text
stringlengths
1
1.05M
<filename>Include/KAI/Console/ConsoleColor.h #pragma once #include <KAI/Core/Config/Base.h> #include <sstream> #include <memory> #include <string> KAI_BEGIN class ConsoleColor { struct Impl; std::unique_ptr<Impl> _impl; public: enum EType { Normal, Error, Warning, Trace, StackNumber, Prompt, LanguageName, Pathname, Input, Last, }; enum EConsoleColor { Red, Green, Blue, }; std::string GetConsoleColor(EType type) const; }; std::ostream& operator<<(std::ostream &S, ConsoleColor::EType C); KAI_END
#!/usr/bin/env bash python make_face_gallery.py --model ../models/model-r50-am-lfw/model,0000 --data_dir ../dataset/images/ --gpu 0 --threshold 1.2 --gallery_dir ../gallery/my_gallery --det 0 --num_per_class 20
#!/bin/bash # # This file is part of the Simutrans-Extended project under the Artistic License. # (see LICENSE.txt) # # # script to fetch pak sets # # make sure that non-existing variables are not ignored set -u # fall back to "/tmp" if TEMP is not set TEMP=${TEMP:-/tmp} # parameter: url and filename do_download(){ if which curl >/dev/null; then curl -L "$1" > "$2" || { echo "Error: download of file $2 failed (curl returned $?)" >&2 rm -f "$2" exit 4 } else if which wget >/dev/null; then wget -q -N "$1" -O "$2" || { echo "Error: download of file $2 failed (wget returned $?)" >&2 rm -f "$2" exit 4 } else echo "Error: Neither curl or wget are available on your system, please install either and try again!" >&2 exit 6 fi fi } # two parameter, url, zipfilename DownloadInstallZip(){ echo "downloading from $1" do_download "$1" "$TEMP/$2" echo "installing from $2" # first try to extract all files in simutrans/ unzip -o -C -q "$TEMP/$2" "simutrans/*" -d . 2> /dev/null if [ $? -eq 11 ]; then # no simutrans folder in the zipfile # unzip directly into simutrans/ unzip -o -C -q "$TEMP/$2" -d simutrans fi rm "$TEMP/$2" } # generated list of pak sets paksets=( \ "http://downloads.sourceforge.net/project/simutrans/pak64/120-0/simupak64-120-0-1.zip" \ "http://downloads.sourceforge.net/project/simutrans/pak.german/pak64.german-112-3/pak64.german_0-112-3-beta3.zip" \ "http://downloads.sourceforge.net/project/simutrans/pak64.japan/120-0/simupak64.japan-120-0-1.zip" \ "http://downloads.sourceforge.net/project/simutrans/pakHAJO/pakHAJO_102-2-2/pakHAJO_0-102-2-2.zip" \ "http://downloads.sourceforge.net/project/simutrans/pak96.comic/pak96.comic%20for%20111-3/pak96.comic-0.4.10-plus.zip" \ "http://downloads.sourceforge.net/project/simutrans/pak128/pak128%20for%20RC%20120%20%282.5.2%2C%20bugfixes%29/pak128-2.5.2--RC_120.zip" \ "http://downloads.sourceforge.net/project/simutrans/PAK128.german/PAK128.german_0.6.1_112.x/PAK128.german_0.6.1_112.x.zip" \ "http://downloads.sourceforge.net/project/simutrans/pak192.comic/pak192comic%20for%20120-0/pak192comic-0.4-120-0up.zip" \ "http://downloads.sourceforge.net/project/simutrans/pak32.comic/pak32.comic%20for%20102-0/pak32.comic_102-0.zip" \ "http://downloads.sourceforge.net/project/simutrans/pak64.contrast/pak64.Contrast_910.zip" \ "http://hd.simutrans.com/release/PakHD_v04B_100-0.zip" \ "http://downloads.sourceforge.net/project/simutrans/pak128.britain/pak128.Britain%20for%20120-0/pak128.Britain.1.15-120-0.zip" \ "http://pak128.jpn.org/souko/pak128.japan.112.0b.cab" \ "http://downloads.sourceforge.net/project/simutrans/pak64.scifi/pak64.scifi_112.x_v0.2.zip" \ "http://downloads.sourceforge.net/project/ironsimu/pak48.Excentrique/v018/pak48-excentrique_v018.zip" \ ) tgzpaksets=( \ "http://simutrans.bilkinfo.de/pak64.ho-scale-latest.tar.gz" \ ) choices=() installpak=() echo "-- Choose at least one of these paks --" let setcount=0 let choicecount=0 let "maxcount = ${#paksets[*]}" while [ "$setcount" -lt "$maxcount" ]; do installpak[choicecount]=0 urlname=${paksets[$setcount]} zipname="${urlname##http*\/}" choicename="${zipname%.zip}" choicename="${choicename/simupak/pak}" choices[choicecount]=$choicename let "setcount += 1" let "choicecount += 1" echo "${choicecount}) ${choicename}" done while true; do read -p "Which paks to install? (enter number or (i) to install or (x) to exit)" pak #exit? if [[ $pak = [xX] ]]; then exit fi # test if installation now if [[ $pak = [iI] ]]; then echo "You will install now" let setcount=0 while [ $setcount -lt $choicecount ]; do if [ ${installpak[$setcount]} -gt 0 ]; then let "displaycount=$setcount+1" echo "${displaycount}) ${choices[$setcount]}" fi let "setcount += 1" done read -p "Is this correct? (y/n)" yn if [[ $yn = [yY] ]]; then break fi # edit again echo "-- Choose again one of these paks --" let setcount=0 while [ $setcount -lt $choicecount ]; do echo "${setcount}) ${choices[$setcount]}" let "setcount += 1" done let "pak=0" fi # otherwise it should be a number if [[ $pak =~ ^[0-9]+$ ]]; then let "setcount=pak-1" if [ $setcount -lt $choicecount ]; then if [ $setcount -ge 0 ]; then status=${installpak[$setcount]} if [ $status -lt 1 ]; then echo "adding ${choices[$setcount]}" installpak[$setcount]=1 else echo "not installing ${choices[$setcount]}" installpak[$setcount]=0 fi fi fi fi done # first the regular pak sets pushd .. let setcount=0 let "maxcount = ${#paksets[*]}" while [ "$setcount" -lt "$maxcount" ]; do if [ "${installpak[$setcount]}" -gt 0 ]; then urlname=${paksets[$setcount]} zipname="${urlname##http*\/}" DownloadInstallZip "$urlname" "$zipname" fi let "setcount += 1" done exit
def sum_factorial_digits(num): sum = 0 while num: digit = num % 10 num = num // 10 fact = 1 for i in range(1,digit+1): fact *= i sum += fact return sum # Output sum_factorial_digits(135) # Output: 1679
#!/usr/bin/env bash set -e curl -o "dartsdk-linux-x64-release.zip" "https://storage.googleapis.com/dart-archive/channels/dev/release/latest/sdk/dartsdk-linux-x64-release.zip" unzip "dartsdk-linux-x64-release.zip" rm "dartsdk-linux-x64-release.zip" cat <<EOF >analysis-server-dart-snapshot #!/usr/bin/env bash DIR=\$(cd \$(dirname \$0); pwd) \$DIR/bin/dart \$DIR/bin/snapshots/analysis_server.dart.snapshot --lsp \$* EOF chmod +x analysis-server-dart-snapshot
<reponame>eddie4941/servicetalk<gh_stars>100-1000 /* * Copyright © 2020-2021 Apple Inc. and the ServiceTalk project authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.servicetalk.concurrent.test.internal; import io.servicetalk.concurrent.Cancellable; import io.servicetalk.concurrent.PublisherSource; import org.junit.jupiter.api.Test; import java.util.concurrent.ThreadLocalRandom; import javax.annotation.Nullable; import static io.servicetalk.concurrent.internal.DeliberateException.DELIBERATE_EXCEPTION; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.junit.jupiter.api.Assertions.assertSame; import static org.mockito.Mockito.mock; class TestSingleSubscriberTest { @Test void onSubscribe() { TestSingleSubscriber<Integer> subscriber = new TestSingleSubscriber<>(); doOnSubscribe(subscriber); assertThat(subscriber.pollTerminal(200, MILLISECONDS), is(nullValue())); } @Test void onSubscribeOnComplete() { onSubscribeOnTerminal(true); } @Test void onSubscribeOnError() { onSubscribeOnTerminal(false); } private static void onSubscribeOnTerminal(boolean onComplete) { TestSingleSubscriber<Integer> subscriber = new TestSingleSubscriber<>(); doOnSubscribe(subscriber); assertThat(subscriber.pollTerminal(200, MILLISECONDS), is(nullValue())); doTerminalSignal(subscriber, onComplete); } @Test void singleItem() { singleItem(ThreadLocalRandom.current().nextInt()); } @Test void singleItemNull() { singleItem(null); } @Test void singleItemCancelBefore() { TestSingleSubscriber<Integer> subscriber = new TestSingleSubscriber<>(); doOnSubscribe(subscriber).cancel(); subscriber.onSuccess(10); assertThat(subscriber.awaitOnSuccess(), is(10)); } @Test void singleItemCancelAfter() { TestSingleSubscriber<Integer> subscriber = new TestSingleSubscriber<>(); Cancellable c = doOnSubscribe(subscriber); subscriber.onSuccess(10); c.cancel(); assertThat(subscriber.awaitOnSuccess(), is(10)); } private static void singleItem(@Nullable Integer i) { TestSingleSubscriber<Integer> subscriber = new TestSingleSubscriber<>(); doOnSubscribe(subscriber); subscriber.onSuccess(i); assertThat(subscriber.awaitOnSuccess(), is(i)); } private static Cancellable doOnSubscribe(TestSingleSubscriber<Integer> subscriber) { PublisherSource.Subscription subscription = mock(PublisherSource.Subscription.class); subscriber.onSubscribe(subscription); Cancellable realCancellable = subscriber.awaitSubscription(); assertThat(realCancellable, notNullValue()); return realCancellable; } private static void doTerminalSignal(TestSingleSubscriber<Integer> subscriber, boolean onComplete) { if (onComplete) { Integer value = ThreadLocalRandom.current().nextInt(); subscriber.onSuccess(value); assertThat(subscriber.awaitOnSuccess(), is(value)); } else { subscriber.onError(DELIBERATE_EXCEPTION); assertSame(DELIBERATE_EXCEPTION, subscriber.awaitOnError()); } } }
import speech_recognition as sr from os import system def listen_command(): r = sr.Recognizer() with sr.Microphone() as source: print("Listening...") audio = r.listen(source) try: command = r.recognize_google(audio) print("I heard: " + command) return command except sr.UnknownValueError: print("I could not understand audio") listen_command() if listen_command() == ‘Please turn on the lights’: system("/usr/bin/python3 /home/pi/script.py")
sudo apt install -y man-db
/* * App Actions * * Actions change things in your application * Since this boilerplate uses a uni-directional data flow, specifically redux, * we have these actions which are the only way your application interacts with * your application state. This guarantees that your state is up to date and nobody * messes it up weirdly somewhere. * * To add a new Action: * 1) Import your constant * 2) Add a function like this: * export function yourAction(var) { * return { type: YOUR_ACTION_CONSTANT, var: var } * } */ import { LOAD_LIST, LOAD_LIST_SUCCESS, LOAD_LIST_ERROR, ADD_ITEM, } from './constants'; /** * Load the list of strings, this action starts the request saga * * @return {object} An action object with a type of LOAD_REPOS */ export function loadList() { return { type: LOAD_LIST, }; } /** * Dispatched when the list of strings are loaded by the request saga * * @param {array} list The array of strings * * @return {object} An action object with a type of LOAD_REPOS_SUCCESS passing the repos */ export function loadListSuccess(list) { return { type: LOAD_LIST_SUCCESS, list, }; } /** * Dispatched when loading the list fails * * @param {object} error The error * * @return {object} An action object with a type of LOAD_REPOS_ERROR passing the error */ export function loadListError(error) { return { type: LOAD_LIST_ERROR, error, }; } /** * Add a string to the backend via the request saga, * this action starts the request saga * * @return {object} An action object with a type of LOAD_REPOS */ export function addItem(str) { return { type: ADD_ITEM, str, }; }
<reponame>RoyAl-Vitamin/Lab6 package vi.al.ro; import java.io.*; import java.net.InetAddress; import java.net.Socket; import java.text.SimpleDateFormat; import java.util.Date; public class Client implements Runnable { // порт сервера private final int SERVER_PORT; // адрес сервера private final String SERVER_ADDRESS; public Client() { Config config = Config.getInstance(); SERVER_ADDRESS = config.getClientHost(); SERVER_PORT = config.getClientPort(); System.out.println("CLIENT: SERVER_ADDRESS == " + SERVER_ADDRESS + " SERVER_PORT == " + SERVER_PORT); } public void run() { try { Thread.sleep(2000); } catch (InterruptedException e) { throw new RuntimeException(e); } SimpleDateFormat sdf = new SimpleDateFormat("yy.MM.dd HH:mm:ss -> "); try { InetAddress ipServerAddress = InetAddress.getByName(SERVER_ADDRESS); // создаем объект который отображает вышеописанный IP-адрес. Socket socket = new Socket(ipServerAddress, SERVER_PORT); // создаем сокет используя IP-адрес и порт сервера. System.out.println("Any of you heard of a socket with IP address " + SERVER_ADDRESS + " and port " + SERVER_PORT + "?"); System.out.println("Yes! I just got hold of the program."); // Берем входной и выходной потоки сокета, теперь можем получать и отсылать данные клиентом. // InputStream sin = socket.getInputStream(); OutputStream sout = socket.getOutputStream(); // Конвертируем потоки в другой тип, чтоб легче обрабатывать текстовые сообщения. // DataInputStream in = new DataInputStream(sin); DataOutputStream out = new DataOutputStream(sout); // Создаем поток для чтения с клавиатуры. BufferedReader keyboard = new BufferedReader(new InputStreamReader(System.in)); String line = null; // System.out.println("Type in something and press enter. Will send it to the server and tell ya what it thinks."); // System.out.println(); while (true) { line = keyboard.readLine(); // ждем пока пользователь введет что-то и нажмет кнопку Enter. // System.out.println("Sending this line to the server..."); out.writeUTF(line); // отсылаем введенную строку текста серверу. out.flush(); // заставляем поток закончить передачу данных. if ("exit".equalsIgnoreCase(line)) { break; } // line = in.readUTF(); // ждем пока сервер отошлет строку текста. // System.out.println("The server was very polite. It sent me this : " + line); // System.out.println(sdf.format(new Date()) + line); // System.out.println("Looks like the server is pleased with us. Go ahead and enter more lines."); // System.out.println(); } } catch (IOException e) { e.printStackTrace(); } } }
def create_prediction_model(dataset): X = dataset.iloc[:,:-1].values y = dataset.iloc[:,-1].values #Split into train and test X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2) #Train the model model = LinearRegression() model.fit(X_train, y_train) #Make the predictions predictions = model.predict(X_test) return predictions
import numpy as np from yt.frontends.stream.api import load_particles from yt.units.yt_array import uconcatenate, YTArray, \ YTQuantity, savetxt from yt.utilities.physical_ratios import keV_per_erg from scipy.interpolate import InterpolatedUnivariateSpline from six import string_types from pyxsim.photon_list import PhotonList from pyxsim.source_models import PowerLawSourceModel from pyxsim.utils import mylog, parse_value from soxs.utils import parse_prng """ Papers used in this code: <NAME>. 2004, MNRAS, 349, 146 <NAME>., <NAME>., & <NAME>. 2012, MNRAS, 419, 2095 """ # Function to calculate the scale factor for a power # law with F = K*E**-alpha (K in units of ct/s/keV) def get_scale_factor(ind, emin, emax): if ind == 2.0: k = np.log(emax/emin) else: k = (emax**(2.0-ind)-emin**(2.0-ind))/(2.0-ind) return keV_per_erg/k # Function to convert between two different energy # bands for a single power law def convert_bands(ind, emin_a, emax_a, emin_b, emax_b): if ind == 2.0: k = np.log(emax_a/emin_a) k /= np.log(emax_b/emin_b) else: k = (emax_a**(2.0-ind)-emin_a**(2.0-ind)) k /= (emax_b**(2.0-ind)-emin_b**(2.0-ind)) return k # Spectral indices for both types of XRBs alpha_lmxb = 1.56 alpha_hmxb = 2.0 # Energy bands for luminosities in XRB # distribution functions emin_lmxb = 0.5 emax_lmxb = 8.0 emin_hmxb = 2.0 emax_hmxb = 10.0 # Bolometric corrections bc_lmxb = convert_bands(alpha_lmxb, 0.03, 100.0, emin_lmxb, emax_lmxb) bc_hmxb = convert_bands(alpha_hmxb, 0.03, 100.0, emin_hmxb, emax_hmxb) # Range of luminosities common to both types of XRBs Lmin = 1.0e-3 Lcut = 1000.0 nbins = 1000 Lbins = np.logspace(np.log10(Lmin), np.log10(Lcut), nbins+1) logLbins = np.log10(Lbins) logLmid = 0.5*(logLbins[1:]+logLbins[:-1]) # LMXB distribution function from Gilfanov 2004 alpha1 = 1.0 alpha2 = 1.86 alpha3 = 4.8 # The luminosities from Gilfanov 2004 are # in the 0.5-8 keV band. Lb1 = 0.19 Lb2 = 5.0 K1 = 440.4 K2 = K1*(Lb1/Lb2)**alpha2 K3 = K2*(Lb2/Lcut)**alpha3 C1 = Lb1*K1 C2 = Lb2*K2/(1.0-alpha2) C3 = Lcut*K3/(1.0-alpha3) D2 = C2*(Lb1/Lb2)**(1.0-alpha2) D3 = C3*(Lb2/Lcut)**(1.0-alpha3) I1 = C1*np.log(Lb1/Lmin) I2 = C2 - D2 + I1 I3 = C3 - D3 + I2 def lmxb_cdf(L): if L < Lb1: N = C1*np.log(L/Lmin) elif Lb1 <= L < Lb2: N = C2*(L/Lb2)**(1.0-alpha2) - D2 + I1 elif Lb2 <= L < Lcut: N = C3*(L/Lcut)**(1.0-alpha3) - D3 + I2 else: N = I3 return N # HMXB distribution function from Mineo et al. 2012 chi = 1.49 gamma1 = 1.58 gamma2 = 2.73 Lb = 110.0 A = Lb**(gamma2-gamma1) E1 = chi/(1.0-gamma1) E2 = chi*A/(1.0-gamma2) F1 = E1*Lmin**(1.0-gamma1) F2 = E2*Lb**(1.0-gamma2) J1 = E1*Lb**(1.0-gamma1) - F1 J2 = E2*Lcut**(1.0-gamma2) - F2 + J1 def hmxb_cdf(L): if L < Lb: N = E1*L**(1.0-gamma1) - F1 elif Lb <= L < Lcut: N = E2*L**(1.0-gamma2) - F2 + J1 else: N = J2 return N def make_xrb_particles(data_source, age_field, scale_length, sfr_time_range=(1.0, "Gyr"), prng=None): r""" This routine generates an in-memory dataset composed of X-ray binary particles from an input data source containing star particles. Parameters ---------- data_source : :class:`~yt.data_objects.data_containers.YTSelectionContainer` The yt data source to obtain the data from, such as a sphere, box, disk, etc. age_field : string or (type, name) field tuple The stellar age field. Must be in some kind of time units. scale_length : string, (ftype, fname) tuple, (value, unit) tuple, :class:`~yt.units.yt_array.YTQuantity`, or :class:`~astropy.units.Quantity` The radial length scale over which to scatter the XRB particles from their parent star particle. Can be the name of a smoothing length field for the stars, a (value, unit) tuple, or a YTQuantity. sfr_time_range : string, (ftype, fname) tuple, (value, unit) tuple, :class:`~yt.units.yt_array.YTQuantity`, or :class:`~astropy.units.Quantity`, optional The recent time range over which to calculate the star formation rate from the current time in the dataset. Default: 1.0 Gyr prng : integer or :class:`~numpy.random.RandomState` object A pseudo-random number generator. Typically will only be specified if you have a reason to generate the same set of random numbers, such as for a test. Default is to use the :mod:`numpy.random` module. """ prng = parse_prng(prng) ds = data_source.ds ptype = data_source._determine_fields(age_field)[0][0] t = data_source[age_field].to("Gyr") m = data_source[(ptype, "particle_mass")].to("Msun") sfr_time_range = parse_value(sfr_time_range, "Gyr") recent = t < sfr_time_range n_recent = recent.sum() if n_recent == 0: sfr = 0.0 else: sfr = (m[recent].sum()/sfr_time_range).to("Msun/yr").v mylog.info("%d star particles were formed in the last " % n_recent + "%s for a SFR of %4.1f Msun/yr." % (sfr_time_range, sfr)) mtot = m.sum() npart = m.size scale_field = None if isinstance(scale_length, tuple): if isinstance(scale_length[0], string_types): scale_field = scale_length elif isinstance(scale_length, string_types): scale_field = (ptype, scale_length) if scale_field is None: if isinstance(scale_length, tuple): scale = YTArray([scale_length[0]]*npart, scale_length[1]) elif isinstance(scale_length, YTQuantity): scale = YTArray([scale_length]*npart) else: scale = YTArray([scale_length[0]]*npart, "kpc") else: scale = data_source[scale_length] scale = scale.to('kpc').d N_l = lmxb_cdf(Lcut)*mtot.v*1.0e-11 N_h = hmxb_cdf(Lcut)*sfr N_all = N_l+N_h if N_all == 0.0: raise RuntimeError("There are no X-ray binaries to generate!") # Compute conversion factors from luminosity to count rate lmxb_factor = get_scale_factor(alpha_lmxb, emin_lmxb, emax_lmxb) hmxb_factor = get_scale_factor(alpha_hmxb, emin_hmxb, emax_hmxb) xp = [] yp = [] zp = [] vxp = [] vyp = [] vzp = [] lp = [] rp = [] ap = [] if N_l > 0.0: F_l = np.zeros(nbins+1) for i in range(1, nbins+1): F_l[i] = lmxb_cdf(Lbins[i]) F_l /= F_l[-1] invcdf_l = InterpolatedUnivariateSpline(F_l, logLbins) n_l = prng.poisson(lam=N_l*m/mtot) mylog.info("Number of low-mass X-ray binaries: %s" % n_l.sum()) for i, n in enumerate(n_l): if n > 0: randvec = prng.uniform(size=n) l = YTArray(10**invcdf_l(randvec)*1.0e38, "erg/s") r = YTArray(l.v*lmxb_factor, "photons/s/keV") # Now convert output luminosities to bolometric l *= bc_lmxb x = YTArray(prng.normal(scale=scale[i], size=n), "kpc") y = YTArray(prng.normal(scale=scale[i], size=n), "kpc") z = YTArray(prng.normal(scale=scale[i], size=n), "kpc") x += data_source[ptype, "particle_position_x"][i].to("kpc") y += data_source[ptype, "particle_position_y"][i].to("kpc") z += data_source[ptype, "particle_position_z"][i].to("kpc") vx = YTArray([data_source[ptype, "particle_velocity_x"][i]]*n).to('km/s') vy = YTArray([data_source[ptype, "particle_velocity_y"][i]]*n).to('km/s') vz = YTArray([data_source[ptype, "particle_velocity_z"][i]]*n).to('km/s') xp.append(x) yp.append(y) zp.append(z) vxp.append(vx) vyp.append(vy) vzp.append(vz) lp.append(l) rp.append(r) ap.append(np.array([alpha_lmxb]*n)) if N_h > 0.0: F_h = np.zeros(nbins+1) for i in range(1, nbins+1): F_h[i] = hmxb_cdf(Lbins[i]) F_h /= F_h[-1] invcdf_h = InterpolatedUnivariateSpline(F_h, logLbins) n_h = prng.poisson(lam=N_h*m/mtot) mylog.info("Number of high-mass X-ray binaries: %s" % n_h.sum()) for i, n in enumerate(n_h): if n > 0: randvec = prng.uniform(size=n) l = YTArray(10**invcdf_h(randvec)*1.0e38, "erg/s") r = YTArray(l.v*hmxb_factor, "photons/s/keV") # Now convert output luminosities to bolometric l *= bc_hmxb x = YTArray(prng.normal(scale=scale[i], size=n), "kpc") y = YTArray(prng.normal(scale=scale[i], size=n), "kpc") z = YTArray(prng.normal(scale=scale[i], size=n), "kpc") x += data_source[ptype, "particle_position_x"][i].to("kpc") y += data_source[ptype, "particle_position_y"][i].to("kpc") z += data_source[ptype, "particle_position_z"][i].to("kpc") vx = YTArray([data_source[ptype, "particle_velocity_x"][i]]*n).to('km/s') vy = YTArray([data_source[ptype, "particle_velocity_y"][i]]*n).to('km/s') vz = YTArray([data_source[ptype, "particle_velocity_z"][i]]*n).to('km/s') xp.append(x) yp.append(y) zp.append(z) vxp.append(vx) vyp.append(vy) vzp.append(vz) lp.append(l) rp.append(r) ap.append(np.array([alpha_hmxb]*n)) xp = uconcatenate(xp) yp = uconcatenate(yp) zp = uconcatenate(zp) vxp = uconcatenate(vxp) vyp = uconcatenate(vyp) vzp = uconcatenate(vzp) lp = uconcatenate(lp) rp = uconcatenate(rp) ap = uconcatenate(ap) data = {"particle_position_x": (xp.d, str(xp.units)), "particle_position_y": (yp.d, str(yp.units)), "particle_position_z": (zp.d, str(zp.units)), "particle_velocity_x": (vxp.d, str(vxp.units)), "particle_velocity_y": (vyp.d, str(vyp.units)), "particle_velocity_z": (vzp.d, str(vzp.units)), "particle_luminosity": (lp.d, str(lp.units)), "particle_count_rate": (rp.d, str(rp.units)), "particle_spectral_index": ap} dle = ds.domain_left_edge.to("kpc").v dre = ds.domain_right_edge.to("kpc").v bbox = np.array([[dle[i], dre[i]] for i in range(3)]) new_ds = load_particles(data, bbox=bbox, length_unit="kpc", time_unit="Myr", mass_unit="Msun", velocity_unit="km/s") return new_ds def make_xrb_photons(ds, redshift, area, exp_time, emin, emax, center="c", cosmology=None, prng=None): r""" Take a dataset produced by :func:`~pyxsim.source_generators.xray_binaries.make_xrb_particles` and produce a :class:`~pyxsim.photon_list.PhotonList`. Parameters ---------- ds : :class:`~yt.data_objects.static_output.Dataset` The dataset of XRB particles to use to make the photons. redshift : float The cosmological redshift for the photons. area : float, (value, unit) tuple, :class:`~yt.units.yt_array.YTQuantity`, or :class:`~astropy.units.Quantity` The collecting area to determine the number of photons. If units are not specified, it is assumed to be in cm^2. exp_time : float, (value, unit) tuple, :class:`~yt.units.yt_array.YTQuantity`, or :class:`~astropy.units.Quantity` The exposure time to determine the number of photons. If units are not specified, it is assumed to be in seconds. emin : float, (value, unit) tuple, :class:`~yt.units.yt_array.YTQuantity`, or :class:`~astropy.units.Quantity` The minimum energy of the photons to be generated, in the rest frame of the source. If units are not given, they are assumed to be in keV. emax : float, (value, unit) tuple, :class:`~yt.units.yt_array.YTQuantity`, or :class:`~astropy.units.Quantity` The maximum energy of the photons to be generated, in the rest frame of the source. If units are not given, they are assumed to be in keV. center : string or array_like, optional The origin of the photon spatial coordinates. Accepts "c", "max", or a coordinate. If not specified, pyxsim attempts to use the "center" field parameter of the data_source. cosmology : :class:`~yt.utilities.cosmology.Cosmology`, optional Cosmological information. If not supplied, we try to get the cosmology from the dataset. Otherwise, LCDM with the default yt parameters is assumed. prng : integer or :class:`~numpy.random.RandomState` object A pseudo-random number generator. Typically will only be specified if you have a reason to generate the same set of random numbers, such as for a test. Default is to use the :mod:`numpy.random` module. """ dd = ds.all_data() e0 = (1.0, "keV") prng = parse_prng(prng) xrb_model = PowerLawSourceModel(e0, emin, emax, "particle_count_rate", "particle_spectral_index", prng=prng) photons = PhotonList.from_data_source(dd, redshift, area, exp_time, xrb_model, center=center, point_sources=True, cosmology=cosmology) return photons
package aufgabe12_9; // utf8: "Köpfchen in das Wasser, Schwänzchen in die Höh." -CIA-Verhörmethode public class Return extends Statement { private Expression expr; public Return(Expression expr) { super(); this.expr = expr; } public Expression getExpression() { return expr; } @Override public void accept(Visitor visitor) { visitor.visit(this); } }
#!/bin/bash xmllint --noout --schema zcl.xsd data-model/chip/matter-devices.xml
const SLICE = 'adverts' // Lenses const sliceLens = R.lensProp(SLICE) // Selectors export const getAdverts = R.view(sliceLens) // Actions export const ADVERTS_REQUESTED = 'adverts/ADVERTS_REQUESTED' export const advertsRequested = () => ({ type: ADVERTS_REQUESTED, payload: {} }) export const ADVERTS_FETCHING = 'adverts/ADVERTS_FETCHING' export const advertsFetching = () => ({ type: ADVERTS_FETCHING, payload: {} }) export const ADVERTS_FETCH_SUCCESS = 'adverts/ADVERTS_FETCH_SUCCESS' export const advertsFetchSuccess = ({ qmedia }) => ({ type: ADVERTS_FETCH_SUCCESS, payload: { qmedia }, }) const mergeLeft = R.flip(R.merge) const getReducer = ({ type, payload, error }) => { switch (type) { case ADVERTS_FETCHING: return mergeLeft({ fetching: true }) case ADVERTS_FETCH_SUCCESS: return mergeLeft({ fetching: false, ...payload }) default: return R.identity } } export default (state = { fetching: false }, action) => getReducer(action)(state)
<filename>core/src/mindustry/world/blocks/power/PowerDistributor.java package mindustry.world.blocks.power; import mindustry.world.blocks.PowerBlock; /** * 电力配送器 * */ public class PowerDistributor extends PowerBlock{ public PowerDistributor(String name){ super(name); consumesPower = false; outputsPower = true; } }
/* * Copyright (c) 2018, <NAME> <<EMAIL>>. * * This source code is licensed under the license found in the * LICENSE.md file in the root directory of this source tree. */ const { Route } = require('serverful') const Database = require('../../database') class GetButtons extends Route { constructor () { super('GET', '/buttons', 'Buttons', 'Returns all buttons') } handler (request, h) { return Database.buttons.findAll() } } module.exports = new GetButtons()
#!/bin/bash DIR='/opt/intel/openvino' ALL=false for v in "$@" do if [ "$v" = '-a' ]; then ALL=true else DIR=$v fi done if [ "$ALL" = true ]; then python3 $DIR/deployment_tools/tools/model_downloader/downloader.py --name face-detection-adas-0001 -o models python3 $DIR/deployment_tools/tools/model_downloader/downloader.py --name landmarks-regression-retail-0009 -o models python3 $DIR/deployment_tools/tools/model_downloader/downloader.py --name head-pose-estimation-adas-0001 -o models python3 $DIR/deployment_tools/tools/model_downloader/downloader.py --name gaze-estimation-adas-0002 -o models else python3 $DIR/deployment_tools/tools/model_downloader/downloader.py --name face-detection-adas-binary-0001 -o models --precisions FP32-INT1 python3 $DIR/deployment_tools/tools/model_downloader/downloader.py --name landmarks-regression-retail-0009 -o models --precisions FP16-INT8 python3 $DIR/deployment_tools/tools/model_downloader/downloader.py --name head-pose-estimation-adas-0001 -o models --precisions FP16-INT8 python3 $DIR/deployment_tools/tools/model_downloader/downloader.py --name gaze-estimation-adas-0002 -o models --precisions FP16-INT8 fi
def add_list(list_a, list_b): return [a + b for a, b in zip(list_a, list_b)]
import math from PySide import QtCore from guide import Guide PI2 = 2 * math.pi class GuideCircle(Guide): CW = 1 CCW = -1 def __init__(self, rect, startAngle=0.0, span=360.0, dir=CCW, follows=None): super(GuideCircle, self).__init__(follows) self.radiusX = rect.width() / 2.0 self.radiusY = rect.height() / 2.0 self.posX = rect.topLeft().x() self.posY = rect.topLeft().y() self.spanRad = span * PI2 / -360.0 if dir == GuideCircle.CCW: self.startAngleRad = startAngle * PI2 / -360.0 self.endAngleRad = self.startAngleRad + self.spanRad self.stepAngleRad = self.spanRad / self.length() else: self.startAngleRad = self.spanRad + (startAngle * PI2 / -360.0) self.endAngleRad = startAngle * PI2 / -360.0 self.stepAngleRad = -self.spanRad / self.length() def length(self): return abs(self.radiusX * self.spanRad) def startPos(self): return QtCore.QPointF((self.posX + self.radiusX + self.radiusX * math.cos(self.startAngleRad)) * self.scaleX, (self.posY + self.radiusY + self.radiusY * math.sin(self.startAngleRad)) * self.scaleY) def endPos(self): return QtCore.QPointF((self.posX + self.radiusX + self.radiusX * math.cos(self.endAngleRad)) * self.scaleX, (self.posY + self.radiusY + self.radiusY * math.sin(self.endAngleRad)) * self.scaleY) def guide(self, item, moveSpeed): frame = item.guideFrame - self.startLength end = QtCore.QPointF((self.posX + self.radiusX + self.radiusX * math.cos(self.startAngleRad + (frame * self.stepAngleRad))) * self.scaleX, (self.posY + self.radiusY + self.radiusY * math.sin(self.startAngleRad + (frame * self.stepAngleRad))) * self.scaleY) self.move(item, end, moveSpeed)
//Generate a set of unique random numbers. #include <stdio.h> #include <stdlib.h> // Function to generate a set of unique random numbers. int *uniqueRandoms(int numCount) { int *unique_nums = malloc(sizeof(int) * numCount); int i, index, r; for (i = 0; i < numCount; i++) { do { r = rand(); for(index = 0; index < i; index++) { if(r == unique_nums[index]) break; } } while (index != i); unique_nums[i] = r; } return unique_nums; } int main() { int *uniqueArr; int numCount; printf("Enter the number of random numbers you want to generate: "); scanf("%d", &numCount); uniqueArr = uniqueRandoms(numCount); int i; for (i = 0; i < numCount; i++) printf("%d ", uniqueArr[i]); printf("\n"); return 0; }
import requests class ImageProcessor: def __init__(self, api_key: str, base_url: str): self.api_key = api_key self.base_url = base_url def upload_image(self, image_path: str) -> str: upload_url = f"{self.base_url}/upload" files = {'file': open(image_path, 'rb')} headers = {'Authorization': f"Bearer {self.api_key}"} response = requests.post(upload_url, files=files, headers=headers) if response.status_code == 200: return response.json()['image_id'] else: raise Exception(f"Failed to upload image: {response.text}") def process_image(self, image_id: str, operation: str) -> str: process_url = f"{self.base_url}/process" payload = {'image_id': image_id, 'operation': operation} headers = {'Authorization': f"Bearer {self.api_key}"} response = requests.post(process_url, json=payload, headers=headers) if response.status_code == 200: return response.json()['processed_image_id'] else: raise Exception(f"Failed to process image: {response.text}") def download_image(self, image_id: str, destination_path: str) -> None: download_url = f"{self.base_url}/download/{image_id}" headers = {'Authorization': f"Bearer {self.api_key}"} response = requests.get(download_url, headers=headers) if response.status_code == 200: with open(destination_path, 'wb') as file: file.write(response.content) else: raise Exception(f"Failed to download image: {response.text}") # Example usage api_key = "fpfGmGL99O+3bM1BpV8vSQLeHxxocf+IeLMcKHFwfXU=" base_url = "http://localhost:5000" processor = ImageProcessor(api_key, base_url) image_id = processor.upload_image("grayscale.jpg") processed_image_id = processor.process_image(image_id, "grayscale") processor.download_image(processed_image_id, "processed_grayscale.jpg")
<reponame>vaniot-s/sentry import styled from '@emotion/styled'; const SidebarPanelEmpty = styled('div')` display: flex; align-items: center; justify-content: center; color: ${p => p.theme.gray500}; height: 100%; width: 100%; padding: 0 60px; text-align: center; `; export default SidebarPanelEmpty;
def is_substring(text1, text2): if text1 in text2: return True elif text2 in text1: return True else: return False
// Define the WebKitInputPurpose enum enum WebKitInputPurpose { Phone, Url, Email, Password, } // Implement the validate_input function fn validate_input(input_value: &str, input_purpose: WebKitInputPurpose) -> bool { match input_purpose { WebKitInputPurpose::Phone => { input_value.chars().all(|c| c.is_digit(10) || "+-() ".contains(c)) } WebKitInputPurpose::Url => { url::Url::parse(input_value).is_ok() } WebKitInputPurpose::Email => { regex::Regex::new(r"^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$") .unwrap() .is_match(input_value) } WebKitInputPurpose::Password => { input_value.len() >= 8 && input_value.chars().any(|c| c.is_ascii_uppercase()) && input_value.chars().any(|c| c.is_ascii_lowercase()) && input_value.chars().any(|c| c.is_digit(10)) && input_value.chars().any(|c| !c.is_alphanumeric()) } } }
import rospy from geometry_msgs.msg import Pose, Quaternion from nav_msgs.msg import Odometry from geometry_msgs.msg import Point, Pose, Quaternion, Twist, Vector3 from tf.transformations import euler_from_quaternion, quaternion_from_euler from your_package.msg import LocalizationEstimate class LocalizationNode: def __init__(self): rospy.init_node('localization_node', anonymous=True) self.pose_pub = rospy.Publisher('/localized_pose', LocalizationEstimate, queue_size=10) rospy.Subscriber('/odometry', Odometry, self.odometry_callback) def odometry_callback(self, data): position = data.pose.pose.position orientation = data.pose.pose.orientation (roll, pitch, yaw) = euler_from_quaternion([orientation.x, orientation.y, orientation.z, orientation.w]) # Process the received odometry data and create a LocalizationEstimate message localization_msg = LocalizationEstimate() localization_msg.position = Point(position.x, position.y, position.z) localization_msg.orientation = Quaternion(orientation.x, orientation.y, orientation.z, orientation.w) # Publish the LocalizationEstimate message self.pose_pub.publish(localization_msg) if __name__ == '__main__': localization_node = LocalizationNode() rospy.spin()
<gh_stars>0 const distube = require('../index.js'); const { MessageEmbed } = require("discord.js"); const { EMOJI_DONE ,EMOJI_ERROR } = require("../config.json") module.exports = { name: "join", aliases: ["j", "247", "24/7", "24x7"], description: "Join your VC for 27/7", async execute(message, args) { const { channel } = message.member.voice; const serverQueue = message.client.queue.get(message.guild.id); if (!channel) return message.reply(`${EMOJI_ERROR} You need to join a voice channel first!`).catch(console.error); if (serverQueue && channel !== message.guild.me.voice.channel) return message.reply(`${EMOJI_ERROR} You must be in the same channel as ${message.client.user}`).catch(console.error); await message.member.voice.channel.join() return message.react(EMOJI_DONE); } } console.log("Join working")
<reponame>notow666/momo-cloud-permission package com.momo.momopermissiongateway.configuration; import com.google.common.collect.Lists; import lombok.*; import org.apache.commons.lang3.StringUtils; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.context.annotation.Configuration; import java.util.List; /** * @ProjectName: momo-cloud-permission * @Package: com.momo.service.service * @Description: 拦截URL配置 * @Author: <NAME> * @CreateDate: 2019/8/24 0024 13:33 * @UpdateDate: 2019/8/24 0024 13:33 * @Version: 1.0 * <p>Copyright: Copyright (c) 2019</p> */ @Configuration @ConfigurationProperties("momo") @Getter @Setter @ToString @Builder @NoArgsConstructor @AllArgsConstructor //@EqualsAndHashCode(of = {"id"}) public class InterceptUrlConfiguration { private Long teantId; //忽略拦截URL private List<String> ignorerUrl = Lists.newArrayList(); //#企业相关操作URL,只有MOMO企业下的员工才可以操作 private List<String> enterpriseUrl = Lists.newArrayList(); public boolean checkIgnoreUrl(String gatewayUrl) { if (StringUtils.isBlank(gatewayUrl)) { return true; } return ignorerUrl.contains(gatewayUrl); } public boolean checkEnterpriseUrl(String gatewayUrl) { if (StringUtils.isBlank(gatewayUrl)) { return true; } return enterpriseUrl.contains(gatewayUrl); } }
#!/bin/sh #experiments for prodcon ts hardware stack #experiments -> 2 threads (min concurrency) , 10-30-50-70-100-300-500-10000 threads #experiments -> threshold: depends on #operations #operations/5, #operations/2, #operations/3, threshold=1 (always stealing) cd ..; cd ..; cd out/Debug; count=10 for i in $(seq $count); do ./prodcon-hc-ts-hardware-stack -producers=$1 -consumers=$2 -threshold=$3 -operations=$4 >> ./output/output-prodcon-ts-hardware-stack$5 ; done
#!/bin/zsh set -euo pipefail # What is this update.sh? # It's a helper script you run whenever you've updated the demo versions or modified the Docker setup. # It generates all the Dockerfiles from templates and the post_push hooks containing image tags. # Once you commit and push the changes, Docker Hub will trigger automated builds of new images. # Usage: # `./update.sh` # As you can see in line 1, the script runs in zsh. This is the default shell in Mac OS since Catalina. # You could run it in a bash shell as well (`#!/bin/bash`), but it requires at least bash 4.x to deal # with associative arrays (hash tables). Check your version with `bash --version`. # configuration --------------------------------------------------------------- # declare commands and image bases for given variants variants=( base community onepage ) # declare package versions declare -A packageVersions=( [base]='2.10.1' [community]='3.3.1' [onepage]='1.5.1' ) # ----------------------------------------------------------------------------- # loop through image variants for variant in "${variants[@]}"; do # declare and make directory for given PHP version and image variant dir="$variant" mkdir -p "$dir" # declare tags for current version and variant tags=( "${variant}" ) packageName="demo_${variant}" packageVersion="${packageVersions[$variant]}" # bring out debug infos echo "- Image: $variant - $packageName@$packageVersion" echo " Tags:" printf " - %s\n" ${tags} # copy hook from template, replace placeholders mkdir -p "$dir/hooks" sed -r \ -e 's!%%TAGS%%!'"$tags"'!g' \ "templates/post_push.sh" > "$dir/hooks/post_push" # copy custom-setup file, replace placeholders sed -r \ -e 's!%%PACKAGE%%!'"$packageName"'!g' \ -e 's!%%VERSION%%!'"$packageVersion"'!g' \ "templates/custom-setup.sh" > "$dir/custom-setup.sh" chmod +x "$dir/custom-setup.sh" # copy remaining files cp "templates/Dockerfile" "$dir/Dockerfile" done
#!/usr/bin/env bash # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -ex airshipctl phase run deliver-network-policy
<reponame>jinzhongwei/tidb // Copyright 2016 PingCAP, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // See the License for the specific language governing permissions and // limitations under the License. package tikv import ( "context" . "github.com/pingcap/check" "github.com/pingcap/kvproto/pkg/coprocessor" "tidb/kv" "tidb/store/mockstore/mocktikv" ) type testCoprocessorSuite struct { OneByOneSuite } var _ = Suite(&testCoprocessorSuite{}) func (s *testCoprocessorSuite) TestBuildTasks(c *C) { // nil --- 'g' --- 'n' --- 't' --- nil // <- 0 -> <- 1 -> <- 2 -> <- 3 -> cluster := mocktikv.NewCluster() _, regionIDs, _ := mocktikv.BootstrapWithMultiRegions(cluster, []byte("g"), []byte("n"), []byte("t")) pdCli := &codecPDClient{mocktikv.NewPDClient(cluster)} cache := NewRegionCache(pdCli) bo := NewBackoffer(context.Background(), 3000) tasks, err := buildCopTasks(bo, cache, buildCopRanges("a", "c"), false, false) c.Assert(err, IsNil) c.Assert(tasks, HasLen, 1) s.taskEqual(c, tasks[0], regionIDs[0], "a", "c") tasks, err = buildCopTasks(bo, cache, buildCopRanges("g", "n"), false, false) c.Assert(err, IsNil) c.Assert(tasks, HasLen, 1) s.taskEqual(c, tasks[0], regionIDs[1], "g", "n") tasks, err = buildCopTasks(bo, cache, buildCopRanges("m", "n"), false, false) c.Assert(err, IsNil) c.Assert(tasks, HasLen, 1) s.taskEqual(c, tasks[0], regionIDs[1], "m", "n") tasks, err = buildCopTasks(bo, cache, buildCopRanges("a", "k"), false, false) c.Assert(err, IsNil) c.Assert(tasks, HasLen, 2) s.taskEqual(c, tasks[0], regionIDs[0], "a", "g") s.taskEqual(c, tasks[1], regionIDs[1], "g", "k") tasks, err = buildCopTasks(bo, cache, buildCopRanges("a", "x"), false, false) c.Assert(err, IsNil) c.Assert(tasks, HasLen, 4) s.taskEqual(c, tasks[0], regionIDs[0], "a", "g") s.taskEqual(c, tasks[1], regionIDs[1], "g", "n") s.taskEqual(c, tasks[2], regionIDs[2], "n", "t") s.taskEqual(c, tasks[3], regionIDs[3], "t", "x") tasks, err = buildCopTasks(bo, cache, buildCopRanges("a", "b", "b", "c"), false, false) c.Assert(err, IsNil) c.Assert(tasks, HasLen, 1) s.taskEqual(c, tasks[0], regionIDs[0], "a", "b", "b", "c") tasks, err = buildCopTasks(bo, cache, buildCopRanges("a", "b", "e", "f"), false, false) c.Assert(err, IsNil) c.Assert(tasks, HasLen, 1) s.taskEqual(c, tasks[0], regionIDs[0], "a", "b", "e", "f") tasks, err = buildCopTasks(bo, cache, buildCopRanges("g", "n", "o", "p"), false, false) c.Assert(err, IsNil) c.Assert(tasks, HasLen, 2) s.taskEqual(c, tasks[0], regionIDs[1], "g", "n") s.taskEqual(c, tasks[1], regionIDs[2], "o", "p") tasks, err = buildCopTasks(bo, cache, buildCopRanges("h", "k", "m", "p"), false, false) c.Assert(err, IsNil) c.Assert(tasks, HasLen, 2) s.taskEqual(c, tasks[0], regionIDs[1], "h", "k", "m", "n") s.taskEqual(c, tasks[1], regionIDs[2], "n", "p") } func (s *testCoprocessorSuite) TestSplitRegionRanges(c *C) { // nil --- 'g' --- 'n' --- 't' --- nil // <- 0 -> <- 1 -> <- 2 -> <- 3 -> cluster := mocktikv.NewCluster() mocktikv.BootstrapWithMultiRegions(cluster, []byte("g"), []byte("n"), []byte("t")) pdCli := &codecPDClient{mocktikv.NewPDClient(cluster)} cache := NewRegionCache(pdCli) bo := NewBackoffer(context.Background(), 3000) ranges, err := SplitRegionRanges(bo, cache, buildKeyRanges("a", "c")) c.Assert(err, IsNil) c.Assert(ranges, HasLen, 1) s.rangeEqual(c, ranges, "a", "c") ranges, err = SplitRegionRanges(bo, cache, buildKeyRanges("h", "y")) c.Assert(err, IsNil) c.Assert(len(ranges), Equals, 3) s.rangeEqual(c, ranges, "h", "n", "n", "t", "t", "y") ranges, err = SplitRegionRanges(bo, cache, buildKeyRanges("s", "z")) c.Assert(err, IsNil) c.Assert(len(ranges), Equals, 2) s.rangeEqual(c, ranges, "s", "t", "t", "z") ranges, err = SplitRegionRanges(bo, cache, buildKeyRanges("s", "s")) c.Assert(err, IsNil) c.Assert(len(ranges), Equals, 1) s.rangeEqual(c, ranges, "s", "s") ranges, err = SplitRegionRanges(bo, cache, buildKeyRanges("t", "t")) c.Assert(err, IsNil) c.Assert(len(ranges), Equals, 1) s.rangeEqual(c, ranges, "t", "t") ranges, err = SplitRegionRanges(bo, cache, buildKeyRanges("t", "u")) c.Assert(err, IsNil) c.Assert(len(ranges), Equals, 1) s.rangeEqual(c, ranges, "t", "u") ranges, err = SplitRegionRanges(bo, cache, buildKeyRanges("u", "z")) c.Assert(err, IsNil) c.Assert(len(ranges), Equals, 1) s.rangeEqual(c, ranges, "u", "z") // min --> max ranges, err = SplitRegionRanges(bo, cache, buildKeyRanges("a", "z")) c.Assert(err, IsNil) c.Assert(ranges, HasLen, 4) s.rangeEqual(c, ranges, "a", "g", "g", "n", "n", "t", "t", "z") } func (s *testCoprocessorSuite) TestRebuild(c *C) { // nil --- 'm' --- nil // <- 0 -> <- 1 -> cluster := mocktikv.NewCluster() storeID, regionIDs, peerIDs := mocktikv.BootstrapWithMultiRegions(cluster, []byte("m")) pdCli := &codecPDClient{mocktikv.NewPDClient(cluster)} cache := NewRegionCache(pdCli) bo := NewBackoffer(context.Background(), 3000) tasks, err := buildCopTasks(bo, cache, buildCopRanges("a", "z"), false, false) c.Assert(err, IsNil) c.Assert(tasks, HasLen, 2) s.taskEqual(c, tasks[0], regionIDs[0], "a", "m") s.taskEqual(c, tasks[1], regionIDs[1], "m", "z") // nil -- 'm' -- 'q' -- nil // <- 0 -> <--1-> <-2--> regionIDs = append(regionIDs, cluster.AllocID()) peerIDs = append(peerIDs, cluster.AllocID()) cluster.Split(regionIDs[1], regionIDs[2], []byte("q"), []uint64{peerIDs[2]}, storeID) cache.DropRegion(tasks[1].region) tasks, err = buildCopTasks(bo, cache, buildCopRanges("a", "z"), true, false) c.Assert(err, IsNil) c.Assert(tasks, HasLen, 3) s.taskEqual(c, tasks[2], regionIDs[0], "a", "m") s.taskEqual(c, tasks[1], regionIDs[1], "m", "q") s.taskEqual(c, tasks[0], regionIDs[2], "q", "z") } func buildKeyRanges(keys ...string) []kv.KeyRange { var ranges []kv.KeyRange for i := 0; i < len(keys); i += 2 { ranges = append(ranges, kv.KeyRange{ StartKey: []byte(keys[i]), EndKey: []byte(keys[i+1]), }) } return ranges } func buildCopRanges(keys ...string) *copRanges { ranges := buildKeyRanges(keys...) return &copRanges{mid: ranges} } func (s *testCoprocessorSuite) taskEqual(c *C, task *copTask, regionID uint64, keys ...string) { c.Assert(task.region.id, Equals, regionID) for i := 0; i < task.ranges.len(); i++ { r := task.ranges.at(i) c.Assert(string(r.StartKey), Equals, keys[2*i]) c.Assert(string(r.EndKey), Equals, keys[2*i+1]) } } func (s *testCoprocessorSuite) rangeEqual(c *C, ranges []kv.KeyRange, keys ...string) { for i := 0; i < len(ranges); i++ { r := ranges[i] c.Assert(string(r.StartKey), Equals, keys[2*i]) c.Assert(string(r.EndKey), Equals, keys[2*i+1]) } } func (s *testCoprocessorSuite) TestCopRanges(c *C) { ranges := []kv.KeyRange{ {StartKey: []byte("a"), EndKey: []byte("b")}, {StartKey: []byte("c"), EndKey: []byte("d")}, {StartKey: []byte("e"), EndKey: []byte("f")}, } s.checkEqual(c, &copRanges{mid: ranges}, ranges, true) s.checkEqual(c, &copRanges{first: &ranges[0], mid: ranges[1:]}, ranges, true) s.checkEqual(c, &copRanges{mid: ranges[:2], last: &ranges[2]}, ranges, true) s.checkEqual(c, &copRanges{first: &ranges[0], mid: ranges[1:2], last: &ranges[2]}, ranges, true) } func (s *testCoprocessorSuite) checkEqual(c *C, copRanges *copRanges, ranges []kv.KeyRange, slice bool) { c.Assert(copRanges.len(), Equals, len(ranges)) for i := range ranges { c.Assert(copRanges.at(i), DeepEquals, ranges[i]) } if slice { for i := 0; i <= copRanges.len(); i++ { for j := i; j <= copRanges.len(); j++ { s.checkEqual(c, copRanges.slice(i, j), ranges[i:j], false) } } } } func (s *testCoprocessorSuite) TestCopRangeSplit(c *C) { first := &kv.KeyRange{StartKey: []byte("a"), EndKey: []byte("b")} mid := []kv.KeyRange{ {StartKey: []byte("c"), EndKey: []byte("d")}, {StartKey: []byte("e"), EndKey: []byte("g")}, {StartKey: []byte("l"), EndKey: []byte("o")}, } last := &kv.KeyRange{StartKey: []byte("q"), EndKey: []byte("t")} left := true right := false // input range: [c-d) [e-g) [l-o) ranges := &copRanges{mid: mid} s.testSplit(c, ranges, right, splitCase{"c", buildCopRanges("c", "d", "e", "g", "l", "o")}, splitCase{"d", buildCopRanges("e", "g", "l", "o")}, splitCase{"f", buildCopRanges("f", "g", "l", "o")}, ) // input range: [a-b) [c-d) [e-g) [l-o) ranges = &copRanges{first: first, mid: mid} s.testSplit(c, ranges, right, splitCase{"a", buildCopRanges("a", "b", "c", "d", "e", "g", "l", "o")}, splitCase{"c", buildCopRanges("c", "d", "e", "g", "l", "o")}, splitCase{"m", buildCopRanges("m", "o")}, ) // input range: [a-b) [c-d) [e-g) [l-o) [q-t) ranges = &copRanges{first: first, mid: mid, last: last} s.testSplit(c, ranges, right, splitCase{"f", buildCopRanges("f", "g", "l", "o", "q", "t")}, splitCase{"h", buildCopRanges("l", "o", "q", "t")}, splitCase{"r", buildCopRanges("r", "t")}, ) // input range: [c-d) [e-g) [l-o) ranges = &copRanges{mid: mid} s.testSplit(c, ranges, left, splitCase{"m", buildCopRanges("c", "d", "e", "g", "l", "m")}, splitCase{"g", buildCopRanges("c", "d", "e", "g")}, splitCase{"g", buildCopRanges("c", "d", "e", "g")}, ) // input range: [a-b) [c-d) [e-g) [l-o) ranges = &copRanges{first: first, mid: mid} s.testSplit(c, ranges, left, splitCase{"d", buildCopRanges("a", "b", "c", "d")}, splitCase{"d", buildCopRanges("a", "b", "c", "d")}, splitCase{"o", buildCopRanges("a", "b", "c", "d", "e", "g", "l", "o")}, ) // input range: [a-b) [c-d) [e-g) [l-o) [q-t) ranges = &copRanges{first: first, mid: mid, last: last} s.testSplit(c, ranges, left, splitCase{"o", buildCopRanges("a", "b", "c", "d", "e", "g", "l", "o")}, splitCase{"p", buildCopRanges("a", "b", "c", "d", "e", "g", "l", "o")}, splitCase{"t", buildCopRanges("a", "b", "c", "d", "e", "g", "l", "o", "q", "t")}, ) } func coprocessorKeyRange(start, end string) *coprocessor.KeyRange { return &coprocessor.KeyRange{ Start: []byte(start), End: []byte(end), } } type splitCase struct { key string *copRanges } func (s *testCoprocessorSuite) testSplit(c *C, ranges *copRanges, checkLeft bool, cases ...splitCase) { for _, t := range cases { left, right := ranges.split([]byte(t.key)) expect := t.copRanges if checkLeft { s.checkEqual(c, left, expect.mid, false) } else { s.checkEqual(c, right, expect.mid, false) } } }
macro_rules! generate_route_handlers { ($($keyword:ident),*) => { $( fn $keyword_handler(req: Request) -> Response { match req.method() { "POST" => { // Perform specific actions for the $keyword on POST request // ... } "PUT" => { // Perform specific actions for the $keyword on PUT request // ... } "GET" => { if $keyword == get_swagger_index { // Perform specific actions for the get_swagger_index on GET request // ... } else { // Perform specific actions for the $keyword on GET request // ... } } _ => { // Handle other HTTP methods // ... } } } )* }; }
using System; public class GetOddAverage { public static void Main() { int[] arr = { 1, 4, 9, 12, 23, 28 }; int n = arr.Length; double sum = 0.0; int count = 0; Console.Write("Input Array Elements :\n"); for (int i = 0; i < n; i++) { Console.Write(arr[i] + " "); if (arr[i] % 2 != 0) { sum = sum + arr[i]; count++; } } double avg = sum / count; Console.Write("\n\nAverage of Odd Elements of the Array = " + avg); } }
package com.honyum.elevatorMan.listener; /** * Created by 李有鬼 on 2017/1/10 0010 */ public interface OnCallStateListener { void onCallStateListener(); }
import { NextApiHandler, NextApiRequest } from 'next'; import { GetSessionOpts, NAuth0Client } from '../client'; import { NAuth0Options } from './config'; import { Session } from '../lib'; import routes from './routes'; import { getSessionFromReq } from './session'; class ServerNAuth0Client implements NAuth0Client { constructor(private readonly opts: NAuth0Options) {} private getActionFromRequest(req: NextApiRequest): string { return req.query.auth as string; } handler(): NextApiHandler { const apiHandler: NextApiHandler = async (req, res) => { const action = this.getActionFromRequest(req); const handler = routes(action); await handler(req, res, this.opts); }; return apiHandler; } getSession(req: GetSessionOpts): Promise<Session | null> { return getSessionFromReq(req, this.opts); } } export default ServerNAuth0Client; export * from './config';
/** * # profileSelectors-test.js * * * This will confirm that given a specific action with a type and * payload, that the state object is modified accordingly. * * *Note*: in this app,```state``` is an Immutable.js object * */ 'use strict' /** * ## Class under test * */ const profileSelectors = require('../profileSelectors') /** * ## Tests * * profileSelectors */ describe('profileSelectors', () => { /** * ### Profile Request */ describe('getUserProfile', () => { it('gets user profile', () => { let currentState = { profile: { userProfile: { userProfile: { profileImage: 'https://d1m37qdzmw041i.cloudfront.net/photos/users/profile/image/318381-1505247817263.jpg', name: 'Amol', bio: 'Hey guys, how are you doing today?', } } } } let next = profileSelectors.getUserProfile(currentState) expect(next).toBe(currentState.profile.userProfile.userProfile) }) }) describe('getUserPhotosThumbnails', () => { it('gets user feed photos', () => { let currentState = { profile: { userPhotos: { userPhotos: { result: { posts: [ { createdAt: '2016-09-16T22:18:13.091Z', thumbnail: 'https://d1m37qdzmw041i.cloudfront.net/photos/posts/thumbnails/17517155-1474064295274.jpg', className: 'Post', objectId: '17517155', __type: 'Object' }, { createdAt: '2016-03-23T20:42:20.304Z', thumbnail: 'https://d1m37qdzmw041i.cloudfront.net/photos/posts/thumbnails/16080756-1458765641755.jpg', className: 'Post', objectId: '16080756', __type: 'Object' } ] } } } } } let expectedState = ['https://d1m37qdzmw041i.cloudfront.net/photos/posts/thumbnails/17517155-1474064295274.jpg', 'https://d1m37qdzmw041i.cloudfront.net/photos/posts/thumbnails/16080756-1458765641755.jpg'] let next = profileSelectors.getUserPhotosThumbnails(currentState) expect(next[0]).toBe(expectedState[0]) expect(next[1]).toBe(expectedState[1]) }) }) describe('getPopularPhotosThumbnails', () => { it('gets popular feed photos', () => { let currentState = { profile: { popularPhotos: { popularPhotos: { result: { posts: [ { createdAt: '2016-09-16T22:18:13.091Z', thumbnail: 'https://d1m37qdzmw041i.cloudfront.net/photos/posts/thumbnails/17517155-1474064295274.jpg', className: 'Post', objectId: '17517155', __type: 'Object' }, { createdAt: '2016-03-23T20:42:20.304Z', thumbnail: 'https://d1m37qdzmw041i.cloudfront.net/photos/posts/thumbnails/16080756-1458765641755.jpg', className: 'Post', objectId: '16080756', __type: 'Object' } ] } } } } } let expectedState = ['https://d1m37qdzmw041i.cloudfront.net/photos/posts/thumbnails/17517155-1474064295274.jpg', 'https://d1m37qdzmw041i.cloudfront.net/photos/posts/thumbnails/16080756-1458765641755.jpg'] let next = profileSelectors.getPopularPhotosThumbnails(currentState) expect(next[0]).toBe(expectedState[0]) expect(next[1]).toBe(expectedState[1]) }) }) })
<reponame>piznel/gladys-rtsp const queries = require('./rtsp.queries.js'); module.exports = function uninstall() { // delete dashboard box gladys.utils.sql(queries.deleteBox) .then(function (data) { sails.log.debug('RTSP box deleted !'); }) // delete boxType return gladys.utils.sql(queries.deleteBoxType) .then(function (data) { sails.log.debug('RTSP boxType deleted !'); }) };
#!/bin/bash set -eu executable_file="squaresdemo.out" # # pass program counter address as arg to this script to see disassembly around # # the program counter # pc_address="${1:-''}" mips64-elf-objdump -S --syms -m mips:4300 --prefix-addresses "$executable_file" > disassembly.txt # if [ -n "$pc_address" ]; then # grep --color=always -C 8 "${pc_address/0x/}" disassembly.txt # fi i=0 for arg in "${@:1}"; do if [[ "$i" = "0" ]]; then echo "exception at:" grep --color=always -C 8 "^${arg/0x/} <\w*\W" disassembly.txt else echo "called from:" grep --color=always "^${arg/0x/} <\w*\W" disassembly.txt fi ((i=i+1)) done
<gh_stars>1-10 package moe.xing.rvutils; import android.support.v7.util.SortedList; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.util.SortedListAdapterCallback; /** * Created by Hehanbo on 2016/5/17 0017. * <p> * RecyclerView 的基础适配器 */ @SuppressWarnings({"WeakerAccess", "unused"}) public abstract class BaseRecyclerViewAdapter<T, R extends RecyclerView.ViewHolder> extends BaseSortedRVAdapter<T, R> { private int addition = 0; public BaseRecyclerViewAdapter(Class<T> kClass) { //noinspection ComparatorMethodParameterNotUsed setDatas(new SortedList<>(kClass, new SortedListAdapterCallback<T>(this) { @Override public int compare(T o1, T o2) { return rvCompare(o1, o2); } @Override public boolean areContentsTheSame(T oldItem, T newItem) { return rvAreContentsTheSame(oldItem, newItem); } @Override public boolean areItemsTheSame(T item1, T item2) { return rvAreItemsTheSame(item1, item2); } })); } protected int rvCompare(T o1, T o2) { return 0; } public boolean rvAreContentsTheSame(T oldItem, T newItem) { return oldItem.toString().equals(newItem.toString()); } public boolean rvAreItemsTheSame(T item1, T item2) { return item1.equals(item2); } }
<filename>src/util/logging-wrap.js /*global module, require */ var listWrappableFunctions = require('./list-wrappable-functions'); module.exports = function loggingWrap(apiObject, options) { 'use strict'; var logPrefix = (options && options.logName && (options.logName + '.')) || '', magic = '__LOGGING_WRAP__', remapKey = function (key) { var oldFunc; if (!apiObject[key][magic]) { oldFunc = apiObject[key]; apiObject[key] = function () { var callArgs = arguments; options.log(logPrefix + key, Array.prototype.slice.call(callArgs)); return oldFunc.apply(apiObject, callArgs); }; apiObject[key][magic] = magic; } }; if (!options || !options.log) { return apiObject; } listWrappableFunctions(apiObject).forEach(remapKey); return apiObject; };
<reponame>i-a-n/eui<filename>src/components/comment_list/comment_event.test.tsx /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ import React from 'react'; import { render } from 'enzyme'; import { requiredProps } from '../../test/required_props'; import { EuiCommentEvent } from './comment_event'; describe('EuiCommentEvent', () => { test('is rendered', () => { const component = render( <EuiCommentEvent username="someuser" {...requiredProps} /> ); expect(component).toMatchSnapshot(); }); describe('props', () => { describe('type', () => { it('is rendered', () => { const component = render( <EuiCommentEvent username="someuser" type="update" /> ); expect(component).toMatchSnapshot(); }); }); describe('timestamp', () => { it('is rendered', () => { const component = render( <EuiCommentEvent timestamp="21 days ago" username="someuser" /> ); expect(component).toMatchSnapshot(); }); }); describe('event', () => { it('is rendered', () => { const component = render( <EuiCommentEvent event="commented" username="someuser" /> ); expect(component).toMatchSnapshot(); }); }); }); });
<reponame>dynamicbalaji/hack-for-pink-2020 import { Component, Inject } from '@angular/core'; import { NavigationCancel, NavigationEnd, NavigationError, NavigationStart, Router } from '@angular/router'; import { popin } from './core/animations/animations'; import { LoginService } from '../app/services/login.service'; @Component({ selector: 'app-root', templateUrl: './app.component.html', styleUrls: ['./app.component.scss'], animations: [popin] }) export class AppComponent { showMenu = false; userType:string = ""; userName: string=""; avatar:string; emailId:string; constructor( @Inject('loading') public loader, private route: Router, private login : LoginService) { } ngOnInit() { this.userType = this.login.userType; } onActivate(event) { window.scroll(0,0); } toggleMenu() { this.showMenu = !this.showMenu; } reload(){ this.userType = this.login.userType; this.userName = this.login.userName; this.avatar = this.login.avatar; this.emailId = this.login.emailId; } logout(){ this.showMenu = false; localStorage.clear(); this.userType = null; this.login.surveyFlag = true; this.route.navigate(['/']); } goToHome() { if(this.userType === 'doctor') { this.route.navigate(['/doctor']); }else if(this.userType === 'warrior'){ this.route.navigate(['/warrior']); }else if(this.userType === 'driver'){ this.route.navigate(['/drivHome']); }else if(this.userType === 'staff'){ this.route.navigate(['/staffHome']); }else if(this.userType === 'student'){ this.route.navigate(['/studentHome']); } } }
const container = document.createElement('div'); container.id = 'notification'; const list = document.createElement('ul'); list.addEventListener('click', onClick); container.appendChild(list); document.body.appendChild(container); export function notify(message) { const liItem = document.createElement('li'); liItem.className = 'notification'; liItem.textContent = message + ' \u2716'; list.appendChild(liItem); setTimeout(() => liItem.remove(), 3000); } function onClick(event) { if (event.target.tagName == 'LI') { event.target.remove(); } }
#!/bin/sh # Copyright(c) 2021-2022 Nexbridge Inc. All Rights Reserved. # Create a VPROC identifier. # Usage vproc.sh [ subproduct ] # PRODUCT_NAME=LIBYAML BUILDER=GCC while [ "$1" != "" ]; do case $1 in BUILDER) shift BUILDER=$1 ;; *) PRODUCT_NAME=NSGIT_$1 ;; esac shift done VERSION=`sh ./version BUILDER ${BUILDER}` MATCHER="unknown" case $VERSION in [0-9]*) PRODUCT_NUMBER=T9999 PRODUCT_PLATFORM= case `uname -p` in NSE) PRODUCT_PLATFORM=H01 ;; NSX) PRODUCT_PLATFORM=L01 ;; NSV) PRODUCT_PLATFORM=L01 ;; *) case $BUILDER in L??|J??) PRODUCT_PLATFORM=$BUILDER ;; *) PRODUCT_PLATFORM=G01 ;; esac ;; esac MATCHER="[0-9]*" ;; T[0-9]*) PRODUCT_NUMBER=`echo $VERSION | \ sed 's/^\(T[0-9]*\)[A-Z][0-9][0-9]\..*/\1/' | \ sed 's/^\(T[0-9]*\)[A-Z][0-9][0-9]_[A-Z][A-Z][A-Z]\..*/\1/'` PRODUCT_PLATFORM=`echo $VERSION | \ sed 's/^T[0-9]*\([A-Z][0-9][0-9]\)\..*/\1/' | \ sed 's/^T[0-9]*\([A-Z][0-9][0-9]\)_[A-Z][A-Z][A-Z]\..*/\1/'` MATCHER=${PRODUCT_NUMBER}${PRODUCT_PLATFORM} ;; *) echo 'Illegal VMATCH value' ;; esac COMMITTER_DATE=`date --date="@\`git show -s --format=%ct HEAD\`" +%d%b%g | tr '[:lower:]' '[:upper:]'` case $VERSION in [0-9]*) VERSION_STRING=`git describe --tags --long --match="${MATCHER}*" | \ sed 's/-.*-/_/' | sed 's/\./_/g' | sed 's/^v//'` ;; T[0-9]*) VERSION_STRING=`git describe --tags --long --match="${MATCHER}*" | \ sed 's/-/_/g' | \ sed 's/\./_/g' | \ sed -E 's/^T[0-9]{4}[A-Z][0-9][0-9].(.*)/\1/'` ;; *) echo 'Illegal VMATCH value' ;; esac echo ${PRODUCT_NUMBER}${PRODUCT_PLATFORM}_${COMMITTER_DATE}_${PRODUCT_NAME}_${VERSION_STRING}
#Imports and dependencies # gtts, stands for the Google-Text-To-Speech module that is used to convert text to speech from gtts import gTTS import os # These packages are used for OCR(Optical character recognition) import pytesseract from PIL import Image # This module is a wrapper around python, basic information about a topic can be obtained import wikipedia # This script can be used to convert text to speech, either from a text file or when a user enters text # Text can be read from images using the Optical recognition framework built for Python print("Option 1, enter text and convert it to speech \n") print("Option 2, convert the contents of a text file to speech \n") print("Option 3, convert the text in an image to speech \n") print("Option 4, convert information about a topic from wikipedia into speech \n") # Conversion will be done to the English language language = "en" def convert_text_to_speech(option): text = "" if option == 1: text = input("Enter the text, that has to be converted to speech ") elif option == 2: file_name = input( "Enter the name of the text file, that has to be converted to speech ") with open(file_name, "r") as handle: text = handle.read().replace("\n", "") elif option == 3: image_path = input( "Enter the path of the image that has to be read and converted to speech ") text = pytesseract.image_to_string( Image.open(image_path).replace("\n", "")) elif option == 4: wikipedia = input( "Enter the topic about which information is to be obtained ") text = wikipedia.summary(wikipedia) speech = gTTS(text=text, lang=language, slow=True) speech.save("text_content.mp3") if __name__ == "__main__": option = int(input("Enter the option ")) convert_text_to_speech(option)
#!/bin/bash /etc/init.d/pwrstatd start /src/ups-api.py
function onChangeRange() { rangeToText(); sendAngles(); } function rangeToText() { var j = 0; //シミュレータの配列に角度を入れるためのカウンタ for (i=1;i<=6;i++){ if(i==4) continue; angle = document.getElementById("J" + i).value; angles[j] = angle; j++; document.getElementById("J" + i + "value").value = angle; } } function sendAngles() { if(window.location.href.substr(0,4) == "file") return; var httpReq = new XMLHttpRequest(); httpReq.onreadystatechange = function(){ if(httpReq.readyState != 4 || httpReq.status != 200) return; org = document.getElementById("angles_log").innerHTML; document.getElementById("angles_log").innerHTML = httpReq.responseText + "<br />" + org; } //var url = "/ajax/last_articles.cgi?num=" + num; var url = "/angles.py?angles=" var j = 0; //シミュレータの配列に角度を入れるためのカウンタ for (i=1;i<=6;i++){ if(i==4) continue; url += angles[j] + ','; j++; } url = url.replace(/,$/,""); url = url + "&ev=" + ev; httpReq.open("GET",url,true); httpReq.send(null); } function numToSlide(obj) { target = obj.id.replace(/value/,""); document.getElementById(target).value = obj.value; sendAngles(); } function readAd(){ if(window.location.href.substr(0,4) == "file") return; var httpReq = new XMLHttpRequest(); httpReq.onreadystatechange = function(){ if(httpReq.readyState != 4 || httpReq.status != 200) return; vs = httpReq.responseText.split(" "); document.getElementById("ch0_value").innerHTML = vs[0]; document.getElementById("ch1_value").innerHTML = vs[1]; readAd(); } var url = "/ad.py? + Math.random()" httpReq.open("GET",url,true); httpReq.send(null); } function ev(val) { if(window.location.href.substr(0,4) == "file") return; var httpReq = new XMLHttpRequest(); httpReq.onreadystatechange = function(){ if(httpReq.readyState != 4 || httpReq.status != 200) return; } url = "/ev.py?onoff=" + val; httpReq.open("GET",url,true); httpReq.send(null); } function run() { seq = document.getElementById("sequence").value; lns = seq.split("\n"); t = 0; for(i=0;i<lns.length;i++){ as = lns[i].split(","); if(as.length < 6) continue; setTimeout( function(a) { oneStep(a); }, t , as ); t += parseInt(as[5]); } } function oneStep(as) { for(k=0;k<5;k++){ angles[k] = as[k]; } ev = as[6]; j=0; for(k=1;k<=6;k++){ if(k==4) continue; document.getElementById("J" + k + "value").value = angles[j++]; } sendAngles(); } function init() { readAd(); drawRobot(); } /* function runCode() { document.getElementById("angles_log").innerHTML = ""; var httpReq = new XMLHttpRequest(); httpReq.onreadystatechange = function(){ if(httpReq.readyState != 4 || httpReq.status != 200) return; document.getElementById("angles_log").innerHTML += httpReq.responseText; } httpReq.open("GET","/run.bash",true); httpReq.send(null); } function stopCode() { var httpReq = new XMLHttpRequest(); httpReq.open("GET","/stop.bash",false); httpReq.send(null); document.getElementById("angles_log").innerHTML = ""; } */
#!/bin/bash set -euo pipefail HOST=${1:-""} [[ "$HOST" ]] || { echo "FATAL: No HOST specified." >&2 exit 1 } DIR="$(pwd)" echo "Syncing local directory $DIR to $HOST:src/hkjn.me. Remove the .sync_active file to stop the sync." PAUSE=${PAUSE:-2s} touch .sync_active while true; do [[ -e .sync_active ]] || break rsync -az --exclude=.git/ --exclude=.sync_active $DIR $HOST:src/hkjn.me/ sleep $PAUSE done echo "No .sync_active file. Exiting."
#!/bin/sh # CYBERWATCH SAS - 2017 # # Security fix for USN-3040-1 # # Security announcement date: 2016-07-21 00:00:00 UTC # Script generation date: 2017-01-19 21:07:29 UTC # # Operating System: Ubuntu 14.04 LTS # Architecture: x86_64 # # Vulnerable packages fix on version: # - mysql-server-5.5:5.5.50-0ubuntu0.14.04.1 # # Last versions recommanded by security team: # - mysql-server-5.5:5.5.54-0ubuntu0.14.04.1 # # CVE List: # - CVE-2016-3424 # - CVE-2016-3459 # - CVE-2016-3477 # - CVE-2016-3486 # - CVE-2016-3501 # - CVE-2016-3518 # - CVE-2016-3521 # - CVE-2016-3588 # - CVE-2016-3614 # - CVE-2016-3615 # - CVE-2016-5436 # - CVE-2016-5437 # - CVE-2016-5439 # - CVE-2016-5440 # - CVE-2016-5441 # - CVE-2016-5442 # - CVE-2016-5443 # # More details: # - https://www.cyberwatch.fr/vulnerabilites # # Licence: Released under The MIT License (MIT), See LICENSE FILE sudo apt-get install --only-upgrade mysql-server-5.5=5.5.54-0ubuntu0.14.04.1 -y
# Generated by Django 3.1.5 on 2021-01-23 18:56 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('camphoric', '0017_registrationtype_invitation_email_subject'), ] operations = [ migrations.AddField( model_name='invitation', name='recipient_name', field=models.CharField(blank=True, max_length=100), ), ]
#!/bin/bash # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ################################################################################ set -euo pipefail REPO_DIR="${KOKORO_ARTIFACTS_DIR}/git/tink" cd "${REPO_DIR}" ./kokoro/testutils/copy_credentials.sh "go/testdata" ./kokoro/testutils/update_certs.sh # Sourcing required to update callers environment. source ./kokoro/testutils/install_go.sh echo "Using go binary from $(which go): $(go version)" readonly TINK_VERSION="$(cat ${REPO_DIR}/go/tink_version.bzl \ | grep ^TINK \ | cut -f 2 -d \")" # Create a temporary directory for performing module tests. TMP_DIR="$(mktemp -dt go-module-test.XXXXXX)" GO_MOD_DIR="${TMP_DIR}/go-mod-test" REPO_URL_PREFIX="github.com/google/tink" ####################################### # Test an individual Go module within the Tink repository. # Globals: # REPO_DIR # TINK_VERISON # GO_MOD_DIR # REPO_URL_PREFIX # Arguments: # The name of the Go module, relative to the repository root. # Outputs: # Prints progress to STDOUT. ####################################### function test_go_mod() { local mod_name="$1" local full_mod_name="${REPO_URL_PREFIX}/${mod_name}" echo "### Testing ${full_mod_name}..." ( echo "Using go binary from $(which go): $(go version)" set -x cd "${REPO_DIR}/${mod_name}" go build -v ./... go test -v ./... ) mkdir "${GO_MOD_DIR}" ( cd "${GO_MOD_DIR}" echo "Using go binary from $(which go): $(go version)" # Display commands being run for the remainder of this subshell. set -x # Initialize a test Go module. go mod init tink-go-mod-test overlay_module "${mod_name}" "${full_mod_name}" overlay_internal_deps "${mod_name}" # Print the prepared go.mod. cat go.mod # Get the module at the latest commit and print graph output depicting # direct dependencies. go get -v "${full_mod_name}@master" # Pint contextual information concerning dependencies. go mod graph | grep google/tink go list -m all | grep google/tink ) # Leave a clean environment for subsequent tests. go clean -modcache rm -rf "${GO_MOD_DIR}" } ####################################### # Add a require statement for a Tink module and a replace statement to point it # to the local copy. # Globals: # REPO_DIR # TINK_VERISON # Arguments: # The name of the Go module, relative to the repository root. # The full name of the Go module, as specified in import statements. ####################################### function overlay_module() { local mod_name="$1" local full_mod_name="$2" go mod edit "-require=${full_mod_name}@v${TINK_VERSION}" go mod edit "-replace=${full_mod_name}=${REPO_DIR}/${mod_name}" } ####################################### # Search the go.mod being tested for internal dependencies and overlay them with # the local copies. # Globals: # REPO_DIR # REPO_URL_PREFIX # Arguments: # The name of the Go module being tested, relative to the repository root. ####################################### function overlay_internal_deps() { local mod_name="$1" declare -a internal_deps while read internal_dep; do internal_deps+=("${internal_dep}") done < <(grep "${REPO_URL_PREFIX}" "${REPO_DIR}/${mod_name}/go.mod" \ | grep -v ^module \ | awk '{print $1}') # If internal_deps are found... if [[ ! -z "${internal_deps+x}" ]]; then for full_dep_name in "${internal_deps[@]}"; do local dep_name="$(echo "${full_dep_name}" | sed "s#${REPO_URL_PREFIX}/##")" overlay_module "${dep_name}" "${full_dep_name}" done fi } function main() { # Extract all go.mod instances from the repository. declare -a go_mod_dirs while read go_mod_dir; do go_mod_dirs+=("${go_mod_dir}") done < <(find "${REPO_DIR}" -name "go.mod" \ | sed "s#^${REPO_DIR}/##" \ | xargs -n 1 dirname) echo "### Go modules found:" for go_mod_dir in "${go_mod_dirs[@]}"; do echo "${go_mod_dir}" done for go_mod_dir in "${go_mod_dirs[@]}"; do test_go_mod "${go_mod_dir}" done } main "$@"
import org.apache.sling.api.SlingHttpServletRequest; import org.apache.sling.api.resource.ResourceResolver; import org.apache.sling.api.resource.Resource; import org.apache.sling.api.resource.ResourceResolverFactory; import org.apache.sling.api.resource.ValueMap; import com.day.cq.search.QueryBuilder; import com.day.cq.search.Query; import com.day.cq.search.result.SearchResult; import com.day.cq.search.result.Hit; import java.util.*; public class SearchService implements SearchInterface { QueryBuilder queryBuilder; @Override public Iterator<Hit> searchResult(List<String> paths, String text, SlingHttpServletRequest slingHttpServletRequest) { Map<String, String> queryMap = new HashMap<>(); queryMap.put("path", paths.toArray(new String[0])); queryMap.put("type", "cq:Page"); queryMap.put("fulltext", text); queryMap.put("p.limit", "-1"); ResourceResolver resourceResolver = slingHttpServletRequest.getResourceResolver(); Query query = queryBuilder.createQuery(PredicateGroup.create(queryMap), resourceResolver.adaptTo(Session.class)); SearchResult result = query.getResult(); return result.getHits().iterator(); } }
<gh_stars>0 package com.netcracker.ncstore.dto.response; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Getter; import lombok.extern.jackson.Jacksonized; import java.time.Instant; import java.util.List; import java.util.UUID; @Jacksonized @Builder @JsonIgnoreProperties(ignoreUnknown = true) @Getter @AllArgsConstructor public class ProductGetInfoResponse { private final UUID productId; private final UUID supplierId; private final String supplierName; private final String productName; private final String productDescription; private final double normalPrice; private final Double discountPrice; private final String priceCurrency; private final Instant startUtcTime; private final Instant endUtcTime; private final UUID parentProductId; private final List<String> categoriesNames; }
<reponame>brighteningStar/rp export class HttpService { constructor() { this.cancelTokenSource = null // to control pending requests this.errors = {} } get($uri, $params) { this.loading = true if (this.cancelTokenSource) { this.cancelTokenSource.cancel() } this.cancelTokenSource = axios.CancelToken.source() return new Promise((resolve, reject) => { axios.get($uri, { params: $params, cancelToken: this.cancelTokenSource.token, headers: { }, }) .then(response => { resolve(response.data); }) .catch(errors => { this.onFail(errors.response) reject(errors.response) }) }); } onFail(response) { if( response.status === 500) { this.errors = { message: 'Something went wrong' } } else { this.errors = response.data } } }
// App.js import React from 'react'; import StepsCounter from './StepsCounter'; import StepsHistory from './StepsHistory'; export default class App extends React.Component { render() { return ( <React.NavigatorIOS initialRoute={{ component: StepsCounter, title: 'Steps Counter' }} style={{flex: 1}} /> ); } } // StepsCounter.js import React from 'react'; import {Alert, Text, View, StyleSheet} from 'react-native'; export default class StepsCounter extends React.Component { constructor(props) { super(props); this.state = { steps: 0 }; } incrementSteps = () => { this.setState({ steps: this.state.steps + 1 }); }; render() { return ( <View style={styles.container}> <Text style={styles.title}>Steps Taken Today</Text> <Text style={styles.count}>{this.state.steps}</Text> <Text style={styles.button} onPress={this.incrementSteps}> Tap here to increment </Text> <Text style={[styles.button, { color: 'blue' }]} onPress={() => this.props.navigator.push({ title: 'Steps History', component: StepsHistory }) }> Show Steps History </Text> </View> ); } } const styles = StyleSheet.create({ container: { flex: 1, justifyContent: 'center', alignItems: 'center', }, title: { fontSize: 20, }, count: { fontSize: 80, marginBottom: 20, }, button: { fontSize: 20, }, });
import os import subprocess from multiprocessing import Pool import numpy as np import pandas as pd from scipy import sparse from graphs import Graph from Utils import ColourClass, FancyApp, Utilities def parse_blast(filename, protein_format): best_hit_dict = {} for line in open(filename): parts = line.strip().split('\t') query = parts[0] if protein_format == 'uniprot': query = Utilities.extract_uniprot_accession(query) evalue = parts[10] if query in best_hit_dict and best_hit_dict[query]['evalue'] < evalue: continue target = parts[1].strip() if protein_format == 'uniprot': target = Utilities.extract_uniprot_accession(target) pi = float(parts[2].strip()) perc = 100.0 * float(parts[7]) - float(parts[6]) / float(parts[-1]) best_hit_dict[query] = { 'evalue': evalue, 'pi': pi, 'perc': perc, 'target': target } return best_hit_dict def compute_ortholog(fasta, db, string_fasta, string_db, out, out_dir, protein_format): outfile = os.path.join(out_dir, out) if not os.path.exists(outfile): blast_command = 'blastp -query {fasta} -db {blastdb} -out {out}' +\ ' -outfmt "6 std qlen" -max_target_seqs 1' # create temporary blast files out_1 = os.path.join(out_dir, out + 'forward') out_2 = os.path.join(out_dir, out + 'backward') subprocess.call(blast_command.format(fasta=fasta, blastdb=string_db, out=out_1), shell=True) subprocess.call(blast_command.format(fasta=string_fasta, blastdb=db, out=out_2), shell=True) # max_evalue = 1e-6 # perc_th = 80.0 # positives = 60.0 orthologs_found = {} forward = parse_blast(out_1, protein_format) backward = parse_blast(out_2, protein_format) index = 0 for query, infoq in forward.items(): if infoq['target'] in backward.keys(): infot = backward[infoq['target']] if infot['target'] == query: # reciprocal best hit! orthologs_found[index] = { 'query': query, 'target': infoq['target'], 'query_evalue': infoq['evalue'], 'query_pi': infoq['pi'], 'query_perc': infoq['perc'], 'target_evalue': infot['evalue'], 'target_pi': infot['pi'], 'target_perc': infot['perc'], } index += 1 orth = pd.DataFrame.from_dict(orthologs_found, orient='index') orth['max evalue'] = np.max(orth[['query_evalue', 'target_evalue']], axis=1) orth['pos'] = np.sqrt(orth['query_pi'] * orth['target_pi']) orth.to_pickle(os.path.join(out_dir, out)) # remove temporary files os.remove(out_1) os.remove(out_2) FancyApp.FancyApp.yell(ColourClass.bcolors.BOLD_CYAN, 'compute ortholog', 'Finished collecting orthologs:', out) class Collection(Graph): def __init__(self, fasta, proteins, string_dir, string_links, core_ids, output_dir, orthologs_dir, graphs_dir, alias, cpus, blacklist, max_evalue, perc, positives, protein_format, interesting_graphs=['neighborhood', 'experiments', 'coexpression', 'textmining', 'database']): super(Collection, self).__init__() self.fasta = fasta self.proteins = proteins self.string_dir = string_dir self.string_links = string_links self.core_ids = core_ids self.output_dir = output_dir self.graphs_dir = graphs_dir self.orthologs_dir = orthologs_dir self.alias = alias self.cpus = cpus self.db = self.fasta self.blacklist = blacklist if self.blacklist is None: self.blacklist = [] self.max_evalue = max_evalue self.perc = perc self.positives = positives self.interesting_graphs = interesting_graphs self.collection = None self.protein_format = protein_format def get_graph(self, **kwargs): # It is assumed that the collection was already made # so we can focus on the creation of the protein-protein graph only. collection = {} for g in self.interesting_graphs: graph = self.collection[['query1', 'query2', g]]\ .merge(self.proteins, left_on='query1', right_index=True) graph = graph.merge(self.proteins, left_on='query2', right_index=True, suffixes=['1', '2']) p1_idx = graph['protein idx1'].values p2_idx = graph['protein idx2'].values collection[g] = sparse.coo_matrix((graph[g], (p1_idx, p2_idx)), shape=(len(self.proteins), len(self.proteins))) return collection def should_be_processed(self, string_id): # self.tell('checking', string_id) if string_id in self.blacklist: # self.tell('blacklisted') return False orthologs_filename = os.path.join(self.orthologs_dir, self.alias + '_AND_' + string_id) if not os.path.exists(orthologs_filename): # self.tell('ortholog file does not exist') return False orthologs = pd.read_pickle(orthologs_filename) if orthologs.shape[0] <= 2: # self.tell('not enough orthologs') return False # self.tell(string_id, 'Should be processed') return True def clean_graph(self): graph = {'protein 1': [], 'protein 2': []} for g in self.interesting_graphs: graph[g] = [] return graph def process_graph(self, string_id, graph): self.tell('Transferring links from', string_id) # load ortholog file orthologs = pd.read_pickle(os.path.join(self.orthologs_dir, self.alias + '_AND_' + string_id)) orthologs['target_evalue'] = orthologs['target_evalue'].astype(float) orthologs['query_evalue'] = orthologs['query_evalue'].astype(float) orthologs['max_evalue'] = orthologs['query_evalue'].astype(float) valid_orthologs = orthologs[ (orthologs['max_evalue'] < self.max_evalue) & (orthologs['target_perc'] >= self.perc) & (orthologs['query_perc'] >= self.perc) & (orthologs['pos'] >= self.positives) ].copy() # valid_orthologs['protein 1'] = valid_orthologs['target'] # valid_orthologs['protein 2'] = valid_orthologs['target'] valid_edges = graph[ (graph['protein 1'].isin(valid_orthologs['target'])) & (graph['protein 2'].isin(valid_orthologs['target'])) ] valid_edges = valid_edges.merge(valid_orthologs[['query', 'target', 'max_evalue']], left_on='protein 1', right_on='target') valid_edges = valid_edges.merge(valid_orthologs[['query', 'target', 'max_evalue']], left_on='protein 2', right_on='target', suffixes=['1', '2']) valid_edges['max_evalue'] = valid_edges[['max_evalue1', 'max_evalue2']].max(axis=1) self.tell(valid_edges.shape[0], 'edges transferred from organism', string_id) drop_cols = ['protein 1', 'protein 2', 'target1', 'target2', 'max_evalue1', 'max_evalue2'] if self.collection is None: self.collection = valid_edges.drop(drop_cols, axis=1) else: self.collection = self.collection.append( valid_edges.drop(drop_cols, axis=1), ignore_index=True) self.tell('current collection has', self.collection.shape[0], 'edges') # TODO: rename "query" to "protein" for consistency def write_graph(self, filename): self.collection.to_csv(filename, sep='\t') def compute_graph(self): collection_file = os.path.join(self.graphs_dir, self.alias) if not os.path.exists(collection_file): self.tell('Loading STRING core id') core_ids = [] for line in open(self.core_ids, 'r'): if line != '': core_ids.append(line.strip()) # check whether the database fot the fasta file is created if not (os.path.exists(self.fasta + '.phr') and os.path.exists(self.fasta + '.pin')): self.tell('Creating a blast database from the input fasta') fasta = self.fasta subprocess.call(f"makeblastdb -in {fasta} -out {fasta} " + "-dbtype prot", shell=True) self.tell('Computing Orthologs using', self.cpus, 'cores') params = [] for core_id in core_ids: fasta = os.path.join(self.string_dir, core_id + '.faa') out = self.alias + '_AND_' + core_id params.append([self.fasta, self.db, fasta, fasta, out, self.orthologs_dir, self.protein_format]) with Pool(self.cpus) as p: p.starmap(compute_ortholog, params) # once the orthologs are computed, we transfer links from STRING should_process = {} graph_index = {} graph = {} first_line = True current_organism = -1 for line in open(self.string_links): fields = line.strip().split() if first_line: first_line = False # identify the index of the interesting # graphs in the STRING file for i, field in enumerate(fields): if field in self.interesting_graphs: graph_index[field] = i else: org_id = fields[0].split('.')[0] # check if the current organism should be processed if org_id not in should_process.keys(): should_process[org_id] =\ self.should_be_processed(org_id) if not should_process[org_id]: self.tell('Ignoring organism', org_id) if not should_process[org_id]: continue if current_organism == -1: current_organism = org_id graph = self.clean_graph() elif current_organism != org_id: # make a pandas from the string graph graph_df = pd.DataFrame.from_dict(graph) # keeping only the relevant links # (those with some information in at # least one of the models) condition = None for g in self.interesting_graphs: if condition is None: condition = graph_df[g] > 0 else: condition |= graph_df[g] > 0 # transfer links self.process_graph(current_organism, graph_df[condition]) # clean graph and update current organism graph = self.clean_graph() current_organism = org_id if should_process[org_id]: graph['protein 1'].append(fields[0]) graph['protein 2'].append(fields[1]) for g in self.interesting_graphs: graph[g].append(int(fields[graph_index[g]])) # we make sure we don't miss possible links from # the last organism in STRING if should_process[current_organism]: # make a pandas from the string graph graph_df = pd.DataFrame.from_dict(graph) # keeping only the relevant links (those with some # information in at least one of the models) condition = None for g in self.interesting_graphs: if condition is None: condition = graph_df[g] > 0 else: condition |= graph_df[g] > 0 # transfer links self.process_graph(current_organism, graph_df[condition]) Graph.assert_lexicographical_order(self.collection, p1='query1', p2='query2') self.collection.to_pickle(os.path.join(collection_file)) else: self.tell('Graph collection file found, skipping computation...') self.collection = pd.read_pickle(collection_file) Graph.assert_lexicographical_order(self.collection, p1='query1', p2='query2')
<gh_stars>0 package validator import ( "errors" "fmt" "log" "path/filepath" "strings" "github.com/xeipuuv/gojsonschema" ) type SchemaProvider struct { validators map[string]*gojsonschema.Schema } func NewSchemaProvider() *SchemaProvider { p := SchemaProvider{validators: make(map[string]*gojsonschema.Schema)} return &p } func loadSchema(path string) (*gojsonschema.Schema, error) { path, err := filepath.Abs(path) if err != nil { log.Printf("Schema abs path error: %v\n", path) return nil, err } path = filepath.ToSlash(path) path = fmt.Sprintf("file://%s", path) loader := gojsonschema.NewReferenceLoader(path) return gojsonschema.NewSchema(loader) } func prepareUrl(url string) string { return strings.ToLower(strings.Trim(url, "/")) } func (p *SchemaProvider) Register(url, filePath string) error { schema, err := loadSchema(filePath) if err != nil { log.Printf("Schema loading error: %v\n", err) return err } log.Printf("Schema loaded from %v\n", filePath) p.validators[prepareUrl(url)] = schema return nil } func (p *SchemaProvider) Get(url string) (*gojsonschema.Schema, error) { s, ok := p.validators[prepareUrl(url)] if ok { return s, nil } else { return nil, errors.New("schema not found") } }
ALTER TABLE versions ADD COLUMN yanked BOOLEAN DEFAULT FALSE;
import os import sys if len(sys.argv) != 2: print("Usage: python program_name.py <path>") sys.exit(1) path = sys.argv[1] if not os.path.exists(path): print("No such file or directory") sys.exit(1) if os.path.isfile(path): files = [path] # Perform file-specific processing here elif os.path.isdir(path): # Perform directory-specific processing here pass
<filename>site/src/pages/index.js import React from 'react' import Link from 'gatsby-link' import styled from 'styled-components' import Meta from '../components/Meta' import SyntaxHighlighter from 'react-syntax-highlighter' import highlightStyle from 'react-syntax-highlighter/styles/hljs/tomorrow' const Container = styled.div` display: flex; flex-direction: column; height: 100%; ` const Title = styled.h1` margin: 0; font-style: italic; ` const Subtitle = styled.h2` margin: 0; padding: 0 10px; @media only screen and (max-width : 768px) { font-size: 19px; } ` const NavBar = styled.div` background-color: #3b74d7; height: 60px; min-height: 60px; display: flex; justify-content: flex-end; padding-right: 80px; @media only screen and (max-width : 768px) { justify-content: center; padding-right: 0; height: 50px; min-height: 50px; } ` const NavBarLink = styled.span` color: white !important; font-size: 22px; align-self: center; padding: 0 10px; > a { text-decoration: none; &:-webkit-any-link { color: white !important; } &:link, &:visited, &:focus, &:hover, &:active { color: white; } } ` const Content = styled.div` text-align: center; background-color: #fbfbfb; height: 100%; @media only screen and (max-width : 768px) { padding-bottom: 25px; height: initial; } ` const Heading = styled.div` padding-top: 4%; ` const InstallBox = styled.div` padding: 12px 0; margin: 10px 0; background-color: white; border-top: 1px dashed #3b74d7; border-bottom: 1px dashed #3b74d7; ` const FeaturesContainer = styled.div` display: flex; justify-content: center; flex-wrap: wrap; ` const Feature = styled.div` flex: 1; padding: 0 10px; max-width: 380px; @media only screen and (max-width : 768px) { flex-basis: 100%; } ` const Footer = styled.div` text-align: center; height: 40px; line-height: 40px; font-size: 14px; @media only screen and (max-width : 768px) { background-color: #fbfbfb; height: initial; line-height: initial; padding-bottom: 10px; } ` const FeatureTitle = styled.h3` ` const RefactoringContainer = styled.div` display: flex; justify-content: center; flex-wrap: wrap; padding-top: 25px; pre { text-align: left; align-self: center; font-size: 12px; background-color: rgba(0,0,0,.02) !important; border-radius: 5px; padding: 1em !important; } ` const RefactoringText = styled.div` align-self: center; font-size: 27px; padding: 0 20px; ` const IndexPage = () => <Container> <Meta /> <NavBar> <NavBarLink><Link to="/getting-started">Docs</Link></NavBarLink> <NavBarLink><a href="https://github.com/gigobyte/purify">Github</a></NavBarLink> </NavBar> <Content> <Heading> <Title><img src="https://raw.githubusercontent.com/gigobyte/purify/master/assets/logo.png" alt="Purify" /></Title> <Subtitle>Functional programming library for TypeScript</Subtitle> <InstallBox> $ npm install purify-ts </InstallBox> </Heading> <FeaturesContainer> <Feature> <FeatureTitle>Utility functions</FeatureTitle> Purify provides implementations for common typeclasses like Functor and Monad, along with utility functions that operate on them </Feature> <Feature> <FeatureTitle>Algebraic Data Types</FeatureTitle> Purify provides a collection of algebraic data structures that will help you tackle common problems that increase code complexity, such as conditional logic and error handling </Feature> <Feature> <FeatureTitle>Practical approach</FeatureTitle> Purify is a library focused on practical functional programming in TypeScript. You will find many examples and tutorials in the <Link to="/getting-started">docs</Link> section of this site. </Feature> </FeaturesContainer> <RefactoringContainer> <RefactoringText>Turn</RefactoringText> <SyntaxHighlighter language="typescript" style={highlightStyle}> {`const getUsers = (country: Country): User[] => { if (!country) { return [] } const users = getUsersByCountry(country) if (!users) { return [] } return users }`} </SyntaxHighlighter> <RefactoringText>into</RefactoringText> <SyntaxHighlighter language="typescript" style={highlightStyle} show> {`import { Maybe } from 'purify-ts/adts/Maybe' const getUsers = (country: Country): User[] => Maybe.fromNullable(country) .chain(getUsersByCountry) .toList()`} </SyntaxHighlighter> </RefactoringContainer> </Content> <Footer> Purify is developed and maintained by <NAME>, distributed under the ISC License. </Footer> </Container> export default IndexPage
<reponame>cinar/indicatorts<gh_stars>10-100 // Copyright (c) 2022 <NAME>. All Rights Reserved. // https://github.com/cinar/indicatorts import { checkSameLength } from '../../helper/numArray'; import { Trend } from '../trend'; const PSAR_AF_STEP = 0.02; const PSAR_AF_MAX = 0.2; /** * Parabolic SAR result object. */ export interface ParabolicSar { trends: Trend[]; psar: number[]; } /** * Parabolic SAR. It is a popular technical indicator for identifying the trend * and as a trailing stop. * * PSAR = PSAR[i - 1] - ((PSAR[i - 1] - EP) * AF) * * If the trend is Falling: * - PSAR is the maximum of PSAR or the previous two high values. * - If the current high is greather than or equals to PSAR, use EP. * * If the trend is Rising: * - PSAR is the minimum of PSAR or the previous two low values. * - If the current low is less than or equals to PSAR, use EP. * * If PSAR is greater than the closing, trend is falling, and the EP * is set to the minimum of EP or the low. * * If PSAR is lower than or equals to the closing, trend is rising, and the EP * is set to the maximum of EP or the high. * * If the trend is the same, and AF is less than 0.20, increment it by 0.02. * If the trend is not the same, set AF to 0.02. * * Based on video https://www.youtube.com/watch?v=MuEpGBAH7pw&t=0s. * * @param highs high values. * @param lows low values. * @param closings closing values. * @return psar result. */ export function parabolicSar( highs: number[], lows: number[], closings: number[] ): ParabolicSar { checkSameLength(highs, lows, closings); const trends = new Array<Trend>(highs.length); const psar = new Array<number>(highs.length); trends[0] = Trend.FALLING; psar[0] = highs[0]; let af = PSAR_AF_STEP; let ep = lows[0]; for (let i = 1; i < psar.length; i++) { psar[i] = psar[i - 1] - (psar[i - 1] - ep) * af; if (trends[i - 1] === Trend.FALLING) { psar[i] = Math.max(psar[i], highs[i - 1]); if (i > 1) { psar[i] = Math.max(psar[i], highs[i - 2]); } if (highs[i] >= psar[i]) { psar[i] = ep; } } else { psar[i] = Math.min(psar[i], lows[i - 1]); if (i > 1) { psar[i] = Math.min(psar[i], lows[i - 2]); } if (lows[i] <= psar[i]) { psar[i] = ep; } } const prevEp = ep; if (psar[i] > closings[i]) { trends[i] = Trend.FALLING; ep = Math.min(ep, lows[i]); } else { trends[i] = Trend.RISING; ep = Math.max(ep, highs[i]); } if (trends[i] !== trends[i - 1]) { af = PSAR_AF_STEP; } else if (prevEp !== ep && af < PSAR_AF_MAX) { af += PSAR_AF_STEP; } } return { trends, psar, }; }
#!/bin/bash python3 -m pip --no-cache-dir install --upgrade \ cellphonedb \ scanpy \ phate # associated with PHATE - python is required R -e 'install.packages("phateR")'
<reponame>VincentLefevre/3D-parallax<gh_stars>10-100 import pytest from pybind11_tests import sequences_and_iterators as m from pybind11_tests import ConstructorStats def isclose(a, b, rel_tol=1e-05, abs_tol=0.0): """Like math.isclose() from Python 3.5""" return abs(a - b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol) def allclose(a_list, b_list, rel_tol=1e-05, abs_tol=0.0): return all(isclose(a, b, rel_tol=rel_tol, abs_tol=abs_tol) for a, b in zip(a_list, b_list)) def test_generalized_iterators(): assert list(m.IntPairs([(1, 2), (3, 4), (0, 5)]).nonzero()) == [(1, 2), (3, 4)] assert list(m.IntPairs([(1, 2), (2, 0), (0, 3), (4, 5)]).nonzero()) == [(1, 2)] assert list(m.IntPairs([(0, 3), (1, 2), (3, 4)]).nonzero()) == [] assert list(m.IntPairs([(1, 2), (3, 4), (0, 5)]).nonzero_keys()) == [1, 3] assert list(m.IntPairs([(1, 2), (2, 0), (0, 3), (4, 5)]).nonzero_keys()) == [1] assert list(m.IntPairs([(0, 3), (1, 2), (3, 4)]).nonzero_keys()) == [] # __next__ must continue to raise StopIteration it = m.IntPairs([(0, 0)]).nonzero() for _ in range(3): with pytest.raises(StopIteration): next(it) it = m.IntPairs([(0, 0)]).nonzero_keys() for _ in range(3): with pytest.raises(StopIteration): next(it) def test_sliceable(): sliceable = m.Sliceable(100) assert sliceable[::] == (0, 100, 1) assert sliceable[10::] == (10, 100, 1) assert sliceable[:10:] == (0, 10, 1) assert sliceable[::10] == (0, 100, 10) assert sliceable[-10::] == (90, 100, 1) assert sliceable[:-10:] == (0, 90, 1) assert sliceable[::-10] == (99, -1, -10) assert sliceable[50:60:1] == (50, 60, 1) assert sliceable[50:60:-1] == (50, 60, -1) def test_sequence(): cstats = ConstructorStats.get(m.Sequence) s = m.Sequence(5) assert cstats.values() == ['of size', '5'] assert "Sequence" in repr(s) assert len(s) == 5 assert s[0] == 0 and s[3] == 0 assert 12.34 not in s s[0], s[3] = 12.34, 56.78 assert 12.34 in s assert isclose(s[0], 12.34) and isclose(s[3], 56.78) rev = reversed(s) assert cstats.values() == ['of size', '5'] rev2 = s[::-1] assert cstats.values() == ['of size', '5'] it = iter(m.Sequence(0)) for _ in range(3): # __next__ must continue to raise StopIteration with pytest.raises(StopIteration): next(it) assert cstats.values() == ['of size', '0'] expected = [0, 56.78, 0, 0, 12.34] assert allclose(rev, expected) assert allclose(rev2, expected) assert rev == rev2 rev[0::2] = m.Sequence([2.0, 2.0, 2.0]) assert cstats.values() == ['of size', '3', 'from std::vector'] assert allclose(rev, [2, 56.78, 2, 0, 2]) assert cstats.alive() == 4 del it assert cstats.alive() == 3 del s assert cstats.alive() == 2 del rev assert cstats.alive() == 1 del rev2 assert cstats.alive() == 0 assert cstats.values() == [] assert cstats.default_constructions == 0 assert cstats.copy_constructions == 0 assert cstats.move_constructions >= 1 assert cstats.copy_assignments == 0 assert cstats.move_assignments == 0 def test_map_iterator(): sm = m.StringMap({'hi': 'bye', 'black': 'white'}) assert sm['hi'] == 'bye' assert len(sm) == 2 assert sm['black'] == 'white' with pytest.raises(KeyError): assert sm['orange'] sm['orange'] = 'banana' assert sm['orange'] == 'banana' expected = {'hi': 'bye', 'black': 'white', 'orange': 'banana'} for k in sm: assert sm[k] == expected[k] for k, v in sm.items(): assert v == expected[k] it = iter(m.StringMap({})) for _ in range(3): # __next__ must continue to raise StopIteration with pytest.raises(StopIteration): next(it) def test_python_iterator_in_cpp(): t = (1, 2, 3) assert m.object_to_list(t) == [1, 2, 3] assert m.object_to_list(iter(t)) == [1, 2, 3] assert m.iterator_to_list(iter(t)) == [1, 2, 3] with pytest.raises(TypeError) as excinfo: m.object_to_list(1) assert "object is not iterable" in str(excinfo.value) with pytest.raises(TypeError) as excinfo: m.iterator_to_list(1) assert "incompatible function arguments" in str(excinfo.value) def bad_next_call(): raise RuntimeError("py::iterator::advance() should propagate errors") with pytest.raises(RuntimeError) as excinfo: m.iterator_to_list(iter(bad_next_call, None)) assert str(excinfo.value) == "py::iterator::advance() should propagate errors" lst = [1, None, 0, None] assert m.count_none(lst) == 2 assert m.find_none(lst) is True assert m.count_nonzeros({"a": 0, "b": 1, "c": 2}) == 2 r = range(5) assert all(m.tuple_iterator(tuple(r))) assert all(m.list_iterator(list(r))) assert all(m.sequence_iterator(r)) def test_iterator_passthrough(): """#181: iterator passthrough did not compile""" from pybind11_tests.sequences_and_iterators import iterator_passthrough assert list(iterator_passthrough(iter([3, 5, 7, 9, 11, 13, 15]))) == [3, 5, 7, 9, 11, 13, 15] def test_iterator_rvp(): """#388: Can't make iterators via make_iterator() with different r/v policies """ import pybind11_tests.sequences_and_iterators as m assert list(m.make_iterator_1()) == [1, 2, 3] assert list(m.make_iterator_2()) == [1, 2, 3] assert not isinstance(m.make_iterator_1(), type(m.make_iterator_2()))
package io.opensphere.wms.envoy; import io.opensphere.mantle.data.ActivationListener; import io.opensphere.server.services.ServerConnectionParams; import io.opensphere.wms.util.WMSQueryTracker; /** * Interface to a WMS server envoy. * */ public interface WMSEnvoy { /** * Gets the active layer change listener, to activate or deactivate the * state layers. * * @return The active change listener. */ ActivationListener getActiveLayerChangeListener(); /** * Gets the query tracker for this layer. * * @return The query tracker. */ WMSQueryTracker getQueryTracker(); /** * Gets the server's connection configuration. * * @return The connection configurtion. */ ServerConnectionParams getServerConnectionConfig(); /** * Get the WMS version used in requests made by this envoy. * * @return the WMS Version (either "1.1.1" or "1.3.0"). */ String getWMSVersion(); }
def find_longest_sequence(arr): # keep track of the start and end indices start_index = 0 end_index = 0 # keep track of the current sequence length longest_length = 0 # keep track of where the 0's start and end prev_index = 0 curr_index = 0 # go through the array looking for consecutive 1's or 0's while curr_index < len(arr): if arr[curr_index] != arr[prev_index]: if arr[prev_index] == 1: # the current sequence ended length = curr_index - prev_index if length > longest_length: longest_length = length start_index = prev_index end_index = curr_index prev_index = curr_index curr_index += 1 # if the last sequence was all 1's, include this length = curr_index - prev_index if length > longest_length: longest_length = length start_index = prev_index end_index = curr_index return (start_index, end_index)
<gh_stars>0 // Copyright 2007, 2008, 2009, 2013 The Apache Software Foundation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.apache.tapestry5.internal.services; import java.util.List; import org.apache.tapestry5.ContentType; import org.apache.tapestry5.MarkupWriter; import org.apache.tapestry5.dom.DefaultMarkupModel; import org.apache.tapestry5.dom.Html5MarkupModel; import org.apache.tapestry5.dom.MarkupModel; import org.apache.tapestry5.dom.XMLMarkupModel; import org.apache.tapestry5.internal.parser.DTDToken; import org.apache.tapestry5.internal.parser.TemplateToken; import org.apache.tapestry5.internal.parser.TokenType; import org.apache.tapestry5.internal.structure.Page; import org.apache.tapestry5.model.ComponentModel; import org.apache.tapestry5.services.MarkupWriterFactory; import org.apache.tapestry5.services.pageload.ComponentRequestSelectorAnalyzer; import org.apache.tapestry5.services.pageload.ComponentResourceSelector; public class MarkupWriterFactoryImpl implements MarkupWriterFactory { private final PageContentTypeAnalyzer pageContentTypeAnalyzer; private final RequestPageCache cache; private final ComponentTemplateSource templateSource; private final ComponentRequestSelectorAnalyzer componentRequestSelectorAnalyzer; private final MarkupModel htmlModel = new DefaultMarkupModel(); private final MarkupModel xmlModel = new XMLMarkupModel(); private final MarkupModel htmlPartialModel = new DefaultMarkupModel(true); private final MarkupModel xmlPartialModel = new XMLMarkupModel(true); private final MarkupModel html5Model = new Html5MarkupModel(); private final MarkupModel html5PartialModel = new Html5MarkupModel(true); public MarkupWriterFactoryImpl(PageContentTypeAnalyzer pageContentTypeAnalyzer, RequestPageCache cache, ComponentTemplateSource templateSource, ComponentRequestSelectorAnalyzer componentRequestSelectorAnalyzer) { this.pageContentTypeAnalyzer = pageContentTypeAnalyzer; this.cache = cache; this.templateSource = templateSource; this.componentRequestSelectorAnalyzer = componentRequestSelectorAnalyzer; } public MarkupWriter newMarkupWriter(ContentType contentType) { return constructMarkupWriter(contentType, false, false); } public MarkupWriter newPartialMarkupWriter(ContentType contentType) { return constructMarkupWriter(contentType, true, false); } private MarkupWriter constructMarkupWriter(ContentType contentType, boolean partial, boolean HTML5) { final String mimeType = contentType.getMimeType(); boolean isHTML = mimeType.equalsIgnoreCase("text/html"); MarkupModel model; if(isHTML) model = HTML5 ? (partial ? html5PartialModel : html5Model) : (partial ? htmlPartialModel : htmlModel); else model = partial ? xmlPartialModel : xmlModel; // The charset parameter sets the encoding attribute of the XML declaration, if // not null and if using the XML model. return new MarkupWriterImpl(model, contentType.getCharset(), mimeType); } public MarkupWriter newMarkupWriter(String pageName) { Page page = cache.get(pageName); return newMarkupWriter(page); } private boolean hasHTML5Doctype(Page page) { ComponentModel componentModel = page.getRootComponent().getComponentResources().getComponentModel(); ComponentResourceSelector selector = componentRequestSelectorAnalyzer.buildSelectorForRequest(); List<TemplateToken> tokens = templateSource.getTemplate(componentModel, selector).getTokens(); DTDToken dtd = null; for(TemplateToken token : tokens) { if(token.getTokenType() == TokenType.DTD) { dtd = (DTDToken) token; break; } } return dtd != null && dtd.name.equalsIgnoreCase("html") && dtd.publicId == null && dtd.systemId == null; } public MarkupWriter newMarkupWriter(Page page) { boolean isHTML5 = hasHTML5Doctype(page); ContentType contentType = pageContentTypeAnalyzer.findContentType(page); return constructMarkupWriter(contentType, false, isHTML5); } public MarkupWriter newPartialMarkupWriter(Page page) { boolean isHTML5 = hasHTML5Doctype(page); ContentType contentType = pageContentTypeAnalyzer.findContentType(page); return constructMarkupWriter(contentType, true, isHTML5); } public MarkupWriter newPartialMarkupWriter(String pageName) { Page page = cache.get(pageName); return newPartialMarkupWriter(page); } }
<filename>collins/collins.go package collins import ( "encoding/json" "fmt" "io/ioutil" "log" "net/http" "net/url" ) const ( defaultMaxSize = "500" ) var defaultParams = &url.Values{} func init() { defaultParams.Set("size", defaultMaxSize) } type AssetState struct { ID int `json:"ID"` Status struct { Name string `json:"NAME"` Description string `json:"DESCRIPTION"` } `json:"STATUS,omitempty"` Name string `json:"NAME"` Label string `json:"LABEL,omitempty"` Description string `json:"DESCRIPTION,omitempty"` } type Status struct { Status string `json:"status"` } // incomplete type Asset struct { Status Data AssetDetails `json:"data"` } type AssetFlat struct { Status Data AssetCommon `json:"data"` } type AssetCommon struct { ID int `json:"ID"` Tag string `json:"TAG"` State AssetState Status string `json:"STATUS"` Type string `json:"TYPE"` Updated string `json:"UPDATED"` Created string `json:"CREATED"` Deleted string `json:"DELETED"` } type AssetDetails struct { Asset AssetCommon `json:"ASSET"` Attributes map[string]map[string]string `json:"ATTRIBS"` IPMI struct { Address string `json:"IPMI_ADDRESS"` Username string `json:"IPMI_USERNAME"` Password string `json:"IP<PASSWORD>"` } `json:"IPMI"` Addresses []AssetAddress `json:"ADDRESSES"` } type AssetAddress struct { ID int `json:"ID"` Pool string `json:"POOL"` Address string `json:"ADDRESS"` Netmask string `json:"NETMASK"` Gateway string `json:"GATEWAY"` } type AssetAddresses struct { Status Data struct { Addresses []AssetAddress } } type Assets struct { Status Data struct { Data []AssetDetails `json:"data"` } `json:"Data"` } // Error implements the error interface type Error struct { err string StatusCode int } func (e Error) Error() string { return fmt.Sprintf("%d/%s: %s", e.StatusCode, http.StatusText(e.StatusCode), e.err) } type Client struct { client *http.Client user string password string url string } func New(user, password, url string) *Client { return &Client{ client: &http.Client{}, user: user, password: password, url: url, } } func (c *Client) Request(method string, path string, params *url.Values) ([]byte, error) { url := c.url + path if params != nil { url = url + "?" + params.Encode() } req, err := http.NewRequest(method, url, nil) if err != nil { return nil, err } log.Printf("> %s", req.URL) req.SetBasicAuth(c.user, c.password) req.Close = true resp, err := c.client.Do(req) if err != nil { return nil, err } defer resp.Body.Close() body, err := ioutil.ReadAll(resp.Body) if err != nil { return nil, err } if resp.StatusCode < 200 || resp.StatusCode >= 400 { return body, Error{err: string(body), StatusCode: resp.StatusCode} } return body, nil } func (c *Client) GetAssetAddresses(tag string) (*AssetAddresses, error) { body, err := c.Request("GET", "/asset/"+tag+"/addresses", defaultParams) if err != nil { return nil, err } adresses := &AssetAddresses{} return adresses, json.Unmarshal(body, &adresses) } func (c *Client) GetAsset(tag string) (*Asset, error) { if tag == "" { return nil, fmt.Errorf("Tag required") } body, err := c.Request("GET", "/asset/"+tag, nil) if err != nil { if cerr, ok := err.(Error); ok { if cerr.StatusCode == http.StatusNotFound { // not an error, just no asset return nil, nil } } return nil, err } asset := &Asset{} return asset, json.Unmarshal(body, &asset) } func (c *Client) GetAssetFromAddress(addr string) (*Asset, error) { body, err := c.Request("GET", "/asset/with/address/"+addr, nil) if err != nil { return nil, err } if body == nil { return nil, nil } asset := &AssetFlat{} if err := json.Unmarshal(body, &asset); err != nil { return nil, err } return c.GetAsset(asset.Data.Tag) } func (c *Client) FindAllAssets() (*Assets, error) { return c.FindAssets(defaultParams) } func (c *Client) FindAssets(params *url.Values) (*Assets, error) { if params.Get("size") == "" { params.Set("size", defaultMaxSize) } body, err := c.Request("GET", "/assets", params) if err != nil { return nil, err } assets := &Assets{} return assets, json.Unmarshal(body, &assets) } func (c *Client) AddAssetLog(tag, mtype, message string) error { v := url.Values{} v.Set("message", message) v.Set("type", mtype) req, err := http.NewRequest("PUT", c.url+"/asset/"+tag+"/log?"+v.Encode(), nil) if err != nil { return err } log.Printf("> %s", req.URL) req.SetBasicAuth(c.user, c.password) resp, err := c.client.Do(req) if err != nil { return err } if resp.StatusCode != http.StatusCreated { return fmt.Errorf("Status code %d unexpected", resp.StatusCode) } return nil } func (c *Client) SetStatus(tag, status, reason string) error { params := &url.Values{} params.Set("status", status) params.Set("reason", reason) body, err := c.Request("POST", "/asset/"+tag+"/status", params) if err != nil { return err } s := &Status{} if err := json.Unmarshal(body, &s); err != nil { return fmt.Errorf("Couldn't unmarshal %s: %s", body, err) } if s.Status != "success:ok" { return fmt.Errorf("Couldn't set status to %s", status) } return nil }
""" Helper functions for exercise 1.3 """ import numpy as np import tensorflow as tf def get_clipped_noise(shape, noise_scale=0.1, noise_clip=0.5): """ get normally distributed noise with std=noise_scale, clipped to +- noise_clip """ noise = noise_scale*tf.random.normal(shape=shape) return tf.clip_by_value(noise, -noise_clip, noise_clip) def get_pi_noise_clipped(pi, noise_scale=0.1, noise_clip=0.5, act_limit=10.): """ Add clipped noise to sampled target action, and then clip to stay within the valid value of the action """ clipped_noise = get_clipped_noise( tf.shape(pi), noise_scale=noise_scale, noise_clip=noise_clip) pi_noise = pi + clipped_noise pi_noise_clipped = tf.clip_by_value(pi_noise, -act_limit, act_limit) return pi_noise_clipped def get_q_target(q1, q2, r, d, gamma=0.99): """ calculate q target """ return r + gamma*(1 - d)*tf.minimum(q1, q2, name='min_q')
def is_prime(n): if n <= 1: return False for i in range(2, n): if n % i == 0: return False return True
<reponame>k11n/konstellation package terraform import ( "bytes" "encoding/json" "fmt" "io/ioutil" "os" "os/exec" "os/signal" "path" "strings" "syscall" "time" "github.com/pkg/errors" "github.com/k11n/konstellation/pkg/utils/files" ) type Flags string const ( OptionDisplayOutput Flags = "display_output" OptionRequireApproval Flags = "require_approval" ) type Action struct { WorkingDir string Vars []Var values map[string]string env map[string]string displayOutput bool requireApproval bool initialized bool } type Var struct { Name string CreationOnly bool TemplateOnly bool } type Values map[Var]interface{} type Option interface { Apply(*Action) } func (f Flags) Apply(a *Action) { switch f { case OptionDisplayOutput: a.displayOutput = true case OptionRequireApproval: a.requireApproval = true } } func (v Values) Apply(a *Action) { if a.values == nil { a.values = make(map[string]string) } for key, val := range v { // if not a string, encode to json if strVal, ok := val.(string); ok { a.values[key.Name] = strVal } else { data, _ := json.Marshal(val) a.values[key.Name] = string(data) } } } type EnvVar map[string]string func (v EnvVar) Apply(a *Action) { if a.env == nil { a.env = make(map[string]string) } for key, val := range v { a.env[key] = val } } func NewTerraformAction(dir string, vars []Var, opts ...Option) *Action { a := &Action{ WorkingDir: dir, Vars: vars, } for _, o := range opts { o.Apply(a) } return a } func (a *Action) Option(opt Option) *Action { opt.Apply(a) return a } func (a *Action) replaceTemplates() error { files, err := ioutil.ReadDir(a.WorkingDir) if err != nil { return err } for _, fi := range files { if strings.HasPrefix(fi.Name(), ".") { continue } else if !fi.IsDir() { if err = a.replaceTemplate(path.Join(a.WorkingDir, fi.Name())); err != nil { return err } } } return nil } func (a *Action) replaceTemplate(filePath string) error { content, err := ioutil.ReadFile(filePath) if err != nil { return err } s := string(content) hasReplacements := false for key, val := range a.values { search := fmt.Sprintf("$${%s}", key) if strings.Contains(s, search) { hasReplacements = true s = strings.ReplaceAll(s, search, val) } } if hasReplacements { return ioutil.WriteFile(filePath, []byte(s), files.DefaultFileMode) } return nil } func (a *Action) Apply() error { if err := a.checkRequiredVars(true); err != nil { return err } args := []string{ "apply", "-compact-warnings", } if !a.requireApproval { args = append(args, "-auto-approve") } for _, v := range a.Vars { if v.TemplateOnly { continue } args = append(args, "-var") args = append(args, fmt.Sprintf("%s=%s", v.Name, a.values[v.Name])) } if err := a.runAction(args...); err != nil { return errors.Wrap(err, "error with terraform apply") } return nil } func (a *Action) GetOutput() (content []byte, err error) { // first initialize terraform if err = a.initIfNeeded(); err != nil { return } buf := bytes.NewBuffer(nil) cmd := exec.Command("terraform", "output", "-json") cmd.Dir = a.WorkingDir cmd.Stderr = os.Stderr cmd.Stdout = buf cmd.Env = a.getEnvVars() if err = cmd.Run(); err != nil { return } content = buf.Bytes() return } func (a *Action) Destroy() error { if err := a.checkRequiredVars(false); err != nil { return err } args := []string{ "destroy", } if !a.requireApproval { args = append(args, "-auto-approve") } for _, v := range a.Vars { if v.CreationOnly || v.TemplateOnly { continue } args = append(args, "-var") args = append(args, fmt.Sprintf("%s=%s", v.Name, a.values[v.Name])) } return a.runAction(args...) } func (a *Action) RemoveDir() error { return os.RemoveAll(a.WorkingDir) } func (a *Action) checkRequiredVars(creation bool) error { for _, v := range a.Vars { if v.CreationOnly && !creation { // creation vars don't have to be passed in during destroys continue } if _, ok := a.values[v.Name]; !ok { return fmt.Errorf("value not found for required var: %s", v.Name) } } return nil } func (a *Action) runAction(args ...string) error { // first initialize terraform if err := a.initIfNeeded(); err != nil { return err } connectStdOut := false connectStdIn := false if a.displayOutput || a.requireApproval { connectStdOut = true } if a.requireApproval { connectStdIn = true } fmt.Printf("Generated terraform plan: %s\n", a.WorkingDir) fmt.Printf("Running: terraform %s\n", strings.Join(args, " ")) cmd := exec.Command("terraform", args...) cmd.Dir = a.WorkingDir cmd.Stderr = os.Stderr cmd.Env = a.getEnvVars() if connectStdIn { cmd.Stdin = os.Stdin } if connectStdOut { cmd.Stdout = os.Stdout } // intercept CTL+C and kill underlying processes sigchan := make(chan os.Signal, 1) signal.Notify(sigchan, os.Interrupt, syscall.SIGTERM) go func() { <-sigchan if cmd.Process == nil { return } time.Sleep(10 * time.Second) cmd.Process.Kill() syscall.Kill(-cmd.Process.Pid, syscall.SIGKILL) }() return cmd.Run() } func (a *Action) initIfNeeded() error { if a.initialized { return nil } fmt.Println("Preparing terraform...") if err := a.replaceTemplates(); err != nil { return err } cmd := exec.Command("terraform", "init") cmd.Dir = a.WorkingDir cmd.Env = a.getEnvVars() //cmd.Stderr = os.Stderr //cmd.Stdout = os.Stdout if err := cmd.Run(); err != nil { return errors.Wrapf(err, "Could not init terraform. Path: %s", a.WorkingDir) } a.initialized = true return nil } func (a *Action) getEnvVars() []string { envVars := make([]string, 0, len(a.env)) for key, val := range a.env { envVars = append(envVars, fmt.Sprintf("%s=%s", key, val)) } return envVars }
#!/bin/bash # CLONE PHASE git clone https://github.com/OpenApoc/OpenApoc.git source pushd source git checkout -f 9183a08d34c90d2b9e91c1c941aa8459f3f5393f git am < ../patches/0001-Workaround-for-missing-PRId64.patch git am < ../patches/0002-Workaround-for-missing-PRIu64.patch git submodule update --init --recursive popd git clone https://github.com/boostorg/boost boost pushd boost git checkout -f 62a4b7f git submodule update --init --recursive popd git clone https://github.com/Kitware/CMake.git cmake pushd cmake git checkout -f 39c6ac5 popd git clone https://github.com/libunwind/libunwind.git libunwind pushd libunwind git checkout -f 1847559 popd readonly pfx="$PWD/local" mkdir -p "$pfx" # BUILD PHASE pushd cmake ./bootstrap make sudo make install popd export CMAKE_ROOT=/usr/local/share/cmake-3.16/ /usr/local/bin/cmake --version ./build-boost.sh pushd libunwind ./autogen.sh ./configure make make install popd cp -rfv /usr/local/lib/libunwind*.so* "$pfx/lib" export LD_LIBRARY_PATH="$pfx/lib:$LD_LIBRARY_PATH" pushd source wget http://s2.jonnyh.net/pub/cd_minimal.iso.xz -O data/cd.iso.xz xz -d data/cd.iso.xz mkdir build cd build /usr/local/bin/cmake \ -DCMAKE_PREFIX_PATH="$pfx;$pfx/qt5" \ -DBUILD_LAUNCHER=ON \ -DBoost_LIBRARY_DIRS="$pfx/lib" \ .. make -j "$(nproc)" popd # COPY PHASE rm -rf "source/data/cd.iso" mkdir -p "$diststart/7660/dist/lib/" mkdir -p "$diststart/7660/dist/bin/" cp -rfv "source/build/bin/OpenApoc" "$diststart/7660/dist/bin/" cp -rfv "source/build/bin/OpenApoc_Launcher" "$diststart/7660/dist/bin/" cp -rfv "$pfx/lib/"*.so* "$diststart/7660/dist/lib/" cp -rfv "source/data" "$diststart/7660/dist/" cp -rfv "assets/"* "$diststart/7660/dist/"
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2016, 2017 by MemSQL. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import print_function from __future__ import absolute_import import urwid import argparse import curses import logging import sys import pkg_resources from distutils.version import LooseVersion from .database import connect from .DatabasePoller import DatabasePoller from .QueryListBox import QueryListBox from .ResourceMonitor import ResourceMonitor from .WrappingPopUpViewer import WrappingPopUpViewer from .ColumnHeadings import ColumnHeadings from .columns import DetectColumnsMetaOrExit def main(args=None): if args is None: parser = argparse.ArgumentParser(add_help=False) parser.add_argument("-h", "--host", default="127.0.0.1") parser.add_argument("-P", "--port", default=3306, type=int) parser.add_argument("-p", "--password", default="") parser.add_argument("-u", "--user", default="root") parser.add_argument("-v", "--version", action="store_true") parser.add_argument("-?", "--help", action="store_true", help="Show this help message and exit") parser.add_argument("--update-interval", default=3.0, type=float, help="How frequently to update the screen.") args = parser.parse_args() if args.help: parser.print_help() sys.exit(0) elif args.version: print(pkg_resources.require("memsql-top")[0].version) sys.exit(0) try: conn = connect(host=args.host, port=args.port, database="information_schema", password=<PASSWORD>.password, user=args.user) except Exception as e: sys.exit("Unexpected error when connecting to database: %s" % e) columnsMeta = DetectColumnsMetaOrExit(conn) # Run any check system queries before we start the DatabasePoller and # start tracking queries. # if not conn.get('select @@forward_aggregator_plan_hash as f').f: sys.exit("forward_aggregator_plan_hash is required") BLACK = 'h16' _BLACK = 'black' BLUE = 'h24' _BLUE = 'light blue' ACCENT_BLUE = 'h74' _ACCENT_BLUE = 'dark cyan' LIGHT_GRAY = 'h255' _LIGHT_GRAY = 'light gray' GRAY = 'h253' _GRAY = 'light gray' TEXT_GRAY = 'h240' _TEXT_GRAY = 'dark gray' HEAD_GRAY = 'h102' _HEAD_GRAY = 'light gray' DARK_GRAY = 'h234' _DARK_GRAY = 'dark gray' WHITE = 'h231' _WHITE = 'white' palette = [ ('popup', _TEXT_GRAY, _WHITE, '', TEXT_GRAY, WHITE), ('head', _HEAD_GRAY, _DARK_GRAY, 'bold', HEAD_GRAY, DARK_GRAY), ('head_key', _ACCENT_BLUE, _DARK_GRAY, 'bold,underline', ACCENT_BLUE, DARK_GRAY), ('head_so', _WHITE, _DARK_GRAY, 'bold,standout', WHITE, DARK_GRAY), ('resource_bar_empty', _GRAY, _BLACK, '', GRAY, BLACK), ('resource_bar', _GRAY, _BLUE, '', GRAY, BLUE), ('foot', _TEXT_GRAY, _LIGHT_GRAY, 'bold', TEXT_GRAY, LIGHT_GRAY), ('foot_key', _ACCENT_BLUE, _LIGHT_GRAY, 'bold,underline', ACCENT_BLUE, LIGHT_GRAY), ('body', _TEXT_GRAY, _WHITE, '', TEXT_GRAY, WHITE), ('body_focus', _TEXT_GRAY, _LIGHT_GRAY, 'underline', TEXT_GRAY, LIGHT_GRAY), ] for tup in [(0, _TEXT_GRAY, TEXT_GRAY), (1, 'light green', 'h77'), (2, 'yellow', 'h220'), (3, 'light magenta', 'h202'), (4, 'light red', 'h160')]: code, old_color, color = tup palette.append(('body_%d' % code, old_color, _WHITE, '', color, WHITE)) palette.append(('body_focus_%d' % code, old_color, _LIGHT_GRAY, 'underline', color, LIGHT_GRAY)) dbpoller = DatabasePoller(args, columnsMeta) column_headings = ColumnHeadings(columnsMeta) resources = ResourceMonitor(columnsMeta.GetMaxCpuTotal(conn), columnsMeta.GetMaxMemTotal(conn)) headerElems = [urwid.Text("MemSQL - MemSQL Top")] # 5.7 did not give us enough info for resource bars. if columnsMeta.minimum_version >= LooseVersion("5.8"): headerElems += [urwid.Divider(), resources] headerElems += [urwid.Divider(), column_headings] header = urwid.Pile(headerElems) qlistbox = QueryListBox(columnsMeta) footer = urwid.Columns([ urwid.Text([ ('foot_key', "UP"), ", ", ('foot_key', "DOWN"), ", ", " move view ", ('foot_key', "F#"), " sorts by column ", ('foot_key', "Q"), " exits", ]), urwid.Text("Send feedback to <EMAIL>.", align="right") ]) view = WrappingPopUpViewer(urwid.Frame( urwid.AttrMap(qlistbox, "body"), header=urwid.AttrMap(header, "head"), footer=urwid.AttrMap(footer, "foot"))) urwid.connect_signal(qlistbox, 'sort_column_changed', column_headings.update_sort_column) urwid.connect_signal(qlistbox, 'query_selected', lambda w, q: view.show_popup(w, columnsMeta.GetPopUpText(conn, q))) def handle_keys(input): if input in ('q', 'Q'): raise urwid.ExitMainLoop() if input in qlistbox.sort_keys(): qlistbox.update_sort_column(input) loop = urwid.MainLoop(view, palette, unhandled_input=handle_keys) def update_widgets(plancache, cpu, mem): qlistbox.update_entries(plancache) resources.update_cpu_util(cpu) resources.update_mem_usage(mem) dbpoller.start(loop.watch_pipe(lambda _: update_widgets(*dbpoller.get_database_data()))) try: curses.setupterm() if curses.tigetnum("colors") == 256: loop.screen.set_terminal_properties(colors=256) except curses.error: logging.warn("Failed to identify terminal color support -- falling back to ANSI terminal colors.") logging.warn("Set TERM=xterm-256color or equivalent for best the experience.") loop.run() if __name__ == "__main__": main()
package pl.decerto.drools; import java.math.BigDecimal; import java.math.RoundingMode; import java.util.Objects; /** * @author <NAME> on 25.06.2019 */ public class QxDto { private final String gender; private final int age; private BigDecimal qx; public QxDto(String gender, int age) { this.gender = gender; this.age = age; } public String getGender() { return gender; } public int getAge() { return age; } public BigDecimal getQx() { return qx; } public void setQx(double qx) { this.qx = BigDecimal.valueOf(qx).setScale(6, RoundingMode.HALF_UP); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } QxDto qxDto = (QxDto) o; return age == qxDto.age && Objects.equals(gender, qxDto.gender) && Objects.equals(qx, qxDto.qx); } @Override public int hashCode() { return Objects.hash(gender, age, qx); } }
/* * angelscript_InputOutputOps.cpp */ #include <angelscript.h> #include "logging/log_Logger.h" #include "text/text_StringConversion.h" #include "text/text_ExternalText.h" #include "channels/events_TextChannel.h" #include "primitives/primitives_PrimitivesAccess.h" #include "primitives/primitives_EventPrims.h" #include "primitives/primitives_SystemPrims.h" #include "angelscript_AngelException.h" #include "angelscript_AEntity.h" #include "angelscript_ScriptContext.h" #include "angelscript_ScriptUtilities.h" #include "angelscript_InputOutputOps.h" namespace { const std::string AS_OBJECT_TYPE_NAME = "InputOutputOps"; const std::string EMIT_TO_ROOM_METHOD_SIG = "emit_to_room(Entity, string, bool)"; const std::string BROADCAST_TO_ROOM_METHOD_SIG = "broadcast_to_room(Entity, string, bool)"; const std::string SEND_TO_ENTITY_METHOD_SIG = "send_to_entity(Entity, string, bool)"; const std::string PRINTLN_METHOD_SIG = "println(string)"; const std::string MPRINTLN_METHOD_SIG = "mprintln(string)"; } namespace mutgos { namespace angelscript { // ---------------------------------------------------------------------- bool InputOutputOps::register_methods(asIScriptEngine &engine) { bool result = true; int rc = 0; engine.SetDefaultNamespace(AS_OBJECT_TYPE_NAME.c_str()); // Register the functions // rc = engine.RegisterGlobalFunction( "void emit_to_room(Entity &in room, const string &in text, const bool prepend_self)", asFUNCTION(emit_to_room), asCALL_GENERIC); check_register_rc(rc, __LINE__, result); rc = engine.RegisterGlobalFunction( "void broadcast_to_room(Entity &in room, const string &in text, const bool prepend_self)", asFUNCTION(broadcast_to_room), asCALL_GENERIC); check_register_rc(rc, __LINE__, result); rc = engine.RegisterGlobalFunction( "void send_to_entity(Entity &in target, const string &in text, const bool prepend_self)", asFUNCTION(send_to_entity), asCALL_GENERIC); check_register_rc(rc, __LINE__, result); engine.SetDefaultNamespace(""); rc = engine.RegisterGlobalFunction( "void println(const string &in text)", asFUNCTION(println), asCALL_GENERIC); check_register_rc(rc, __LINE__, result); rc = engine.RegisterGlobalFunction( "void mprintln(const string &in text)", asFUNCTION(mprintln), asCALL_GENERIC); check_register_rc(rc, __LINE__, result); return result; } // ---------------------------------------------------------------------- void InputOutputOps::emit_to_room(asIScriptGeneric *gen_ptr) { if (not gen_ptr) { LOG(fatal, "angelscript", "emit_to_room", "gen_ptr is null"); return; } asIScriptEngine * const engine_ptr = gen_ptr->GetEngine(); AEntity * const room_entity = reinterpret_cast<AEntity *>( gen_ptr->GetArgObject(0)); AString * const raw_text = reinterpret_cast<AString *>( gen_ptr->GetArgObject(1)); const bool prepend_self = *(bool*)gen_ptr->GetAddressOfArg(2); if ((not room_entity) or (not raw_text)) { throw AngelException( "AngelScript passed null pointers to us", AS_OBJECT_TYPE_NAME, EMIT_TO_ROOM_METHOD_SIG); } send_event( engine_ptr, EMIT_TO_ROOM_METHOD_SIG, *room_entity, true, *raw_text, prepend_self, true); } // ---------------------------------------------------------------------- void InputOutputOps::broadcast_to_room(asIScriptGeneric *gen_ptr) { if (not gen_ptr) { LOG(fatal, "angelscript", "broadcast_to_room", "gen_ptr is null"); return; } asIScriptEngine * const engine_ptr = gen_ptr->GetEngine(); AEntity * const room_entity = reinterpret_cast<AEntity *>( gen_ptr->GetArgObject(0)); AString * const raw_text = reinterpret_cast<AString *>( gen_ptr->GetArgObject(1)); const bool prepend_self = *(bool*)gen_ptr->GetAddressOfArg(2); if ((not room_entity) or (not raw_text)) { throw AngelException( "AngelScript passed null pointers to us", AS_OBJECT_TYPE_NAME, BROADCAST_TO_ROOM_METHOD_SIG); } send_event( engine_ptr, BROADCAST_TO_ROOM_METHOD_SIG, *room_entity, true, *raw_text, prepend_self, false); } // ---------------------------------------------------------------------- void InputOutputOps::send_to_entity(asIScriptGeneric *gen_ptr) { if (not gen_ptr) { LOG(fatal, "angelscript", "send_to_entity", "gen_ptr is null"); return; } asIScriptEngine * const engine_ptr = gen_ptr->GetEngine(); AEntity * const entity = reinterpret_cast<AEntity *>( gen_ptr->GetArgObject(0)); AString * const raw_text = reinterpret_cast<AString *>( gen_ptr->GetArgObject(1)); const bool prepend_self = *(bool*)gen_ptr->GetAddressOfArg(2); if ((not entity) or (not raw_text)) { throw AngelException( "AngelScript passed null pointers to us", AS_OBJECT_TYPE_NAME, SEND_TO_ENTITY_METHOD_SIG); } send_event( engine_ptr, BROADCAST_TO_ROOM_METHOD_SIG, *entity, false, *raw_text, prepend_self); } // ---------------------------------------------------------------------- void InputOutputOps::println(asIScriptGeneric *gen_ptr) { if (not gen_ptr) { LOG(fatal, "angelscript", "println", "gen_ptr is null"); return; } asIScriptEngine * const engine_ptr = gen_ptr->GetEngine(); AString * const raw_text = reinterpret_cast<AString *>( gen_ptr->GetArgObject(0)); if (not raw_text) { throw AngelException( "AngelScript passed null pointers to us", AS_OBJECT_TYPE_NAME, PRINTLN_METHOD_SIG); } text::ExternalTextLine text_line; try { ScriptContext * const script_context_ptr = ScriptUtilities::get_my_script_context(engine_ptr); // Convert text to ExternalText // const primitives::Result convert_result = primitives::PrimitivesAccess::instance()-> system_prims().to_external_text( script_context_ptr->get_security_context(), raw_text->export_to_string(), text_line); if (not convert_result.is_success()) { throw AngelException( "Failed to convert text to ExternalText", convert_result, AS_OBJECT_TYPE_NAME, PRINTLN_METHOD_SIG); } else { const bool success_send = script_context_ptr->get_output_channel() ? script_context_ptr->get_output_channel()->send_item( text_line) : false; if (not success_send) { // Should never happen throw AngelException( "Output Channel is closed or blocked", convert_result, AS_OBJECT_TYPE_NAME, PRINTLN_METHOD_SIG); } } } catch (std::exception &ex) { text::ExternalText::clear_text_line(text_line); ScriptUtilities::set_exception_info(engine_ptr, ex); throw; } catch (...) { text::ExternalText::clear_text_line(text_line); ScriptUtilities::set_exception_info(engine_ptr); throw; } } // ---------------------------------------------------------------------- void InputOutputOps::mprintln(asIScriptGeneric *gen_ptr) { if (not gen_ptr) { LOG(fatal, "angelscript", "mprintln", "gen_ptr is null"); return; } asIScriptEngine * const engine_ptr = gen_ptr->GetEngine(); AString * const raw_text = reinterpret_cast<AString *>( gen_ptr->GetArgObject(0)); if (not raw_text) { throw AngelException( "AngelScript passed null pointers to us", AS_OBJECT_TYPE_NAME, MPRINTLN_METHOD_SIG); } text::ExternalTextMultiline text_lines; try { ScriptContext * const script_context_ptr = ScriptUtilities::get_my_script_context(engine_ptr); // Convert text to ExternalText // const primitives::Result convert_result = primitives::PrimitivesAccess::instance()-> system_prims().to_external_text_multiline_unformatted( script_context_ptr->get_security_context(), raw_text->export_to_string(), text_lines); if (not convert_result.is_success()) { throw AngelException( "Failed to convert text to ExternalText", convert_result, AS_OBJECT_TYPE_NAME, MPRINTLN_METHOD_SIG); } else { events::TextChannel * const output_channel = script_context_ptr->get_output_channel(); bool success_send = output_channel; if (success_send) { // Send each line one at a time until we're done or errored // out. // for (text::ExternalTextMultiline::iterator line_iter = text_lines.begin(); line_iter != text_lines.end(); ++line_iter) { success_send = output_channel->send_item(*line_iter); if (not success_send) { break; } } } if (not success_send) { // Should never happen throw AngelException( "Output Channel is closed or blocked", convert_result, AS_OBJECT_TYPE_NAME, PRINTLN_METHOD_SIG); } } } catch (std::exception &ex) { text::ExternalText::clear_text_lines(text_lines); ScriptUtilities::set_exception_info(engine_ptr, ex); throw; } catch (...) { text::ExternalText::clear_text_lines(text_lines); ScriptUtilities::set_exception_info(engine_ptr); throw; } } // ---------------------------------------------------------------------- void InputOutputOps::send_event( asIScriptEngine *const engine_ptr, const std::string &method, AEntity &entity, const bool entity_is_room, AString &raw_text, const bool prepend_self, const bool exclude_requester) { text::ExternalTextLine text_line; try { security::Context * const security_context_ptr = ScriptUtilities::get_my_security_context(engine_ptr); // Convert text to ExternalText // const primitives::Result convert_result = primitives::PrimitivesAccess::instance()-> system_prims().to_external_text( *security_context_ptr, raw_text.export_to_string(), text_line); if (not convert_result.is_success()) { throw AngelException( "Failed to convert text to ExternalText", convert_result, AS_OBJECT_TYPE_NAME, method); } else { // Insert name in the front if requested. // if (prepend_self) { text::ExternalIdText *id_text_ptr = 0; const primitives::Result id_make_result = primitives::PrimitivesAccess::instance()-> system_prims().make_id_text( *security_context_ptr, security_context_ptr->get_requester(), id_text_ptr); if ((not id_text_ptr) or (not id_make_result.is_success())) { delete id_text_ptr; id_text_ptr = 0; throw AngelException( "Failed to construct ID Text with requester", id_make_result, AS_OBJECT_TYPE_NAME, method); } else { text_line.insert(text_line.begin(), id_text_ptr); id_text_ptr = 0; } } // Send off the text. // const primitives::Result prim_result = (entity_is_room ? primitives::PrimitivesAccess::instance()-> event_prims().send_text_to_room( *security_context_ptr, entity.get_id(), text_line, exclude_requester) : primitives::PrimitivesAccess::instance()-> event_prims().send_text_to_entity( *security_context_ptr, entity.get_id(), text_line)); if (not prim_result.is_success()) { throw AngelException( "", prim_result, AS_OBJECT_TYPE_NAME, method); } else { text::ExternalText::clear_text_line(text_line); } } } catch (std::exception &ex) { text::ExternalText::clear_text_line(text_line); ScriptUtilities::set_exception_info(engine_ptr, ex); throw; } catch (...) { text::ExternalText::clear_text_line(text_line); ScriptUtilities::set_exception_info(engine_ptr); throw; } } // ---------------------------------------------------------------------- void InputOutputOps::check_register_rc( const int rc, const size_t line, bool &current_result) { if (rc < 0) { current_result = false; LOG(fatal, "angelscript", "check_register_rc", "Failed to register with AngelScript. rc = " + text::to_string(rc) + ", line = " + text::to_string(line)); } } } }
package randomizer.fates.model.processors.chapter; import randomizer.common.enums.ItemType; import randomizer.common.fs.model.Decompiler; import randomizer.common.fs.model.ScriptCompiler; import randomizer.common.structures.Chapter; import randomizer.fates.model.structures.FatesCharacter; import randomizer.fates.singletons.*; import java.io.File; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; public class ScriptHandler { private static boolean[] options = FatesGui.getInstance().getSelectedOptions(); private static FatesItems fatesItems = FatesItems.getInstance(); private static FatesChapters fatesChapters = FatesChapters.getInstance(); private static FatesFiles fileData = FatesFiles.getInstance(); public static void randomizeScript() { List<FatesCharacter> characters = FatesCharacters.getInstance().getWorkingCharacters(); List<Chapter> chapters = fatesChapters.getSelectedChapters(); Decompiler decompiler = new Decompiler(); ScriptCompiler compiler; for(Chapter c : chapters) { try { // Swap PID values within the script. Path path = fileData.getScript().get(c.getCid()).toPath(); String script = decompiler.decompile(path); for(FatesCharacter ch : characters) { script = script.replaceAll(ch.getPid(), ch.getAid() + "RANDOMIZERTMP"); } for(FatesCharacter ch : characters) { script = script.replaceAll(ch.getTargetPid().replace("PID_", "AID_") + "RANDOMIZERTMP", ch.getPid()); // Replace the forced reclass in chapter 5 with the player's newly // assigned class. if(ch.getId() == 1 && c.getCid().equals("A005")) { script = script.replaceAll("JID_ダークプリンス男", ch.getCharacterClass().getJid()); } else if(ch.getId() == 2 && c.getCid().equals("A005")) { script = script.replaceAll("JID_ダークプリンセス女", ch.getCharacterClass().getJid()); } } // Patch unusual map scripts. if(c.getCid().equals("A011") && options[3]) script = patchA011Script(script); // Recompile for use in game. compiler = new ScriptCompiler(path.toFile().getName()); compiler.compile(path, script); } catch (Exception e) { e.printStackTrace(); } if(options[7]) randomizeBev(c, characters); if(options[2]) randomizeTerrain(c); } } private static void randomizeBev(Chapter chapter, List<FatesCharacter> characters) { if(chapter == null) throw new IllegalArgumentException("Violation of precondidition: " + "randomizeBev. chapter must not be null."); if(characters == null) throw new IllegalArgumentException("Violation of precondidition: " + "randomizeBev. characters must not be null."); ArrayList<File> arrfile = new ArrayList<>(); File[] tempFiles = fileData.getBev().listFiles((dir, name) -> name.startsWith(chapter.getCid())); if(tempFiles != null) { Collections.addAll(arrfile, tempFiles); } Decompiler decompiler = new Decompiler(); ScriptCompiler compiler; for(File f : arrfile) { try { // Swap tagless PIDs. String script = decompiler.decompile(f.toPath()); script = script.replaceAll("\"法衣裏返しレオン\"", "\"レオン\""); // Leo chapter 1 model. for(FatesCharacter ch : characters) { script = script.replaceAll(ch.getTaglessPid(), ch.getAid() + "RANDOMIZERTMP"); } for(FatesCharacter ch : characters) { script = script.replaceAll(ch.getTargetPid().replace("PID_", "AID_") + "RANDOMIZERTMP", ch.getTaglessPid()); } // Recompile the script for use in game. compiler = new ScriptCompiler(f.getName()); compiler.compile(f.toPath(), script); } catch (Exception e) { e.printStackTrace(); } } } private static void randomizeTerrain(Chapter chapter) { if(chapter == null) throw new IllegalArgumentException("Violation of precondidition: " + "randomizeTerrain. chapter must not be null."); if(fileData.getTerrain().containsKey(chapter.getCid())) { try { // Generate new chest items. Decompiler decompiler = new Decompiler(); ScriptCompiler compiler; Path path = fileData.getTerrain().get(chapter.getCid()).toPath(); String tmp = decompiler.decompile(path); String lines[] = tmp.split("\\r?\\n"); StringBuilder script = new StringBuilder(); for(String line : lines) { if(line.startsWith("ev::ItemGain(string\"")) { line = "ev::ItemGain(string(\"" + fatesItems.getSelectedItems(ItemType.Treasure) + "\"))"; } script.append(line).append(System.lineSeparator()); } script.append(System.lineSeparator()); compiler = new ScriptCompiler(path.toFile().getName()); compiler.compile(path, script.toString()); } catch (Exception e) { e.printStackTrace(); } } } private static String patchA011Script(String script) { if(script == null) throw new IllegalArgumentException("Violation of precondidition: " + "patchA011Script. chapter must not be null."); // Remove all references to the Birthright handover file. String[] arr = script.split("\\r?\\n"); List<String> lines = new ArrayList<>(); lines.addAll(Arrays.asList(arr)); lines.removeIf(s -> s.contains("A_HANDOVER")); StringBuilder builder = new StringBuilder(); for(String line : lines) builder.append(line).append(System.lineSeparator()); builder.append(System.lineSeparator()); return builder.toString(); } }
const { fs } = require ('fs'); const { path } = require ('path'); const { express } = require ('express'); const app = express();
package com.unboundid.ldap.sdk; public class LDAPException extends Exception { }
# -*- coding: utf-8 -*- # Copyright: (c) 2019, <NAME> (@jborean93) <<EMAIL>> # MIT License (see LICENSE or https://opensource.org/licenses/MIT) from __future__ import division import collections import errno import io import ntpath import operator import os import stat as py_stat import time from smbclient._io import ( ioctl_request, query_info, set_info, SMBDirectoryIO, SMBFileIO, SMBFileTransaction, SMBPipeIO, SMBRawIO, ) from smbprotocol import ( MAX_PAYLOAD_SIZE, ) from smbprotocol._text import ( to_bytes, to_native, to_text, ) from smbprotocol.exceptions import ( NtStatus, SMBOSError, SMBResponseException, ) from smbprotocol.file_info import ( FileAttributeTagInformation, FileBasicInformation, FileDispositionInformation, FileFsVolumeInformation, FileFullEaInformation, FileIdFullDirectoryInformation, FileInformationClass, FileInternalInformation, FileLinkInformation, FileRenameInformation, FileStandardInformation, ) from smbprotocol.ioctl import ( CtlCode, IOCTLFlags, SMB2SrvCopyChunk, SMB2SrvCopyChunkResponse, SMB2SrvCopyChunkCopy, SMB2SrvRequestResumeKey ) from smbprotocol.open import ( CreateOptions, FileAttributes, FilePipePrinterAccessMask, QueryInfoFlags, ) from smbprotocol.reparse_point import ( ReparseDataBuffer, ReparseTags, SymbolicLinkFlags, SymbolicLinkReparseDataBuffer, ) from smbprotocol.structure import ( DateTimeField, ) XATTR_CREATE = getattr(os, 'XATTR_CREATE', 1) XATTR_REPLACE = getattr(os, 'XATTR_REPLACE', 2) MAX_COPY_CHUNK_SIZE = 1 * 1024 * 1024 # maximum chunksize 1M from 3.3.3 in MS-SMB documentation MAX_COPY_CHUNK_COUNT = 16 # maximum total chunksize 16M from 3.3.3 in MS-SMB documentation SMBStatResult = collections.namedtuple('SMBStatResult', [ 'st_mode', 'st_ino', 'st_dev', 'st_nlink', 'st_uid', 'st_gid', 'st_size', 'st_atime', 'st_mtime', 'st_ctime', # Extra attributes not part of the base stat_result 'st_chgtime', # ChangeTime, change of file metadata and not just data (mtime) 'st_atime_ns', 'st_mtime_ns', 'st_ctime_ns', 'st_chgtime_ns', 'st_file_attributes', 'st_reparse_tag', ]) def copyfile(src, dst, **kwargs): """ Copy a file to a different location on the same server share. This will fail if the src and dst paths are to a different server or share. This will replace the file at dst if it already exists. This is not normally part of the builtin os package but because it relies on some SMB IOCTL commands it is useful to expose here. :param src: The full UNC path of the source file. :param dst: The full UNC path of the target file. :param kwargs: Common SMB Session arguments for smbclient. """ norm_src = ntpath.normpath(src) norm_dst = ntpath.normpath(dst) if not norm_src.startswith('\\\\'): raise ValueError("src must be an absolute path to where the file should be copied from.") if not norm_dst.startswith('\\\\'): raise ValueError("dst must be an absolute path to where the file should be copied to.") src_root = ntpath.splitdrive(norm_src)[0] dst_root, dst_name = ntpath.splitdrive(norm_dst) if src_root.lower() != dst_root.lower(): raise ValueError("Cannot copy a file to a different root than the src.") with open_file(norm_src, mode='rb', share_access='r', buffering=0, **kwargs) as src_fd: with SMBFileTransaction(src_fd) as transaction_src: ioctl_request(transaction_src, CtlCode.FSCTL_SRV_REQUEST_RESUME_KEY, flags=IOCTLFlags.SMB2_0_IOCTL_IS_FSCTL, output_size=32) resume_response = SMB2SrvRequestResumeKey() resume_response.unpack(transaction_src.results[0]) resume_key = resume_response['resume_key'].get_value() chunks = [] offset = 0 while offset < src_fd.fd.end_of_file: copychunk_struct = SMB2SrvCopyChunk() copychunk_struct['source_offset'] = offset copychunk_struct['target_offset'] = offset copychunk_struct['length'] = min(MAX_COPY_CHUNK_SIZE, src_fd.fd.end_of_file - offset) chunks.append(copychunk_struct) offset += MAX_COPY_CHUNK_SIZE with open_file(norm_dst, mode='wb', share_access='r', buffering=0, **kwargs) as dst_fd: for i in range(0, len(chunks), MAX_COPY_CHUNK_COUNT): batch = chunks[i:i + MAX_COPY_CHUNK_COUNT] with SMBFileTransaction(dst_fd) as transaction_dst: copychunkcopy_struct = SMB2SrvCopyChunkCopy() copychunkcopy_struct['source_key'] = resume_key copychunkcopy_struct['chunks'] = batch ioctl_request(transaction_dst, CtlCode.FSCTL_SRV_COPYCHUNK_WRITE, flags=IOCTLFlags.SMB2_0_IOCTL_IS_FSCTL, output_size=12, input_buffer=copychunkcopy_struct) for result in transaction_dst.results: copychunk_response = SMB2SrvCopyChunkResponse() copychunk_response.unpack(result) if copychunk_response['chunks_written'].get_value() != len(batch): raise IOError("Failed to copy all the chunks in a server side copyfile: '%s' -> '%s'" % (norm_src, norm_dst)) def link(src, dst, follow_symlinks=True, **kwargs): """ Create a hard link pointing to src named dst. The src argument must be an absolute path in the same share as src. :param src: The full UNC path to used as the source of the hard link. :param dst: The full UNC path to create the hard link at. :param follow_symlinks: Whether to link to the src target (True) or src itself (False) if src is a symlink. :param kwargs: Common arguments used to build the SMB Session. """ norm_src = ntpath.normpath(src) norm_dst = ntpath.normpath(dst) if not norm_src.startswith('\\\\'): raise ValueError("src must be the absolute path to where the file is hard linked to.") src_root = ntpath.splitdrive(norm_src)[0] dst_root, dst_name = ntpath.splitdrive(norm_dst) if src_root.lower() != dst_root.lower(): raise ValueError("Cannot hardlink a file to a different root than the src.") raw = SMBFileIO(norm_src, mode='r', share_access='rwd', desired_access=FilePipePrinterAccessMask.FILE_WRITE_ATTRIBUTES, create_options=0 if follow_symlinks else CreateOptions.FILE_OPEN_REPARSE_POINT, **kwargs) with SMBFileTransaction(raw) as transaction: link_info = FileLinkInformation() link_info['replace_if_exists'] = False link_info['file_name'] = to_text(dst_name[1:]) set_info(transaction, link_info) def listdir(path, search_pattern="*", **kwargs): """ Return a list containing the names of the entries in the directory given by path. The list is in arbitrary order, and does not include the special entries '.' and '..' even if they are present in the directory. :param path: The path to the directory to list. :param search_pattern: THe search string to match against the names of directories or files. This pattern can use '*' as a wildcard for multiple chars and '?' as a wildcard for a single char. Does not support regex patterns. :param kwargs: Common SMB Session arguments for smbclient. :return: A list containing the names of the entries in the directory. """ with SMBDirectoryIO(path, mode='r', share_access='r', **kwargs) as dir_fd: try: raw_filenames = dir_fd.query_directory(search_pattern, FileInformationClass.FILE_NAMES_INFORMATION) return list(e['file_name'].get_value().decode('utf-16-le') for e in raw_filenames if e['file_name'].get_value().decode('utf-16-le') not in ['.', '..']) except SMBResponseException as exc: if exc.status == NtStatus.STATUS_NO_SUCH_FILE: return [] raise def lstat(path, **kwargs): """ Perform the equivalent of an lstat() system call on the given path. Similar to stat(), but does not follow symbolic links. :param path: The path to the file or directory to stat. :param kwargs: Common SMB Session arguments for smbclient. :return: See stat() for the return values. """ return stat(path, follow_symlinks=False, **kwargs) def mkdir(path, **kwargs): """ Create a directory named path. If the directory already exists, OSError(errno.EEXIST) is raised. :param path: The path to the directory to create. :param kwargs: Common SMB Session arguments for smbclient. """ raw = SMBDirectoryIO(path, mode='x', **kwargs) with SMBFileTransaction(raw): pass def makedirs(path, exist_ok=False, **kwargs): """ Recursive directory creation function. Like mkdir(), but makes all intermediate-level directories needed to contain the leaf directory. If exist_ok is False (the default), an OSError is raised if the target directory already exists. :param path: The path to the directory to create. :param exist_ok: Set to True to not fail if the target directory already exists. :param kwargs: Common SMB Session arguments for smbclient. """ create_queue = [ntpath.normpath(path)] present_parent = None while create_queue: mkdir_path = create_queue[-1] try: mkdir(mkdir_path, **kwargs) except OSError as err: if err.errno == errno.EEXIST: present_parent = mkdir_path create_queue.pop(-1) if not create_queue and not exist_ok: raise elif err.errno == errno.ENOENT: # Check if the parent path has already been created to avoid getting in an endless loop. parent_path = ntpath.dirname(mkdir_path) if present_parent == parent_path: raise else: create_queue.append(parent_path) else: raise else: create_queue.pop(-1) def open_file(path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, share_access=None, desired_access=None, file_attributes=None, file_type='file', **kwargs): """ Open a file on an SMB share and return a corresponding file object. If the file cannot be opened, an OSError is raised. This function is designed to mimic the builtin open() function but limits some functionality based on what is available over SMB. It is recommended to call this function with a 'with' statement to ensure the file is closed when not required: with smbclient.open_file("\\\\server\\share\\file.txt") as fd: fd.read() Otherwise the .close() function will also close the handle to the file. :param path: The absolute pathname of the file to be opened. :param mode: Optional string that specifies the mode in which the file is opened. It defaults to 'r' which means for reading in text mode. Other common values are 'w' for writing (truncating the file if it already exists), 'x' for exclusive creation and 'a' for appending. The available modes are: Open Mode 'r': Open for reading (default). 'w': Open for writing, truncating the file first. 'x': Open for exclusive creation, failing if the file already exists. 'a': Open for writing, appending to the end of the file if it exists. '+': Open for updating (reading and writing), can be used in conjunction with any of the above. Open Type - can be specified with the OpenMode 't': Text mode (default). 'b': Binary mode. :param buffering: An optional integer used to set the buffering policy. Pass 0 to switch buffering off (only allowed in binary mode), 1 to select line buffering (only usable in text mode), and an integer > 1 to indicate the size in bytes of a fixed-size chunk buffer. When no buffering argument is given, the default buffering is max size for a single SMB2 packet (65536). This can be higher but is dependent on the credits available from the server. :param encoding: The name of the encoding used to decode or encode the file. This should only be used in text mode. The default encoding is platform dependent (whatever locale.getpreferredencoding() returns), but any text encoding types supported by Python can be used. :param errors: Specifies how encoding encoding and decoding errors are to be handled. This cannot be used in binary mode. A variety of standard error handlers are available, though any error handling name that has been registered with codecs.register_error() is also valid. See the open() docs for a list of builtin error handlers for your Python version. :param newline: Controls how universal newlines mode works. This should only be used in text mode. It can be 'None', '', '\n', '\r', and '\r\n'. :param share_access: String that specifies the type of access that is allowed when a handle to this file is opened by another process. The default is 'None' which exclusively locks the file until the file is closed. The available access values are: 'r': Allow other handles to be opened with read access. 'w': Allow other handles to be opened with write access. 'd': Allow other handles to be opened with delete access. A combination of values can be set to allow multiple access types together. :param desired_access: Override the access mask used when opening the file. :param file_attributes: Set custom file attributes when opening the file. :param file_type: The type of file to access, supports 'file' (default), 'dir', and 'pipe'. :param kwargs: Common arguments used to build the SMB Session. :return: The file object returned by the open() function, the type depends on the mode that was used to open the file. """ file_class = { 'file': SMBFileIO, 'dir': SMBDirectoryIO, 'pipe': SMBPipeIO, }[file_type] # buffer_size for this is not the same as the buffering value. We choose the max between the input and # MAX_PAYLOAD_SIZE (SMB2 payload size) to ensure a user can set a higher size but not limit single payload # requests. This is only used readall() requests to the underlying open. raw_fd = file_class(path, mode=mode, share_access=share_access, desired_access=desired_access, file_attributes=file_attributes, buffer_size=max(buffering, MAX_PAYLOAD_SIZE), **kwargs) try: raw_fd.open() line_buffering = buffering == 1 if buffering == 0: if 'b' not in raw_fd.mode: raise ValueError("can't have unbuffered text I/O") return raw_fd if raw_fd.readable() and raw_fd.writable(): buff_type = io.BufferedRandom elif raw_fd.readable(): buff_type = io.BufferedReader else: buff_type = io.BufferedWriter if buffering == -1: buffering = MAX_PAYLOAD_SIZE fd_buffer = buff_type(raw_fd, buffer_size=buffering) if 'b' in raw_fd.mode: return fd_buffer return io.TextIOWrapper(fd_buffer, encoding, errors, newline, line_buffering=line_buffering) except Exception: # If there was a failure in the setup, make sure the file is closed. raw_fd.close() raise def readlink(path, **kwargs): """ Return a string representing the path to which the symbolic link points. If the link is relative it will be converted to an absolute pathname relative to the link itself. The link target may point to a local path and not another UNC path. :param path: The path to the symbolic link to read. :param kwargs: Common SMB Session arguments for smbclient. :return: The link target path. """ norm_path = ntpath.normpath(path) reparse_buffer = _get_reparse_point(norm_path, **kwargs) reparse_tag = reparse_buffer['reparse_tag'] if reparse_tag.get_value() != ReparseTags.IO_REPARSE_TAG_SYMLINK: raise ValueError(to_native("Cannot read link of reparse point with tag %s at '%s'" % (str(reparse_tag), norm_path))) symlink_buffer = SymbolicLinkReparseDataBuffer() symlink_buffer.unpack(reparse_buffer['data_buffer'].get_value()) return symlink_buffer.resolve_link(norm_path) def remove(path, **kwargs): """ Remove (delete) the file path. If path is a directory, an IsADirectoryError is raised. Use rmdir() to remove directories. Trying to remove a file that is in use causes an exception to be raised unless the existing handle was opened with the Delete share access. In that case the file will be removed once all handles are closed. :param path: The full UNC path to the file to remove. :param kwargs: Common SMB Session arguments for smbclient. """ _delete(SMBFileIO, path, **kwargs) def removedirs(name, **kwargs): """ Remove directories recursively. Works like rmdir() except that, if the leaf directory is successfully removed, removedirs() tries to successively remove every parent directory mentioned in path until an error is raised (which is ignored, because it generally means that a parent directory is not empty). :param name: The directory to start removing recursively from. :param kwargs: Common SMB Session arguments for smbclient. """ remove_dir = ntpath.normpath(name) while True: try: rmdir(remove_dir, **kwargs) except (SMBResponseException, OSError): return else: remove_dir = ntpath.dirname(remove_dir) def rename(src, dst, **kwargs): """ Rename the file or directory src to dst. If dst exists, the operation will fail with an OSError subclass in a number of cases. :param src: The path to the file or directory to rename. :param dst: The path to rename the file or directory to. :param kwargs: Common SMB Session arguments for smbclient. """ _rename_information(src, dst, replace_if_exists=False, **kwargs) def renames(old, new, **kwargs): """ Recursive directory or file renaming function. Works like rename(), except creation of any intermediate directories needed to make the new pathname good is attempted first. After the rename, directories corresponding to rightmost path segments of the old name will be pruned away using removedirs(). :param old: The path to the file or directory to rename. :param new: The path to rename the file or directory to. :param kwargs: Common SMB Session arguments for smbclient. """ makedirs(ntpath.dirname(new), exist_ok=True, **kwargs) rename(old, new, **kwargs) removedirs(ntpath.dirname(old), **kwargs) def replace(src, dst, **kwargs): """ Rename the file or directory src to dst. If dst exists and is a directory, OSError will be raised. If dst exists and is a file, it will be replaced silently if the user has permission. The path at dst must be on the same share as the src file or folder. :param src: The path to the file or directory to rename. :param dst: The path to rename the file or directory to. :param kwargs: Common SMB Session arguments for smbclient. """ _rename_information(src, dst, replace_if_exists=True, **kwargs) def rmdir(path, **kwargs): """ Remove (delete) the directory path. If the directory does not exist or is not empty, an FileNotFoundError or an OSError is raised respectively. :param path: The path to the directory to remove. :param kwargs: Common SMB Session arguments for smbclient. """ _delete(SMBDirectoryIO, path, **kwargs) def scandir(path, search_pattern="*", **kwargs): """ Return an iterator of DirEntry objects corresponding to the entries in the directory given by path. The entries are yielded in arbitrary order, and the special entries '.' and '..' are not included. Using scandir() instead of listdir() can significantly increase the performance of code that also needs file type or file attribute information, because DirEntry objects expose this information if the SMB server provides it when scanning a directory. All DirEntry methods may perform a SMB request, but is_dir(), is_file(), is_symlink() usually only require a one system call unless the file or directory is a reparse point which requires 2 calls. See the Python documentation for how DirEntry is set up and the methods and attributes that are available. :param path: The path to a directory to scan. :param search_pattern: THe search string to match against the names of directories or files. This pattern can use '*' as a wildcard for multiple chars and '?' as a wildcard for a single char. Does not support regex patterns. :param kwargs: Common SMB Session arguments for smbclient. :return: An iterator of DirEntry objects in the directory. """ with SMBDirectoryIO(path, share_access='rwd', **kwargs) as fd: for dir_info in fd.query_directory(search_pattern, FileInformationClass.FILE_ID_FULL_DIRECTORY_INFORMATION): filename = dir_info['file_name'].get_value().decode('utf-16-le') if filename in [u'.', u'..']: continue dir_entry = SMBDirEntry(SMBRawIO(u"%s\\%s" % (path, filename), **kwargs), dir_info) yield dir_entry def stat(path, follow_symlinks=True, **kwargs): """ Get the status of a file. Perform the equivalent of a stat() system call on the given path. This function normally follows symlinks; to stat a symlink add the argument follow_symlinks=False. :param path: The path to the file or directory to stat. :param follow_symlinks: Whether to open the file's reparse point if present during the open. In most scenarios this means to stat() the symlink target if the path is a symlink or not. :param kwargs: Common SMB Session arguments for smbclient. :return: A tuple representing the stat result of the path. This contains the standard tuple entries as os.stat_result as well as: st_chgtime: The time, seconds since EPOCH, when the file's metadata was last changed. st_atime_ns: Same as st_atime but measured in nanoseconds st_mtime_ns: Same as st_mtime but measured in nanoseconds st_ctime_ns: Same as st_ctime but measured in nanoseconds st_chgtime_ns: Same as st_chgtime but measured in nanoseconds st_file_attributes: An int representing the Windows FILE_ATTRIBUTES_* constants. st_reparse_tag: An int representing the Windows IO_REPARSE_TAG_* constants. This is set to 0 unless follow_symlinks=False and the path is a reparse point. See smbprotocol.reparse_point.ReparseTags. """ raw = SMBRawIO(path, mode='r', share_access='rwd', desired_access=FilePipePrinterAccessMask.FILE_READ_ATTRIBUTES, create_options=0 if follow_symlinks else CreateOptions.FILE_OPEN_REPARSE_POINT, **kwargs) with SMBFileTransaction(raw) as transaction: query_info(transaction, FileBasicInformation) # volume_label is variable and can return up to the first 32 chars (32 * 2 for UTF-16) + null padding query_info(transaction, FileFsVolumeInformation, output_buffer_length=88) query_info(transaction, FileInternalInformation) query_info(transaction, FileStandardInformation) query_info(transaction, FileAttributeTagInformation) basic_info, fs_volume, internal_info, standard_info, attribute_tag = transaction.results reparse_tag = attribute_tag['reparse_tag'].get_value() file_attributes = basic_info['file_attributes'] st_mode = 0 # Permission bits are mostly symbolic, holdover from python stat behaviour if file_attributes.has_flag(FileAttributes.FILE_ATTRIBUTE_DIRECTORY): st_mode |= py_stat.S_IFDIR | 0o111 else: st_mode |= py_stat.S_IFREG if file_attributes.has_flag(FileAttributes.FILE_ATTRIBUTE_READONLY): st_mode |= 0o444 else: st_mode |= 0o666 if reparse_tag == ReparseTags.IO_REPARSE_TAG_SYMLINK: # Python behaviour is to remove the S_IFDIR and S_IFREG is the file is a symbolic link. It also only sets # S_IFLNK for symbolic links and not other reparse point tags like junction points. st_mode ^= py_stat.S_IFMT(st_mode) st_mode |= py_stat.S_IFLNK # The time fields are 100s of nanoseconds since 1601-01-01 UTC and we need to convert to nanoseconds since EPOCH. epoch_ft = DateTimeField.EPOCH_FILETIME atime_ns = (basic_info['last_access_time'].get_value() - epoch_ft) * 100 mtime_ns = (basic_info['last_write_time'].get_value() - epoch_ft) * 100 ctime_ns = (basic_info['creation_time'].get_value() - epoch_ft) * 100 chgtime_ns = (basic_info['change_time'].get_value() - epoch_ft) * 100 return SMBStatResult( st_mode=st_mode, st_ino=internal_info['index_number'].get_value(), st_dev=fs_volume['volume_serial_number'].get_value(), st_nlink=standard_info['number_of_links'].get_value(), st_uid=0, st_gid=0, st_size=standard_info['end_of_file'].get_value(), st_atime=atime_ns / 1000000000, st_mtime=mtime_ns / 1000000000, st_ctime=ctime_ns / 1000000000, st_chgtime=chgtime_ns / 1000000000, st_atime_ns=atime_ns, st_mtime_ns=mtime_ns, st_ctime_ns=ctime_ns, st_chgtime_ns=chgtime_ns, st_file_attributes=file_attributes.get_value(), st_reparse_tag=reparse_tag, ) def symlink(src, dst, target_is_directory=False, **kwargs): """ Create a symbolic link pointing to src named dst. The src argument must be an absolute path in the same share as src. If the target src exists, then the symlink type is created based on the target type. If the target does not exist then the target_is_directory var can be used to control the type of symlink created. Note the server must support creating a reparse point using the FSCTL_SET_REPARSE_POINT code. This is typically only Windows servers. :param src: The target of the symlink. :param dst: The path where the symlink is to be created. :param target_is_directory: If src does not exist, controls whether a file or directory symlink is created. :param kwargs: Common SMB Session arguments for smbclient. """ norm_dst = ntpath.normpath(dst) if not norm_dst.startswith('\\\\'): raise ValueError("The link dst must be an absolute UNC path for where the link is to be created") norm_src = ntpath.normpath(src) print_name = norm_src if not norm_src.startswith('\\\\'): flags = SymbolicLinkFlags.SYMLINK_FLAG_RELATIVE substitute_name = norm_src dst_dir = ntpath.dirname(norm_dst) norm_src = ntpath.abspath(ntpath.join(dst_dir, norm_src)) else: flags = SymbolicLinkFlags.SYMLINK_FLAG_ABSOLUTE substitute_name = '\\??\\UNC\\%s' % norm_src[2:] src_drive = ntpath.splitdrive(norm_src)[0] dst_drive = ntpath.splitdrive(norm_dst)[0] if src_drive.lower() != dst_drive.lower(): raise ValueError(to_native("Resolved link src root '%s' must be the same as the dst root '%s'" % (src_drive, dst_drive))) try: src_stat = stat(norm_src, **kwargs) except OSError as err: if err.errno != errno.ENOENT: raise else: # If the src actually exists, override the target_is_directory with whatever type src actually is. target_is_directory = py_stat.S_ISDIR(src_stat.st_mode) symlink_buffer = SymbolicLinkReparseDataBuffer() symlink_buffer['flags'] = flags symlink_buffer.set_name(substitute_name, print_name) reparse_buffer = ReparseDataBuffer() reparse_buffer['reparse_tag'] = ReparseTags.IO_REPARSE_TAG_SYMLINK reparse_buffer['data_buffer'] = symlink_buffer co = CreateOptions.FILE_OPEN_REPARSE_POINT if target_is_directory: co |= CreateOptions.FILE_DIRECTORY_FILE else: co |= CreateOptions.FILE_NON_DIRECTORY_FILE raw = SMBRawIO(norm_dst, mode='x', desired_access=FilePipePrinterAccessMask.FILE_WRITE_ATTRIBUTES, create_options=co, **kwargs) with SMBFileTransaction(raw) as transaction: ioctl_request(transaction, CtlCode.FSCTL_SET_REPARSE_POINT, flags=IOCTLFlags.SMB2_0_IOCTL_IS_FSCTL, input_buffer=reparse_buffer) def truncate(path, length, **kwargs): """ Truncate the file corresponding to path, so that it is at most length bytes in size. :param path: The path for the file to truncate. :param length: The length in bytes to truncate the file to. :param kwargs: Common SMB Session arguments for smbclient. """ with open_file(path, mode='ab', **kwargs) as fd: fd.truncate(length) def unlink(path, **kwargs): """ Remove (delete) the file path. This function is semantically identical to remove(); the unlink name is its traditional Unix name. Please see the documentation for remove() for further information. :param path: The full UNC path to the file to remove. :param kwargs: Common SMB Session arguments for smbclient. """ remove(path, **kwargs) def utime(path, times=None, ns=None, follow_symlinks=True, **kwargs): """ Set the access and modified times of the file specified by path. utime() takes two optional parameters, times and ns. These specify the times set on path and are used as follows: * If ns is specified, it must be a 2-tuple of the form (atime_ns, mtime_ns) where each member is an int expressing nanoseconds. Note SMB has a precision of 100's of nanoseconds. * If times is not None, it must be a 2-tuple of the form (atime, mtime) where each member is an int or float expressing seconds. * If times and ns is None, this is equivalent to specifying ns=(atime_ns, mtime_ns) where both times are the current time. It is an error to specify tuples for both times and ns. :param path: The full UNC path to the file or directory to update the time. :param times: A 2-tuple of the form (atime, mtime) :param ns: A 2-tuple of the form (atime_ns, mtime_ns) :param follow_symlinks: Whether to follow symlinks when opening path. :param kwargs: Common SMB Session arguments for smbclient. """ if times and ns: raise ValueError("Both times and ns have been set for utime.") elif times or ns: if times: time_tuple = times # seconds in 100s of nanoseonds op = operator.mul op_amt = 10000000 else: time_tuple = ns # nanoseconds in 100s of nanoseconds op = operator.floordiv op_amt = 100 if len(time_tuple) != 2: raise ValueError("The time tuple should be a 2-tuple of the form (atime, mtime).") # EPOCH_FILETIME is EPOCH represented as MS FILETIME (100s of nanoseconds since 1601-01-01 atime, mtime = tuple([op(t, op_amt) + DateTimeField.EPOCH_FILETIME for t in time_tuple]) else: # time_ns() was only added in Python 3.7 time_ns = getattr(time, 'time_ns', None) if not time_ns: def time_ns(): # pragma: no cover return int(time.time()) * 1000000000 atime = mtime = (time_ns() // 100) + DateTimeField.EPOCH_FILETIME _set_basic_information(path, last_access_time=atime, last_write_time=mtime, follow_symlinks=follow_symlinks, **kwargs) def walk(top, topdown=True, onerror=None, follow_symlinks=False, **kwargs): """ Generate the file names in a directory tree by walking the tree either top-down or bottom-up. For each directory in the tree rooted at directory top (including top itself), it yields a 3-tuple (dirpath, dirnames, filenames). dirpath is a string, the path to the directory, dirnames is a list of names of the subdirectories in dirpath (excluding '.' and '..''). filenames is a list of names of the non-directory files in dirpath. Note that the names in the lists contain no path components. To get a full path (which beings with top) to a file or directory in dirpath, do ntpath.join(dirpath, name). If optional argument topdown is True or not specified, the triple for a directory is generated before the triples for any of its subdirectories (directories are generated top-down). If topdown is False, the triple for a directory is generated after the triples for all of its subdirectories (directories are generated bottom-up). No matter the value of topdown, the list of subdirectories is retrieved before the tuples for the directory and its subdirectories are generated. When topdown is True, the caller can modify the dirnames list in-place (perhaps using del or slice assignment) and walk() will only recurse into the subdirectories whose names remain in dirnames; this can be used to prune the search, impose a specific order of visting, or even to inform walk() about directories the caller creates or renames before it resumes walk() again. Modifying dirnames when topdown is False has no effect on the behaviour of the walk, because in bottom-up mode the directories in dirnames are generated before dirpath itself is generated. By default, errors from scandir() call are ignored. If optional argument onerror is specified, it should be a function; It will be called with one argument, an OSError instance. It can report the error to continue with the walk, or raise the exception to abort the walk. Note that the filename is available as the filename attribute of the exception object. By default walk() will not walk down into symbolic links that resolve to directories, Set follow_symlinks to True to visit directories pointed to by symlinks. Be aware that setting follow_symlinks to True can lead to infinite recursion if a link points to a parent directory of itself. walk() does not keep track of the directories it visited already. :param top: The full UNC path to the directory to walk. :param topdown: Controls whether to run in top-down (True) or bottom-up mode (False) :param onerror: A function that takes in 1 argument of OSError that is called when an error is encountered. :param follow_symlinks: Whether to follow symlinks that point to directories that are encountered. :param kwargs: Common SMB Session arguments for smbclient. """ try: scandir_gen = scandir(top, **kwargs) except OSError as err: if onerror is not None: onerror(err) return dirs = [] files = [] bottom_up_dirs = [] while True: try: try: entry = next(scandir_gen) except StopIteration: break except OSError as err: if onerror is not None: onerror(err) return if not entry.is_dir(): files.append(entry.name) continue dirs.append(entry.name) if not topdown and (follow_symlinks or not entry.is_symlink()): # Add the directory to the bottom up list which is recursively walked below, we exclude symlink dirs if # follow_symlinks is False. bottom_up_dirs.append(entry.path) walk_kwargs = { 'topdown': topdown, 'onerror': onerror, 'follow_symlinks': follow_symlinks } walk_kwargs.update(kwargs) if topdown: yield top, dirs, files for dirname in dirs: dirpath = ntpath.join(top, dirname) # In case the dir was changed in the yield we need to re-check if the dir is now a symlink and skip it if # it is not and follow_symlinks=False. if not follow_symlinks and py_stat.S_ISLNK(lstat(dirpath, **kwargs).st_mode): continue for dir_top, dir_dirs, dir_files in walk(dirpath, **walk_kwargs): yield dir_top, dir_dirs, dir_files else: # On a bottom up approach we yield the sub directories before the top path. for dirpath in bottom_up_dirs: for dir_top, dir_dirs, dir_files in walk(dirpath, **walk_kwargs): yield dir_top, dir_dirs, dir_files yield top, dirs, files def getxattr(path, attribute, follow_symlinks=True, **kwargs): """ Return the value of the extended filesystem attribute attribute for path :param path: The full UNC path to the file to get the extended attribute for. :param attribute: The extended attribute to lookup. :param follow_symlinks: Whether to follow the symlink at path if encountered :param kwargs: Common SMB Session arguments for smbclient. :return: The value fo the attribute. """ # I could use FileGetEaInformation() to select the attribute to return but that behaviour varies across different # SMB server, Samba returns all regardless of the ea_name set in the list and Windows returns a blank entry even # if the xattr is not set. Instead we just get them all and filter it from there. extended_attributes = _get_extended_attributes(path, follow_symlinks, **kwargs) # Convert the input attribute name to bytes and default to using utf-8. If a different encoding is desired then the # user should pass in a byte string themselves. b_attribute = to_bytes(attribute) b_xattr = next((b_val for b_name, b_val in extended_attributes if b_name == b_attribute), None) if b_xattr is None: raise SMBOSError(NtStatus.STATUS_END_OF_FILE, path) return b_xattr def listxattr(path, follow_symlinks=True, **kwargs): """ Return a list of attributes on path. :param path: The full UNC path to the file to get the list of extended attributes for. :param follow_symlinks: Whether to follow the symlink at path if encountered. :param kwargs: Common SMB Session arguments for smbclient. :return: List of attributes on the file with each attribute entry being a byte string. """ return [b_name for b_name, _ in _get_extended_attributes(path, follow_symlinks, **kwargs)] def removexattr(path, attribute, follow_symlinks=True, **kwargs): """ Removes the extended filesystem attribute attribute from path. :param path: The full UNC path to the file to remove the extended attribute from. :param attribute: The attribute to remove, if not a byte string the text is encoded using utf-8. :param follow_symlinks: Whether to follow the symlink at path if encountered. :param kwargs: Common SMB Session arguments for smbclient. """ # Setting a null byte value will remove the extended attribute, we also run with XATTR_REPLACE as that will raise # an exception if the attribute was not already set. setxattr(path, attribute, b"", flags=XATTR_REPLACE, follow_symlinks=follow_symlinks, **kwargs) def setxattr(path, attribute, value, flags=0, follow_symlinks=True, **kwargs): """ Set the extended filesystem attribute on path to value. flags may be XATTR_REPLACE or XATTR_CREATE. if XATTR_REPLACE is given and the attribute does not exists, EEXISTS will be raised. If XATTR_CREATE is given and the attribute already exists, the attribute will not be created and ENODATA will be raised. :param path: The full UNC path to the file to set the extended attribute on. :param attribute: The attribute to set, if not a byte string the text is encoded using utf-8. :param value: The value to set on the attribute, if not a byte string the text is encoded using utf-8. :param flags: Set to XATTR_REPLACE to replace an attribute or XATTR_CREATE to create an attribute or 0 for both. :param follow_symlinks: Whether to follow the symlink at path if encountered. :param kwargs: Common SMB Session arguments for smbclient. """ # Make sure we are dealing with a byte string, defaults to using utf-8 to encode a text string, a user can use # another encoding by passing in a byte string directly. b_attribute = to_bytes(attribute) b_value = to_bytes(value) # If flags are set we need to verify whether the attribute already exists or not. SMB doesn't have a native # create/replace mechanism so we need to implement that ourselves. if flags: xattrs = _get_extended_attributes(path, follow_symlinks=follow_symlinks, **kwargs) present = next((True for b_name, _ in xattrs if b_name == b_attribute), False) if flags == XATTR_CREATE and present: raise SMBOSError(NtStatus.STATUS_OBJECT_NAME_COLLISION, path) elif flags == XATTR_REPLACE and not present: raise SMBOSError(NtStatus.STATUS_OBJECT_NAME_NOT_FOUND, path) raw = SMBRawIO(path, mode='r', share_access='r', desired_access=FilePipePrinterAccessMask.FILE_WRITE_EA, create_options=0 if follow_symlinks else CreateOptions.FILE_OPEN_REPARSE_POINT, **kwargs) with SMBFileTransaction(raw) as transaction: ea_info = FileFullEaInformation() ea_info['ea_name'] = b_attribute ea_info['ea_value'] = b_value set_info(transaction, ea_info) def _delete(raw_type, path, **kwargs): # Ensures we delete the symlink (if present) and don't follow it down. co = CreateOptions.FILE_OPEN_REPARSE_POINT co |= { 'dir': CreateOptions.FILE_DIRECTORY_FILE, 'file': CreateOptions.FILE_NON_DIRECTORY_FILE, }.get(raw_type.FILE_TYPE, 0) # Setting a shared_access of rwd means we can still delete a file that has an existing handle open, the file will # be deleted when that handle is closed. This replicates the os.remove() behaviour when running on Windows locally. raw = raw_type(path, mode='r', share_access='rwd', desired_access=FilePipePrinterAccessMask.DELETE | FilePipePrinterAccessMask.FILE_WRITE_ATTRIBUTES, create_options=co, **kwargs) with SMBFileTransaction(raw) as transaction: # Make sure the file does not have the FILE_ATTRIBUTE_READONLY flag as Windows will fail to delete these files. basic_info = FileBasicInformation() basic_info['creation_time'] = 0 basic_info['last_access_time'] = 0 basic_info['last_write_time'] = 0 basic_info['change_time'] = 0 basic_info['file_attributes'] = FileAttributes.FILE_ATTRIBUTE_NORMAL if raw_type.FILE_TYPE == 'file' else \ FileAttributes.FILE_ATTRIBUTE_DIRECTORY set_info(transaction, basic_info) info_buffer = FileDispositionInformation() info_buffer['delete_pending'] = True set_info(transaction, info_buffer) def _get_extended_attributes(path, follow_symlinks=True, **kwargs): raw = SMBRawIO(path, mode='r', share_access='r', desired_access=FilePipePrinterAccessMask.FILE_READ_EA, create_options=0 if follow_symlinks else CreateOptions.FILE_OPEN_REPARSE_POINT, **kwargs) try: with SMBFileTransaction(raw) as transaction: # We don't know the EA size and FileEaInformation is too unreliable so just set the max size to the SMB2 # payload length. It seems to fail if it goes any higher than this. query_info(transaction, FileFullEaInformation, flags=QueryInfoFlags.SL_RESTART_SCAN, output_buffer_length=MAX_PAYLOAD_SIZE) except SMBOSError as err: if err.ntstatus == NtStatus.STATUS_NO_EAS_ON_FILE: return [] raise return [(e['ea_name'].get_value(), e['ea_value'].get_value()) for e in transaction.results[0]] def _get_reparse_point(path, **kwargs): raw = SMBRawIO(path, mode='r', desired_access=FilePipePrinterAccessMask.FILE_READ_ATTRIBUTES, create_options=CreateOptions.FILE_OPEN_REPARSE_POINT, **kwargs) with SMBFileTransaction(raw) as transaction: ioctl_request(transaction, CtlCode.FSCTL_GET_REPARSE_POINT, output_size=16384, flags=IOCTLFlags.SMB2_0_IOCTL_IS_FSCTL) reparse_buffer = ReparseDataBuffer() reparse_buffer.unpack(transaction.results[0]) return reparse_buffer def _rename_information(src, dst, replace_if_exists=False, **kwargs): verb = 'replace' if replace_if_exists else 'rename' norm_src = ntpath.normpath(src) norm_dst = ntpath.normpath(dst) if not norm_dst.startswith('\\\\'): raise ValueError("dst must be an absolute path to where the file or directory should be %sd." % verb) src_root = ntpath.splitdrive(norm_src)[0] dst_root, dst_name = ntpath.splitdrive(norm_dst) if src_root.lower() != dst_root.lower(): raise ValueError("Cannot %s a file to a different root than the src." % verb) raw = SMBRawIO(src, mode='r', share_access='rwd', desired_access=FilePipePrinterAccessMask.DELETE, create_options=CreateOptions.FILE_OPEN_REPARSE_POINT, **kwargs) with SMBFileTransaction(raw) as transaction: file_rename = FileRenameInformation() file_rename['replace_if_exists'] = replace_if_exists file_rename['file_name'] = to_text(dst_name[1:]) # dst_name has \ prefix from splitdrive, we remove that. set_info(transaction, file_rename) def _set_basic_information(path, creation_time=0, last_access_time=0, last_write_time=0, change_time=0, file_attributes=0, follow_symlinks=True, **kwargs): raw = SMBRawIO(path, mode='r', share_access='rwd', desired_access=FilePipePrinterAccessMask.FILE_WRITE_ATTRIBUTES, create_options=0 if follow_symlinks else CreateOptions.FILE_OPEN_REPARSE_POINT, **kwargs) with SMBFileTransaction(raw) as transaction: basic_info = FileBasicInformation() basic_info['creation_time'] = creation_time basic_info['last_access_time'] = last_access_time basic_info['last_write_time'] = last_write_time basic_info['change_time'] = change_time basic_info['file_attributes'] = file_attributes set_info(transaction, basic_info) class SMBDirEntry(object): def __init__(self, raw, dir_info): self._smb_raw = raw self._dir_info = dir_info self._stat = None self._lstat = None def __str__(self): return '<{0}: {1!r}>'.format(self.__class__.__name__, to_native(self.name)) @property def name(self): """ The entry's base filename, relative to the scandir() path argument. """ return self._smb_raw.name.split("\\")[-1] @property def path(self): """ The entry's full path name. """ return self._smb_raw.name def inode(self): """ Return the inode number of the entry. The result is cached on the 'smcblient.DirEntry' object. Use 'smbclient.stat(entry.path, follow_symlinks=False).st_ino' to fetch up-to-date information. """ return self._dir_info['file_id'].get_value() def is_dir(self, follow_symlinks=True): """ Return 'True' if this entry is a directory or a symbolic link pointing to a directory; return 'False' if the entry is or points to any other kind of file, or if it doesn't exist anymore. If follow_symlinks is 'False', return 'True' only if this entry is a directory (without following symlinks); return 'False' if the entry is any other kind of file. The result is cached on the 'smcblient.DirEntry' object, with a separate cache for follow_symlinks 'True' and 'False'. Call 'smbclient.path.isdir(entry.path)' to fetch up-to-date information. On the first, uncached call, no SMB call is required unless the path is a reparse point. :param follow_symlinks: Whether to check if the entry's target is a directory (True) or the entry itself (False) if the entry is a symlink. :return: bool that states whether the entry is a directory or not. """ is_lnk = self.is_symlink() if follow_symlinks and is_lnk: return self._link_target_type_check(py_stat.S_ISDIR) else: # Python behaviour is to consider a symlink not a directory even if it has the DIRECTORY attribute. return not is_lnk and self._dir_info['file_attributes'].has_flag(FileAttributes.FILE_ATTRIBUTE_DIRECTORY) def is_file(self, follow_symlinks=True): """ Return 'True' if this entry is a file or a symbolic link pointing to a file; return 'False' if the entry is or points to a directory or other non-file entry. If follow_symlinks is 'False', return 'True' only if this entry is a file (without following symlinks); return 'False' if entry is a directory or other non-file entry. The result is cached on the 'smcblient.DirEntry' object, with a separate cache for follow_symlinks 'True' and 'False'. Call 'smbclient.path.isfile(entry.path)' to fetch up-to-date information. On the first, uncached call, no SMB call is required unless the path is a reparse point. :param follow_symlinks: Whether to check if the entry's target is a file (True) or the entry itself (False) if the entry is a symlink. :return: bool that states whether the entry is a file or not. """ is_lnk = self.is_symlink() if follow_symlinks and is_lnk: return self._link_target_type_check(py_stat.S_ISREG) else: # Python behaviour is to consider a symlink not a file even if it does not have the DIRECTORY attribute. return not is_lnk and \ not self._dir_info['file_attributes'].has_flag(FileAttributes.FILE_ATTRIBUTE_DIRECTORY) def is_symlink(self): """ Return 'True' if this entry is a symbolic link (even if broken); return 'False' if the entry points to a directory or any kind of file. The result is cached on the 'smcblient.DirEntry' object. Call 'smcblient.path.islink()' to fetch up-to-date information. On the first, uncached call, only files or directories that are reparse points requires another SMB call. The result is cached for subsequent calls. :return: Whether the path is a symbolic link. """ if self._dir_info['file_attributes'].has_flag(FileAttributes.FILE_ATTRIBUTE_REPARSE_POINT): # While a symlink is a reparse point, all reparse points aren't symlinks. We need to get the reparse tag # to use as our check. Unlike WIN32_FILE_DATA scanned locally, we don't get the reparse tag in the original # query result. We need to do a separate stat call to get this information. lstat = self.stat(follow_symlinks=False) return lstat.st_reparse_tag == ReparseTags.IO_REPARSE_TAG_SYMLINK else: return False def stat(self, follow_symlinks=True): """ Return a SMBStatResult object for this entry. This method follows symbolic links by default; to stat a symbolic link without following add the 'follow_symlinks=False' argument. This method always requires an extra SMB call or 2 if the path is a reparse point. The result is cached on the 'smcblient.DirEntry' object, with a separate cache for follow_symlinks 'True' and 'False'. Call 'smbclient.stat(entry.path)' to fetch up-to-date information. :param follow_symlinks: Whether to stat() the symlink target (True) or the symlink itself (False) if path is a symlink or not. :return: SMBStatResult object, see smbclient.stat() for more information. """ if follow_symlinks: if not self._stat: if self.is_symlink(): self._stat = stat(self.path) else: # Because it's not a symlink lstat will be the same as stat so set both. if self._lstat is None: self._lstat = lstat(self._smb_raw.name) self._stat = self._lstat return self._stat else: if not self._lstat: self._lstat = lstat(self.path) return self._lstat @classmethod def from_path(cls, path, **kwargs): file_stat = stat(path, **kwargs) # A DirEntry only needs these 2 properties to be set dir_info = FileIdFullDirectoryInformation() dir_info['file_attributes'] = file_stat.st_file_attributes dir_info['file_id'] = file_stat.st_ino dir_entry = cls(SMBRawIO(path, **kwargs), dir_info) dir_entry._stat = file_stat return dir_entry def _link_target_type_check(self, check): try: return check(self.stat(follow_symlinks=True).st_mode) except OSError as err: if err.errno == errno.ENOENT: # Missing target, broken symlink just return False return False raise
<reponame>kelaltech/passport-kelal-test import { userModelFactory as factory, IUserMethods } from './UserModel' export const userMethods: IUserMethods = { getFullName(): string { const p = factory.documentify(this) return `${p.name.first} ${p.name.last}` }, getEnglishGender(): string { const p = factory.documentify(this) return p.gender === 'F' ? 'Female' : p.gender === 'M' ? 'Male' : 'Not specified' }, getEnglishBirthdate( day = false, month = true, date = true, year = true ): string { const p = factory.documentify(this) const bd = new Date(p.birthdate) let str = '' if (day) { const days = [ 'Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday' ] str += days[bd.getDay()] } if (month) { if (day) str += ', ' const months = [ 'January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December' ] str += months[bd.getMonth()] } if (date) { if (day || month) str += ' ' str += String(bd.getDate()) } if (year) { if (day || month || date) str += ', ' str += String(bd.getFullYear()) } return str }, getAge(): number { const p = factory.documentify(this) const ageInMs = Date.now() - p.birthdate.getTime() return new Date(ageInMs).getFullYear() - 1970 }, getEmailsList(): string[] { const p = factory.documentify(this) const list: string[] = [] for (const email of p.emails) list.push(email.address) return list }, getPhonesList(): string[] { const p = factory.documentify(this) const list: string[] = [] for (const phone of p.phones) list.push(`(+${phone.code}) ${phone.number}`) return list } }
#!/bin/bash echo '$#: ' $# echo '$@: ' $@ echo '$*: ' $* echo echo '$1 $2 $9 $10 are: ' $1 $2 $9 $10 echo shift echo '$#: ' $# echo '$@: ' $@ echo '$*: ' $* echo echo '$1 $2 $9 are: ' $1 $2 $9 shift 2 echo '$#: ' $# echo '$@: ' $@ echo '$*: ' $* echo echo '$1 $2 $9 are: ' $1 $2 $9 echo '${10}: ' ${10}
<gh_stars>1-10 // // Created by JinWen on 2019/4/29. // #ifndef JETANALYSOR_JETSELECTOR_H #define JETANALYSOR_JETSELECTOR_H #include <vector> #include <fastjet/PseudoJet.hh> #include <fastjet/ClusterSequence.hh> #include <fastjet/Selector.hh> #include "utils.h" using namespace std; using namespace fastjet; namespace JetAnalysor { vector<PseudoJet> SelectJet(const vector<PseudoJet> &hadrons, JetDefinition jet_def, Selector select); vector<PseudoJet> SelectJet(const vector<PseudoJet> &hadrons, JetDefinition jet_def, Selector select) { ClusterSequence cluster(hadrons, jet_def); vector<PseudoJet> jets = sorted_by_pt(select(cluster.inclusive_jets())); return jets; } } #endif //JETANALYSOR_JETSELECTOR_H
package com.slimgears.rxrepo.expressions.internal; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.auto.value.AutoValue; import com.slimgears.rxrepo.expressions.ConstantExpression; import com.slimgears.rxrepo.expressions.StringExpression; @AutoValue public abstract class StringConstantExpression<S> implements ConstantExpression<S, String>, StringExpression<S> { @JsonCreator public static <S> StringConstantExpression<S> create(@JsonProperty("type") Type type, @JsonProperty String value) { return new AutoValue_StringConstantExpression<>(type, value); } }
#!/bin/bash # this script works on a specialized type of git checkout that has been configured # to push to a "downstream" repository, while still pulling from its normal origin. # the downstream destination might be a github or sourceforge repository that is # loaded from a personal repository or server. # # it is assumed that you have already added your ssh key to your github account. # # to set up the repository for relaying downstream, just do the normal checkout # or clone on it from the real origin. for example: # # $ git clone git://feistymeow.org/feisty_meow feisty_relay # # change into that new directory: # # $ pushd feisty_relay # # and then add the downstream remote repository: # # # github example of add: # $ git remote add downstream git@github.com:fredhamster/feisty_meow.git # # # sourceforge example of add: # $ git remote add downstream ssh://fred_t_hamster@git.code.sf.net/p/feistymeow/trunk # # once the repository has been created, you can synch all updates that # have been checked into the origin repository with the downstream version # by running this command: # # push_repo_downstream ~/relay_repo_folder source "$FEISTY_MEOW_SCRIPTS/core/launch_feisty_meow.sh" source "$FEISTY_MEOW_SCRIPTS/rev_control/version_control.sh" save_terminal_title # turn off occasionally troublesome setting before checkin. unset GIT_SSH ############## dir="$1"; shift if [ -z "$dir" ]; then dir=. fi pushd "$dir" &>/dev/null exit_on_error "changing to directory: $dir" tempfile=$(generate_rev_ctrl_filelist) exit_on_error "generating revision control file list" perform_revctrl_action_on_file "$tempfile" do_revctrl_careful_update exit_on_error "doing a careful update on: $tempfile" rm "$tempfile" # seems to be needed to cause a merge to be resolved. git pull downstream main # -m "unfortunate merge" exit_on_error "running the git pull downstream main" # send our little boat down the stream to the dependent repository. git push --tags downstream main exit_on_error "running the git push downstream main" # do our dev branch also. git push --tags downstream dev continue_on_error "running the git push downstream dev: is there a dev branch?" popd &>/dev/null restore_terminal_title
import cv2 import numpy as np def process_image(input_image: np.ndarray) -> np.ndarray: # Resize the image to 64x64 resized_image = cv2.resize(input_image, (64, 64)) # Convert the color space from BGR to RGB rgb_image = cv2.cvtColor(resized_image, cv2.COLOR_BGR2RGB) return rgb_image
<filename>project/common/src/main/java/common/service/FileLoadService.java package common.service; import io.netty.channel.ChannelHandlerContext; import java.io.File; import java.io.RandomAccessFile; public class FileLoadService { private final int sizePart = 1024 * 10; private int arrayLength; public void writeFile(FileLoad fileLoad, ChannelHandlerContext ctx) throws Exception { byte[] bytes = fileLoad.getBytes(); int byteRead = fileLoad.getByteRead(); int startPos = fileLoad.getStartPos(); File file = new File(fileLoad.getDstPath()); RandomAccessFile randomAccessFile = new RandomAccessFile(file, "rw"); randomAccessFile.seek(startPos); randomAccessFile.write(bytes); startPos = startPos + byteRead; if (fileLoad.isNotLastPart()) { fileLoad.setStartPos(startPos); ctx.writeAndFlush(fileLoad); } randomAccessFile.close(); } public void readFile(FileLoad fileLoad, ChannelHandlerContext ctx) throws Exception { int startPos = fileLoad.getStartPos(); RandomAccessFile randomAccessFile = new RandomAccessFile(fileLoad.getSourcePath(), "r"); randomAccessFile.seek(startPos); long lastPart = (randomAccessFile.length() - startPos); if (lastPart > sizePart) { arrayLength = sizePart; } else { arrayLength = (int) lastPart; fileLoad.setNotLastPart(false); } byte[] arrayBytes = new byte[arrayLength]; int byteRead = randomAccessFile.read(arrayBytes); arrayLength = sizePart; fileLoad.setByteRead(byteRead); fileLoad.setBytes(arrayBytes); fileLoad.setCountParts((int) (randomAccessFile.length() / sizePart)); fileLoad.setCountProgress(fileLoad.getCountProgress() + 1); ctx.writeAndFlush(fileLoad); randomAccessFile.close(); } }
<filename>lib/oxcelix/nf.rb<gh_stars>10-100 module Oxcelix module Numformats # Formatarray is the array of default format strings in Excel. Nil values should apparently contain CJK date format strings, #feel free to add/document those according to existing standards. Formatarray = [ {:id => '0', :xl => 'General', :ostring => nil, :cls => 'string'}, {:id => '1', :xl => '0', :ostring => '%1d', :cls => 'numeric'}, {:id => '2', :xl => '0.00', :ostring => '%1.2f', :cls => 'numeric'}, {:id => '3', :xl => '#,##0', :ostring => '%#4d', :cls => 'numeric'}, {:id => '4', :xl => '#,##0.00', :ostring => '%#4.2f', :cls => 'numeric'}, {:id => '5', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '6', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '7', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '8', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '9', :xl => '0%', :ostring => '%1d%', :cls => 'numeric'}, {:id => '10', :xl => '0.00%', :ostring => '%1.2f%', :cls => 'numeric'}, {:id => '11', :xl => '0.00E+00', :ostring => '%1.2fE+', :cls => 'numeric'}, {:id => '12', :xl => '# ?/?', :ostring => '%#1d', :cls => 'rational'}, {:id => '13', :xl => '# ??/??', :ostring => '%#1d', :cls => 'rational'}, {:id => '14', :xl => 'd/m/yyyy', :ostring => '%-d/%-m/%Y', :cls => 'date'}, {:id => '15', :xl => 'd-mmm-yy', :ostring => '%-d-%b-%y', :cls => 'date'}, {:id => '16', :xl => 'd-mmm', :ostring => '%-d-%b', :cls => 'date'}, {:id => '17', :xl => 'mmm-yy', :ostring => '%b-%y', :cls => 'date'}, {:id => '18', :xl => 'h:mm tt', :ostring => '%-k:%M tt', :cls => 'date'}, {:id => '19', :xl => 'h:mm:ss tt', :ostring => '%-k:%M:%-S tt', :cls => 'date'}, {:id => '20', :xl => 'H:mm', :ostring => '%-k:%M', :cls => 'date'}, {:id => '21', :xl => 'H:mm:ss', :ostring => '%-k:%M:%-S', :cls => 'date'}, {:id => '22', :xl => 'm/d/yyyy H:mm', :ostring => '%-m/%-d/%Y %-k:%M', :cls => 'date'}, {:id => '23', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '24', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '25', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '26', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '27', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '28', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '29', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '30', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '31', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '32', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '33', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '34', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '35', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '36', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '37', :xl => '#,##0 ;(#,##0)', :ostring => '%#4d', :cls => 'numeric'}, {:id => '38', :xl => '#,##0 ;[Red](#,##0)', :ostring => '%#4d', :cls => 'numeric'}, {:id => '39', :xl => '#,##0.00;(#,##0.00)', :ostring => '%#4.2f', :cls => 'numeric'}, {:id => '40', :xl => '#,##0.00;[Red](#,##0.00)', :ostring => '%#4.2f', :cls => 'numeric'}, {:id => '41', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '42', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '43', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '44', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '45', :xl => 'mm:ss', :ostring => '%M:%-S', :cls => 'date'}, {:id => '46', :xl => '[h]:mm:ss', :ostring => '%-k:%M:%-S', :cls => 'date'}, {:id => '47', :xl => 'mmss.0', :ostring => '%M%-S.%1n', :cls => 'date'}, {:id => '48', :xl => '##0.0E+0', :ostring => '%#3.1E', :cls => 'numeric'}, {:id => '49', :xl => 'Text', :ostring => nil, :cls => 'string'}, {:id => '50', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '51', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '52', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '53', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '54', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '55', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '56', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '57', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '58', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '59', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '60', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '61', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '62', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '63', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '64', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '65', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '66', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '67', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '68', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '69', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '70', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '71', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '72', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '73', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '74', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '75', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '76', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '77', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '78', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '79', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '80', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '81', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '82', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '83', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '84', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '85', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '86', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '87', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '88', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '89', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '90', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '91', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '92', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '93', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '94', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '95', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '96', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '97', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '98', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '99', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '100', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '101', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '102', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '103', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '104', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '105', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '106', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '107', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '108', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '109', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '110', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '111', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '112', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '113', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '114', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '115', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '116', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '117', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '118', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '119', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '120', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '121', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '122', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '123', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '124', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '125', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '126', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '127', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '128', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '129', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '130', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '131', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '132', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '133', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '134', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '135', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '136', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '137', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '138', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '139', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '140', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '141', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '142', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '143', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '144', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '145', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '146', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '147', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '148', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '149', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '150', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '151', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '152', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '153', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '154', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '155', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '156', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '157', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '158', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '159', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '160', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '161', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '162', :xl => '', :ostring => nil, :cls => 'string'}, {:id => '163', :xl => '', :ostring => nil, :cls => 'string'}, ] end end
#!/usr/bin/sh FULL_PATH=$(realpath "$0") BASE_DIR=$(dirname $FULL_PATH) ROM_NAME=$(basename $BASE_DIR) pushd $BASE_DIR rgbasm -Werror -Weverything -o main.o main.rgbasm [ $? -eq 0 ] || exit 1 rgbasm -Werror -Weverything -o sample.o sample.rgbasm [ $? -eq 0 ] || exit 1 rgblink --tiny --map $ROM_NAME.map --sym $ROM_NAME.sym -o $ROM_NAME.gbc main.o sample.o [ $? -eq 0 ] || exit 1 rgbfix --title game --color-only --pad-value 0 --validate $ROM_NAME.gbc [ $? -eq 0 ] || exit 1 popd exit 0
let sqlite3 = require("sqlite3").verbose(); const path = "./Backend/datasource/food-app.db"; let createTable = () => { let db = new sqlite3.Database(path); // let query = `CREATE TABLE user( // USERID INT PRIMARY KEY NOT NULL, // USERNAME TEXT NOT NULL, // USERPASS INT NOT NULL // );`; // let query = `CREATE TABLE inventory( // USERID INT NOT NULL, // ITEM TEXT NOT NULL, // QTY INT NOT NULL, // LOCATION TEXT NULL, // UOM TEXT NULL, // EXPIRES TEXT NOT NULL, // ITEMID INT PRIMARY KEY NOT NULL // );`; // let query = `DROP TABLE inventory`; db.run(query, (err, row) => { if (err) return console.log(err); db.close(); console.log(`Table Created`); }); }; let addUser = function (user) { return new Promise(function (resolve, reject) { try { let db = new sqlite3.Database(path); let userID = Math.floor(Math.random() * (1000 - 900 + 1) + 900); let userColumns = "userID,userName,userPass"; let values = `${userID},'${user.userName}','${user.userPass}'`; //Sql Query let insertQuery = `INSERT INTO user (${userColumns})VALUES(${values});`; db.run(insertQuery, function (err, row) { if (err) return reject(err.message); resolve(); db.close(); console.log(`A user has been inserted with rowid ${this.lastID}`); }); } catch (err) { console.log(err); db.close(); reject(err); } }); }; let addItem = function (item) { return new Promise(function (resolve, reject) { try { let db = new sqlite3.Database(path); let itemID = Math.floor(Math.random() * (10000 - 9000 + 1) + 9000); let values = `${item.userid},'${item.unitOfMeasure}',${item.quantity},'${item.location}','${item.itemName}','${item.expires}',${itemID}`; let inventoryColumns = "USERID,ITEM,QTY,LOCATION,UOM,EXPIRES,ITEMID"; let insertQuery = `INSERT INTO inventory(${inventoryColumns})VALUES(${values});`; db.run(insertQuery, function (err) { if (err) return reject(err.message); resolve(); db.close(); console.log(`A row has been inserted with rowid ${this.lastID}`); }); } catch (err) { console.log(err); db.close(); reject(err); } }); }; let listItems = function (userID) { return new Promise(function (resolve, reject) { try { let db = new sqlite3.Database(path); let list = []; db.serialize(function () { let sqlQuery = `SELECT * FROM inventory ORDER BY item;`; try { db.all(sqlQuery, function (err, rows) { if (err) return reject(err.message); rows.forEach(function (row) { row.canEdit = row.USERID == userID ? true : false; list.push(row); }); db.close(); resolve(list); }); } catch (err) { db.close(); reject(err); } }); } catch (err) { console.log(err); db.close(); reject(err); } }); }; let listItemsByUser = function (userID) { return new Promise(function (resolve, reject) { try { let db = new sqlite3.Database(path); let list = []; db.serialize(function () { let sqlQuery = `SELECT * FROM inventory WHERE USERID = ${userID} ORDER BY item;`; try { db.all(sqlQuery, function (err, rows) { if (err) return reject(err.message); rows.forEach(function (row) { list.push(row); }); db.close(); resolve(list); console.log(list); }); } catch (err) { db.close(); reject(err); } }); } catch (err) { console.log(err); db.close(); reject(err); } }); }; let massageData = async () => { let itemObject = {}; itemObject.userid = 0; itemObject.unitOfMeasure = "bucket"; itemObject.quantity = 4; itemObject.location = "bin"; itemObject.itemName = "chili"; itemObject.expires = "04/25/2022"; let userObject = {}; userObject.userName = "steve"; userObject.userPass = "<PASSWORD>"; await addUser(userObject); // await addItem(itemObject); // await editItem(itemObject); }; let editItem = function (itemData) { return new Promise(function (resolve, reject) { try { let db = new sqlite3.Database(path); let params = []; params.push(itemData.unitOfMeasure); params.push(itemData.quantity); params.push(itemData.location); params.push(itemData.itemName); params.push(itemData.expires); //Sql Query let updateQuery = `UPDATE inventory SET UOM=?,QTY=?,LOCATION=?,ITEM=?,EXPIRES=? WHERE ITEMID = ${itemData.itemID}`; db.get(updateQuery, params, function (err) { if (err) return reject(err.message); resolve(); db.close(); console.log(`An item has been edited`); }); } catch (err) { console.log(err); db.close(); reject(err); } }); }; let deleteItem = function (itemID) { return new Promise(function (resolve, reject) { try { let db = new sqlite3.Database(path); //Sql Query let deleteQuery = `DELETE FROM inventory where ITEMID = ${itemID}`; db.get(deleteQuery, function (err) { if (err) return reject(err.message); resolve(); db.close(); console.log(`Item[${itemID}] has been deleted`); }); } catch (err) { console.log(err); db.close(); reject(err); } }); }; let findItem = function (itemID) { return new Promise(function (resolve, reject) { try { let db = new sqlite3.Database(path); //Sql Query let findQuery = `SELECT * FROM inventory where ITEMID = ${itemID}`; db.get(findQuery, function (err, item) { if (err) return reject(err.message); resolve(item); db.close(); console.log(`Found Item[${itemID}]\r\n`); }); } catch (err) { console.log(err); db.close(); reject(err); } }); }; let getItems = async (userID) => { let items = await listItems(userID); console.log(items); }; // massageData(); // createTable(); // getItems(1); // deleteItem(""); findItem("9969"); // listItemsByUser("1");
def sort_array(arr): n = len(arr) for i in range(n): for j in range(0, n-i-1): if arr[j] > arr[j+1] : arr[j], arr[j+1] = arr[j+1], arr[j] return arr arr = [3,9,12,1,10] print(sort_array(arr)) # Output: [1, 3, 9, 10, 12]
<reponame>lananh265/social-network "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.flag = void 0; var flag = { "viewBox": "0 0 16 16", "children": [{ "name": "path", "attribs": { "fill": "#000000", "d": "M0 0h2v16h-2v-16z" } }, { "name": "path", "attribs": { "fill": "#000000", "d": "M13 10.047c1.291 0 2.415-0.312 3-0.773v-8c-0.585 0.461-1.709 0.773-3 0.773s-2.415-0.312-3-0.773v8c0.585 0.461 1.709 0.773 3 0.773z" } }, { "name": "path", "attribs": { "fill": "#000000", "d": "M9.5 0.508c-0.733-0.312-1.805-0.508-3-0.508-1.506 0-2.818 0.312-3.5 0.773v8c0.682-0.461 1.994-0.773 3.5-0.773 1.195 0 2.267 0.197 3 0.508v-8z" } }] }; exports.flag = flag;
<filename>js/index.js import ExpressServer from './express-server.js' import PostgresReviewsService from './postgres-reviews-service.js' import logger from './logger.js' const DB_CONNECTION_URI_DEFAULT = 'postgres://postgres@localhost:6543/postgres' const PORT_DEFAULT = 9090 const dbUriCf = process.env.VCAP_SERVICES ? JSON.parse(process.env.VCAP_SERVICES).postgresql[0].credentials.uri : undefined const dbUriK8s = process.env.POSTGRES_URI const dbConnectionUri = dbUriCf || dbUriK8s || DB_CONNECTION_URI_DEFAULT const defaultLogger = logger.create() const reviewsService = new PostgresReviewsService(dbConnectionUri, defaultLogger) const server = new ExpressServer(reviewsService, defaultLogger) const port = process.env.PORT || PORT_DEFAULT server.start(port)
// tasks.component.ts import { Component, OnInit } from '@angular/core'; @Component({ selector: 'app-tasks', templateUrl: './tasks.component.html', styleUrls: ['./tasks.component.scss'] }) export class TasksComponent implements OnInit { tasks: string[]; constructor() { this.tasks = []; } ngOnInit(): void { // Fetch and populate the task list } } // login.component.ts import { Component, OnInit } from '@angular/core'; @Component({ selector: 'app-login', templateUrl: './login.component.html', styleUrls: ['./login.component.scss'] }) export class LoginComponent implements OnInit { username: string; password: string; constructor() { } ngOnInit(): void { } onSubmit() { // Handle login logic } }
package dev.p0ke.fkcounter.config; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileReader; import java.io.FileWriter; import com.google.gson.GsonBuilder; import com.google.gson.JsonArray; import com.google.gson.JsonObject; import com.google.gson.JsonParser; public class ConfigHandler { private File configFile; private JsonObject configJson; public ConfigHandler(File file) { configFile = file; } public void loadConfig() { if(configFile.exists()) { try { BufferedReader br = new BufferedReader(new FileReader(configFile)); StringBuilder builder = new StringBuilder(); String line; while((line = br.readLine()) != null) { builder.append(line); } configJson = new JsonParser().parse(builder.toString()).getAsJsonObject(); br.close(); for(ConfigSetting setting : ConfigSetting.values()) { if(configJson.has(setting.getTitle())) { setting.setValue(configJson.get(setting.getTitle()).getAsBoolean()); } if(configJson.has(setting.getTitle() + "_POS")) { JsonArray posArray = configJson.getAsJsonArray(setting.getTitle() + "_POS"); setting.getData().setScreenPos(posArray.get(0).getAsDouble(), posArray.get(1).getAsDouble()); } } } catch (Exception e) { System.out.println("[FKCounter] Failed to read config!"); } } else { saveConfig(); } } public void saveConfig() { configJson = new JsonObject(); try { configFile.createNewFile(); BufferedWriter bw = new BufferedWriter(new FileWriter(configFile)); for(ConfigSetting setting : ConfigSetting.values()) { configJson.addProperty(setting.getTitle(), setting.getValue()); if(setting.getData() != null) { JsonArray posArray = new JsonArray(); posArray.add(new GsonBuilder().create().toJsonTree(setting.getData().getScreenPos().getRelativeX())); posArray.add(new GsonBuilder().create().toJsonTree(setting.getData().getScreenPos().getRelativeY())); configJson.add(setting.getTitle() + "_POS", posArray); } } bw.write(configJson.toString()); bw.close(); } catch (Exception e) { System.out.println("[FKCounter] Failed to save config!"); } } }
package com.ylesb.config; /** * @title: ExceptionHandlerPage * @projectName springcloud-alibaba * @description: TODO * @author White * @site : [www.ylesb.com] * @date 2022/1/1215:48 */ import com.alibaba.csp.sentinel.adapter.spring.webmvc.callback.BlockExceptionHandler; import com.alibaba.csp.sentinel.slots.block.BlockException; import com.alibaba.csp.sentinel.slots.block.authority.AuthorityException; import com.alibaba.csp.sentinel.slots.block.degrade.DegradeException; import com.alibaba.csp.sentinel.slots.block.flow.FlowException; import com.alibaba.csp.sentinel.slots.block.flow.param.ParamFlowException; import com.alibaba.csp.sentinel.slots.system.SystemBlockException; import com.alibaba.fastjson.JSON; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; /** * @className : ExceptionHandlerPage * @description : [描述说明该类的功能] * @author : [XuGuangchao] * @site : [www.ylesb.com] * @version : [v1.0] * @createTime : [2022/1/12 15:48] * @updateUser : [XuGuangchao] * @updateTime : [2022/1/12 15:48] * @updateRemark : [描述说明本次修改内容] */ public class ExceptionHandlerPage implements BlockExceptionHandler { @Override public void handle(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse, BlockException e) throws Exception { //解决中文乱码 httpServletResponse.setContentType("application/json;charset=utf-8"); ResponseData data = null; if (e instanceof FlowException) { data = new ResponseData(-1, "限流了"); } else if (e instanceof DegradeException) { data = new ResponseData(-2, "降级了"); } else if (e instanceof ParamFlowException) { data = new ResponseData(-3, "参数限流了"); } else if (e instanceof SystemBlockException) { data = new ResponseData(-4, "系统负载异常了"); } else if (e instanceof AuthorityException) { data = new ResponseData(-5, "授权异常"); } httpServletResponse.getWriter().write(JSON.toJSONString(data)); } } @Data @AllArgsConstructor//全参构造 @NoArgsConstructor//无参构造 class ResponseData{ private int code; private String message; }
#! /bin/sh # # Copyright (c) 1999, 2006 Tanuki Software Inc. # # Permission is hereby granted, free of charge, to any person # obtaining a copy of the Java Service Wrapper and associated # documentation files (the "Software"), to deal in the Software # without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sub-license, # and/or sell copies of the Software, and to permit persons to # whom the Software is furnished to do so, subject to the # following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NON-INFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR # OTHER DEALINGS IN THE SOFTWARE. # # Java Service Wrapper sh script. Suitable for starting and stopping # wrapped Java applications on UNIX platforms. # # This file is originally from Java Service Wrapper 3.2.3 distribution # with alteration to fit the needs of AppAssembler Maven Plugin # #----------------------------------------------------------------------------- # These settings can be modified to fit the needs of your application # Application APP_NAME="app" APP_LONG_NAME="Test Project" # discover BASEDIR BASEDIR=`dirname "$0"`/.. BASEDIR=`(cd "$BASEDIR"; pwd)` ls -l "$0" | grep -e '->' > /dev/null 2>&1 if [ $? = 0 ]; then #this is softlink _PWD=`pwd` _EXEDIR=`dirname "$0"` cd "$_EXEDIR" _BASENAME=`basename "$0"` _REALFILE=`ls -l "$_BASENAME" | sed 's/.*->\ //g'` BASEDIR=`dirname "$_REALFILE"`/.. BASEDIR=`(cd "$BASEDIR"; pwd)` cd "$_PWD" fi # Wrapper WRAPPER_CMD="./wrapper" WRAPPER_CONF="$BASEDIR/conf/wrapper.conf" # Priority at which to run the wrapper. See "man nice" for valid priorities. # nice is only used if a priority is specified. PRIORITY= # Location of the pid file. PIDDIR="$BASEDIR/test" # If uncommented, causes the Wrapper to be shutdown using an anchor file. # When launched with the 'start' command, it will also ignore all INT and # TERM signals. #IGNORE_SIGNALS=true # If specified, the Wrapper will be run as the specified user. # IMPORTANT - Make sure that the user has the required privileges to write # the PID file and wrapper.log files. Failure to be able to write the log # file will cause the Wrapper to exit without any way to write out an error # message. # NOTE - This will set the user which is used to run the Wrapper as well as # the JVM and is not useful in situations where a privileged resource or # port needs to be allocated prior to the user being changed. #RUN_AS_USER= # The following two lines are used by the chkconfig command. Change as is # appropriate for your application. They should remain commented. # chkconfig: 2345 20 80 # description: Test Project # Do not modify anything beyond this point #----------------------------------------------------------------------------- # Get the fully qualified path to the script case $0 in /*) SCRIPT="$0" ;; *) PWD=`pwd` SCRIPT="$PWD/$0" ;; esac # Resolve the true real path without any sym links. CHANGED=true while [ "X$CHANGED" != "X" ] do # Change spaces to ":" so the tokens can be parsed. SAFESCRIPT=`echo $SCRIPT | sed -e 's; ;:;g'` # Get the real path to this script, resolving any symbolic links TOKENS=`echo $SAFESCRIPT | sed -e 's;/; ;g'` REALPATH= for C in $TOKENS; do # Change any ":" in the token back to a space. C=`echo $C | sed -e 's;:; ;g'` REALPATH="$REALPATH/$C" # If REALPATH is a sym link, resolve it. Loop for nested links. while [ -h "$REALPATH" ] ; do LS="`ls -ld "$REALPATH"`" LINK="`expr "$LS" : '.*-> \(.*\)$'`" if expr "$LINK" : '/.*' > /dev/null; then # LINK is absolute. REALPATH="$LINK" else # LINK is relative. REALPATH="`dirname "$REALPATH"`""/$LINK" fi done done if [ "$REALPATH" = "$SCRIPT" ] then CHANGED="" else SCRIPT="$REALPATH" fi done # Change the current directory to the location of the script cd "`dirname "$REALPATH"`" REALDIR=`pwd` # If the PIDDIR is relative, set its value relative to the full REALPATH to avoid problems if # the working directory is later changed. FIRST_CHAR=`echo $PIDDIR | cut -c1,1` if [ "$FIRST_CHAR" != "/" ] then PIDDIR=$REALDIR/$PIDDIR fi # Same test for WRAPPER_CMD FIRST_CHAR=`echo $WRAPPER_CMD | cut -c1,1` if [ "$FIRST_CHAR" != "/" ] then WRAPPER_CMD=$REALDIR/$WRAPPER_CMD fi # Same test for WRAPPER_CONF FIRST_CHAR=`echo $WRAPPER_CONF | cut -c1,1` if [ "$FIRST_CHAR" != "/" ] then WRAPPER_CONF=$REALDIR/$WRAPPER_CONF fi # Process ID ANCHORFILE="$PIDDIR/$APP_NAME.anchor" PIDFILE="$PIDDIR/$APP_NAME.pid" LOCKDIR="/var/lock/subsys" LOCKFILE="$LOCKDIR/$APP_NAME" pid="" # Resolve the location of the 'ps' command PSEXE="/usr/bin/ps" if [ ! -x "$PSEXE" ] then PSEXE="/bin/ps" if [ ! -x "$PSEXE" ] then echo "Unable to locate 'ps'." echo "Please report this message along with the location of the command on your system." exit 1 fi fi # Resolve the os DIST_OS=`uname -s | tr "[A-Z]" "[a-z]" | tr -d ' '` case "$DIST_OS" in 'sunos') DIST_OS="solaris" ;; 'hp-ux' | 'hp-ux64') DIST_OS="hpux" ;; 'darwin') DIST_OS="macosx" ;; 'unix_sv') DIST_OS="unixware" ;; esac # Resolve the architecture DIST_ARCH=`uname -p | tr "[A-Z]" "[a-z]" | tr -d ' '` if [ "$DIST_ARCH" = "unknown" ] then DIST_ARCH=`uname -m | tr "[A-Z]" "[a-z]" | tr -d ' '` fi case "$DIST_ARCH" in 'amd64' | 'athlon' | 'ia32' | 'ia64' | 'i386' | 'i486' | 'i586' | 'i686' | 'x86_64') DIST_ARCH="x86" ;; 'ip27') DIST_ARCH="mips" ;; 'power' | 'powerpc' | 'power_pc' | 'ppc64') DIST_ARCH="ppc" ;; 'pa_risc' | 'pa-risc') DIST_ARCH="parisc" ;; 'sun4u' | 'sparcv9') DIST_ARCH="sparc" ;; '9000/800') DIST_ARCH="parisc" ;; esac outputFile() { if [ -f "$1" ] then echo " $1 (Found but not executable.)"; else echo " $1" fi } # Decide on the wrapper binary to use. # Start with 64 bit wrapper binary, fall back to 32-bit or the default one as needed # For macosx, we also want to look for universal binaries. if [ "$DIST_OS" = "macosx" ] then DIST_ARCH="universal" fi WRAPPER_TEST_CMD="$WRAPPER_CMD-$DIST_OS-$DIST_ARCH-64" "$WRAPPER_TEST_CMD" -v > /dev/null 2>&1 if [ "$?" = "0" ] then WRAPPER_CMD="$WRAPPER_TEST_CMD" else WRAPPER_TEST_CMD="$WRAPPER_CMD-$DIST_OS-$DIST_ARCH-32" "$WRAPPER_TEST_CMD" -v > /dev/null 2>&1 if [ "$?" = "0" ] then WRAPPER_CMD="$WRAPPER_TEST_CMD" else WRAPPER_TEST_CMD="$WRAPPER_CMD" "$WRAPPER_TEST_CMD" -v > /dev/null 2>&1 if [ "$?" != "0" ] then echo "Unable to locate any of the following operational binaries:" outputFile "$WRAPPER_CMD-$DIST_OS-$DIST_ARCH-64" outputFile "$WRAPPER_CMD-$DIST_OS-$DIST_ARCH-32" outputFile "$WRAPPER_CMD" exit 1 fi fi fi # Build the nice clause if [ "X$PRIORITY" = "X" ] then CMDNICE="" else CMDNICE="nice -$PRIORITY" fi # Build the anchor file clause. if [ "X$IGNORE_SIGNALS" = "X" ] then ANCHORPROP= IGNOREPROP= else ANCHORPROP=wrapper.anchorfile=\"$ANCHORFILE\" IGNOREPROP=wrapper.ignore_signals=TRUE fi # Build the lock file clause. Only create a lock file if the lock directory exists on this platform. LOCKPROP= if [ -d $LOCKDIR ] then if [ -w $LOCKDIR ] then LOCKPROP=wrapper.lockfile=\"$LOCKFILE\" fi fi checkUser() { # $1 touchLock flag # $2 command # Check the configured user. If necessary rerun this script as the desired user. if [ "X$RUN_AS_USER" != "X" ] then # Resolve the location of the 'id' command IDEXE="/usr/xpg4/bin/id" if [ ! -x "$IDEXE" ] then IDEXE="/usr/bin/id" if [ ! -x "$IDEXE" ] then echo "Unable to locate 'id'." echo "Please report this message along with the location of the command on your system." exit 1 fi fi if [ "`$IDEXE -u -n`" = "$RUN_AS_USER" ] then # Already running as the configured user. Avoid password prompts by not calling su. RUN_AS_USER="" fi fi if [ "X$RUN_AS_USER" != "X" ] then # If LOCKPROP and $RUN_AS_USER are defined then the new user will most likely not be # able to create the lock file. The Wrapper will be able to update this file once it # is created but will not be able to delete it on shutdown. If $2 is defined then # the lock file should be created for the current command if [ "X$LOCKPROP" != "X" ] then if [ "X$1" != "X" ] then # Resolve the primary group RUN_AS_GROUP=`groups $RUN_AS_USER | awk '{print $3}' | tail -1` if [ "X$RUN_AS_GROUP" = "X" ] then RUN_AS_GROUP=$RUN_AS_USER fi touch $LOCKFILE chown $RUN_AS_USER:$RUN_AS_GROUP $LOCKFILE fi fi # Still want to change users, recurse. This means that the user will only be # prompted for a password once. Variables shifted by 1 su -m $RUN_AS_USER -c "\"$REALPATH\" $2" RETVAL=$? # Now that we are the original user again, we may need to clean up the lock file. if [ "X$LOCKPROP" != "X" ] then getpid if [ "X$pid" = "X" ] then # Wrapper is not running so make sure the lock file is deleted. if [ -f "$LOCKFILE" ] then rm "$LOCKFILE" fi fi fi exit $RETVAL fi } getpid() { if [ -f "$PIDFILE" ] then if [ -r "$PIDFILE" ] then pid=`cat "$PIDFILE"` if [ "X$pid" != "X" ] then # It is possible that 'a' process with the pid exists but that it is not the # correct process. This can happen in a number of cases, but the most # common is during system startup after an unclean shutdown. # The ps statement below looks for the specific wrapper command running as # the pid. If it is not found then the pid file is considered to be stale. if [ "$DIST_OS" = "macosx" ]; then pidtest=`$PSEXE -p $pid -o command -ww | grep "$WRAPPER_CMD" | tail -1` else pidtest=`$PSEXE -p $pid -o args | grep "$WRAPPER_CMD" | tail -1` fi if [ "X$pidtest" = "X" ] then # This is a stale pid file. rm -f "$PIDFILE" echo "Removed stale pid file: $PIDFILE" pid="" fi fi else echo "Cannot read $PIDFILE." exit 1 fi fi } testpid() { pid=`$PSEXE -p $pid | grep $pid | grep -v grep | awk '{print $1}' | tail -1` if [ "X$pid" = "X" ] then # Process is gone so remove the pid file. rm -f "$PIDFILE" pid="" fi } console() { echo "Running $APP_LONG_NAME..." getpid if [ "X$pid" = "X" ] then # The string passed to eval must handles spaces in paths correctly. COMMAND_LINE="$CMDNICE \"$WRAPPER_CMD\" \"$WRAPPER_CONF\" wrapper.syslog.ident=$APP_NAME wrapper.pidfile=\"$PIDFILE\" $ANCHORPROP $LOCKPROP $WRAPPER_CONF_OVERRIDES" eval $COMMAND_LINE else echo "$APP_LONG_NAME is already running." exit 1 fi } start() { echo "Starting $APP_LONG_NAME..." getpid if [ "X$pid" = "X" ] then # The string passed to eval must handles spaces in paths correctly. COMMAND_LINE="$CMDNICE \"$WRAPPER_CMD\" \"$WRAPPER_CONF\" wrapper.syslog.ident=$APP_NAME wrapper.pidfile=\"$PIDFILE\" wrapper.daemonize=TRUE $ANCHORPROP $IGNOREPROP $LOCKPROP $WRAPPER_CONF_OVERRIDES" eval $COMMAND_LINE else echo "$APP_LONG_NAME is already running." exit 1 fi } stopit() { echo "Stopping $APP_LONG_NAME..." getpid if [ "X$pid" = "X" ] then echo "$APP_LONG_NAME was not running." else if [ "X$IGNORE_SIGNALS" = "X" ] then # Running so try to stop it. kill $pid if [ $? -ne 0 ] then # An explanation for the failure should have been given echo "Unable to stop $APP_LONG_NAME." exit 1 fi else rm -f "$ANCHORFILE" if [ -f "$ANCHORFILE" ] then # An explanation for the failure should have been given echo "Unable to stop $APP_LONG_NAME." exit 1 fi fi # We can not predict how long it will take for the wrapper to # actually stop as it depends on settings in wrapper.conf. # Loop until it does. savepid=$pid CNT=0 TOTCNT=0 while [ "X$pid" != "X" ] do # Show a waiting message every 5 seconds. if [ "$CNT" -lt "5" ] then CNT=`expr $CNT + 1` else echo "Waiting for $APP_LONG_NAME to exit..." CNT=0 fi TOTCNT=`expr $TOTCNT + 1` sleep 1 testpid done pid=$savepid testpid if [ "X$pid" != "X" ] then echo "Failed to stop $APP_LONG_NAME." exit 1 else echo "Stopped $APP_LONG_NAME." fi fi } status() { getpid if [ "X$pid" = "X" ] then echo "$APP_LONG_NAME is not running." exit 1 else echo "$APP_LONG_NAME is running ($pid)." exit 0 fi } dump() { echo "Dumping $APP_LONG_NAME..." getpid if [ "X$pid" = "X" ] then echo "$APP_LONG_NAME was not running." else kill -3 $pid if [ $? -ne 0 ] then echo "Failed to dump $APP_LONG_NAME." exit 1 else echo "Dumped $APP_LONG_NAME." fi fi } case "$1" in 'console') checkUser touchlock $1 console ;; 'start') checkUser touchlock $1 start ;; 'stop') checkUser "" $1 stopit ;; 'restart') checkUser touchlock $1 stopit start ;; 'status') checkUser "" $1 status ;; 'dump') checkUser "" $1 dump ;; *) echo "Usage: $0 { console | start | stop | restart | status | dump }" exit 1 ;; esac exit 0
package com.ipec.trazactivo.model; import java.io.Serializable; import javax.persistence.*; import javax.validation.constraints.NotEmpty; import lombok.Data; @Entity @Data @Table(name="numero_aula") public class NumeroAula implements Serializable { private static final long serialVersionUID = 1L; @Id @GeneratedValue(strategy = GenerationType.IDENTITY) @Column(name = "id_numero_aula") private int idNumeroAula; @NotEmpty @Column(name = "detalle_numero_aula") private String detalleNumeroAula; }
import React, { Component, PropTypes } from 'react' import moment from 'moment' import Event from 'components/event' const SECTION_DATE_FORMAT = 'MMM DD, YYYY' const EVENT_TIME_FORMAT = 'YYYY-MM-DD' export default class extends Component { static get propTypes () { return { events: PropTypes.array } } render () { return ( <div className='events-list'> <div className='timeline'> {this.renderTimeline()} </div> </div> ) } renderTimeline () { let day let timeline = [] this.props.events.forEach((event) => { const eventDay = formatEventDate(event.event_time) if (day !== eventDay) { day = eventDay timeline.push(this.renderDayHeader(day)) } timeline.push( <Event key={`event-${event.id}`} event={event}/> ) }) return timeline } renderDayHeader (day) { return ( <div key={`day-header-${day}`} className='day-header'> <span className='dot' /> {formatSectionDate(day)} </div> ) } } function formatEventDate (eventdate) { if (!eventdate) return null return moment(new Date(eventdate)).format(EVENT_TIME_FORMAT) } function formatSectionDate (date) { if (!date) return null return moment(new Date(date)).format(SECTION_DATE_FORMAT) }
import React from 'react' import './header.scss' const Header = () => ( <div className='hero-image' > <div className='hero-text' > <h1><NAME></h1> <h2>Actor. Writer. America's Gay Sweetheart.</h2> </div> </div> ) export default Header
/* Copyright © 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package controller import ( "fmt" "os" "github.com/awslabs/clencli/cobra/aid" "github.com/awslabs/clencli/helper" "github.com/sirupsen/logrus" "github.com/spf13/cobra" ) var gitIgnoreArgs = []string{"list"} // GitIgnoreCmd .... func GitIgnoreCmd() *cobra.Command { man, err := helper.GetManual("gitignore") if err != nil { fmt.Println(err) os.Exit(1) } cmd := &cobra.Command{ Use: man.Use, Short: man.Short, Long: man.Long, Example: man.Example, PreRunE: gitIgnorePreRun, RunE: gitIgnoreRun, } cmd.Flags().StringP("input", "i", "", "Gitignore input. If multiple, comma-separated") return cmd } func gitIgnorePreRun(cmd *cobra.Command, args []string) error { logrus.Traceln("start: command gitignore pre-run") if len(args) == 0 { input, err := cmd.Flags().GetString("input") if err != nil { logrus.Errorf("unable to access flag input\n%v", err) return err } if input == "" { logrus.Errorln("no flag or argument provided") return fmt.Errorf("no flag or argument provided") } } else if len(args) == 1 && args[0] != "list" { logrus.Errorf("unknow argument passed: %v", args) return fmt.Errorf("unknown argument provided: %s", args[0]) } logrus.Traceln("end: command gitignore pre-run") return nil } func gitIgnoreRun(cmd *cobra.Command, args []string) error { logrus.Traceln("start: command gitignore run") if len(args) > 0 && args[0] == "list" { list := aid.GetGitIgnoreList() if list == "" { return fmt.Errorf("unable to get gitignore list") } cmd.Println(list) } else { input, err := cmd.Flags().GetString("input") if err != nil { return err } downloaded, err := aid.DownloadGitIgnore(cmd, input) if err != nil { logrus.Errorf("unable to download gitignore\n%v", err) return err } if downloaded { cmd.Println(".gitignore created successfully") } } logrus.Traceln("end: command gitignore run") return nil }