text stringlengths 1 1.05M |
|---|
package controllers
import (
"github.com/revel/modules/orm/gorp/app/controllers"
"github.com/revel/revel"
)
type Boards struct {
//*revel.Controller
gorpController.Controller
}
func (c Boards) Index() revel.Result {
return c.Render()
}
|
#!/bin/sh
# Public domain
# http://unlicense.org/
# Created by Grigore Stefan <g_stefan@yahoo.com>
echo "-> release quantum-script-extension-url"
cmdX(){
if ! "$@" ; then
echo "Error: release"
exit 1
fi
}
export XYO_PATH_RELEASE=release
cmdX /bin/sh ./build/ubuntu.sh make
cmdX /bin/sh ./build/ubuntu.sh install
cmdX /bin/sh ./build/ubuntu.sh install-release
cmdX xyo-cc quantum-script-extension-url --archive-release-sha512 --version-file=version.ini
|
#!/usr/bin/env python3
import os
import requests
import getpass
url = "http://localhost/upload/"
images_directory = "/home/{}/supplier-data/images".format(getpass.getuser())
for file in os.listdir(images_directory):
if '.jpeg' in file:
with open('{}/{}'.format(images_directory,file), 'rb') as opened:
req = requests.post(url, files={'file': opened})
|
import { Vue } from 'vue-property-decorator';
/**
* A control button used by the pager.
*/
export declare class VueDatatablePagerButton extends Vue {
/**
* Defines if the button is triggerable or not.
*
* @vue-prop
*/
private readonly disabled;
/**
* Represents if the pager button is the currently selected one.
*
* @vue-prop
*/
private readonly selected;
/**
* The page index of the button.
*
* @vue-prop
*/
private readonly value;
/**
* The [[TableType]] instance provided through [[TableTypeConsumer.tableType]].
*
* @vue Inject `table-type`
*/
private readonly tableType;
/** HTML classes to set on list items tags. */
get liClasses(): string[];
/** CSS styles to apply on the list items tags */
get liStyles(): {
cursor: string;
};
/**
* Emits an event if the button is not [[VueDatatablePagerButton.disabled]].
*
* @vue-event vuejs-datatable::set-page.
* @returns Nothing.
*/
sendClick(): void;
}
|
# X11 package
TERMUX_PKG_HOMEPAGE=https://xorg.freedesktop.org/
TERMUX_PKG_DESCRIPTION="X11 Inter-Client Exchange library"
TERMUX_PKG_LICENSE="MIT"
TERMUX_PKG_MAINTAINER="Leonid Pliushch <leonid.pliushch@gmail.com>"
TERMUX_PKG_VERSION=1.0.10
TERMUX_PKG_REVISION=15
TERMUX_PKG_SRCURL=https://xorg.freedesktop.org/releases/individual/lib/libICE-${TERMUX_PKG_VERSION}.tar.bz2
TERMUX_PKG_SHA256=6f86dce12cf4bcaf5c37dddd8b1b64ed2ddf1ef7b218f22b9942595fb747c348
TERMUX_PKG_BUILD_DEPENDS="xorgproto, xorg-util-macros, xtrans"
|
package com.decathlon.ara.postman.bean;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Wither;
@Data
@Wither
@NoArgsConstructor
@AllArgsConstructor
@JsonIgnoreProperties(ignoreUnknown = true)
public class ItemId {
/**
* UUID of the folder or request, used to map a request with its execution and failure(s).
*/
private String id;
}
|
from typing import Dict, List
def path_first_order(graph: Dict[int, List[int]], start: int) -> List[int]:
visited = set()
order = []
def dfs(vertex):
visited.add(vertex)
order.append(vertex)
for neighbor in graph[vertex]:
if neighbor not in visited:
dfs(neighbor)
dfs(start)
return order |
<reponame>syncrase/meta-plants
package fr.syncrase.ecosyst.domain;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.io.Serializable;
import java.util.HashSet;
import java.util.Set;
import javax.persistence.*;
import org.hibernate.annotations.Cache;
import org.hibernate.annotations.CacheConcurrencyStrategy;
/**
* Une plante est soit une plante botanique qui correspond à une unique classification (et n'est associée à aucune autre plante, elle est unique) soit une plante potagère qui correspond à une unique plante botanique
*/
@ApiModel(
description = "Une plante est soit une plante botanique qui correspond à une unique classification (et n'est associée à aucune autre plante, elle est unique) soit une plante potagère qui correspond à une unique plante botanique"
)
@Entity
@Table(name = "plante")
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
public class Plante implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "sequenceGenerator")
@SequenceGenerator(name = "sequenceGenerator")
@Column(name = "id")
private Long id;
@Column(name = "entretien")
private String entretien;
@Column(name = "histoire")
private String histoire;
@Column(name = "vitesse_croissance")
private String vitesseCroissance;
@Column(name = "exposition")
private String exposition;
@OneToMany(mappedBy = "planteRessemblant")
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
@JsonIgnoreProperties(value = { "planteRessemblant" }, allowSetters = true)
private Set<Ressemblance> confusions = new HashSet<>();
@OneToMany(mappedBy = "plante")
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
@JsonIgnoreProperties(value = { "plante" }, allowSetters = true)
private Set<Ensoleillement> ensoleillements = new HashSet<>();
/**
* Une plante peut avoir beaucoup de variantes potagère\nUne plante potagère ne correspond qu'à une seule plante botanique et n'est associé à aucune classification (contenu dans le plante botanique)
*/
@ApiModelProperty(
value = "Une plante peut avoir beaucoup de variantes potagère\nUne plante potagère ne correspond qu'à une seule plante botanique et n'est associé à aucune classification (contenu dans le plante botanique)"
)
@OneToMany(mappedBy = "planteBotanique")
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
@JsonIgnoreProperties(
value = {
"confusions",
"ensoleillements",
"plantesPotageres",
"cycleDeVie",
"sol",
"temperature",
"racine",
"strate",
"feuillage",
"nomsVernaculaires",
"classificationCronquist",
"planteBotanique",
},
allowSetters = true
)
private Set<Plante> plantesPotageres = new HashSet<>();
@ManyToOne
@JsonIgnoreProperties(
value = {
"semis", "apparitionFeuilles", "floraison", "recolte", "croissance", "maturite", "plantation", "rempotage", "reproduction",
},
allowSetters = true
)
private CycleDeVie cycleDeVie;
@ManyToOne
private Sol sol;
@ManyToOne
private Temperature temperature;
@ManyToOne
private Racine racine;
@ManyToOne
private Strate strate;
@ManyToOne
private Feuillage feuillage;
/**
* Un même nom vernaculaire peut qualifier plusieurs plantes distinctes et très différentes
*/
@ApiModelProperty(value = "Un même nom vernaculaire peut qualifier plusieurs plantes distinctes et très différentes")
@ManyToMany
@JoinTable(
name = "rel_plante__noms_vernaculaires",
joinColumns = @JoinColumn(name = "plante_id"),
inverseJoinColumns = @JoinColumn(name = "noms_vernaculaires_id")
)
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
@JsonIgnoreProperties(value = { "plantes" }, allowSetters = true)
private Set<NomVernaculaire> nomsVernaculaires = new HashSet<>();
@JsonIgnoreProperties(value = { "plante" }, allowSetters = true)
@OneToOne(mappedBy = "plante")
private ClassificationCronquist classificationCronquist;
@ManyToOne
@JsonIgnoreProperties(
value = {
"confusions",
"ensoleillements",
"plantesPotageres",
"cycleDeVie",
"sol",
"temperature",
"racine",
"strate",
"feuillage",
"nomsVernaculaires",
"classificationCronquist",
"planteBotanique",
},
allowSetters = true
)
private Plante planteBotanique;
// jhipster-needle-entity-add-field - JHipster will add fields here
public Long getId() {
return this.id;
}
public Plante id(Long id) {
this.setId(id);
return this;
}
public void setId(Long id) {
this.id = id;
}
public String getEntretien() {
return this.entretien;
}
public Plante entretien(String entretien) {
this.setEntretien(entretien);
return this;
}
public void setEntretien(String entretien) {
this.entretien = entretien;
}
public String getHistoire() {
return this.histoire;
}
public Plante histoire(String histoire) {
this.setHistoire(histoire);
return this;
}
public void setHistoire(String histoire) {
this.histoire = histoire;
}
public String getVitesseCroissance() {
return this.vitesseCroissance;
}
public Plante vitesseCroissance(String vitesseCroissance) {
this.setVitesseCroissance(vitesseCroissance);
return this;
}
public void setVitesseCroissance(String vitesseCroissance) {
this.vitesseCroissance = vitesseCroissance;
}
public String getExposition() {
return this.exposition;
}
public Plante exposition(String exposition) {
this.setExposition(exposition);
return this;
}
public void setExposition(String exposition) {
this.exposition = exposition;
}
public Set<Ressemblance> getConfusions() {
return this.confusions;
}
public void setConfusions(Set<Ressemblance> ressemblances) {
if (this.confusions != null) {
this.confusions.forEach(i -> i.setPlanteRessemblant(null));
}
if (ressemblances != null) {
ressemblances.forEach(i -> i.setPlanteRessemblant(this));
}
this.confusions = ressemblances;
}
public Plante confusions(Set<Ressemblance> ressemblances) {
this.setConfusions(ressemblances);
return this;
}
public Plante addConfusions(Ressemblance ressemblance) {
this.confusions.add(ressemblance);
ressemblance.setPlanteRessemblant(this);
return this;
}
public Plante removeConfusions(Ressemblance ressemblance) {
this.confusions.remove(ressemblance);
ressemblance.setPlanteRessemblant(null);
return this;
}
public Set<Ensoleillement> getEnsoleillements() {
return this.ensoleillements;
}
public void setEnsoleillements(Set<Ensoleillement> ensoleillements) {
if (this.ensoleillements != null) {
this.ensoleillements.forEach(i -> i.setPlante(null));
}
if (ensoleillements != null) {
ensoleillements.forEach(i -> i.setPlante(this));
}
this.ensoleillements = ensoleillements;
}
public Plante ensoleillements(Set<Ensoleillement> ensoleillements) {
this.setEnsoleillements(ensoleillements);
return this;
}
public Plante addEnsoleillements(Ensoleillement ensoleillement) {
this.ensoleillements.add(ensoleillement);
ensoleillement.setPlante(this);
return this;
}
public Plante removeEnsoleillements(Ensoleillement ensoleillement) {
this.ensoleillements.remove(ensoleillement);
ensoleillement.setPlante(null);
return this;
}
public Set<Plante> getPlantesPotageres() {
return this.plantesPotageres;
}
public void setPlantesPotageres(Set<Plante> plantes) {
if (this.plantesPotageres != null) {
this.plantesPotageres.forEach(i -> i.setPlanteBotanique(null));
}
if (plantes != null) {
plantes.forEach(i -> i.setPlanteBotanique(this));
}
this.plantesPotageres = plantes;
}
public Plante plantesPotageres(Set<Plante> plantes) {
this.setPlantesPotageres(plantes);
return this;
}
public Plante addPlantesPotageres(Plante plante) {
this.plantesPotageres.add(plante);
plante.setPlanteBotanique(this);
return this;
}
public Plante removePlantesPotageres(Plante plante) {
this.plantesPotageres.remove(plante);
plante.setPlanteBotanique(null);
return this;
}
public CycleDeVie getCycleDeVie() {
return this.cycleDeVie;
}
public void setCycleDeVie(CycleDeVie cycleDeVie) {
this.cycleDeVie = cycleDeVie;
}
public Plante cycleDeVie(CycleDeVie cycleDeVie) {
this.setCycleDeVie(cycleDeVie);
return this;
}
public Sol getSol() {
return this.sol;
}
public void setSol(Sol sol) {
this.sol = sol;
}
public Plante sol(Sol sol) {
this.setSol(sol);
return this;
}
public Temperature getTemperature() {
return this.temperature;
}
public void setTemperature(Temperature temperature) {
this.temperature = temperature;
}
public Plante temperature(Temperature temperature) {
this.setTemperature(temperature);
return this;
}
public Racine getRacine() {
return this.racine;
}
public void setRacine(Racine racine) {
this.racine = racine;
}
public Plante racine(Racine racine) {
this.setRacine(racine);
return this;
}
public Strate getStrate() {
return this.strate;
}
public void setStrate(Strate strate) {
this.strate = strate;
}
public Plante strate(Strate strate) {
this.setStrate(strate);
return this;
}
public Feuillage getFeuillage() {
return this.feuillage;
}
public void setFeuillage(Feuillage feuillage) {
this.feuillage = feuillage;
}
public Plante feuillage(Feuillage feuillage) {
this.setFeuillage(feuillage);
return this;
}
public Set<NomVernaculaire> getNomsVernaculaires() {
return this.nomsVernaculaires;
}
public void setNomsVernaculaires(Set<NomVernaculaire> nomVernaculaires) {
this.nomsVernaculaires = nomVernaculaires;
}
public Plante nomsVernaculaires(Set<NomVernaculaire> nomVernaculaires) {
this.setNomsVernaculaires(nomVernaculaires);
return this;
}
public Plante addNomsVernaculaires(NomVernaculaire nomVernaculaire) {
this.nomsVernaculaires.add(nomVernaculaire);
nomVernaculaire.getPlantes().add(this);
return this;
}
public Plante removeNomsVernaculaires(NomVernaculaire nomVernaculaire) {
this.nomsVernaculaires.remove(nomVernaculaire);
nomVernaculaire.getPlantes().remove(this);
return this;
}
public ClassificationCronquist getClassificationCronquist() {
return this.classificationCronquist;
}
public void setClassificationCronquist(ClassificationCronquist classificationCronquist) {
if (this.classificationCronquist != null) {
this.classificationCronquist.setPlante(null);
}
if (classificationCronquist != null) {
classificationCronquist.setPlante(this);
}
this.classificationCronquist = classificationCronquist;
}
public Plante classificationCronquist(ClassificationCronquist classificationCronquist) {
this.setClassificationCronquist(classificationCronquist);
return this;
}
public Plante getPlanteBotanique() {
return this.planteBotanique;
}
public void setPlanteBotanique(Plante plante) {
this.planteBotanique = plante;
}
public Plante planteBotanique(Plante plante) {
this.setPlanteBotanique(plante);
return this;
}
// jhipster-needle-entity-add-getters-setters - JHipster will add getters and setters here
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof Plante)) {
return false;
}
return id != null && id.equals(((Plante) o).id);
}
@Override
public int hashCode() {
// see https://vladmihalcea.com/how-to-implement-equals-and-hashcode-using-the-jpa-entity-identifier/
return getClass().hashCode();
}
// prettier-ignore
@Override
public String toString() {
return "Plante{" +
"id=" + getId() +
", entretien='" + getEntretien() + "'" +
", histoire='" + getHistoire() + "'" +
", vitesseCroissance='" + getVitesseCroissance() + "'" +
", exposition='" + getExposition() + "'" +
"}";
}
}
|
def process_geolocation_file(file_path: str) -> dict:
total_entries = 0
total_latitude = 0
total_longitude = 0
longest_location_name = ""
with open(file_path, 'r') as file:
lines = file.readlines()
total_entries = len(lines)
for line in lines:
data = line.strip().split(',')
latitude, longitude, location_name = float(data[0]), float(data[1]), data[2]
total_latitude += latitude
total_longitude += longitude
if len(location_name) > len(longest_location_name):
longest_location_name = location_name
average_latitude = total_latitude / total_entries
average_longitude = total_longitude / total_entries
return {
"total_entries": total_entries,
"average_latitude": average_latitude,
"average_longitude": average_longitude,
"longest_location_name": longest_location_name
} |
<gh_stars>1-10
package io.github.antolius.engine.kotlin2;
import org.jetbrains.kotlin.script.jsr223.KotlinJsr223JvmLocalScriptEngineFactory;
import javax.script.ScriptEngine;
import java.util.function.Supplier;
public class ScriptEngineSupplier implements Supplier<ScriptEngine> {
@Override
public ScriptEngine get() {
return new KotlinJsr223JvmLocalScriptEngineFactory().getScriptEngine();
}
}
|
#!/bin/bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
rm ${DIR}/rc.conf
ln -s ${DIR}/rc.original.conf ${DIR}/rc.conf
rm scope.sh
ln -s ${DIR}/scope-light.sh ${DIR}/scope.sh
|
// For menu styling
//used vanilla just to check if I can or not.
var meny = document.getElementById("List");
meny.style.color = "coral";
meny.style.fontSize = "20px";
var meny = document.getElementById("postOne");
meny.style.color = "coral";
meny.style.fontSize = "16px";
var meny = document.getElementById("postTwo");
meny.style.color = "coral";
meny.style.fontSize = "16px";
var meny = document.getElementById("postThree");
meny.style.color = "coral";
meny.style.fontSize = "16px";
// For section content styling
var section = document.getElementById("Lean");
section.style.background = "bisque";
// section.style.fontSize = "20px";
var section = document.getElementById("Agila-metoder");
section.style.background = "bisque";
// section.style.fontSize = "20px";
var section = document.getElementById("Projektmetodiken");
section.style.background = "bisque";
// section.style.fontSize = "20px";
|
<gh_stars>0
/**
*/
package edu.kit.ipd.sdq.kamp4hmi.model.Kamp4hmiModel.tests;
import edu.kit.ipd.sdq.kamp4hmi.model.Kamp4hmiModel.Kamp4hmiModelFactory;
import edu.kit.ipd.sdq.kamp4hmi.model.Kamp4hmiModel.SystemStep;
import junit.textui.TestRunner;
/**
* <!-- begin-user-doc -->
* A test case for the model object '<em><b>System Step</b></em>'.
* <!-- end-user-doc -->
* @generated
*/
public class SystemStepTest extends HMIElementTest {
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public static void main(String[] args) {
TestRunner.run(SystemStepTest.class);
}
/**
* Constructs a new System Step test case with the given name.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public SystemStepTest(String name) {
super(name);
}
/**
* Returns the fixture for this System Step test case.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected SystemStep getFixture() {
return (SystemStep)fixture;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see junit.framework.TestCase#setUp()
* @generated
*/
@Override
protected void setUp() throws Exception {
setFixture(Kamp4hmiModelFactory.eINSTANCE.createSystemStep());
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see junit.framework.TestCase#tearDown()
* @generated
*/
@Override
protected void tearDown() throws Exception {
setFixture(null);
}
} //SystemStepTest
|
#!/usr/bin/env sh
# generated from catkin/python/catkin/environment_cache.py
# based on a snapshot of the environment before and after calling the setup script
# it emulates the modifications of the setup script without recurring computations
# new environment variables
# modified environment variables
export CMAKE_PREFIX_PATH="/home/venom/ros/hrwros_ws/devel/.private/turtlebot_gazebo:$CMAKE_PREFIX_PATH"
export LD_LIBRARY_PATH="/home/venom/ros/hrwros_ws/devel/.private/turtlebot_gazebo/lib:$LD_LIBRARY_PATH"
export PKG_CONFIG_PATH="/home/venom/ros/hrwros_ws/devel/.private/turtlebot_gazebo/lib/pkgconfig:$PKG_CONFIG_PATH"
export PWD='/home/venom/ros/hrwros_ws/build/turtlebot_gazebo'
export ROSLISP_PACKAGE_DIRECTORIES="/home/venom/ros/hrwros_ws/devel/.private/turtlebot_gazebo/share/common-lisp:$ROSLISP_PACKAGE_DIRECTORIES"
export ROS_PACKAGE_PATH="/home/venom/ros/hrwros_ws/src/turtlebot_simulator/turtlebot_gazebo:$ROS_PACKAGE_PATH" |
#compdef myWatchNoBlinkColorized.sh
local arguments
arguments=(
'-h[]'
'-b[bold]'
'*:filename:_files'
)
_arguments -s $arguments
|
<filename>frontend/components/MainPage.js
import React, { Component, Fragment } from 'react'
import styled, { ThemeProvider, createGlobalStyle } from 'styled-components'
import Meta from './Meta'
const theme = {
red: '#FF0000',
grey: '#9FB3C8',
lightgrey: '#F0F4F8',
darkgrey: '#334E68',
green: '#3EBD93',
darkgreen: '#014D40',
red: '#CF1124',
blue: '##1992D4',
maxWidth: '1000px',
height: '100%',
bs: '0 12px 24px 0 rgba(0, 0, 0, 0.09)'
}
const StyledPage = styled.div`
color: ${props => props.theme.darkgrey};
`
const Inner = styled.div`
max-width: ${props => props.theme.maxWidth};
margin: 0 auto;
padding: 2rem;
`
const GlobalStyle = createGlobalStyle`
@font-face {
font-family: 'Open Sans', sans-serif;
font-weight: normal;
font-style: normal;
}
html {
height: ${props => props.theme.height};
box-sizing: border-box;
}
*, *:before, *:after {
box-sizing: inherit;
}
body {
background: ${props => props.theme.lightgrey};
height: ${props => props.theme.height};
padding: 0;
margin: 0;
font-size: 1.5rem;
line-height: 2;
font-family: 'Open Sans', sans-serif;
}
a {
text-decoration: none;
color: ${theme.black};
}
input:focus {
outline: none;
}
# Those rules are specific to the loading component
@keyframes bouncing-loader {
from {
opacity: 1;
transform: translateY(0);
}
to {
opacity: 0.1;
transform: translateY(-1rem);
}
}
.bouncing-loader {
display: flex;
justify-content: center;
align-self: center;
}
.bouncing-loader > div {
width: 1rem;
height: 1rem;
margin: 0.7rem 0.2rem;
background: #8385aa;
border-radius: 50%;
animation: bouncing-loader 0.6s infinite alternate;
}
.bouncing-loader > div:nth-child(2) {
animation-delay: 0.1s;
}
.bouncing-loader > div:nth-child(3) {
animation-delay: 0.2s;
}
`
class MainPage extends Component {
render() {
return (
<ThemeProvider theme={theme}>
<Fragment>
<GlobalStyle />
<StyledPage>
<Meta />
<Inner>{this.props.children}</Inner>
</StyledPage>
</Fragment>
</ThemeProvider>
)
}
}
export default MainPage
|
package proptics.specs
import proptics.instances.reverse._
import proptics.law.discipline.ReverseTests
class ReverseSpec extends ReverseSpec0 {
checkAll("ReverseTests[Stream[Int], Stream[Int]] reverse", ReverseTests[Stream[Int], Stream[Int]].reverse)
}
|
<filename>Lib/site-packages/PyQt5/examples/widgets/icons/icons.py
#!/usr/bin/env python
#############################################################################
##
## Copyright (C) 2013 Riverbank Computing Limited.
## Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
## All rights reserved.
##
## This file is part of the examples of PyQt.
##
## $QT_BEGIN_LICENSE:BSD$
## You may use this file under the terms of the BSD license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor
## the names of its contributors may be used to endorse or promote
## products derived from this software without specific prior written
## permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
## $QT_END_LICENSE$
##
#############################################################################
from PyQt5.QtCore import QFileInfo, QRegExp, QSize, Qt
from PyQt5.QtGui import QIcon, QImage, QPalette, QPixmap
from PyQt5.QtWidgets import (QAbstractItemView, QAction, QActionGroup,
QApplication, QComboBox, QFileDialog, QFrame, QGridLayout, QGroupBox,
QHBoxLayout, QHeaderView, QItemDelegate, QLabel, QMainWindow,
QMessageBox, QRadioButton, QSizePolicy, QSpinBox, QStyle,
QStyleFactory, QTableWidget, QTableWidgetItem, QVBoxLayout, QWidget)
class IconSizeSpinBox(QSpinBox):
@staticmethod
def valueFromText(text):
regExp = QRegExp("(\\d+)(\\s*[xx]\\s*\\d+)?")
if regExp.exactMatch(text):
return int(regExp.cap(1))
else:
return 0
@staticmethod
def textFromValue(value):
return "%d x %d" % (value, value)
class ImageDelegate(QItemDelegate):
def createEditor(self, parent, option, index):
comboBox = QComboBox(parent)
if index.column() == 1:
comboBox.addItem("Normal")
comboBox.addItem("Active")
comboBox.addItem("Disabled")
comboBox.addItem("Selected")
elif index.column() == 2:
comboBox.addItem("Off")
comboBox.addItem("On")
comboBox.activated.connect(self.emitCommitData)
return comboBox
def setEditorData(self, editor, index):
comboBox = editor
if not comboBox:
return
pos = comboBox.findText(index.model().data(index), Qt.MatchExactly)
comboBox.setCurrentIndex(pos)
def setModelData(self, editor, model, index):
comboBox = editor
if not comboBox:
return
model.setData(index, comboBox.currentText())
def emitCommitData(self):
self.commitData.emit(self.sender())
class IconPreviewArea(QWidget):
def __init__(self, parent=None):
super(IconPreviewArea, self).__init__(parent)
mainLayout = QGridLayout()
self.setLayout(mainLayout)
self.icon = QIcon()
self.size = QSize()
self.stateLabels = []
self.modeLabels = []
self.pixmapLabels = []
self.stateLabels.append(self.createHeaderLabel("Off"))
self.stateLabels.append(self.createHeaderLabel("On"))
self.modeLabels.append(self.createHeaderLabel("Normal"))
self.modeLabels.append(self.createHeaderLabel("Active"))
self.modeLabels.append(self.createHeaderLabel("Disabled"))
self.modeLabels.append(self.createHeaderLabel("Selected"))
for j, label in enumerate(self.stateLabels):
mainLayout.addWidget(label, j + 1, 0)
for i, label in enumerate(self.modeLabels):
mainLayout.addWidget(label, 0, i + 1)
self.pixmapLabels.append([])
for j in range(len(self.stateLabels)):
self.pixmapLabels[i].append(self.createPixmapLabel())
mainLayout.addWidget(self.pixmapLabels[i][j], j + 1, i + 1)
def setIcon(self, icon):
self.icon = icon
self.updatePixmapLabels()
def setSize(self, size):
if size != self.size:
self.size = size
self.updatePixmapLabels()
def createHeaderLabel(self, text):
label = QLabel("<b>%s</b>" % text)
label.setAlignment(Qt.AlignCenter)
return label
def createPixmapLabel(self):
label = QLabel()
label.setEnabled(False)
label.setAlignment(Qt.AlignCenter)
label.setFrameShape(QFrame.Box)
label.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
label.setBackgroundRole(QPalette.Base)
label.setAutoFillBackground(True)
label.setMinimumSize(132, 132)
return label
def updatePixmapLabels(self):
for i in range(len(self.modeLabels)):
if i == 0:
mode = QIcon.Normal
elif i == 1:
mode = QIcon.Active
elif i == 2:
mode = QIcon.Disabled
else:
mode = QIcon.Selected
for j in range(len(self.stateLabels)):
state = QIcon.Off if j == 0 else QIcon.On
pixmap = self.icon.pixmap(self.size, mode, state)
self.pixmapLabels[i][j].setPixmap(pixmap)
self.pixmapLabels[i][j].setEnabled(not pixmap.isNull())
class MainWindow(QMainWindow):
def __init__(self, parent=None):
super(MainWindow, self).__init__(parent)
self.centralWidget = QWidget()
self.setCentralWidget(self.centralWidget)
self.createPreviewGroupBox()
self.createImagesGroupBox()
self.createIconSizeGroupBox()
self.createActions()
self.createMenus()
self.createContextMenu()
mainLayout = QGridLayout()
mainLayout.addWidget(self.previewGroupBox, 0, 0, 1, 2)
mainLayout.addWidget(self.imagesGroupBox, 1, 0)
mainLayout.addWidget(self.iconSizeGroupBox, 1, 1)
self.centralWidget.setLayout(mainLayout)
self.setWindowTitle("Icons")
self.checkCurrentStyle()
self.otherRadioButton.click()
self.resize(self.minimumSizeHint())
def about(self):
QMessageBox.about(self, "About Icons",
"The <b>Icons</b> example illustrates how Qt renders an icon "
"in different modes (active, normal, disabled and selected) "
"and states (on and off) based on a set of images.")
def changeStyle(self, checked):
if not checked:
return
action = self.sender()
style = QStyleFactory.create(action.data())
if not style:
return
QApplication.setStyle(style)
self.setButtonText(self.smallRadioButton, "Small (%d x %d)",
style, QStyle.PM_SmallIconSize)
self.setButtonText(self.largeRadioButton, "Large (%d x %d)",
style, QStyle.PM_LargeIconSize)
self.setButtonText(self.toolBarRadioButton, "Toolbars (%d x %d)",
style, QStyle.PM_ToolBarIconSize)
self.setButtonText(self.listViewRadioButton, "List views (%d x %d)",
style, QStyle.PM_ListViewIconSize)
self.setButtonText(self.iconViewRadioButton, "Icon views (%d x %d)",
style, QStyle.PM_IconViewIconSize)
self.setButtonText(self.tabBarRadioButton, "Tab bars (%d x %d)",
style, QStyle.PM_TabBarIconSize)
self.changeSize()
@staticmethod
def setButtonText(button, label, style, metric):
metric_value = style.pixelMetric(metric)
button.setText(label % (metric_value, metric_value))
def changeSize(self, checked=True):
if not checked:
return
if self.otherRadioButton.isChecked():
extent = self.otherSpinBox.value()
else:
if self.smallRadioButton.isChecked():
metric = QStyle.PM_SmallIconSize
elif self.largeRadioButton.isChecked():
metric = QStyle.PM_LargeIconSize
elif self.toolBarRadioButton.isChecked():
metric = QStyle.PM_ToolBarIconSize
elif self.listViewRadioButton.isChecked():
metric = QStyle.PM_ListViewIconSize
elif self.iconViewRadioButton.isChecked():
metric = QStyle.PM_IconViewIconSize
else:
metric = QStyle.PM_TabBarIconSize
extent = QApplication.style().pixelMetric(metric)
self.previewArea.setSize(QSize(extent, extent))
self.otherSpinBox.setEnabled(self.otherRadioButton.isChecked())
def changeIcon(self):
icon = QIcon()
for row in range(self.imagesTable.rowCount()):
item0 = self.imagesTable.item(row, 0)
item1 = self.imagesTable.item(row, 1)
item2 = self.imagesTable.item(row, 2)
if item0.checkState() == Qt.Checked:
if item1.text() == "Normal":
mode = QIcon.Normal
elif item1.text() == "Active":
mode = QIcon.Active
elif item1.text() == "Disabled":
mode = QIcon.Disabled
else:
mode = QIcon.Selected
if item2.text() == "On":
state = QIcon.On
else:
state = QIcon.Off
fileName = item0.data(Qt.UserRole)
image = QImage(fileName)
if not image.isNull():
icon.addPixmap(QPixmap.fromImage(image), mode, state)
self.previewArea.setIcon(icon)
def addImage(self):
fileNames, _ = QFileDialog.getOpenFileNames(self, "Open Images", '',
"Images (*.png *.xpm *.jpg);;All Files (*)")
for fileName in fileNames:
row = self.imagesTable.rowCount()
self.imagesTable.setRowCount(row + 1)
imageName = QFileInfo(fileName).baseName()
item0 = QTableWidgetItem(imageName)
item0.setData(Qt.UserRole, fileName)
item0.setFlags(item0.flags() & ~Qt.ItemIsEditable)
item1 = QTableWidgetItem("Normal")
item2 = QTableWidgetItem("Off")
if self.guessModeStateAct.isChecked():
if '_act' in fileName:
item1.setText("Active")
elif '_dis' in fileName:
item1.setText("Disabled")
elif '_sel' in fileName:
item1.setText("Selected")
if '_on' in fileName:
item2.setText("On")
self.imagesTable.setItem(row, 0, item0)
self.imagesTable.setItem(row, 1, item1)
self.imagesTable.setItem(row, 2, item2)
self.imagesTable.openPersistentEditor(item1)
self.imagesTable.openPersistentEditor(item2)
item0.setCheckState(Qt.Checked)
def removeAllImages(self):
self.imagesTable.setRowCount(0)
self.changeIcon()
def createPreviewGroupBox(self):
self.previewGroupBox = QGroupBox("Preview")
self.previewArea = IconPreviewArea()
layout = QVBoxLayout()
layout.addWidget(self.previewArea)
self.previewGroupBox.setLayout(layout)
def createImagesGroupBox(self):
self.imagesGroupBox = QGroupBox("Images")
self.imagesTable = QTableWidget()
self.imagesTable.setSelectionMode(QAbstractItemView.NoSelection)
self.imagesTable.setItemDelegate(ImageDelegate(self))
self.imagesTable.horizontalHeader().setDefaultSectionSize(90)
self.imagesTable.setColumnCount(3)
self.imagesTable.setHorizontalHeaderLabels(("Image", "Mode", "State"))
self.imagesTable.horizontalHeader().setSectionResizeMode(0, QHeaderView.Stretch)
self.imagesTable.horizontalHeader().setSectionResizeMode(1, QHeaderView.Fixed)
self.imagesTable.horizontalHeader().setSectionResizeMode(2, QHeaderView.Fixed)
self.imagesTable.verticalHeader().hide()
self.imagesTable.itemChanged.connect(self.changeIcon)
layout = QVBoxLayout()
layout.addWidget(self.imagesTable)
self.imagesGroupBox.setLayout(layout)
def createIconSizeGroupBox(self):
self.iconSizeGroupBox = QGroupBox("Icon Size")
self.smallRadioButton = QRadioButton()
self.largeRadioButton = QRadioButton()
self.toolBarRadioButton = QRadioButton()
self.listViewRadioButton = QRadioButton()
self.iconViewRadioButton = QRadioButton()
self.tabBarRadioButton = QRadioButton()
self.otherRadioButton = QRadioButton("Other:")
self.otherSpinBox = IconSizeSpinBox()
self.otherSpinBox.setRange(8, 128)
self.otherSpinBox.setValue(64)
self.smallRadioButton.toggled.connect(self.changeSize)
self.largeRadioButton.toggled.connect(self.changeSize)
self.toolBarRadioButton.toggled.connect(self.changeSize)
self.listViewRadioButton.toggled.connect(self.changeSize)
self.iconViewRadioButton.toggled.connect(self.changeSize)
self.tabBarRadioButton.toggled.connect(self.changeSize)
self.otherRadioButton.toggled.connect(self.changeSize)
self.otherSpinBox.valueChanged.connect(self.changeSize)
otherSizeLayout = QHBoxLayout()
otherSizeLayout.addWidget(self.otherRadioButton)
otherSizeLayout.addWidget(self.otherSpinBox)
otherSizeLayout.addStretch()
layout = QGridLayout()
layout.addWidget(self.smallRadioButton, 0, 0)
layout.addWidget(self.largeRadioButton, 1, 0)
layout.addWidget(self.toolBarRadioButton, 2, 0)
layout.addWidget(self.listViewRadioButton, 0, 1)
layout.addWidget(self.iconViewRadioButton, 1, 1)
layout.addWidget(self.tabBarRadioButton, 2, 1)
layout.addLayout(otherSizeLayout, 3, 0, 1, 2)
layout.setRowStretch(4, 1)
self.iconSizeGroupBox.setLayout(layout)
def createActions(self):
self.addImagesAct = QAction("&Add Images...", self, shortcut="Ctrl+A",
triggered=self.addImage)
self.removeAllImagesAct = QAction("&Remove All Images", self,
shortcut="Ctrl+R", triggered=self.removeAllImages)
self.exitAct = QAction("&Quit", self, shortcut="Ctrl+Q",
triggered=self.close)
self.styleActionGroup = QActionGroup(self)
for styleName in QStyleFactory.keys():
action = QAction(self.styleActionGroup,
text="%s Style" % styleName, checkable=True,
triggered=self.changeStyle)
action.setData(styleName)
self.guessModeStateAct = QAction("&Guess Image Mode/State", self,
checkable=True, checked=True)
self.aboutAct = QAction("&About", self, triggered=self.about)
self.aboutQtAct = QAction("About &Qt", self,
triggered=QApplication.instance().aboutQt)
def createMenus(self):
self.fileMenu = self.menuBar().addMenu("&File")
self.fileMenu.addAction(self.addImagesAct)
self.fileMenu.addAction(self.removeAllImagesAct)
self.fileMenu.addSeparator()
self.fileMenu.addAction(self.exitAct)
self.viewMenu = self.menuBar().addMenu("&View")
for action in self.styleActionGroup.actions():
self.viewMenu.addAction(action)
self.viewMenu.addSeparator()
self.viewMenu.addAction(self.guessModeStateAct)
self.menuBar().addSeparator()
self.helpMenu = self.menuBar().addMenu("&Help")
self.helpMenu.addAction(self.aboutAct)
self.helpMenu.addAction(self.aboutQtAct)
def createContextMenu(self):
self.imagesTable.setContextMenuPolicy(Qt.ActionsContextMenu)
self.imagesTable.addAction(self.addImagesAct)
self.imagesTable.addAction(self.removeAllImagesAct)
def checkCurrentStyle(self):
for action in self.styleActionGroup.actions():
styleName = action.data()
candidate = QStyleFactory.create(styleName)
if candidate is None:
return
if candidate.metaObject().className() == QApplication.style().metaObject().className():
action.trigger()
if __name__ == '__main__':
import sys
app = QApplication(sys.argv)
mainWin = MainWindow()
mainWin.show()
sys.exit(app.exec_())
|
/*******************************************************************************
* KindEditor - WYSIWYG HTML Editor for Internet
* Copyright (C) 2006-2011 kindsoft.net
*
* @author Roddy <<EMAIL>>
* @site http://www.kindsoft.net/
* @licence http://www.kindsoft.net/license.php
*******************************************************************************/
KindEditor.plugin('quickformat', function(K) {
var self = this, name = 'quickformat',
blockMap = K.toMap('blockquote,center,div,h1,h2,h3,h4,h5,h6,p');
function getFirstChild(knode) {
var child = knode.first();
while (child && child.first()) {
child = child.first();
}
return child;
}
self.clickToolbar(name, function() {
self.focus();
var doc = self.edit.doc,
range = self.cmd.range,
child = K(doc.body).first(), next,
nodeList = [], subList = [],
bookmark = range.createBookmark(true);
while(child) {
next = child.next();
var firstChild = getFirstChild(child);
if (!firstChild || firstChild.name != 'img') {
if (blockMap[child.name]) {
child.html(child.html().replace(/^(\s| | )+/ig, ''));
child.css('text-indent', '2em');
} else {
subList.push(child);
}
if (!next || (blockMap[next.name] || blockMap[child.name] && !blockMap[next.name])) {
if (subList.length > 0) {
nodeList.push(subList);
}
subList = [];
}
}
child = next;
}
K.each(nodeList, function(i, subList) {
var wrapper = K('<p style="text-indent:2em;"></p>', doc);
subList[0].before(wrapper);
K.each(subList, function(i, knode) {
wrapper.append(knode);
});
});
range.moveToBookmark(bookmark);
self.addBookmark();
});
});
/**
--------------------------
abcd<br />
1234<br />
to
<p style="text-indent:2em;">
abcd<br />
1234<br />
</p>
--------------------------
abcd<img>1233
<p>1234</p>
to
<p style="text-indent:2em;">abcd<img>1233</p>
<p style="text-indent:2em;">1234</p>
--------------------------
*/ |
/*
* Copyright 2022 Creek Contributors (https://github.com/creek-service)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.creekservice.api.base.type;
import static org.creekservice.api.base.type.JarVersion.jarVersion;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import java.net.URI;
import java.util.Optional;
import org.creekservice.api.base.annotation.VisibleForTesting;
import org.junit.jupiter.api.Test;
class JarVersionTest {
@Test
void shouldGetVersionFromJar() {
assertThat(JarVersion.jarVersion(VisibleForTesting.class), is(not(Optional.empty())));
}
@Test
void shouldReturnEmptyWhenRunningInTheBuildAndThereIsNoJar() throws Exception {
assertThat(
jarVersion(URI.create("file:/blah/blah/type/build/classes/java/test/").toURL()),
is(Optional.empty()));
}
@Test
void shouldReturnEmptyIfSomeWeirdJarName() throws Exception {
assertThat(
jarVersion(URI.create("file:/blah/blah/a-version-less-jar.jar").toURL()),
is(Optional.empty()));
}
@Test
void shouldReturnVersionForSemantic() throws Exception {
assertThat(
jarVersion(URI.create("file:/blah/blah/some-jar-0.1.3.jar").toURL()),
is(Optional.of("0.1.3")));
}
@Test
void shouldReturnVersionForSemanticSnapshot() throws Exception {
assertThat(
jarVersion(URI.create("file:/blah/blah/some-jar-0.1.3-SNAPSHOT.jar").toURL()),
is(Optional.of("0.1.3-SNAPSHOT")));
}
}
|
package flect
var singularRules = []rule{}
// AddSingular adds a rule that will replace the given suffix with the replacement suffix.
func AddSingular(ext string, repl string) {
singularMoot.Lock()
defer singularMoot.Unlock()
singularRules = append(singularRules, rule{
suffix: ext,
fn: func(s string) string {
s = s[:len(s)-len(ext)]
return s + repl
},
})
singularRules = append(singularRules, rule{
suffix: repl,
fn: func(s string) string {
return s
},
})
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.streampipes.smp.constants;
public enum PeType {
ADAPTER("Adapter"),
PROCESSOR("Data Processor"),
SINK("Data Sink");
private String friendly;
PeType(String friendly) {
this.friendly = friendly;
}
public String getFriendly() {
return friendly;
}
}
|
from http.server import BaseHTTPRequestHandler
from io import BytesIO
class HTTPResponse(BaseHTTPRequestHandler):
def __init__(self, status_code, message):
self.status_code = status_code
self.message = message
def send_response(self, code, message=None):
self.send_response_only(code, message)
self.send_header('Server', self.version_string())
self.send_header('Date', self.date_time_string())
self.end_headers()
def do_GET(self):
self.send_response(self.status_code)
self.wfile.write(self.message.encode('utf-8'))
def create_http_response(status_code, message=''):
response = BytesIO()
handler = HTTPResponse(status_code, message)
handler.send_response(status_code)
handler.end_headers()
if message:
handler.wfile.write(message.encode('utf-8'))
return response.getvalue() |
<filename>spacegraphcats/catlas/converter.py
#!/usr/bin/env python3
"""Convert graph with nonconsecutive ids to have consecutive ids."""
import sys
from .graph_parser import write
class IdentityHash:
def __init__(self):
pass
def __getitem__(self, item):
return item
def _parse_line(line):
return list(map(str.strip, line.split(',')))
def parse(graph_file, add_vertex=None, add_edge=None, consecutive_ids=False):
"""Parser for (old) simple graph format.
Parse a graph and call provided methods with vertices and edges."""
# read vertices
vertex_attributes = _parse_line(graph_file.readline())[2:]
# consecutive id to original id
if consecutive_ids:
id_map = []
else:
id_map = IdentityHash()
# original id to consecutive id
id_map_reverse = {}
def _get_consecutive_id(id):
if not consecutive_ids:
return id
if id in id_map_reverse:
return id_map_reverse[id]
else:
consecutive_id = len(id_map)
id_map_reverse[id] = consecutive_id
id_map.append(id)
return consecutive_id
next_line = graph_file.readline()
while len(next_line) > 1:
if add_vertex is not None:
parsed = _parse_line(next_line)
add_vertex(_get_consecutive_id(int(parsed[0])), int(parsed[1]),
vertex_attributes, parsed[2:])
next_line = graph_file.readline()
if add_edge is None:
# we won't be doing anything with the edges anyway
return id_map
# read edges
edge_attributes = _parse_line(graph_file.readline())[2:]
next_line = graph_file.readline()
while len(next_line) > 1:
parsed = _parse_line(next_line)
add_edge(_get_consecutive_id(int(parsed[0])),
_get_consecutive_id(int(parsed[1])),
edge_attributes, parsed[2:])
next_line = graph_file.readline()
return id_map
def main():
"""Apply to graph."""
edges = []
num_vertices = 0
def add_edge(u, v, *args):
edges.append((u, v))
def add_vertex(u, *args):
nonlocal num_vertices
num_vertices += 1
parse(sys.stdin, add_vertex, add_edge, consecutive_ids=True)
write(sys.stdout, num_vertices, edges)
if __name__ == '__main__':
main()
|
// Copyright (C) 2018-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <gtest/gtest.h>
#include <string.h>
#include <ie_builders.hpp>
#include "builder_test.hpp"
using namespace testing;
using namespace InferenceEngine;
class MemoryLayerBuilderTest : public BuilderTestCommon {};
TEST_F(MemoryLayerBuilderTest, getExistsLayerFromNetworkBuilder) {
Builder::Network network("Test");
Builder::MemoryLayer memoryInBuilder("MemoryIn1"), memoryOutBuilder("MemoryOut1");
Builder::ConcatLayer concat("concat");
Builder::InputLayer input("inLayer");
Builder::FullyConnectedLayer fc("fc0");
memoryInBuilder.setOutputPort(Port({1, 30}));
memoryOutBuilder.setInputPort(Port({1, 30}));
input.setPort(Port({1, 30}));
concat.setInputPorts({Port({1,30}), Port({1, 30})});
concat.setOutputPort(Port({1, 60}));
fc.setInputPort(Port({1, 60}));
fc.setOutputPort(Port({1, 30}));
size_t inId = network.addLayer(memoryInBuilder);
size_t outId = network.addLayer(memoryOutBuilder);
size_t inId2 = network.addLayer(concat);
size_t inId3 = network.addLayer(input);
size_t inIdfc = network.addLayer(fc);
network.connect({inId3}, {inId2, 0});
network.connect({inId}, {inId2, 1});
network.connect({inId2}, {inIdfc});
network.connect({inIdfc}, {outId});
ASSERT_EQ(memoryInBuilder.getOutputPort().shape(), Port({1, 30}).shape());
auto cnn_network = Builder::convertToICNNNetwork(network.build());
CNNLayerPtr layer;
cnn_network->getLayerByName("concat", layer, nullptr);
ASSERT_EQ(layer->outData.size(), 1);
} |
<filename>calculator.py
import os
import logging
from pathlib import Path
from operations import catalogue
from parsers import XMLSpecParser
class ExpressionCalculator:
"""
Processes all expression files with given extension in source directory
Assumes that all files with given extension are expression files
"""
__slots__ = ('source_dir', 'target_dir', 'extension', 'operations', 'spec_parser')
def __init__(self, source: str, target: str, extension: str):
"""
:param source: Path to source directory
:param target: Path to target directory
:param extension: extension of the files to be processed
:return: None
"""
self.operations = None
self.spec_parser = None
self.source_dir = None
self.target_dir = None
self.extension = extension
self.validate(source, target) # Validate the inputs
self.operations = catalogue() # Build the operations catalogues
self.spec_parser = XMLSpecParser(self.operations, extension) # Initialize the parser
def process(self):
"""
This function acts as a coordinator for actual execution
"""
logger = logging.getLogger(__name__)
logger.info('Collecting names of files that will be processed')
entries = self.entries()
logger.info('Found %d files to process', len(entries))
for spec in entries:
logger.info('Parsing spec: %s', spec)
operations = self.spec_parser.parse(spec)
logger.info('Evaluating operations found in the spec')
results = self.evaluate(operations)
logger.info('Persisting results for the spec')
self.persist(spec, results)
def entries(self) -> list:
"""
Walk through the file system and find all suitable files
:return: a list of files that have to be processed
"""
entries = []
logger = logging.getLogger(__name__)
logger.debug('Traversing the source directory')
for root, _, names in os.walk(self.source_dir):
for name in names:
if Path(name).suffix == self.extension:
full_path = os.path.join(root, name)
entries.append(full_path)
return entries
def evaluate(self, operations: dict) -> dict:
"""
Execute operations.
:param operations: a mapping of id and operations objects
:return: a map of id and results
"""
return {oid: obj() for oid, obj in operations.items()}
def persist(self, spec: str, results: dict):
"""
Transform the results using spec parser and save
them to the target directory
:param spec: name of the spec to be used for generating target file name
:param results: mapping of top-level operation id and their results
:return: information on the resultant file
"""
logger = logging.getLogger(__name__)
logger.debug('Preparing results for persistence')
serialized_result = self.spec_parser.serialize(results, 'expressions', 'result')
if serialized_result:
logger.debug('Determining result file path')
spec_path = Path(spec)
file_name = spec_path.name[:-len(spec_path.suffix)]
result_file_name = '%s_result%s' % (file_name, spec_path.suffix)
result_file_path = self.target_dir.joinpath(result_file_name)
with open(result_file_path, 'w') as rf:
rf.write(serialized_result)
logger.info('Results for spec %s have been saved to: %s', spec_path.name, result_file_path)
else:
logger.error("Failed to serialize results")
def validate(self, source: str, target: str):
"""
Validates inputs to the application
:param source: path to source directory
:param target: path to target directory
:return:
"""
logger = logging.getLogger(__name__)
source_path = Path(source) # Path to source directory
target_path = Path(target) # Path to destination directory
logger.debug('Validating paths are valid and are directories')
if not (source_path.exists() and source_path.is_dir()):
raise ValueError('Path to source directory is not valid.')
if not (target_path.exists() and target_path.is_dir()):
raise ValueError('Path to target directory is not valid.')
source = source_path.resolve()
target = target_path.resolve()
logger.debug('Checking if the directories are accessible to current user')
if not (os.access(source, os.R_OK)):
raise ValueError('Read permissions on source directory is missing.')
if not (os.access(target, os.R_OK | os.W_OK)):
raise ValueError('Read/write permissions on target directory are missing.')
self.source_dir = source
self.target_dir = target
|
<filename>airbnb-pricing/src/App.js<gh_stars>1-10
import React from 'react';
import PrivateRoute from "./components/PrivateRoute";
import { logout } from "./store/actions";
import { connect } from "react-redux";
import {
BrowserRouter as Router,
Route,
withRouter,
Switch,
Redirect
} from "react-router-dom";
import Login from "./components/Login";
import Signup from "./components/Signup";
import PropertyList from "./components/PropertyList";
import AddProperty from './components/AddProperty';
function App(props) {
return (
<Router>
<Switch>
<PrivateRoute
path="/properties"
exact
component={localStorage.getItem("token") ? PropertyList : Signup}
/>
<PrivateRoute
path="/add"
exact
component={localStorage.getItem("token") ? AddProperty : Signup}
/>
{/* <PrivateRoute path="/properties" exact component={PropertyList} />
<PrivateRoute path="/add" exact component={AddProperty} /> */}
<Route path="/login" component={Login} />
<Route path="/signup" component={Signup} />
<Route
exact
path="/"
render={() =>
localStorage.getItem("token") ? <Redirect to="/properties" /> : <Signup />
}
/>
</Switch>
</Router>
);
}
const mapStateToProps = state => ({
loginStart: state.loginStart,
token: state.token
});
export default connect(mapStateToProps, { logout })(withRouter(App));
|
import { NgModule } from '@angular/core';
import { Routes, RouterModule } from '@angular/router';
import { AuthGuard } from './auth.guard';
import { AppComponent } from './app.component';
import { LoginComponent } from './login/login.component';
import { LoginPageComponent } from './routes/login-page/login-page.component';
const routes: Routes = [
// {
// path: '',
// component: AppComponent,
// children: [
{path: '', redirectTo: 'account', pathMatch: 'full'},
{path: 'account', loadChildren: './routes/account-page/account-page.module#AccountPageModule'},
{path: 'config', loadChildren: './routes/upload-config/upload-config.module#UploadConfigModule'},
{path: 'tool', loadChildren: './routes/tool/tool.module#ToolModule'},
// ]
// },
{path: 'login', component: LoginPageComponent},
{
path: '**',
redirectTo: 'exception/404',
pathMatch: 'full'
},
// { path: 'exception', loadChildren: './exception/exception.module#ExceptionModule' }
];
@NgModule({
imports: [RouterModule.forRoot(routes)],
exports: [RouterModule]
})
export class AppRoutingModule { }
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package brooklyn.management.entitlement;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.annotations.Beta;
import brooklyn.entity.Entity;
import brooklyn.management.entitlement.Entitlements.EntityAndItem;
/**
* provides an easy entry point to supplying entitlements, by providing the dispatch and defining the additional methods
* which have to be supplied.
* <p>
* note that this class may change as versions change, deliberately breaking backwards compatibility
* to ensure all permissions are used.
* <p>
* @since 0.7.0 */
@Beta
public abstract class EntitlementManagerAdapter implements EntitlementManager {
private static final Logger log = LoggerFactory.getLogger(EntitlementManagerAdapter.class);
@SuppressWarnings("unchecked")
@Override
public <T> boolean isEntitled(EntitlementContext context, EntitlementClass<T> entitlementClass, T entitlementClassArgument) {
if (log.isTraceEnabled()) {
log.trace("Checking entitlement of "+context+" to "+entitlementClass+" "+entitlementClassArgument);
}
if (isEntitledToRoot( context )) return true;
switch (Entitlements.EntitlementClassesEnum.of(entitlementClass)) {
case ENTITLEMENT_SEE_ENTITY:
return isEntitledToSeeEntity( context, (Entity)entitlementClassArgument );
case ENTITLEMENT_SEE_SENSOR:
return isEntitledToSeeSensor( context, (EntityAndItem<String>)entitlementClassArgument );
case ENTITLEMENT_INVOKE_EFFECTOR:
return isEntitledToInvokeEffector( context, (EntityAndItem<String>)entitlementClassArgument );
case ENTITLEMENT_DEPLOY_APPLICATION:
return isEntitledToDeploy( context, entitlementClassArgument );
case ENTITLEMENT_SEE_ALL_SERVER_INFO:
return isEntitledToSeeAllServerInfo( context );
default:
log.warn("Unsupported permission type: "+entitlementClass+" / "+entitlementClassArgument);
return false;
}
}
protected abstract boolean isEntitledToSeeSensor(EntitlementContext context, EntityAndItem<String> sensorInfo);
protected abstract boolean isEntitledToSeeEntity(EntitlementContext context, Entity entity);
protected abstract boolean isEntitledToInvokeEffector(EntitlementContext context, EntityAndItem<String> effectorInfo);
protected abstract boolean isEntitledToDeploy(EntitlementContext context, Object app);
protected abstract boolean isEntitledToSeeAllServerInfo(EntitlementContext context);
protected abstract boolean isEntitledToRoot(EntitlementContext context);
}
|
#!/usr/bin/env bash
#
# Install dependencies
go get -d -v ./...
go version | grep 1.3 > /dev/null
if [ $? == 0 ]; then
go get code.google.com/p/go.tools/cmd/cover
else
go get golang.org/x/tools/cmd/cover
fi
|
set -xe
git clone https://github.com/libspatialindex/libspatialindex.git
cd libspatialindex
git checkout afabefc21d7f486a50089db306d82152aa8cc6a7
cmake .
make
make install
cd ..
rm -rf libspatialindex
|
<reponame>louiethe17th/data-structures-and-algorithms
package day19;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
class BinaryTreeTest {
@Test
void isIdentical() {
BinaryTree tree1 = new BinaryTree();
tree1.add(21);
tree1.add(12);
tree1.add(56);
tree1.add(10);
tree1.add(1);
tree1.add(3);
tree1.add(75);
tree1.add(5);
BinaryTree tree = new BinaryTree();
tree.add(21);
tree.add(12);
tree.add(56);
tree.add(10);
tree.add(1);
tree.add(3);
tree.add(75);
tree.add(5);
System.out.println(tree.toString());
System.out.println(tree1.toString());
boolean results = tree.isIdentical(tree1.root);
assertEquals(true, results);
}
@Test
void isIdenticalEmpty() {
BinaryTree tree1 = new BinaryTree();
BinaryTree tree = new BinaryTree();
boolean results = tree.isIdentical(tree1.root);
assertEquals(true, results);
}
@Test
void isIdenticalEmptyOneEmpty() {
BinaryTree tree1 = new BinaryTree();
BinaryTree tree = new BinaryTree();
tree.add(21);
tree.add(12);
tree.add(56);
tree.add(10);
boolean results = tree.isIdentical(tree1.root);
assertEquals(false, results);
}
} |
#!/bin/bash
grep -v "^a\|^e" $1 > temp.cnf
echo "DETECT"
timeout 10s ./../build/kissat temp.cnf --gateextraction=1 --gatexorrecurse=1
mv "kissat_gates.gate" $2"/"$(basename $1 .qdimacs)".gate"
|
<gh_stars>1-10
import { Arrayable } from '@0x-jerry/utils'
/**
* User snippet configuration
*/
export interface VSCodeSchemasGlobalSnippets {
[k: string]: Snippet
}
export interface Snippet {
/**
* The prefix to use when selecting the snippet in intellisense
*/
prefix?: Arrayable<string>
/**
* Maybe dynamic
*/
body: Arrayable<string> | (() => Arrayable<string>)
/**
* The snippet description.
*/
description?: Arrayable<string>
/**
* A list of language names to which this snippet applies, e.g. 'typescript,javascript'.
*/
scope?: string
}
export interface SnippetConfig {
path: string
language?: string
}
export interface RemoteSnippetsConfig {
contributes?: {
snippets?: SnippetConfig[]
}
}
|
<gh_stars>1-10
// Copyright 2018 Cobaro Pty Ltd. All Rights Reserved.
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package main
import (
"flag"
"github.com/cobaro/elvin/elog"
"github.com/cobaro/elvin/elvin"
"os"
"os/signal"
"time"
)
type Manager struct {
config *Configuration
router Router
protocols map[string]*elvin.Protocol
failover *elvin.Protocol
}
func main() {
var manager Manager
var err error
// Argument parsing
configFile := flag.String("config", "elvind.json", "JSON config file path")
verbosity := flag.Int("verbose", 3, "Verbosity level (0-8)")
flag.Parse()
manager.router.elog.Logf(elog.LogLevelError, "testing")
if manager.config, err = LoadConfig(*configFile); err != nil {
manager.router.elog.Logf(elog.LogLevelError, "config load failed:", err, "using defaults")
manager.config = DefaultConfig()
}
manager.router.elog.SetLogLevel(*verbosity)
manager.router.elog.Logf(elog.LogLevelInfo1, "Logging at log level %d", manager.router.elog.LogLevel())
manager.router.elog.SetLogDateFormat(elog.LogDateEpochMilli)
manager.router.elog.Logf(elog.LogLevelInfo2, "Loaded config: %+v", *manager.config)
manager.router.SetMaxConnections(manager.config.MaxConnections)
manager.router.SetDoFailover(manager.config.DoFailover)
manager.router.SetTestConnInterval(time.Duration(manager.config.TestConnInterval) * time.Second)
manager.router.SetTestConnTimeout(time.Duration(manager.config.TestConnTimeout) * time.Second)
manager.protocols = make(map[string]*elvin.Protocol)
for _, url := range manager.config.Protocols {
if protocol, e := elvin.URLToProtocol(url); e != nil {
manager.router.elog.Logf(elog.LogLevelWarning, "Can't convert url %s to protocol: %v", e)
} else {
manager.protocols[protocol.Address] = protocol
manager.router.AddProtocol(protocol.Address, protocol)
}
}
if manager.failover, err = elvin.URLToProtocol(manager.config.Failover); err != nil {
manager.router.SetFailoverProtocol(manager.failover)
} else {
manager.router.elog.Logf(elog.LogLevelWarning, err.Error())
}
manager.router.elog.Logf(elog.LogLevelInfo1, "Start router")
go manager.router.Start()
// Set up sigint handling and wait for one
ch := make(chan os.Signal)
signal.Notify(ch, os.Interrupt)
// FIXME: SIGUSR[12] not supported on windows. Bring this back
// via REST api at some point
// State reporting on SIGUSR1 (testing/debugging)
// signal.Notify(ch, syscall.SIGUSR1)
// Failover on SIGUSR2 (testing)
// if manager.router.doFailover {
// FIXME: elvin://
// signal.Notify(ch, syscall.SIGUSR2)
// }
for {
sig := <-ch
switch sig {
case os.Interrupt:
manager.router.elog.Logf(elog.LogLevelInfo1, "Exiting on %v", sig)
// FIXME: Flush logs
// FIXME: wait group
os.Exit(0)
// case syscall.SIGUSR1:
// manager.router.LogClients()
// case syscall.SIGUSR2:
// manager.router.Failover()
}
}
}
|
import numpy as np
from phonopy.structure.tetrahedron_method import (
get_all_tetrahedra_relative_grid_address,
get_tetrahedra_integration_weight)
rel_ga_ref = [0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1,
0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1,
0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1,
0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1,
0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1,
0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1,
0, 0, 0, 0, 1, 0, 0, 1, 1, -1, 0, 0,
0, 0, 0, 0, 0, 1, 0, 1, 1, -1, 0, 0,
0, 0, 0, 1, 0, 0, 1, 0, 1, 0, -1, 0,
0, 0, 0, 0, 0, 1, 1, 0, 1, 0, -1, 0,
0, 0, 0, 0, 0, 1, -1, -1, 0, 0, -1, 0,
0, 0, 0, 0, 0, 1, -1, -1, 0, -1, 0, 0,
0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, -1,
0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, -1,
0, 0, 0, 0, 1, 0, -1, 0, -1, 0, 0, -1,
0, 0, 0, 0, 1, 0, -1, 0, -1, -1, 0, 0,
0, 0, 0, 1, 0, 0, 0, -1, -1, 0, 0, -1,
0, 0, 0, 1, 0, 0, 0, -1, -1, 0, -1, 0,
0, 0, 0, -1, -1, -1, 0, -1, -1, 0, 0, -1,
0, 0, 0, -1, -1, -1, 0, -1, -1, 0, -1, 0,
0, 0, 0, -1, -1, -1, -1, 0, -1, 0, 0, -1,
0, 0, 0, -1, -1, -1, -1, 0, -1, -1, 0, 0,
0, 0, 0, -1, -1, -1, -1, -1, 0, 0, -1, 0,
0, 0, 0, -1, -1, -1, -1, -1, 0, -1, 0, 0,
0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1,
0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1,
0, 0, 0, -1, 1, 0, -1, 1, 1, -1, 0, 0,
0, 0, 0, -1, 0, 1, -1, 1, 1, -1, 0, 0,
0, 0, 0, -1, 1, 0, 0, 1, 0, -1, 1, 1,
0, 0, 0, 0, 1, 0, -1, 1, 1, 0, 1, 1,
0, 0, 0, -1, 0, 1, 0, 0, 1, -1, 1, 1,
0, 0, 0, 0, 0, 1, -1, 1, 1, 0, 1, 1,
0, 0, 0, 0, 0, 1, 0, -1, 0, 1, -1, 0,
0, 0, 0, 1, 0, 0, 0, 0, 1, 1, -1, 0,
0, 0, 0, -1, 0, 1, 0, -1, 0, -1, 0, 0,
0, 0, 0, -1, 0, 1, 0, 0, 1, 0, -1, 0,
0, 0, 0, 0, 1, 0, 0, 0, -1, 1, 0, -1,
0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, -1,
0, 0, 0, -1, 1, 0, 0, 0, -1, -1, 0, 0,
0, 0, 0, -1, 1, 0, 0, 1, 0, 0, 0, -1,
0, 0, 0, 0, -1, -1, 1, -1, -1, 0, 0, -1,
0, 0, 0, 0, -1, -1, 1, -1, -1, 0, -1, 0,
0, 0, 0, 1, -1, -1, 0, 0, -1, 1, 0, -1,
0, 0, 0, 1, 0, 0, 1, -1, -1, 1, 0, -1,
0, 0, 0, 1, -1, -1, 0, -1, 0, 1, -1, 0,
0, 0, 0, 1, 0, 0, 1, -1, -1, 1, -1, 0,
0, 0, 0, 0, -1, -1, 0, 0, -1, -1, 0, 0,
0, 0, 0, 0, -1, -1, 0, -1, 0, -1, 0, 0,
0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1,
0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1,
0, 0, 0, -1, 1, 0, 0, 0, 1, -1, 0, 0,
0, 0, 0, -1, 1, 0, 0, 1, 0, 0, 0, 1,
0, 0, 0, 1, -1, 1, 0, -1, 0, 1, -1, 0,
0, 0, 0, 0, -1, 1, 1, -1, 1, 0, -1, 0,
0, 0, 0, 1, 0, 0, 1, -1, 1, 1, -1, 0,
0, 0, 0, 1, 0, 0, 1, -1, 1, 1, 0, 1,
0, 0, 0, 0, -1, 1, 1, -1, 1, 0, 0, 1,
0, 0, 0, 1, -1, 1, 0, 0, 1, 1, 0, 1,
0, 0, 0, 0, -1, 1, 0, -1, 0, -1, 0, 0,
0, 0, 0, 0, -1, 1, 0, 0, 1, -1, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, -1, 0, 1, -1,
0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, -1,
0, 0, 0, -1, 0, -1, 0, 0, -1, -1, 1, -1,
0, 0, 0, -1, 0, -1, -1, 1, -1, -1, 0, 0,
0, 0, 0, 0, 0, -1, -1, 1, -1, 0, 1, -1,
0, 0, 0, 0, 1, 0, -1, 1, -1, 0, 1, -1,
0, 0, 0, -1, 1, 0, -1, 1, -1, -1, 0, 0,
0, 0, 0, -1, 1, 0, 0, 1, 0, -1, 1, -1,
0, 0, 0, 0, 0, -1, 0, -1, 0, 1, -1, 0,
0, 0, 0, 1, 0, 0, 0, 0, -1, 1, -1, 0,
0, 0, 0, -1, 0, -1, 0, 0, -1, 0, -1, 0,
0, 0, 0, -1, 0, -1, 0, -1, 0, -1, 0, 0,
0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1,
0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1,
0, 0, 0, 0, 1, 0, -1, 0, 1, -1, 0, 0,
0, 0, 0, 0, 1, 0, -1, 0, 1, 0, 0, 1,
0, 0, 0, 1, 0, 0, 0, -1, 1, 0, -1, 0,
0, 0, 0, 1, 0, 0, 0, -1, 1, 0, 0, 1,
0, 0, 0, -1, -1, 1, -1, -1, 0, 0, -1, 0,
0, 0, 0, -1, -1, 1, -1, -1, 0, -1, 0, 0,
0, 0, 0, -1, -1, 1, 0, -1, 1, 0, -1, 0,
0, 0, 0, -1, -1, 1, -1, 0, 1, -1, 0, 0,
0, 0, 0, -1, -1, 1, 0, -1, 1, 0, 0, 1,
0, 0, 0, -1, -1, 1, -1, 0, 1, 0, 0, 1,
0, 0, 0, 0, 0, -1, 1, 0, -1, 1, 1, -1,
0, 0, 0, 0, 0, -1, 0, 1, -1, 1, 1, -1,
0, 0, 0, 1, 0, 0, 1, 0, -1, 1, 1, -1,
0, 0, 0, 0, 1, 0, 0, 1, -1, 1, 1, -1,
0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, -1,
0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, -1,
0, 0, 0, 0, 0, -1, 0, 1, -1, -1, 0, 0,
0, 0, 0, 0, 1, 0, 0, 1, -1, -1, 0, 0,
0, 0, 0, 0, 0, -1, 1, 0, -1, 0, -1, 0,
0, 0, 0, 1, 0, 0, 1, 0, -1, 0, -1, 0,
0, 0, 0, 0, 0, -1, -1, -1, 0, 0, -1, 0,
0, 0, 0, 0, 0, -1, -1, -1, 0, -1, 0, 0]
freqs = [7.75038996, 8.45225776]
tetra_freqs = [[8.31845176, 8.69248151, 8.78939432, 8.66179133],
[8.31845176, 8.69248151, 8.57211855, 8.66179133],
[8.31845176, 8.3073908, 8.78939432, 8.66179133],
[8.31845176, 8.3073908, 8.16360975, 8.66179133],
[8.31845176, 8.15781566, 8.57211855, 8.66179133],
[8.31845176, 8.15781566, 8.16360975, 8.66179133],
[8.31845176, 8.3073908, 8.16360975, 7.23665561],
[8.31845176, 8.15781566, 8.16360975, 7.23665561],
[8.31845176, 8.69248151, 8.57211855, 8.25247917],
[8.31845176, 8.15781566, 8.57211855, 8.25247917],
[8.31845176, 8.15781566, 7.40609306, 8.25247917],
[8.31845176, 8.15781566, 7.40609306, 7.23665561],
[8.31845176, 8.69248151, 8.78939432, 8.55165578],
[8.31845176, 8.3073908, 8.78939432, 8.55165578],
[8.31845176, 8.3073908, 7.56474684, 8.55165578],
[8.31845176, 8.3073908, 7.56474684, 7.23665561],
[8.31845176, 8.69248151, 8.60076148, 8.55165578],
[8.31845176, 8.69248151, 8.60076148, 8.25247917],
[8.31845176, 7.72920193, 8.60076148, 8.55165578],
[8.31845176, 7.72920193, 8.60076148, 8.25247917],
[8.31845176, 7.72920193, 7.56474684, 8.55165578],
[8.31845176, 7.72920193, 7.56474684, 7.23665561],
[8.31845176, 7.72920193, 7.40609306, 8.25247917],
[8.31845176, 7.72920193, 7.40609306, 7.23665561]]
iw_I_ref = [0.37259443, 1.79993056]
iw_J_ref = [0.05740597, 0.78096241]
def test_get_all_tetrahedra_relative_grid_address():
rel_ga = get_all_tetrahedra_relative_grid_address()
# for i, line in enumerate(rel_ga.reshape(-1, 12)):
# print("%03d: " % i + "".join(["%d, " % v for v in line]))
np.testing.assert_array_equal(rel_ga.ravel(), np.array(rel_ga_ref).ravel())
def test_get_tetrahedra_integration_weight():
iw_I = get_tetrahedra_integration_weight(freqs, tetra_freqs, function='I')
iw_J = get_tetrahedra_integration_weight(freqs, tetra_freqs, function='J')
np.testing.assert_allclose(iw_I_ref, iw_I, atol=1e-5)
np.testing.assert_allclose(iw_J_ref, iw_J, atol=1e-5)
def test_get_tetrahedra_integration_weight_one_freq():
iw_I = []
iw_J = []
for i in range(2):
iw_I.append(get_tetrahedra_integration_weight(
freqs[i], tetra_freqs, function='I'))
iw_J.append(get_tetrahedra_integration_weight(
freqs[i], tetra_freqs, function='J'))
np.testing.assert_allclose(iw_I_ref, iw_I, atol=1e-5)
np.testing.assert_allclose(iw_J_ref, iw_J, atol=1e-5)
|
<reponame>rooojs/views<gh_stars>0
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.htmlspecialchars = void 0;
const MAP = {
'&': '&',
'<': '<',
'>': '>',
'"': '"',
'\'': ''',
};
const replacer = ((match) => MAP[match]);
const htmlspecialchars = (value) => {
return `${value}`.replace(/[&<>"']/g, replacer);
};
exports.htmlspecialchars = htmlspecialchars;
|
<reponame>Trevonte/network
class CheckInsController < ApplicationController
def index
@network_events = NetworkEvent.where(scheduled_at: Date.today..2.weeks.from_now)
end
def new
@level = params[:level]
@network_event = NetworkEvent.find(params[:network_event_id])
@network_event_id = @network_event.id
invitees = @network_event.invitees.sort_by(&:first_name)
@members = []
for invitee in invitees
if not Participation.where(member_id: invitee.id, network_event_id: @network_event.id).present?
@members.push(invitee)
end
end
end
def create
participation_details = params[:participation]
member_ids = participation_details[:member_ids] || Array(participation_details[:member_id])
network_event_id = participation_details[:network_event_id]
level = participation_details[:level]
participation_type = participation_details[:participation_type]
error_ids =[]
member_ids.each do |member_id|
participation_params = {member_id: member_id, network_event_id: network_event_id, level: level, participation_type: participation_type}
unless Participation.where(participation_params).exists?
@participation = Participation.new(participation_params)
@participation.user = current_user
error_ids << member_id unless @participation.save
end
end
update_media_waivers
respond_to do |format|
if error_ids.empty?
flash[:check_in_message] = "#{member_ids.size} members successfully checked in"
format.json { head :no_content}
format.html { render :body => nil, status: 200, :notice => 'Check In Successful!' }
else
flash[:check_in_message] = "Error Checking In #{error_ids.size} members"
format.json { render :json => current_user.errors, :status => :unprocessable_entity }
format.html { render :action => "edit" }
end
end
end
private
def participation_params
params.require(:participation).permit(:member_id, :level, :network_event_id, :participation_type)
end
def update_media_waivers
if waiver_params[:member_ids].present?
Member.where(id: waiver_params[:member_ids]).update_all(media_waiver: true)
end
end
def waiver_params
if params.key?(:waiver)
params.require(:waiver).permit(member_ids: [])
else
{}
end
end
end
|
const { execFile } = require('child_process');
module.exports = function compileNib(loader, nibPath, cb) {
// Tell webpack to cache the compiled file and to track changes to the source files
if (loader.cacheable) {
loader.cacheable();
loader.addDependency(loader.resourcePath);
}
execFile('/usr/bin/ibtool', ['--compile', nibPath, loader.resourcePath], { encoding: 'utf8' }, cb);
};
|
module.exports = {
mongoURI: "mongodb://localhost/dbqasim", // this is my local Mongodb database
secretOrKey: "secret",
};
|
<filename>radiaTest-server/server/utils/message_util.py
import json, yaml, os
from flask import g, jsonify, current_app
from server.utils.response_util import RET
from server.model import (
ReUserOrganization,
Message,
MsgLevel,
MsgType,
Role,
ReUserRole
)
from server.utils.db import Insert, Precise
from server.utils.table_adapter import TableAdapter
class MessageManager:
@staticmethod
def get_cur_api_msg(uri, method):
base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
with open(os.path.join(base_dir,'config/api_infos.yaml'), 'r', encoding='utf-8') as f:
api_infos = yaml.load(f.read(), Loader=yaml.FullLoader)
for _api in api_infos:
if method == _api.get("act") and _api.get("uri").split("%")[0] in uri:
uri_s = _api.get("uri").split("%")
uri_t1 = uri_s[0]
uri_t2 = uri_s[1] if len(uri_s) > 1 else ""
indx = uri.index(uri_t1)
_api["id"] = uri[indx:].replace(uri_t1, "").replace(uri_t2, "")
_instance = Precise(
getattr(TableAdapter, _api["table"]), {"id": int(_api["id"])},
).first()
if _instance.permission_type == "person":
return None
return _api
continue
return None
@staticmethod
def run(_api):
from server import redis_client
from server.utils.redis_util import RedisKey
_instance = Precise(
getattr(TableAdapter, _api["table"]), {"id": int(_api["id"])},
).first()
if not _instance:
raise RuntimeError("the data does not exsit")
cur_org_id = redis_client.hget(RedisKey.user(g.gitee_id), 'current_org_id')
re_user_org = Precise(
ReUserOrganization, {"gitee_id": _instance.creator_id, "org_id":cur_org_id},
).first()
if not re_user_org:
current_app.logger.info("the creater user does not exsit in current org")
with open('server/config/role_init.yaml', 'r', encoding='utf-8') as f:
role_infos = yaml.load(f.read(), Loader=yaml.FullLoader)
role = Precise(
Role, {"name": role_infos.get(_instance.permission_type).get("administrator"), "type": _instance.permission_type},
).first()
if not role:
raise RuntimeError("the role does not exist.")
re_role_user = Precise(
ReUserRole, {"role_id": role.id},
).first()
if not re_role_user:
raise RuntimeError("the user with this role does not exist.")
MessageManager.send_scrpt_msg(re_role_user.user_id, MsgLevel.user.value, _api, _instance.permission_type)
@staticmethod
def send_scrpt_msg(to_user_id, msg_leve: MsgLevel, _api, permission_type):
from server import redis_client
from server.utils.redis_util import RedisKey
_message = dict(
data=json.dumps(
dict(
permission_type=permission_type,
info=f'<b>{redis_client.hget(RedisKey.user(g.gitee_id), "gitee_name")}</b>请求{_api.get("alias")}<b>{_api["id"]}</b>。',
script=_api.get("uri") % int(_api["id"]), #“/api/v1/product/%d” % instance_id
method=_api.get("act"), #"delete"
_alias=_api.get("alias"),
_id=_api.get("id"),
body=_api["body"]
)
),
level=msg_leve,
from_id=g.gitee_id,
to_id=to_user_id,
type=MsgType.script.value
)
Insert(Message, _message).single()
|
<reponame>googleinterns/supply-chain-map
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { StatsTabComponent } from './stats-tab.component';
import { MockStore, provideMockStore } from '@ngrx/store/testing';
import { MemoizedSelector } from '@ngrx/store';
import { HomeState } from 'src/app/home/store/state';
import { selectHomeFormQueryResult, selectHomeFormQuery, selectHomeFormQueryResultStats } from 'src/app/home/store/selectors';
import { FormQueryResultStats } from 'src/app/home/home.models';
describe('StatsTabComponent', () => {
let component: StatsTabComponent;
let fixture: ComponentFixture<StatsTabComponent>;
let mockStore: MockStore;
let mockFormQuerySelector: MemoizedSelector<HomeState, string>;
let mockFormQueryStatsSelector: MemoizedSelector<HomeState, FormQueryResultStats>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ StatsTabComponent ],
providers: [
provideMockStore()
]
})
.compileComponents();
mockStore = TestBed.inject(MockStore);
mockFormQuerySelector = mockStore.overrideSelector(
selectHomeFormQuery,
''
);
mockFormQueryStatsSelector = mockStore.overrideSelector(
selectHomeFormQueryResultStats,
{
jobId: '',
jobComplete: true,
projectId: '',
cacheHit: true,
totalBytesProcessed: '0'
}
);
}));
beforeEach(() => {
fixture = TestBed.createComponent(StatsTabComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});
|
/* Copyright 2009-2015 <NAME>
*
* This file is part of the MOEA Framework.
*
* The MOEA Framework is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* The MOEA Framework is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with the MOEA Framework. If not, see <http://www.gnu.org/licenses/>.
*/
package org.moeaframework.core.spi;
import java.util.Properties;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.moeaframework.analysis.sensitivity.ProblemStub;
import org.moeaframework.core.Problem;
import org.moeaframework.core.Solution;
import org.moeaframework.core.Variable;
import org.moeaframework.core.Variation;
import org.moeaframework.core.operator.RandomInitialization;
import org.moeaframework.core.variable.BinaryVariable;
import org.moeaframework.core.variable.Grammar;
import org.moeaframework.core.variable.Permutation;
import org.moeaframework.core.variable.RealVariable;
/**
* Tests the {@link OperatorFactory} class.
*/
public class OperatorFactoryTest {
private static final String[] operators = { "sbx+pm", "hux+bf",
"pmx+insertion+swap", "de", "de+pm", "pcx", "spx", "undx", "pm", "um",
"1x+um", "2x+um", "ux+um", "gx+gm", "bx", "ptm", "bx+ptm" };
private Problem problem;
@Before
public void setUp() {
problem = ProblemFactory.getInstance().getProblem("DTLZ2_2");
}
@After
public void tearDown() {
problem = null;
}
@Test
public void testCommonOperators() {
for (String operator : operators) {
Variation variation = OperatorFactory.getInstance().getVariation(
operator, new Properties(), problem);
test(variation);
}
}
private void test(Variation variation) {
RandomInitialization initialization = new RandomInitialization(problem,
variation.getArity());
Solution[] parents = initialization.initialize();
Solution[] offspring = variation.evolve(parents);
Assert.assertNotNull(offspring);
}
@Test
public void testDefaultReal() {
Problem problem = new ProblemStub(1) {
@Override
public Solution newSolution() {
Solution solution = new Solution(1, 0);
solution.setVariable(0, new RealVariable(0, 1));
return solution;
}
};
Assert.assertNotNull(OperatorFactory.getInstance().getVariation(null,
new Properties(), problem));
}
@Test
public void testDefaultBinary() {
Problem problem = new ProblemStub(1) {
@Override
public Solution newSolution() {
Solution solution = new Solution(1, 0);
solution.setVariable(0, new BinaryVariable(10));
return solution;
}
};
Assert.assertNotNull(OperatorFactory.getInstance().getVariation(null,
new Properties(), problem));
}
@Test
public void testDefaultPermutation() {
Problem problem = new ProblemStub(1) {
@Override
public Solution newSolution() {
Solution solution = new Solution(1, 0);
solution.setVariable(0, new Permutation(4));
return solution;
}
};
Assert.assertNotNull(OperatorFactory.getInstance().getVariation(null,
new Properties(), problem));
}
@Test
public void testDefaultGrammar() {
Problem problem = new ProblemStub(1) {
@Override
public Solution newSolution() {
Solution solution = new Solution(1, 0);
solution.setVariable(0, new Grammar(4));
return solution;
}
};
Assert.assertNotNull(OperatorFactory.getInstance().getVariation(null,
new Properties(), problem));
}
@Test(expected = ProviderNotFoundException.class)
public void testMixedType() {
Problem problem = new ProblemStub(5) {
@Override
public Solution newSolution() {
Solution solution = new Solution(5, 0);
solution.setVariable(0, new RealVariable(0, 1));
solution.setVariable(1, new BinaryVariable(10));
solution.setVariable(2, new Permutation(4));
solution.setVariable(3, new Grammar(4));
return solution;
}
};
OperatorFactory.getInstance().getVariation(null, new Properties(),
problem);
}
@Test(expected = ProviderNotFoundException.class)
public void testUnknownType() {
Problem problem = new ProblemStub(1) {
@Override
public Solution newSolution() {
Solution solution = new Solution(1, 0);
solution.setVariable(0, new Variable() {
private static final long serialVersionUID = -5453570189207466169L;
@Override
public Variable copy() {
throw new UnsupportedOperationException();
}
});
return solution;
}
};
OperatorFactory.getInstance().getVariation(null, new Properties(),
problem);
}
@Test(expected = ProviderNotFoundException.class)
public void testEmptyType() {
Problem problem = new ProblemStub(0);
OperatorFactory.getInstance().getVariation(null, new Properties(),
problem);
}
@Test(expected = ProviderNotFoundException.class)
public void testNonexistentOperator() {
Problem problem = new ProblemStub(0);
OperatorFactory.getInstance().getVariation("sbx+test_fake_operator",
new Properties(), problem);
}
}
|
// By KRT girl xiplus
#include <bits/stdc++.h>
#define endl '\n'
using namespace std;
int count(int n){
if(n==1) return 0;
if(n&1) return count(n*3+1)+1;
return count(n/2)+1;
}
int main(){
// ios::sync_with_stdio(false);
// cin.tie(0);
int a,b;
while(cin>>a>>b){
cout<<a<<" "<<b<<" ";
if(a>b)swap(a,b);
int ans=0;
for(int q=a;q<=b;q++){
ans=max(count(q)+1,ans);
}
cout<<ans<<endl;
}
}
|
#!/usr/bin/bash
for i in 1 2 4 8 16 32
do
./xmem-win-x64.exe -j1 -l -w4 -M$i -f$i.csv >$i.txt
done
|
def generate_migration_operation(app_name, model_name, field_name):
# Construct the migration operation code
migration_operation = f"migrations.AlterField(" \
f"model_name='{model_name}', " \
f"name='{field_name}', " \
f"field=models.CharField(max_length=100), " \
f"preserve_default=True" \
f")"
return migration_operation |
<filename>src/commands/settings/ProtectedRolesCommand.js
const ConfigCommand = require('../ConfigCommand');
const AddProtectedRoleCommand = require('./protectedroles/AddProtectedRoleCommand');
const ListProtectedRoleCommand = require('./protectedroles/ListProtectedRoleCommand');
const RemoveProtectedRoleCommand = require('./protectedroles/RemoveProtectedRoleCommand');
class ProtectedRolesCommand extends ConfigCommand {
static description = 'Manage protected roles';
static usage = 'add|list|remove';
static names = ['protectedroles'];
static userPerms = ['MANAGE_GUILD'];
static getSubCommands() {
return [
AddProtectedRoleCommand,
ListProtectedRoleCommand,
RemoveProtectedRoleCommand,
];
}
}
module.exports = ProtectedRolesCommand;
|
export {default as MasterDAOContractConstruct} from './MasterDAO'
export {default as SubDAOContractConstruct} from './SubDAO'
export {default as MemberERC721ContractConstruct} from './MemberERC721'
|
<reponame>micpez/c3po
/*
* Copyright (c) 2017 Sprint
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <iostream>
#include <vector>
#include <pistache/net.h>
#include <pistache/http.h>
#include <pistache/client.h>
#include "options.h"
#include "logger.h"
#include "rule.h"
#include "session.h"
#include "chronos.h"
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
RulesMap::RulesMap()
{
}
RulesMap::~RulesMap()
{
RulesMap::iterator it;
while ( (it = begin()) != end() )
{
delete it->second;
erase( it );
}
}
bool RulesMap::getRule( const std::string &rulename, Rule* &rule )
{
auto it = find( rulename );
if ( it == end() )
return false;
rule = it->second;
return true;
}
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
RulesList::RulesList( bool free_on_destroy )
: m_free_on_destroy( free_on_destroy )
{
}
RulesList::~RulesList()
{
if ( m_free_on_destroy )
{
std::list<Rule*>::iterator it;
while ( (it = m_rules.begin()) != m_rules.end() )
{
delete *it;
m_rules.erase( it );
}
}
}
void RulesList::push_back( Rule *r )
{
m_rules.push_back( r );
}
bool RulesList::exists( Rule *r )
{
return find( r ) != m_rules.end();
}
bool RulesList::erase( Rule *r )
{
std::list<Rule*>::iterator it = find( r );
if ( it != m_rules.end() )
{
m_rules.erase( it );
return true;
}
return false;
}
std::list<Rule*>::iterator RulesList::erase( std::list<Rule*>::iterator &it )
{
return m_rules.erase( it );
}
void RulesList::addGxSession( GxSession *gx )
{
for ( auto r : m_rules )
if ( r->getRuleTimer() )
r->getRuleTimer()->addSession( gx );
}
void RulesList::removeGxSession( GxSession *gx )
{
for ( auto r : m_rules )
if ( r->getRuleTimer() )
r->getRuleTimer()->removeSession( gx );
}
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
RuleEvaluator::RuleEvaluator()
{
}
RuleEvaluator::~RuleEvaluator()
{
}
bool RuleEvaluator::evaluate( GxSession &pcef, const RulesList &rules, RulesList &gxInstalled, RulesList &sdInstalled, RulesList &stInstalled, bool failOnUninstallableRule )
{
bool result = true;
//
// determine which rules need to be activated now
//
uint64_t pcefFeatures = pcef.getSupportedFeatures();
uint64_t tdfFeatures = pcef.getTdfSession().getSupportedFeatures();
uint64_t tssfFeatures = pcef.getTssfSession().getSupportedFeatures();
bool tdfRequired = pcef.getTdfSession().required();
bool tssfRequired = pcef.getTssfSession().required();
auto ruleit = rules.begin();
while ( result && ruleit != rules.end() )
{
// check to see if the rule applies to the PCEF
if ( ( (*ruleit)->getFeatureMask() & pcefFeatures ) == (*ruleit)->getFeatureMask() )
{
if ( !Options::enableRuleTimers() || (*ruleit)->activeNow() )
{
if ( !gxInstalled.exists( *ruleit ) )
addGxInstallRule( *ruleit );
}
else
{
if ( gxInstalled.exists( *ruleit ) )
addGxRemoveRule( *ruleit );
}
//ruleit = rules.getRules().erase( ruleit );
//ruleit = rules.erase( ruleit );
}
// check to see if the rule applies to the TDF
else if ( tdfRequired && ( (*ruleit)->getFeatureMask() & tdfFeatures ) == (*ruleit)->getFeatureMask() )
{
if ( !Options::enableRuleTimers() || (*ruleit)->activeNow() )
{
if ( !sdInstalled.exists( *ruleit ) )
addSdInstallRule( *ruleit );
}
else
{
if ( sdInstalled.exists( *ruleit ) )
addSdRemoveRule( *ruleit );
}
//ruleit = rules.erase( ruleit );
}
// check to see if the rule applies to the TSSF
else if ( tssfRequired && ( (*ruleit)->getFeatureMask() & tssfFeatures ) == (*ruleit)->getFeatureMask() )
{
if ( !Options::enableRuleTimers() || (*ruleit)->activeNow() )
{
if ( !stInstalled.exists( *ruleit ) )
addStInstallRule( *ruleit );
}
else
{
if ( stInstalled.exists( *ruleit ) )
addStRemoveRule( *ruleit );
}
//ruleit = rules.erase( ruleit );
}
// check for fail on unassigned rule
else if ( failOnUninstallableRule )
{
Logger::gx().error( "%s:%d - Aborting, unable to install rule=[%s] for imsi=[%s] apn=[%s]",
__FILE__, __LINE__, pcef.getImsi().c_str(), pcef.getApn().c_str(), (*ruleit)->getRuleName().c_str() );
result = false;
}
ruleit++;
}
return result;
}
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
RuleTimer::RuleTimer()
{
}
RuleTimer::~RuleTimer()
{
}
bool RuleTimer::addSession( GxSession *gx )
{
SMutexLock l( m_mutex );
auto res = m_sessions.insert( gx );
return res.second;
}
void RuleTimer::removeSession( GxSession *gx )
{
SMutexLock l( m_mutex );
m_sessions.erase( gx );
}
#define CHRONOS_POST( interval, repeat_for, uri, opaque, postbody ) \
{ \
std::stringstream ss; \
ss \
<< "{" \
<< "\"timing\": {" \
<< "\"interval\": " << interval << "," \
<< "\"repeat-for\": " << repeat_for \
<< "}," \
<< "\"callback\": {" \
<< "\"http\": {" \
<< "\"uri\": \"" << uri << "\"," \
<< "\"opaque\": \"" << opaque << "\"" \
<< "}" \
<< "}," \
<< "\"reliability\": {" \
<< "\"replication-factor\": 99" \
<< "}" \
<< "}"; \
postbody = ss.str(); \
}
void RuleTimer::setNextInterval()
{
int interval = getRule()->getTimerInterval();
if ( interval < 0 )
{
Logger::chronos().error( "%s:%d - the timer interval for [%s] is less than zero (%d)",
__FILE__, __LINE__, getRule()->getRuleName().c_str(), interval );
return;
}
//
// build the request json
//
std::string postbody;
CHRONOS_POST( interval, interval, Options::ruleCallbackUrl(), getRuleName(), postbody );
//
// setup the http client
//
std::string chronos = Options::chronosApiUrl() + "/timers";
Logger::chronos().debug( "%s:%d - sending request to Chronos for [%s] interval [%d]",
__FILE__, __LINE__, getRule()->getRuleName().c_str(), interval );
auto resp = Chronos::client().post(chronos).body(postbody).send();
resp.then( [&](Pistache::Http::Response response) {
Logger::chronos().debug( "%s:%d - processing Chronos response",
__FILE__, __LINE__ );
if ( response.code() == Pistache::Http::Code::Ok ) {
if ( response.headers().has("Location") ) {
auto loc = response.headers().get<ChronosLocation>();
setTimerId( loc->getLocation() );
Logger::chronos().debug( "%s:%d - the TimerId for [%s] is [%s]",
__FILE__, __LINE__, getRule()->getRuleName().c_str(), loc->getLocation().c_str() );
}
else {
Logger::chronos().error( "%s:%d - the Chronos POST response for [%s] does not contain the Location header",
__FILE__, __LINE__, getRule()->getRuleName().c_str() );
}
}
else {
Logger::chronos().error( "%s:%d - the Chronos POST for [%s] returned (%d)",
__FILE__, __LINE__, getRule()->getRuleName().c_str(), response.code() );
}
}, Pistache::Async::IgnoreException );
Logger::chronos().debug( "%s:%d - RuleTimer::setNextInterval() - exiting",
__FILE__, __LINE__ );
}
void RuleTimer::processIntervalExpiration()
{
Logger::chronos().debug( "%s:%d - RuleTimer::processIntervalExpiration() for [%s]",
__FILE__, __LINE__, getRule()->getRuleName().c_str() );
SMutexLock l( m_mutex );
Rule *r = getRule();
RulesList rl;
for ( auto gx : m_sessions )
{
RuleEvaluator re;
rl.clear();
rl.push_back( r );
bool evres = re.evaluate( *gx, rl, gx->getInstalledRules(),
gx->getTdfSession().getInstalledRules(),
gx->getTssfSession().getInstalledRules(),
gx->getSubscriber().getFailOnUninstallableRule() );
if ( evres )
{
GxProcessRulesUpdate *gxe = new GxProcessRulesUpdate( gx->getPCRF(), gx );
SdProcessRulesUpdate *sde = new SdProcessRulesUpdate( gx->getPCRF(), &gx->getTdfSession() );
StProcessRulesUpdate *ste = new StProcessRulesUpdate( gx->getPCRF(), &gx->getTssfSession() );
if ( !gxe->processPhase1( re ) )
delete gxe;
if ( !sde->processPhase1( re ) )
delete sde;
if ( !ste->processPhase1( re ) )
delete ste;
}
}
}
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
SMutex RuleTimers::m_mutex;
RuleTimers *RuleTimers::m_singleton = NULL;
RuleTimers::RuleTimers()
{
}
RuleTimers::~RuleTimers()
{
}
RuleTimer *RuleTimers::addRuleTimer( Rule *rule )
{
if ( !Options::enableRuleTimers() || !rule->getTimeSensitive() )
return NULL;
SMutexLock l( m_mutex );
auto it = m_map.find( rule->getRuleName() );
if ( it == m_map.end() )
{
RuleTimer *rt = new RuleTimer();
rt->setRule( rule );
auto res = m_map.insert( std::pair<std::string,RuleTimer*>( rt->getRuleName(), rt ) );
if ( res.second ) // insert succeeded
{
rule->setRuleTimer( rt );
}
else
{
Logger::gx().error( "%s:%d - Unable to insert RuleTimer for rule [%s]",
__FILE__, __LINE__, rule->getRuleName().c_str() );
delete rt;
}
}
else
{
rule->setRuleTimer( it->second );
}
return rule->getRuleTimer();
}
RuleTimer *RuleTimers::getRuleTimer( const std::string &name )
{
SMutexLock l( m_mutex );
auto it = m_map.find( name );
if ( it == m_map.end() )
return NULL;
return it->second;
}
void RuleTimers::removeRuleTimer( Rule *rule )
{
RuleTimer *rt = rule->getRuleTimer();
if ( rt )
{
SMutexLock l( m_mutex );
m_map.erase( rule->getRuleName() );
rule->setRuleTimer( NULL );
delete rt;
}
}
|
#!/bin/bash
python exploit08/exploit08.py &> exploit08/exploit08.log
|
'use strict';
const path = require('path');
const webpack = require('webpack');
const HtmlWebpackPlugin = require('html-webpack-plugin');
const { mapConfigToTargets, getBundleLocationWithId } = require('../../../../utils/env-bundles');
module.exports = mapConfigToTargets({ root: __dirname }, ({ browsers, id }) => {
return {
entry: [path.join(__dirname, 'app/main.js')],
output: {
path: getBundleLocationWithId(`${__dirname}/dist/`, id),
filename: '[name].js',
publicPath: '/',
},
plugins: [
new HtmlWebpackPlugin(),
],
module: {
rules: [
{
test: /\.js/,
exclude: /node_modules/,
loader: 'babel-loader',
query: {
presets: [
[
'@babel/preset-env',
{
targets: {
browsers,
},
shippedProposals: true,
useBuiltIns: 'usage',
},
],
],
},
},
{
test: /\.html$/,
loader: 'html',
},
],
},
};
});
|
<gh_stars>1-10
"use strict"
import crypto from "crypto"
module.exports = {
getTxHash(txstring) {
let s256Buffer = crypto
.createHash(`sha256`)
.update(Buffer.from(txstring, `base64`))
.digest()
let txbytes = new Uint8Array(s256Buffer)
return Buffer.from(txbytes.slice(0, 20)).toString(`hex`)
}
}
|
<reponame>Nazar910/assignment-system
import IAssignmentService from "./interface";
import { injectable, inject } from "inversify";
import TYPES from '../../types';
import IAssignmentRepo from "../../repos/assignment/interface";
import * as assert from 'assert';
import 'reflect-metadata';
import { IAssignmentValidator } from "../../validation/ajv/interfaces";
@injectable()
export default class AssignmentService implements IAssignmentService {
constructor(
@inject(TYPES.AssignmentRepo) private assignmentRepo: IAssignmentRepo,
@inject(TYPES.AssignmentValidator) private validator: IAssignmentValidator
) { }
async findAll() {
const assignments = await this.assignmentRepo.findAll();
return assignments;
}
async findById(id: any) {
assert.ok(id, '"id" field is required');
assert.ok(typeof id === 'string', '"id" field should be a string');
const assignment = await this.assignmentRepo.findById(id);
return assignment;
}
async create(data: object) {
this.validator.create(data);
return this.assignmentRepo.create(data);
}
async updateById(id: any, data: Object) {
this.validator.update(id, data);
const assignment = await this.assignmentRepo.updateById(id, data);
return assignment;
}
async deleteById(id: any) {
assert.ok(id, '"id" field is required');
assert.ok(typeof id === 'string', '"id" field should be a string');
await this.assignmentRepo.deleteById(id);
}
async findByAssigneeId(assigneeId: string) {
assert.ok(assigneeId, '"assigneeId" field is required');
assert.ok(typeof assigneeId === 'string', '"assigneeId" field should be a string');
return this.assignmentRepo.findByAssigneeId(assigneeId);
}
}
|
<filename>src/main/java/de/lmu/cis/ocrd/cli/AlignCommand.java<gh_stars>1-10
package de.lmu.cis.ocrd.cli;
import com.google.gson.Gson;
import de.lmu.cis.ocrd.align.Lines;
import org.pmw.tinylog.Logger;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
public class AlignCommand extends AbstractIOCommand {
public static class Parameter {
public int n;
}
public AlignCommand() {
super();
}
@Override
public void execute(CommandLineArguments args) throws Exception {
final Parameter p = args.mustGetParameter(Parameter.class);
if (p.n <= 0) {
throw new Exception("invalid n: " + p.n);
}
align(p.n);
}
@Override
public String getName() {
return "align";
}
private void align(int n) throws IOException {
String[] lines = new String[n];
final List<Lines.Alignment> data = new ArrayList<>();
// read input
while (readLines(lines)) {
data.add(Lines.align(lines));
}
println(new Gson().toJson(data));
flush();
}
private boolean readLines(String[] lines) throws IOException {
for (int i = 0; i < lines.length; i++) {
lines[i] = readLine();
if (lines[i] == null) {
return false;
}
lines[i] = lines[i].trim();
lines[i] = lines[i].replace('#', ' ');
lines[i] = lines[i].replace('$', ' ');
Logger.info("read line: {}", lines[i]);
}
return true;
}
}
|
// Compiled by ClojureScript 1.9.946 {}
goog.provide('cljs.repl');
goog.require('cljs.core');
goog.require('cljs.spec.alpha');
cljs.repl.print_doc = (function cljs$repl$print_doc(p__25569){
var map__25570 = p__25569;
var map__25570__$1 = ((((!((map__25570 == null)))?((((map__25570.cljs$lang$protocol_mask$partition0$ & (64))) || ((cljs.core.PROTOCOL_SENTINEL === map__25570.cljs$core$ISeq$)))?true:false):false))?cljs.core.apply.call(null,cljs.core.hash_map,map__25570):map__25570);
var m = map__25570__$1;
var n = cljs.core.get.call(null,map__25570__$1,new cljs.core.Keyword(null,"ns","ns",441598760));
var nm = cljs.core.get.call(null,map__25570__$1,new cljs.core.Keyword(null,"name","name",1843675177));
cljs.core.println.call(null,"-------------------------");
cljs.core.println.call(null,[cljs.core.str.cljs$core$IFn$_invoke$arity$1((function (){var temp__4657__auto__ = new cljs.core.Keyword(null,"ns","ns",441598760).cljs$core$IFn$_invoke$arity$1(m);
if(cljs.core.truth_(temp__4657__auto__)){
var ns = temp__4657__auto__;
return [cljs.core.str.cljs$core$IFn$_invoke$arity$1(ns),"/"].join('');
} else {
return null;
}
})()),cljs.core.str.cljs$core$IFn$_invoke$arity$1(new cljs.core.Keyword(null,"name","name",1843675177).cljs$core$IFn$_invoke$arity$1(m))].join(''));
if(cljs.core.truth_(new cljs.core.Keyword(null,"protocol","protocol",652470118).cljs$core$IFn$_invoke$arity$1(m))){
cljs.core.println.call(null,"Protocol");
} else {
}
if(cljs.core.truth_(new cljs.core.Keyword(null,"forms","forms",2045992350).cljs$core$IFn$_invoke$arity$1(m))){
var seq__25572_25594 = cljs.core.seq.call(null,new cljs.core.Keyword(null,"forms","forms",2045992350).cljs$core$IFn$_invoke$arity$1(m));
var chunk__25573_25595 = null;
var count__25574_25596 = (0);
var i__25575_25597 = (0);
while(true){
if((i__25575_25597 < count__25574_25596)){
var f_25598 = cljs.core._nth.call(null,chunk__25573_25595,i__25575_25597);
cljs.core.println.call(null," ",f_25598);
var G__25599 = seq__25572_25594;
var G__25600 = chunk__25573_25595;
var G__25601 = count__25574_25596;
var G__25602 = (i__25575_25597 + (1));
seq__25572_25594 = G__25599;
chunk__25573_25595 = G__25600;
count__25574_25596 = G__25601;
i__25575_25597 = G__25602;
continue;
} else {
var temp__4657__auto___25603 = cljs.core.seq.call(null,seq__25572_25594);
if(temp__4657__auto___25603){
var seq__25572_25604__$1 = temp__4657__auto___25603;
if(cljs.core.chunked_seq_QMARK_.call(null,seq__25572_25604__$1)){
var c__24378__auto___25605 = cljs.core.chunk_first.call(null,seq__25572_25604__$1);
var G__25606 = cljs.core.chunk_rest.call(null,seq__25572_25604__$1);
var G__25607 = c__24378__auto___25605;
var G__25608 = cljs.core.count.call(null,c__24378__auto___25605);
var G__25609 = (0);
seq__25572_25594 = G__25606;
chunk__25573_25595 = G__25607;
count__25574_25596 = G__25608;
i__25575_25597 = G__25609;
continue;
} else {
var f_25610 = cljs.core.first.call(null,seq__25572_25604__$1);
cljs.core.println.call(null," ",f_25610);
var G__25611 = cljs.core.next.call(null,seq__25572_25604__$1);
var G__25612 = null;
var G__25613 = (0);
var G__25614 = (0);
seq__25572_25594 = G__25611;
chunk__25573_25595 = G__25612;
count__25574_25596 = G__25613;
i__25575_25597 = G__25614;
continue;
}
} else {
}
}
break;
}
} else {
if(cljs.core.truth_(new cljs.core.Keyword(null,"arglists","arglists",1661989754).cljs$core$IFn$_invoke$arity$1(m))){
var arglists_25615 = new cljs.core.Keyword(null,"arglists","arglists",1661989754).cljs$core$IFn$_invoke$arity$1(m);
if(cljs.core.truth_((function (){var or__23539__auto__ = new cljs.core.Keyword(null,"macro","macro",-867863404).cljs$core$IFn$_invoke$arity$1(m);
if(cljs.core.truth_(or__23539__auto__)){
return or__23539__auto__;
} else {
return new cljs.core.Keyword(null,"repl-special-function","repl-special-function",1262603725).cljs$core$IFn$_invoke$arity$1(m);
}
})())){
cljs.core.prn.call(null,arglists_25615);
} else {
cljs.core.prn.call(null,((cljs.core._EQ_.call(null,new cljs.core.Symbol(null,"quote","quote",1377916282,null),cljs.core.first.call(null,arglists_25615)))?cljs.core.second.call(null,arglists_25615):arglists_25615));
}
} else {
}
}
if(cljs.core.truth_(new cljs.core.Keyword(null,"special-form","special-form",-1326536374).cljs$core$IFn$_invoke$arity$1(m))){
cljs.core.println.call(null,"Special Form");
cljs.core.println.call(null," ",new cljs.core.Keyword(null,"doc","doc",1913296891).cljs$core$IFn$_invoke$arity$1(m));
if(cljs.core.contains_QMARK_.call(null,m,new cljs.core.Keyword(null,"url","url",276297046))){
if(cljs.core.truth_(new cljs.core.Keyword(null,"url","url",276297046).cljs$core$IFn$_invoke$arity$1(m))){
return cljs.core.println.call(null,["\n Please see http://clojure.org/",cljs.core.str.cljs$core$IFn$_invoke$arity$1(new cljs.core.Keyword(null,"url","url",276297046).cljs$core$IFn$_invoke$arity$1(m))].join(''));
} else {
return null;
}
} else {
return cljs.core.println.call(null,["\n Please see http://clojure.org/special_forms#",cljs.core.str.cljs$core$IFn$_invoke$arity$1(new cljs.core.Keyword(null,"name","name",1843675177).cljs$core$IFn$_invoke$arity$1(m))].join(''));
}
} else {
if(cljs.core.truth_(new cljs.core.Keyword(null,"macro","macro",-867863404).cljs$core$IFn$_invoke$arity$1(m))){
cljs.core.println.call(null,"Macro");
} else {
}
if(cljs.core.truth_(new cljs.core.Keyword(null,"repl-special-function","repl-special-function",1262603725).cljs$core$IFn$_invoke$arity$1(m))){
cljs.core.println.call(null,"REPL Special Function");
} else {
}
cljs.core.println.call(null," ",new cljs.core.Keyword(null,"doc","doc",1913296891).cljs$core$IFn$_invoke$arity$1(m));
if(cljs.core.truth_(new cljs.core.Keyword(null,"protocol","protocol",652470118).cljs$core$IFn$_invoke$arity$1(m))){
var seq__25576_25616 = cljs.core.seq.call(null,new cljs.core.Keyword(null,"methods","methods",453930866).cljs$core$IFn$_invoke$arity$1(m));
var chunk__25577_25617 = null;
var count__25578_25618 = (0);
var i__25579_25619 = (0);
while(true){
if((i__25579_25619 < count__25578_25618)){
var vec__25580_25620 = cljs.core._nth.call(null,chunk__25577_25617,i__25579_25619);
var name_25621 = cljs.core.nth.call(null,vec__25580_25620,(0),null);
var map__25583_25622 = cljs.core.nth.call(null,vec__25580_25620,(1),null);
var map__25583_25623__$1 = ((((!((map__25583_25622 == null)))?((((map__25583_25622.cljs$lang$protocol_mask$partition0$ & (64))) || ((cljs.core.PROTOCOL_SENTINEL === map__25583_25622.cljs$core$ISeq$)))?true:false):false))?cljs.core.apply.call(null,cljs.core.hash_map,map__25583_25622):map__25583_25622);
var doc_25624 = cljs.core.get.call(null,map__25583_25623__$1,new cljs.core.Keyword(null,"doc","doc",1913296891));
var arglists_25625 = cljs.core.get.call(null,map__25583_25623__$1,new cljs.core.Keyword(null,"arglists","arglists",1661989754));
cljs.core.println.call(null);
cljs.core.println.call(null," ",name_25621);
cljs.core.println.call(null," ",arglists_25625);
if(cljs.core.truth_(doc_25624)){
cljs.core.println.call(null," ",doc_25624);
} else {
}
var G__25626 = seq__25576_25616;
var G__25627 = chunk__25577_25617;
var G__25628 = count__25578_25618;
var G__25629 = (i__25579_25619 + (1));
seq__25576_25616 = G__25626;
chunk__25577_25617 = G__25627;
count__25578_25618 = G__25628;
i__25579_25619 = G__25629;
continue;
} else {
var temp__4657__auto___25630 = cljs.core.seq.call(null,seq__25576_25616);
if(temp__4657__auto___25630){
var seq__25576_25631__$1 = temp__4657__auto___25630;
if(cljs.core.chunked_seq_QMARK_.call(null,seq__25576_25631__$1)){
var c__24378__auto___25632 = cljs.core.chunk_first.call(null,seq__25576_25631__$1);
var G__25633 = cljs.core.chunk_rest.call(null,seq__25576_25631__$1);
var G__25634 = c__24378__auto___25632;
var G__25635 = cljs.core.count.call(null,c__24378__auto___25632);
var G__25636 = (0);
seq__25576_25616 = G__25633;
chunk__25577_25617 = G__25634;
count__25578_25618 = G__25635;
i__25579_25619 = G__25636;
continue;
} else {
var vec__25585_25637 = cljs.core.first.call(null,seq__25576_25631__$1);
var name_25638 = cljs.core.nth.call(null,vec__25585_25637,(0),null);
var map__25588_25639 = cljs.core.nth.call(null,vec__25585_25637,(1),null);
var map__25588_25640__$1 = ((((!((map__25588_25639 == null)))?((((map__25588_25639.cljs$lang$protocol_mask$partition0$ & (64))) || ((cljs.core.PROTOCOL_SENTINEL === map__25588_25639.cljs$core$ISeq$)))?true:false):false))?cljs.core.apply.call(null,cljs.core.hash_map,map__25588_25639):map__25588_25639);
var doc_25641 = cljs.core.get.call(null,map__25588_25640__$1,new cljs.core.Keyword(null,"doc","doc",1913296891));
var arglists_25642 = cljs.core.get.call(null,map__25588_25640__$1,new cljs.core.Keyword(null,"arglists","arglists",1661989754));
cljs.core.println.call(null);
cljs.core.println.call(null," ",name_25638);
cljs.core.println.call(null," ",arglists_25642);
if(cljs.core.truth_(doc_25641)){
cljs.core.println.call(null," ",doc_25641);
} else {
}
var G__25643 = cljs.core.next.call(null,seq__25576_25631__$1);
var G__25644 = null;
var G__25645 = (0);
var G__25646 = (0);
seq__25576_25616 = G__25643;
chunk__25577_25617 = G__25644;
count__25578_25618 = G__25645;
i__25579_25619 = G__25646;
continue;
}
} else {
}
}
break;
}
} else {
}
if(cljs.core.truth_(n)){
var temp__4657__auto__ = cljs.spec.alpha.get_spec.call(null,cljs.core.symbol.call(null,[cljs.core.str.cljs$core$IFn$_invoke$arity$1(cljs.core.ns_name.call(null,n))].join(''),cljs.core.name.call(null,nm)));
if(cljs.core.truth_(temp__4657__auto__)){
var fnspec = temp__4657__auto__;
cljs.core.print.call(null,"Spec");
var seq__25590 = cljs.core.seq.call(null,new cljs.core.PersistentVector(null, 3, 5, cljs.core.PersistentVector.EMPTY_NODE, [new cljs.core.Keyword(null,"args","args",1315556576),new cljs.core.Keyword(null,"ret","ret",-468222814),new cljs.core.Keyword(null,"fn","fn",-1175266204)], null));
var chunk__25591 = null;
var count__25592 = (0);
var i__25593 = (0);
while(true){
if((i__25593 < count__25592)){
var role = cljs.core._nth.call(null,chunk__25591,i__25593);
var temp__4657__auto___25647__$1 = cljs.core.get.call(null,fnspec,role);
if(cljs.core.truth_(temp__4657__auto___25647__$1)){
var spec_25648 = temp__4657__auto___25647__$1;
cljs.core.print.call(null,["\n ",cljs.core.str.cljs$core$IFn$_invoke$arity$1(cljs.core.name.call(null,role)),":"].join(''),cljs.spec.alpha.describe.call(null,spec_25648));
} else {
}
var G__25649 = seq__25590;
var G__25650 = chunk__25591;
var G__25651 = count__25592;
var G__25652 = (i__25593 + (1));
seq__25590 = G__25649;
chunk__25591 = G__25650;
count__25592 = G__25651;
i__25593 = G__25652;
continue;
} else {
var temp__4657__auto____$1 = cljs.core.seq.call(null,seq__25590);
if(temp__4657__auto____$1){
var seq__25590__$1 = temp__4657__auto____$1;
if(cljs.core.chunked_seq_QMARK_.call(null,seq__25590__$1)){
var c__24378__auto__ = cljs.core.chunk_first.call(null,seq__25590__$1);
var G__25653 = cljs.core.chunk_rest.call(null,seq__25590__$1);
var G__25654 = c__24378__auto__;
var G__25655 = cljs.core.count.call(null,c__24378__auto__);
var G__25656 = (0);
seq__25590 = G__25653;
chunk__25591 = G__25654;
count__25592 = G__25655;
i__25593 = G__25656;
continue;
} else {
var role = cljs.core.first.call(null,seq__25590__$1);
var temp__4657__auto___25657__$2 = cljs.core.get.call(null,fnspec,role);
if(cljs.core.truth_(temp__4657__auto___25657__$2)){
var spec_25658 = temp__4657__auto___25657__$2;
cljs.core.print.call(null,["\n ",cljs.core.str.cljs$core$IFn$_invoke$arity$1(cljs.core.name.call(null,role)),":"].join(''),cljs.spec.alpha.describe.call(null,spec_25658));
} else {
}
var G__25659 = cljs.core.next.call(null,seq__25590__$1);
var G__25660 = null;
var G__25661 = (0);
var G__25662 = (0);
seq__25590 = G__25659;
chunk__25591 = G__25660;
count__25592 = G__25661;
i__25593 = G__25662;
continue;
}
} else {
return null;
}
}
break;
}
} else {
return null;
}
} else {
return null;
}
}
});
//# sourceMappingURL=repl.js.map
|
//
// VHCFavor.h
// VHCFoundation
//
// 点赞
// Created by vhall on 2019/9/25.
// Copyright © 2019 vhall. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "VHCError.h"
NS_ASSUME_NONNULL_BEGIN
@interface VHCFavor : NSObject
+ (void)favorHostSucessed:(void(^)(NSDictionary *result))sucess failed:(void(^)(VHCError *error))failed;
+ (NSInteger)getFavorNum;
@end
NS_ASSUME_NONNULL_END
|
#!/bin/bash
DATE=$(date +"%Y-%m-%d_%H%M")
python3 -W ignore /home/nvidia/nvme/minifi-jetson-xavier/demo.py --camera /dev/video0 --network googlenet /home/nvidia/nvme/images/$DATE.jpg 2>/dev/null
python3 -W ignore /home/nvidia/nvme/minifi-jetson-xavier/demo.py --camera /dev/video1 --network googlenet /home/nvidia/nvme/images/$DATE.jpg 2>/dev/null
python3 -W ignore /home/nvidia/nvme/minifi-jetson-xavier/demo.py --camera /dev/video2 --network googlenet /home/nvidia/nvme/images/$DATE.jpg 2>/dev/null
|
def generateSchemaSQL(table_columns):
sql_code = ""
for table, columns in table_columns.items():
sql_code += f"CREATE TABLE {table} (\n"
for column, datatype in columns.items():
sql_code += f" {column} {datatype},\n"
sql_code = sql_code[:-2] # Remove the trailing comma and newline
sql_code += "\n);\n\n"
return sql_code |
package ai.knearestneighbour;
import datageneration.DataPoint;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public class KNearestNeighbour {
public static List<DataPoint> findErrors(List<DataPoint> listOfPoints,List<DataPoint> trainingListOfPoints){
// first find out what value k is
int k = determineK(trainingListOfPoints);
for (DataPoint testDataPoint : listOfPoints) {
//creates a list of distances between each value
List<KNNDataPoint> distanceList = calculateDistance(trainingListOfPoints,testDataPoint);
//sort the list according to distance
Collections.sort(distanceList);
//then according to whatever number k is compare test data to k values
// assign pressure label to mode of test data labels
List<String> labelList = new ArrayList<>();
for (int i = 0; i < k; i++) {
KNNDataPoint distancePoint = distanceList.get(i);
labelList.add(distancePoint.getPressureLabel());
}
String testDataLabel = determineMode(labelList);
testDataPoint.setPressureLabel(testDataLabel);
}
return listOfPoints;
}
private static String determineMode(List<String> listOfLabels){
String maxValue = "undetermined";
int maxCount = 0;
for (int i = 0; i < listOfLabels.size(); ++i) {
int count = 0;
for (int j = 0; j < listOfLabels.size(); ++j) {
if (listOfLabels.get(j).equals(listOfLabels.get(i))) ++count;
}
if (count > maxCount) {
maxCount = count;
maxValue = listOfLabels.get(i);
}
}
// if there is a tie in labels "undetermined" will return
return maxValue;
}
private static int determineK (List<DataPoint> listOfPoints) {
// gets size of list
double sizeDouble = listOfPoints.size();
double root = Math.sqrt(sizeDouble);
//determines the size of k
double rawK = root / 2 ;
int num = Math.round( ( float )rawK ) ;
if ( num%2 != 0 ) {
return num ;
}
else {
return num - 1 ;
}
}
private static List<KNNDataPoint> calculateDistance(List<DataPoint> trainingDataList, DataPoint dataPoint){
// returns a list of how far each training point is to the dataPoint given
List<KNNDataPoint> distanceList = new ArrayList<>();
for (DataPoint trainingPoint:trainingDataList) {
BigDecimal tPressure = BigDecimal.valueOf(trainingPoint.getPressure());
BigDecimal dpPressure = BigDecimal.valueOf(dataPoint.getPressure());
BigDecimal difference = tPressure.subtract(dpPressure);
// convert training point to a KNN point and add difference value
//convert difference to absolute
KNNDataPoint distanceCalculatedPoint = new KNNDataPoint(trainingPoint);
distanceCalculatedPoint.setDistance(difference.abs());
distanceList.add(distanceCalculatedPoint);
}
return distanceList;
}
}
|
ccalib=`python -c "print( (1535.0 / 23.1700))"` # hit/edep energy for GeV unit
scalib=`python -c 'print((26310.0 / 23.1700))'`
# after run analysis. apply the below codes. You must change the scale factors. use fit mean value
cccalib=`python -c "print($ccalib * 25.54 / 30)"`
sscalib=`python -c "print($scalib * 26.43 / 30)"`
ccalib=${ccalib:0:7}
scalib=${scalib:0:7}
# echo "1 $ccalib $scalib" > equal_cons.csv
##############33
# echo "1 $ccalib $scalib" > calib.csv
##############33
# echo "1 $cccalib $sscalib" > calib_new2.csv
##############33
echo "1 $cccalib $sscalib" > calib.csv
##############33
for ((i=2;i<=92;i++))
do
##############33
# echo "1 $ccalib $scalib" >> calib.csv
##############33
# echo "$i $ccalib $scalib" >> equal_cons.csv
# echo "$i $cccalib $sscalib" >> calib_new2.csv
##############33
echo "$i $cccalib $sscalib" >> calib.csv
##############33
done |
#!/bin/sh
ovs_version=$(ovs-vsctl -V | grep ovs-vsctl | awk '{print $4}')
ovs_db_version=$(ovsdb-tool schema-version /usr/share/openvswitch/vswitch.ovsschema)
# give ovsdb-server and vswitchd some space...
sleep 3
# begin configuring
ovs-vsctl --no-wait -- init
ovs-vsctl --no-wait -- set Open_vSwitch . db-version="${ovs_db_version}"
ovs-vsctl --no-wait -- set Open_vSwitch . ovs-version="${ovs_version}"
ovs-vsctl --no-wait -- set Open_vSwitch . system-type="docker-ovs"
ovs-vsctl --no-wait -- set Open_vSwitch . system-version="0.1"
ovs-vsctl --no-wait -- set Open_vSwitch . external-ids:system-id=`cat /proc/sys/kernel/random/uuid`
ovs-vsctl --no-wait -- set-manager ptcp:6640
ovs-appctl -t ovsdb-server ovsdb-server/add-remote db:Open_vSwitch,Open_vSwitch,manager_options
|
#!/bin/bash
set -ex
cat > /usr/bin/start-bosh <<'BASH'
#!/bin/bash
set -e
if [ ! "$BOSH_LITE_NO_AUFS" ]; then
(
set -e
mount_path=/tmp/self-cgroups
cgroups_path=`cat /proc/self/cgroup|grep devices|cut -d: -f3`
# Clean up possibly leftover cgroups mount
[ -d $mount_path ] && umount $mount_path && rmdir $mount_path
# Make new mount for cgroups
mkdir -p $mount_path
mount -t cgroup -o devices none $mount_path
# Allow loop devices
echo 'b 7:* rwm' > $mount_path/$cgroups_path/devices.allow
# Clean up cgroups mount
umount $mount_path
rmdir $mount_path
for i in $(seq 0 260); do
mknod -m660 /dev/loop${i} b 7 $i 2>/dev/null || true
done
)
# Aufs on aufs doesnt work
truncate -s 10G /tmp/garden-disk
mkfs -t ext4 -F /tmp/garden-disk
mkdir -p /var/vcap/data/garden/aufs_graph/
mount /tmp/garden-disk /var/vcap/data/garden/aufs_graph/
fi
# Global package cache configuration
mkdir -p /vagrant
chmod 777 /vagrant
# Start agent & monit
exec /usr/sbin/runsvdir-start <&- >/dev/null 2>&1
BASH
chmod 755 /usr/bin/start-bosh
chown root:root /usr/bin/start-bosh
|
import numpy as np
class GaussianQuadrature:
def __init__(self):
self._gaussPoints = {}
def calculate_gauss_points(self, numPoints, weightFunction):
rootsArray = np.polynomial.legendre.leggauss(numPoints)[0]
weights = weightFunction(rootsArray)
weights /= np.sum(weights)
self._gaussPoints[numPoints] = (rootsArray, weights)
# Example usage
def weightFunction(x):
return np.exp(-x**2) # Example weight function
gaussian = GaussianQuadrature()
gaussian.calculate_gauss_points(3, weightFunction)
print(gaussian._gaussPoints) |
<filename>lib/helpers/index.js<gh_stars>1-10
export { default as choiceOfChoices } from './choiceOfChoices';
export { default as expandChildren } from './expandChildren';
export { default as useIsMobile } from './useIsMobile';
export { default as useMatchMedia } from './useMatchMedia';
export { default as usePortal } from './usePortal';
|
<reponame>akokhanovskyi/kaa<filename>server/node/src/main/java/org/kaaproject/kaa/server/admin/client/mvp/data/EventMapDataProvider.java<gh_stars>0
/*
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.server.admin.client.mvp.data;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.kaaproject.avro.ui.gwt.client.widget.grid.AbstractGrid;
import org.kaaproject.kaa.common.dto.event.ApplicationEventAction;
import org.kaaproject.kaa.common.dto.event.ApplicationEventMapDto;
import org.kaaproject.kaa.common.dto.event.EcfInfoDto;
import org.kaaproject.kaa.common.dto.event.EventClassDto;
import org.kaaproject.kaa.common.dto.event.EventClassType;
import org.kaaproject.kaa.server.admin.client.KaaAdmin;
import org.kaaproject.kaa.server.admin.client.mvp.activity.grid.AbstractDataProvider;
import org.kaaproject.kaa.server.admin.client.util.HasErrorMessage;
import com.google.gwt.user.client.rpc.AsyncCallback;
public class EventMapDataProvider extends AbstractDataProvider<ApplicationEventMapDto, String>{
private EcfInfoDto ecf;
private List<ApplicationEventMapDto> eventMaps;
public EventMapDataProvider(AbstractGrid<ApplicationEventMapDto,String> dataGrid,
HasErrorMessage hasErrorMessage) {
super(dataGrid, hasErrorMessage);
}
public void setEcf(EcfInfoDto ecf) {
this.ecf = ecf;
}
public void setEventMaps(List<ApplicationEventMapDto> eventMaps) {
this.eventMaps = eventMaps;
}
@Override
protected void loadData(final LoadCallback callback) {
if (this.eventMaps == null && ecf != null) {
KaaAdmin.getDataSource().getEventClassesByFamilyIdVersionAndType(ecf.getEcfId(), ecf.getVersion(), EventClassType.EVENT, new AsyncCallback<List<EventClassDto>>() {
@Override
public void onFailure(Throwable caught) {
callback.onFailure(caught);
}
@Override
public void onSuccess(List<EventClassDto> result) {
List<ApplicationEventMapDto> eventMaps = new ArrayList<>(result.size());
for (EventClassDto eventClass : result) {
ApplicationEventMapDto eventMap = new ApplicationEventMapDto();
eventMap.setEventClassId(eventClass.getId());
eventMap.setFqn(eventClass.getFqn());
eventMap.setAction(ApplicationEventAction.BOTH);
eventMaps.add(eventMap);
}
callback.onSuccess(eventMaps);
}
});
} else if (this.eventMaps != null) {
callback.onSuccess(this.eventMaps);
} else {
List<ApplicationEventMapDto> data = Collections.emptyList();
callback.onSuccess(data);
}
}
}
|
package string_handle;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.StringTokenizer;
public class Boj17176 {
private static final char SPACE = ' ';
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int N = Integer.parseInt(br.readLine());
int[] seq = new int[53];
StringTokenizer st = new StringTokenizer(br.readLine());
for(int i = 0; i < N; i++) {
seq[Integer.parseInt(st.nextToken())]++;
}
char[] words = br.readLine().toCharArray();
System.out.println(decode(N, seq, words));
}
private static String decode(int n, int[] encrypt, char[] plain) {
int[] result = new int[53];
for(char c: plain) {
if(c == SPACE) result[0]++;
else if(c >= 'A' && c <= 'Z') result[(c - 'A') + 1]++;
else result[(c - 'a') + 27]++;
}
for(int i = 0; i < result.length; i++) {
if(result[i] != encrypt[i]) return "n";
}
return "y";
}
}
|
#!/bin/bash
#SBATCH --account=def-lombaert
#SBATCH --gres=gpu:v100l:1 # Number of GPUs (per node)
#SBATCH --cpus-per-task=8 # CPU cores/threads
#SBATCH --mem=64G # memory (per node)
#SBATCH --time=05-00:00 # time (DD-HH:MM)
#SBATCH --mail-user=pierre-luc.delisle@live.com
#SBATCH --mail-type=BEGIN
#SBATCH --mail-type=END
#SBATCH --mail-type=FAIL
#SBATCH --mail-type=REQUEUE
#SBATCH --mail-type=ALL
#SBATCH --output=%x-%j.out
#SBATCH --output=LSGAN_school_gaussian_filter_disc_ratio_0_25.out
#SBATCH --job-name=LSGAN_school_gaussian_filter_disc_ratio_0_25
nvidia-smi
source /home/pld2602/venv/bin/activate
CUDA_VISIBLE_DEVICES=0 python /project/def-lombaert/pld2602/code/deepNormalizev5/main_cc.py --config=/project/def-lombaert/pld2602/code/deepNormalizev5/deepNormalize/experiments/experiments_school/LSGAN/disc_ratio_gaussian_filter/config_disc_ratio_0.25.yaml
|
# Sample usage of the ElectricityMonitor class
monitor = ElectricityMonitor()
monitor.add_household(1)
monitor.add_household(2)
monitor.record_consumption(1, 100.5)
monitor.record_consumption(2, 75.3)
monitor.record_consumption(1, 50.2)
print(monitor.total_consumption(1, 3)) # Output: 225.0 |
years = ["2000", "2001", "2002", "2003", "2004", "2005", "2006", "2007", "2008", "2009", "2010", "2011", "2012", "2013", "2014", "2015", "2016", "2017", "2018", "2019", "2020", "2021", "2022", "2023", "2024", "2025"] |
import { Routes, RouterModule } from '@angular/router';
import { HomeComponent } from './components/home';
import { TodosContainerComponent } from './components/todosContainer';
import { ToDosComponent } from './components/todos';
export const ROUTES: Routes = [
{
path: '',
component: TodosContainerComponent,
children: [
{
path: '',
component: ToDosComponent,
data: { days: 1, title: 'Today' }
},
{
path: 'all',
component: ToDosComponent,
data: { days: 0, title: 'All' }
},
{
path: 'today',
component: ToDosComponent,
data: { days: 1, title: 'Today' }
},
{
path: 'timeList',
component: ToDosComponent,
data: { days: 7, title: 'Next 7 Days' }
}
]
},
{
path: 'home',
component: HomeComponent
},
{
path: '**',
component: TodosContainerComponent
},
];
|
#!/bin/bash
jmeterHome=$1
ips=$2
scenario_name=$3
scenario_id=$4
numThreads=$5
rampupTime=$6
ctrlLoops=$7
apiKey=$8
csvFileHost=${9}
csvFileRequest=${10}
createGroupApi=${11}
JMETER_HOME=/mnt/data/benchmark/apache-jmeter-4.0
JMETER_HOME=${jmeterHome}
SCENARIO_LOGS=~/sunbird-perf-tests/sunbird-platform/logs/$scenario_name
JMETER_CLUSTER_IPS=$ips
echo "Executing $scenario_id"
if [ -f ~/logs/$scenario_id ]
then
rm ~/logs/$scenario_id
fi
JMX_FILE_PATH=~/current_scenario/$scenario_name.jmx
mkdir $SCENARIO_LOGS
mkdir $SCENARIO_LOGS/$scenario_id
mkdir $SCENARIO_LOGS/$scenario_id/logs
mkdir $SCENARIO_LOGS/$scenario_id/server/
rm ~/current_scenario/*.jmx
cp ~/sunbird-perf-tests/sunbird-platform/$scenario_name/$scenario_name.jmx $JMX_FILE_PATH
echo "ip = " ${ips}
echo "scenario_name = " ${scenario_name}
echo "scenario_id = " ${scenario_id}
echo "numThreads = " ${numThreads}
echo "rampupTime = " ${rampupTime}
echo "ctrlLoops = " ${ctrlLoops}
echo "apiKey = " ${apiKey}
echo "csvFileHost = " ${csvFileHost}
echo "csvFileRequest = " ${csvFileRequest}
echo "createGroupApi = " ${createGroupApi}
sed "s/THREADS_COUNT/${numThreads}/g" $JMX_FILE_PATH > jmx.tmp
mv jmx.tmp $JMX_FILE_PATH
sed "s/RAMPUP_TIME/${rampupTime}/g" $JMX_FILE_PATH > jmx.tmp
mv jmx.tmp $JMX_FILE_PATH
sed "s/CTRL_LOOPS/${ctrlLoops}/g" $JMX_FILE_PATH > jmx.tmp
mv jmx.tmp $JMX_FILE_PATH
sed "s/API_KEY/${apiKey}/g" $JMX_FILE_PATH > jmx.tmp
mv jmx.tmp $JMX_FILE_PATH
sed "s#DOMAIN_FILE#${csvFileHost}#g" $JMX_FILE_PATH > jmx.tmp
mv jmx.tmp $JMX_FILE_PATH
sed "s#CSV_FILE#${csvFileRequest}#g" $JMX_FILE_PATH > jmx.tmp
mv jmx.tmp $JMX_FILE_PATH
sed "s#PATH_PREFIX#${createGroupApi}#g" $JMX_FILE_PATH > jmx.tmp
mv jmx.tmp $JMX_FILE_PATH
###Copy JMX File to Logs dir ###
cp $JMX_FILE_PATH $SCENARIO_LOGS/$scenario_id/logs
echo "Running ... "
echo "$JMETER_HOME/bin/jmeter.sh -n -t $JMX_FILE_PATH -R ${ips} -l $SCENARIO_LOGS/$scenario_id/logs/output.xml -j $SCENARIO_LOGS/$scenario_id/logs/jmeter.log > $SCENARIO_LOGS/$scenario_id/logs/scenario.log"
### Create HTML reports for every run ###
nohup $JMETER_HOME/bin/jmeter.sh -n -t $JMX_FILE_PATH -R ${ips} -l $SCENARIO_LOGS/$scenario_id/logs/output.xml -e -o $SCENARIO_LOGS/$scenario_id/logs/summary -j $SCENARIO_LOGS/$scenario_id/logs/jmeter.log > $SCENARIO_LOGS/$scenario_id/logs/scenario.log 2>&1 &
echo "Log file ..."
echo "$SCENARIO_LOGS/$scenario_id/logs/scenario.log"
echo "Execution of $scenario_id Complete."
tail -f $SCENARIO_LOGS/$scenario_id/logs/scenario.log
|
<reponame>Eugene-Fedorenko/prebid-server
package admixer
import (
"encoding/json"
"fmt"
"net/http"
"github.com/mxmCherry/openrtb"
"github.com/eugene-fedorenko/prebid-server/adapters"
"github.com/eugene-fedorenko/prebid-server/config"
"github.com/eugene-fedorenko/prebid-server/errortypes"
"github.com/eugene-fedorenko/prebid-server/openrtb_ext"
)
type AdmixerAdapter struct {
endpoint string
}
// Builder builds a new instance of the Admixer adapter for the given bidder with the given config.
func Builder(bidderName openrtb_ext.BidderName, config config.Adapter) (adapters.Bidder, error) {
bidder := &AdmixerAdapter{
endpoint: config.Endpoint,
}
return bidder, nil
}
type admixerImpExt struct {
CustomParams map[string]interface{} `json:"customParams"`
}
func (a *AdmixerAdapter) MakeRequests(request *openrtb.BidRequest, reqInfo *adapters.ExtraRequestInfo) (requests []*adapters.RequestData, errors []error) {
rq, errs := a.makeRequest(request)
if len(errs) > 0 {
errors = append(errors, errs...)
return
}
if rq != nil {
requests = append(requests, rq)
}
return
}
func (a *AdmixerAdapter) makeRequest(request *openrtb.BidRequest) (*adapters.RequestData, []error) {
var errs []error
var validImps []openrtb.Imp
if len(request.Imp) == 0 {
return nil, []error{&errortypes.BadInput{
Message: "No impressions in request",
}}
}
for _, imp := range request.Imp {
if err := preprocess(&imp); err != nil {
errs = append(errs, err)
continue
}
validImps = append(validImps, imp)
}
if len(validImps) == 0 {
return nil, errs
}
request.Imp = validImps
reqJSON, err := json.Marshal(request)
if err != nil {
errs = append(errs, err)
return nil, errs
}
headers := http.Header{}
headers.Add("Content-Type", "application/json;charset=utf-8")
headers.Add("Accept", "application/json")
return &adapters.RequestData{
Method: "POST",
Uri: a.endpoint,
Body: reqJSON,
Headers: headers,
}, errs
}
func preprocess(imp *openrtb.Imp) error {
var bidderExt adapters.ExtImpBidder
if err := json.Unmarshal(imp.Ext, &bidderExt); err != nil {
return &errortypes.BadInput{
Message: err.Error(),
}
}
var admixerExt openrtb_ext.ExtImpAdmixer
if err := json.Unmarshal(bidderExt.Bidder, &admixerExt); err != nil {
return &errortypes.BadInput{
Message: "Wrong Admixer bidder ext",
}
}
//don't use regexp due to possible performance reduce
if len(admixerExt.ZoneId) != 36 {
return &errortypes.BadInput{
Message: "ZoneId must be UUID/GUID",
}
}
imp.TagID = admixerExt.ZoneId
imp.BidFloor = admixerExt.CustomBidFloor
imp.Ext = nil
if admixerExt.CustomParams != nil {
impExt := admixerImpExt{
CustomParams: admixerExt.CustomParams,
}
var err error
if imp.Ext, err = json.Marshal(impExt); err != nil {
return &errortypes.BadInput{
Message: err.Error(),
}
}
}
return nil
}
func (a *AdmixerAdapter) MakeBids(internalRequest *openrtb.BidRequest, externalRequest *adapters.RequestData, response *adapters.ResponseData) (*adapters.BidderResponse, []error) {
if response.StatusCode == http.StatusNoContent {
return nil, nil
}
if response.StatusCode >= http.StatusInternalServerError {
return nil, []error{&errortypes.BadServerResponse{
Message: fmt.Sprintf("Unexpected status code: %d. Dsp server internal error", response.StatusCode),
}}
}
if response.StatusCode >= http.StatusBadRequest {
return nil, []error{&errortypes.BadInput{
Message: fmt.Sprintf("Unexpected status code: %d. Bad request to dsp", response.StatusCode),
}}
}
if response.StatusCode != http.StatusOK {
return nil, []error{&errortypes.BadServerResponse{
Message: fmt.Sprintf("Unexpected status code: %d", response.StatusCode),
}}
}
var bidResp openrtb.BidResponse
if err := json.Unmarshal(response.Body, &bidResp); err != nil {
return nil, []error{err}
}
//additional no content check
if len(bidResp.SeatBid) == 0 || len(bidResp.SeatBid[0].Bid) == 0 {
return nil, nil
}
bidResponse := adapters.NewBidderResponseWithBidsCapacity(len(bidResp.SeatBid[0].Bid))
for _, sb := range bidResp.SeatBid {
for i := range sb.Bid {
bidResponse.Bids = append(bidResponse.Bids, &adapters.TypedBid{
Bid: &sb.Bid[i],
BidType: getMediaTypeForImp(sb.Bid[i].ImpID, internalRequest.Imp),
})
}
}
return bidResponse, nil
}
func getMediaTypeForImp(impID string, imps []openrtb.Imp) openrtb_ext.BidType {
for _, imp := range imps {
if imp.ID == impID {
if imp.Banner != nil {
return openrtb_ext.BidTypeBanner
} else if imp.Video != nil {
return openrtb_ext.BidTypeVideo
} else if imp.Native != nil {
return openrtb_ext.BidTypeNative
} else if imp.Audio != nil {
return openrtb_ext.BidTypeAudio
}
}
}
return openrtb_ext.BidTypeBanner
}
|
import numpy as np
def hashPeaks(A, songID, delay_time, delta_time, delta_freq):
# Create a matrix of peaks hashed as: [[freq_anchor, freq_other, delta_time], time_anchor, songID]
hashMatrix = np.zeros((len(A)*100, 5)) # Assume size limitation
index = 0
numPeaks = len(A)
for i in range(numPeaks):
freq_anchor, time_anchor = A[i] # Extract frequency and time of the current peak
for j in range(numPeaks):
if i != j:
freq_other, time_other = A[j] # Extract frequency and time of the other peak
# Calculate time difference and frequency difference
time_diff = abs(time_other - time_anchor)
freq_diff = abs(freq_other - freq_anchor)
# Check if the time difference and frequency difference satisfy the given conditions
if time_diff >= delay_time and time_diff <= delay_time + delta_time and freq_diff <= delta_freq:
# Store the hashed peak values in the matrix
hashMatrix[index] = [freq_anchor, freq_other, time_diff, time_anchor, songID]
index += 1
# Trim the hashMatrix to remove excess zeros
hashMatrix = hashMatrix[:index]
return hashMatrix |
<gh_stars>1-10
# File: D (Python 2.4)
from pandac.PandaModules import *
from direct.interval.IntervalGlobal import *
from direct.interval.ProjectileInterval import *
from direct.directnotify import DirectNotifyGlobal
from direct.task.Task import Task
from direct.distributed.GridChild import GridChild
from pirates.piratesbase import PLocalizer
from pirates.battle.CannonballProjectile import CannonballProjectile
from pirates.piratesbase import PiratesGlobals
from pirates.minigame import CannonDefenseGlobals, DistributedDefendWorld
from pirates.effects.SimpleSmokeCloud import SimpleSmokeCloud
from pirates.effects.FireTrail import FireTrail
from pirates.effects.FireballHit import FireballHit
from pirates.audio.SoundGlobals import loadSfx
from pirates.audio import SoundGlobals
import random
from direct.distributed.DistributedObject import DistributedObject
class DistributedFlamingBarrel(DistributedObject, GridChild):
def __init__(self, cr):
DistributedObject.__init__(self, cr)
GridChild.__init__(self)
self.collNode = None
self.destroyed = False
self.smokeVfx = None
self.barrelModel = None
self.trailEffect = None
self._initAudio()
def _initAudio(self):
self.launchSound = loadSfx(SoundGlobals.SFX_MINIGAME_CANNON_BARREL_LAUNCH)
self.hitSound = loadSfx(SoundGlobals.SFX_MINIGAME_CANNON_BARREL_HIT)
self.shotDownSound = loadSfx(SoundGlobals.SFX_MINIGAME_CANNON_BARREL_SHOTDOWN)
self.closeSound = loadSfx(SoundGlobals.SFX_MINIGAME_CANNON_BARREL_CLOSE)
def setShipDoId(self, shipDoId):
self.ship = self.cr.getDo(shipDoId)
def setTargetDoId(self, targetDoId):
self.pirateTarget = self.cr.getDo(targetDoId)
def setFlightDuration(self, duration):
self.flightDuration = duration
def announceGenerate(self):
DistributedObject.announceGenerate(self)
if self.ship is None and self.pirateTarget is None and self.ship.isEmpty() or self.pirateTarget.isEmpty():
self.sendUpdate('shotDown')
return None
self.barrelModel = loader.loadModel('models/ammunition/pir_m_gam_can_powderKeg')
self.barrelModel.setScale(2.0)
base.playSfx(self.launchSound, node = self.barrelModel, cutoff = 2000)
self.barrelModel.reparentTo(self.ship)
self.barrelModel.setPos(0, 0, 10)
self.barrelModel.wrtReparentTo(self.pirateTarget)
self.makeCollNode()
self.barrelModel.setTag('objType', str(PiratesGlobals.COLL_FLAMING_BARREL))
self.collNode.setPythonTag('barrel', self)
self.projectileInterval = Parallel(ProjectileInterval(self.barrelModel, endPos = Point3(0.0, 0.0, 4.5), duration = self.flightDuration, gravityMult = CannonDefenseGlobals.BARREL_GRAVITY), self.barrelModel.hprInterval(self.flightDuration, Vec3(720, 640, 440)), Sequence(Wait(self.flightDuration - 1.2), Func(base.playSfx, self.closeSound, node = self.barrelModel, cutoff = 2000), Wait(1.2), Func(self.hitTarget)), name = self.uniqueName('FlamingBarrelFlying'))
self.collNode.reparentTo(self.barrelModel)
self.projectileInterval.start()
base.cTrav.addCollider(self.collNode, self.collHandler)
base.cr.activeWorld.flamingBarrels.append(self)
self.trailEffect = FireTrail.getEffect()
if self.trailEffect:
self.trailEffect.reparentTo(self.barrelModel)
self.trailEffect.wantGlow = base.options.getSpecialEffectsSetting() >= base.options.SpecialEffectsMedium
self.trailEffect.wantBlur = base.options.getSpecialEffectsSetting() >= base.options.SpecialEffectsHigh
self.trailEffect.startLoop()
def shotDown(self, s = None):
base.playSfx(self.shotDownSound, node = self.barrelModel, cutoff = 2000)
self.destroyed = True
if self in base.cr.activeWorld.flamingBarrels:
base.cr.activeWorld.flamingBarrels.remove(self)
self.startSmoke()
self.projectileInterval.pause()
self.projectileInterval.clearToInitial()
self.barrelModel.removeNode()
base.cTrav.removeCollider(self.collNode)
self.collNode.removeNode()
taskMgr.doMethodLater(0.40000000000000002, self.sendUpdate, name = self.uniqueName('SendShotDown'), extraArgs = [
'shotDown'])
def startSmoke(self):
self.smokeVfx = SimpleSmokeCloud.getEffect(unlimited = True)
if self.smokeVfx:
self.smokeVfx.reparentTo(self.pirateTarget)
self.smokeVfx.setPos(self.barrelModel.getPos())
self.smokeVfx.setEffectScale(0.59999999999999998)
self.smokeVfx.play()
def hitTarget(self):
base.playSfx(self.hitSound, node = self.barrelModel, cutoff = 2000)
if not self.destroyed:
base.talkAssistant.receiveGameMessage(PLocalizer.CannonDefense['DizzyChatNotification'] % self.pirateTarget.name)
if CannonDefenseGlobals.DAZED_ENABLED and isinstance(base.cr.activeWorld, DistributedDefendWorld.DistributedDefendWorld) and base.cr.activeWorld.fsm.getCurrentOrNextState() not in ('ResultScreen', 'Defeat', 'Victory'):
self.pirateTarget.setPirateDazed(True)
self.barrelModel.removeNode()
self.barrelModel = None
base.cTrav.removeCollider(self.collNode)
self.collNode.removeNode()
self.projectileInterval.pause()
self.projectileInterval.clearToInitial()
effect = SimpleSmokeCloud.getEffect(unlimited = True)
if effect:
effect.reparentTo(self.pirateTarget)
effect.setEffectScale(0.59999999999999998)
effect.play()
effect = FireballHit.getEffect()
if effect:
effect.reparentTo(self.pirateTarget)
effect.setScale(2.0)
effect.play()
def delete(self):
taskMgr.remove(self.uniqueName('SendShotDown'))
if not self.destroyed:
if isinstance(base.cr.activeWorld, DistributedDefendWorld.DistributedDefendWorld) and self in base.cr.activeWorld.flamingBarrels:
base.cr.activeWorld.flamingBarrels.remove(self)
self.destroyed = True
if self.barrelModel is not None:
self.barrelModel.removeNode()
base.cTrav.removeCollider(self.collNode)
self.collNode.removeNode()
self.projectileInterval.pause()
self.projectileInterval.clearToInitial()
if self.smokeVfx:
self.smokeVfx.cleanUpEffect()
self.smokeVfx = None
if self.trailEffect:
self.trailEffect.stopLoop()
self.trailEffect = None
GridChild.delete(self)
DistributedObject.delete(self)
def makeCollNode(self):
if self.collNode == None:
node = CollisionNode('flamingBarrelCollNode')
node.setFromCollideMask(BitMask32.allOff())
node.setIntoCollideMask(PiratesGlobals.TargetBitmask)
weaponSphere = CollisionSphere(0.0, 0.0, 0.0, 2.0)
weaponSphere.setTangible(1)
node.clearSolids()
node.addSolid(weaponSphere)
self.collNode = NodePath(node)
self.collHandler = CollisionHandlerEvent()
|
package ua.stqa.pft.addressbook.Tests;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import ua.stqa.pft.addressbook.Models.ContactData;
import ua.stqa.pft.addressbook.Models.Contacts;
import ua.stqa.pft.addressbook.Models.GroupData;
import ua.stqa.pft.addressbook.Models.Groups;
import static org.hamcrest.CoreMatchers.*;
import static org.hamcrest.MatcherAssert.*;
public class ContactToGroupAddingTests extends TestBase{
@BeforeMethod
public void ensurePreconditions() {
if (app.db().groups().size() == 0) {
app.goTo().group();
app.group().create(new GroupData().withName("precName").withHeader("precHeader").withFooter("precFooter"));
}
if (app.db().contacts().size() == 0) {
app.goTo().home();
app.contact().create(new ContactData().withFirstname("test1").withLastname("test2")
.withAddress("someadrress").withPhoto("src/test/resources/test.png")
.withHomePhone("111").withWorkPhone("222").withMobilePhone("333")
.withEmail("email").withEmail2("email2").withEmail3("email3").inGroup(app.db().groups().iterator().next()), true);
}
}
@Test
public void testContactToGroupAdding() {
ContactData contact = app.db().contacts().iterator().next();
GroupData group = app.db().groups().iterator().next();
Groups beforeGroupsOfContact = app.db().contacts().iterator().next().getGroups();
Contacts beforeContactsOfGroup = app.db().groups().iterator().next().getContacts();
app.goTo().home();
app.contact().addContactToGroup(contact.getId(), group.getName());
Groups afterGroupsOfContact = app.db().contacts().iterator().next().getGroups();
Contacts afterContactsOfGroup = app.db().groups().iterator().next().getContacts();
beforeContactsOfGroup.add(contact);
beforeGroupsOfContact.add(group);
assertThat(beforeContactsOfGroup, equalTo(afterContactsOfGroup));
assertThat(beforeGroupsOfContact, equalTo(afterGroupsOfContact));
}
}
|
<gh_stars>0
package cyclops.pure.typeclasses.taglessfinal;
import cyclops.function.higherkinded.Higher;
import cyclops.container.control.Option;
public interface StoreAlgebra<W,K,V> {
Higher<W, Option<V>> get(K key);
Higher<W, Void> put(K key, V value);
}
|
#!/bin/sh
# Copyright (C) 2014 Red Hat, Inc. All rights reserved.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions
# of the GNU General Public License v.2.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
. lib/inittest
test -e LOCAL_LVMPOLLD && skip
aux prepare_devs 2
pvcreate "$dev1"
vgcreate foo "$dev1"
pvcreate -ff -y "$dev1"
vgs
vgcreate foo "$dev1"
|
const Component = require('./component');
class Switch extends Component {
constructor(key, leds=false) {
super('switch', `K_${key.compositeName}`, 4);
this.key = key;
this.leds = leds;
}
getAdditionalData(x, y, rotation) {
return {
key: this.key,
leds: this.leds,
x: ((x + 0.5 + ((this.key.size-1)/2)) * 1905) / 100,
y: ((y + 0.5) * 1905) / 100,
};
}
}
module.exports = Switch;
|
/* ------------------------------------------------------------------------
_codecs -- Provides access to the codec registry and the builtin
codecs.
This module should never be imported directly. The standard library
module "codecs" wraps this builtin module for use within Python.
The codec registry is accessible via:
register(search_function) -> None
lookup(encoding) -> CodecInfo object
The builtin Unicode codecs use the following interface:
<encoding>_encode(Unicode_object[,errors='strict']) ->
(string object, bytes consumed)
<encoding>_decode(char_buffer_obj[,errors='strict']) ->
(Unicode object, bytes consumed)
<encoding>_encode() interfaces also accept non-Unicode object as
input. The objects are then converted to Unicode using
PyUnicode_FromObject() prior to applying the conversion.
These <encoding>s are available: utf_8, unicode_escape,
raw_unicode_escape, unicode_internal, latin_1, ascii (7-bit),
mbcs (on win32).
Written by <NAME> (<EMAIL>).
Copyright (c) Corporation for National Research Initiatives.
------------------------------------------------------------------------ */
#define PY_SSIZE_T_CLEAN
#include "Python.h"
#ifdef MS_WINDOWS
#include <windows.h>
#endif
/*[clinic input]
module _codecs
[clinic start generated code]*/
/*[clinic end generated code: output=da39a3ee5e6b4b0d input=e1390e3da3cb9deb]*/
#include "clinic/_codecsmodule.c.h"
/* --- Registry ----------------------------------------------------------- */
/*[clinic input]
_codecs.register
search_function: object
/
Register a codec search function.
Search functions are expected to take one argument, the encoding name in
all lower case letters, and either return None, or a tuple of functions
(encoder, decoder, stream_reader, stream_writer) (or a CodecInfo object).
[clinic start generated code]*/
static PyObject *
_codecs_register(PyModuleDef *module, PyObject *search_function)
/*[clinic end generated code: output=d17608b6ad380eb8 input=369578467955cae4]*/
{
if (PyCodec_Register(search_function))
return NULL;
Py_RETURN_NONE;
}
/*[clinic input]
_codecs.lookup
encoding: str
/
Looks up a codec tuple in the Python codec registry and returns a CodecInfo object.
[clinic start generated code]*/
static PyObject *
_codecs_lookup_impl(PyModuleDef *module, const char *encoding)
/*[clinic end generated code: output=798e41aff0c04ef6 input=3c572c0db3febe9c]*/
{
return _PyCodec_Lookup(encoding);
}
/*[clinic input]
_codecs.encode
obj: object
encoding: str(c_default="NULL") = sys.getdefaultencoding()
errors: str(c_default="NULL") = "strict"
Encodes obj using the codec registered for encoding.
encoding defaults to the default encoding. errors may be given to set a
different error handling scheme. Default is 'strict' meaning that encoding
errors raise a ValueError. Other possible values are 'ignore', 'replace'
and 'backslashreplace' as well as any other name registered with
codecs.register_error that can handle ValueErrors.
[clinic start generated code]*/
static PyObject *
_codecs_encode_impl(PyModuleDef *module, PyObject *obj, const char *encoding,
const char *errors)
/*[clinic end generated code: output=5c073f62249c8d7c input=2440d769df020a0e]*/
{
if (encoding == NULL)
encoding = PyUnicode_GetDefaultEncoding();
/* Encode via the codec registry */
return PyCodec_Encode(obj, encoding, errors);
}
/*[clinic input]
_codecs.decode
obj: object
encoding: str(c_default="NULL") = sys.getdefaultencoding()
errors: str(c_default="NULL") = "strict"
Decodes obj using the codec registered for encoding.
encoding defaults to the default encoding. errors may be given to set a
different error handling scheme. Default is 'strict' meaning that encoding
errors raise a ValueError. Other possible values are 'ignore', 'replace'
and 'backslashreplace' as well as any other name registered with
codecs.register_error that can handle ValueErrors.
[clinic start generated code]*/
static PyObject *
_codecs_decode_impl(PyModuleDef *module, PyObject *obj, const char *encoding,
const char *errors)
/*[clinic end generated code: output=c81cbf6189a7f878 input=a351e5f5baad1544]*/
{
if (encoding == NULL)
encoding = PyUnicode_GetDefaultEncoding();
/* Decode via the codec registry */
return PyCodec_Decode(obj, encoding, errors);
}
/* --- Helpers ------------------------------------------------------------ */
/*[clinic input]
_codecs._forget_codec
encoding: str
/
Purge the named codec from the internal codec lookup cache
[clinic start generated code]*/
static PyObject *
_codecs__forget_codec_impl(PyModuleDef *module, const char *encoding)
/*[clinic end generated code: output=b56a9b99d2d28080 input=18d5d92d0e386c38]*/
{
if (_PyCodec_Forget(encoding) < 0) {
return NULL;
};
Py_RETURN_NONE;
}
static
PyObject *codec_tuple(PyObject *decoded,
Py_ssize_t len)
{
if (decoded == NULL)
return NULL;
return Py_BuildValue("Nn", decoded, len);
}
/* --- String codecs ------------------------------------------------------ */
/*[clinic input]
_codecs.escape_decode
data: Py_buffer(accept={str, buffer})
errors: str(accept={str, NoneType}) = NULL
/
[clinic start generated code]*/
static PyObject *
_codecs_escape_decode_impl(PyModuleDef *module, Py_buffer *data,
const char *errors)
/*[clinic end generated code: output=648fa3e78d03e658 input=0018edfd99db714d]*/
{
PyObject *decoded = PyBytes_DecodeEscape(data->buf, data->len,
errors, 0, NULL);
return codec_tuple(decoded, data->len);
}
/*[clinic input]
_codecs.escape_encode
data: object(subclass_of='&PyBytes_Type')
errors: str(accept={str, NoneType}) = NULL
/
[clinic start generated code]*/
static PyObject *
_codecs_escape_encode_impl(PyModuleDef *module, PyObject *data,
const char *errors)
/*[clinic end generated code: output=fcd6f34fe4111c50 input=da9ded00992f32f2]*/
{
Py_ssize_t size;
Py_ssize_t newsize;
PyObject *v;
size = PyBytes_GET_SIZE(data);
if (size > PY_SSIZE_T_MAX / 4) {
PyErr_SetString(PyExc_OverflowError,
"string is too large to encode");
return NULL;
}
newsize = 4*size;
v = PyBytes_FromStringAndSize(NULL, newsize);
if (v == NULL) {
return NULL;
}
else {
Py_ssize_t i;
char c;
char *p = PyBytes_AS_STRING(v);
for (i = 0; i < size; i++) {
/* There's at least enough room for a hex escape */
assert(newsize - (p - PyBytes_AS_STRING(v)) >= 4);
c = PyBytes_AS_STRING(data)[i];
if (c == '\'' || c == '\\')
*p++ = '\\', *p++ = c;
else if (c == '\t')
*p++ = '\\', *p++ = 't';
else if (c == '\n')
*p++ = '\\', *p++ = 'n';
else if (c == '\r')
*p++ = '\\', *p++ = 'r';
else if (c < ' ' || c >= 0x7f) {
*p++ = '\\';
*p++ = 'x';
*p++ = Py_hexdigits[(c & 0xf0) >> 4];
*p++ = Py_hexdigits[c & 0xf];
}
else
*p++ = c;
}
*p = '\0';
if (_PyBytes_Resize(&v, (p - PyBytes_AS_STRING(v)))) {
return NULL;
}
}
return codec_tuple(v, size);
}
/* --- Decoder ------------------------------------------------------------ */
/*[clinic input]
_codecs.unicode_internal_decode
obj: object
errors: str(accept={str, NoneType}) = NULL
/
[clinic start generated code]*/
static PyObject *
_codecs_unicode_internal_decode_impl(PyModuleDef *module, PyObject *obj,
const char *errors)
/*[clinic end generated code: output=9fe47c2cd8807d92 input=8d57930aeda170c6]*/
{
if (PyUnicode_Check(obj)) {
if (PyUnicode_READY(obj) < 0)
return NULL;
Py_INCREF(obj);
return codec_tuple(obj, PyUnicode_GET_LENGTH(obj));
}
else {
Py_buffer view;
PyObject *result;
if (PyObject_GetBuffer(obj, &view, PyBUF_SIMPLE) != 0)
return NULL;
result = codec_tuple(
_PyUnicode_DecodeUnicodeInternal(view.buf, view.len, errors),
view.len);
PyBuffer_Release(&view);
return result;
}
}
/*[clinic input]
_codecs.utf_7_decode
data: Py_buffer
errors: str(accept={str, NoneType}) = NULL
final: int(c_default="0") = False
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_7_decode_impl(PyModuleDef *module, Py_buffer *data,
const char *errors, int final)
/*[clinic end generated code: output=ca945e907e72e827 input=bc4d6247ecdb01e6]*/
{
Py_ssize_t consumed = data->len;
PyObject *decoded = PyUnicode_DecodeUTF7Stateful(data->buf, data->len,
errors,
final ? NULL : &consumed);
return codec_tuple(decoded, consumed);
}
/*[clinic input]
_codecs.utf_8_decode
data: Py_buffer
errors: str(accept={str, NoneType}) = NULL
final: int(c_default="0") = False
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_8_decode_impl(PyModuleDef *module, Py_buffer *data,
const char *errors, int final)
/*[clinic end generated code: output=7309f9ff4ef5c9b6 input=39161d71e7422ee2]*/
{
Py_ssize_t consumed = data->len;
PyObject *decoded = PyUnicode_DecodeUTF8Stateful(data->buf, data->len,
errors,
final ? NULL : &consumed);
return codec_tuple(decoded, consumed);
}
/*[clinic input]
_codecs.utf_16_decode
data: Py_buffer
errors: str(accept={str, NoneType}) = NULL
final: int(c_default="0") = False
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_16_decode_impl(PyModuleDef *module, Py_buffer *data,
const char *errors, int final)
/*[clinic end generated code: output=8d2fa0507d9bef2c input=f3cf01d1461007ce]*/
{
int byteorder = 0;
/* This is overwritten unless final is true. */
Py_ssize_t consumed = data->len;
PyObject *decoded = PyUnicode_DecodeUTF16Stateful(data->buf, data->len,
errors, &byteorder,
final ? NULL : &consumed);
return codec_tuple(decoded, consumed);
}
/*[clinic input]
_codecs.utf_16_le_decode
data: Py_buffer
errors: str(accept={str, NoneType}) = NULL
final: int(c_default="0") = False
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_16_le_decode_impl(PyModuleDef *module, Py_buffer *data,
const char *errors, int final)
/*[clinic end generated code: output=4fd621515ef4ce18 input=a77e3bf97335d94e]*/
{
int byteorder = -1;
/* This is overwritten unless final is true. */
Py_ssize_t consumed = data->len;
PyObject *decoded = PyUnicode_DecodeUTF16Stateful(data->buf, data->len,
errors, &byteorder,
final ? NULL : &consumed);
return codec_tuple(decoded, consumed);
}
/*[clinic input]
_codecs.utf_16_be_decode
data: Py_buffer
errors: str(accept={str, NoneType}) = NULL
final: int(c_default="0") = False
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_16_be_decode_impl(PyModuleDef *module, Py_buffer *data,
const char *errors, int final)
/*[clinic end generated code: output=792f4eacb3e1fa05 input=606f69fae91b5563]*/
{
int byteorder = 1;
/* This is overwritten unless final is true. */
Py_ssize_t consumed = data->len;
PyObject *decoded = PyUnicode_DecodeUTF16Stateful(data->buf, data->len,
errors, &byteorder,
final ? NULL : &consumed);
return codec_tuple(decoded, consumed);
}
/* This non-standard version also provides access to the byteorder
parameter of the builtin UTF-16 codec.
It returns a tuple (unicode, bytesread, byteorder) with byteorder
being the value in effect at the end of data.
*/
/*[clinic input]
_codecs.utf_16_ex_decode
data: Py_buffer
errors: str(accept={str, NoneType}) = NULL
byteorder: int = 0
final: int(c_default="0") = False
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_16_ex_decode_impl(PyModuleDef *module, Py_buffer *data,
const char *errors, int byteorder, int final)
/*[clinic end generated code: output=f136a186dc2defa0 input=f6e7f697658c013e]*/
{
/* This is overwritten unless final is true. */
Py_ssize_t consumed = data->len;
PyObject *decoded = PyUnicode_DecodeUTF16Stateful(data->buf, data->len,
errors, &byteorder,
final ? NULL : &consumed);
if (decoded == NULL)
return NULL;
return Py_BuildValue("Nni", decoded, consumed, byteorder);
}
/*[clinic input]
_codecs.utf_32_decode
data: Py_buffer
errors: str(accept={str, NoneType}) = NULL
final: int(c_default="0") = False
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_32_decode_impl(PyModuleDef *module, Py_buffer *data,
const char *errors, int final)
/*[clinic end generated code: output=b7635e55857e8efb input=86d4f41c6c2e763d]*/
{
int byteorder = 0;
/* This is overwritten unless final is true. */
Py_ssize_t consumed = data->len;
PyObject *decoded = PyUnicode_DecodeUTF32Stateful(data->buf, data->len,
errors, &byteorder,
final ? NULL : &consumed);
return codec_tuple(decoded, consumed);
}
/*[clinic input]
_codecs.utf_32_le_decode
data: Py_buffer
errors: str(accept={str, NoneType}) = NULL
final: int(c_default="0") = False
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_32_le_decode_impl(PyModuleDef *module, Py_buffer *data,
const char *errors, int final)
/*[clinic end generated code: output=a79d1787d8ddf988 input=d18b650772d188ba]*/
{
int byteorder = -1;
/* This is overwritten unless final is true. */
Py_ssize_t consumed = data->len;
PyObject *decoded = PyUnicode_DecodeUTF32Stateful(data->buf, data->len,
errors, &byteorder,
final ? NULL : &consumed);
return codec_tuple(decoded, consumed);
}
/*[clinic input]
_codecs.utf_32_be_decode
data: Py_buffer
errors: str(accept={str, NoneType}) = NULL
final: int(c_default="0") = False
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_32_be_decode_impl(PyModuleDef *module, Py_buffer *data,
const char *errors, int final)
/*[clinic end generated code: output=a8356b0f36779981 input=19c271b5d34926d8]*/
{
int byteorder = 1;
/* This is overwritten unless final is true. */
Py_ssize_t consumed = data->len;
PyObject *decoded = PyUnicode_DecodeUTF32Stateful(data->buf, data->len,
errors, &byteorder,
final ? NULL : &consumed);
return codec_tuple(decoded, consumed);
}
/* This non-standard version also provides access to the byteorder
parameter of the builtin UTF-32 codec.
It returns a tuple (unicode, bytesread, byteorder) with byteorder
being the value in effect at the end of data.
*/
/*[clinic input]
_codecs.utf_32_ex_decode
data: Py_buffer
errors: str(accept={str, NoneType}) = NULL
byteorder: int = 0
final: int(c_default="0") = False
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_32_ex_decode_impl(PyModuleDef *module, Py_buffer *data,
const char *errors, int byteorder, int final)
/*[clinic end generated code: output=ab8c70977c1992f5 input=4af3e6ccfe34a076]*/
{
Py_ssize_t consumed = data->len;
PyObject *decoded = PyUnicode_DecodeUTF32Stateful(data->buf, data->len,
errors, &byteorder,
final ? NULL : &consumed);
if (decoded == NULL)
return NULL;
return Py_BuildValue("Nni", decoded, consumed, byteorder);
}
/*[clinic input]
_codecs.unicode_escape_decode
data: Py_buffer(accept={str, buffer})
errors: str(accept={str, NoneType}) = NULL
/
[clinic start generated code]*/
static PyObject *
_codecs_unicode_escape_decode_impl(PyModuleDef *module, Py_buffer *data,
const char *errors)
/*[clinic end generated code: output=d1aa63f2620c4999 input=49fd27d06813a7f5]*/
{
PyObject *decoded = PyUnicode_DecodeUnicodeEscape(data->buf, data->len,
errors);
return codec_tuple(decoded, data->len);
}
/*[clinic input]
_codecs.raw_unicode_escape_decode
data: Py_buffer(accept={str, buffer})
errors: str(accept={str, NoneType}) = NULL
/
[clinic start generated code]*/
static PyObject *
_codecs_raw_unicode_escape_decode_impl(PyModuleDef *module, Py_buffer *data,
const char *errors)
/*[clinic end generated code: output=0bf96cc182d81379 input=770903a211434ebc]*/
{
PyObject *decoded = PyUnicode_DecodeRawUnicodeEscape(data->buf, data->len,
errors);
return codec_tuple(decoded, data->len);
}
/*[clinic input]
_codecs.latin_1_decode
data: Py_buffer
errors: str(accept={str, NoneType}) = NULL
/
[clinic start generated code]*/
static PyObject *
_codecs_latin_1_decode_impl(PyModuleDef *module, Py_buffer *data,
const char *errors)
/*[clinic end generated code: output=66b916f5055aaf13 input=5cad0f1759c618ec]*/
{
PyObject *decoded = PyUnicode_DecodeLatin1(data->buf, data->len, errors);
return codec_tuple(decoded, data->len);
}
/*[clinic input]
_codecs.ascii_decode
data: Py_buffer
errors: str(accept={str, NoneType}) = NULL
/
[clinic start generated code]*/
static PyObject *
_codecs_ascii_decode_impl(PyModuleDef *module, Py_buffer *data,
const char *errors)
/*[clinic end generated code: output=7f213a1b5cdafc65 input=ad1106f64037bd16]*/
{
PyObject *decoded = PyUnicode_DecodeASCII(data->buf, data->len, errors);
return codec_tuple(decoded, data->len);
}
/*[clinic input]
_codecs.charmap_decode
data: Py_buffer
errors: str(accept={str, NoneType}) = NULL
mapping: object = NULL
/
[clinic start generated code]*/
static PyObject *
_codecs_charmap_decode_impl(PyModuleDef *module, Py_buffer *data,
const char *errors, PyObject *mapping)
/*[clinic end generated code: output=87d27f365098bbae input=19712ca35c5a80e2]*/
{
PyObject *decoded;
if (mapping == Py_None)
mapping = NULL;
decoded = PyUnicode_DecodeCharmap(data->buf, data->len, mapping, errors);
return codec_tuple(decoded, data->len);
}
#ifdef HAVE_MBCS
/*[clinic input]
_codecs.mbcs_decode
data: Py_buffer
errors: str(accept={str, NoneType}) = NULL
final: int(c_default="0") = False
/
[clinic start generated code]*/
static PyObject *
_codecs_mbcs_decode_impl(PyModuleDef *module, Py_buffer *data,
const char *errors, int final)
/*[clinic end generated code: output=0ebaf3a5b20e53fa input=d492c1ca64f4fa8a]*/
{
Py_ssize_t consumed = data->len;
PyObject *decoded = PyUnicode_DecodeMBCSStateful(data->buf, data->len,
errors, final ? NULL : &consumed);
return codec_tuple(decoded, consumed);
}
/*[clinic input]
_codecs.code_page_decode
codepage: int
data: Py_buffer
errors: str(accept={str, NoneType}) = NULL
final: int(c_default="0") = False
/
[clinic start generated code]*/
static PyObject *
_codecs_code_page_decode_impl(PyModuleDef *module, int codepage,
Py_buffer *data, const char *errors, int final)
/*[clinic end generated code: output=4318e3d9971e31ba input=4f3152a304e21d51]*/
{
Py_ssize_t consumed = data->len;
PyObject *decoded = PyUnicode_DecodeCodePageStateful(codepage,
data->buf, data->len,
errors,
final ? NULL : &consumed);
return codec_tuple(decoded, consumed);
}
#endif /* HAVE_MBCS */
/* --- Encoder ------------------------------------------------------------ */
/*[clinic input]
_codecs.readbuffer_encode
data: Py_buffer(accept={str, buffer})
errors: str(accept={str, NoneType}) = NULL
/
[clinic start generated code]*/
static PyObject *
_codecs_readbuffer_encode_impl(PyModuleDef *module, Py_buffer *data,
const char *errors)
/*[clinic end generated code: output=319cc24083299859 input=b7c322b89d4ab923]*/
{
PyObject *result = PyBytes_FromStringAndSize(data->buf, data->len);
return codec_tuple(result, data->len);
}
/*[clinic input]
_codecs.unicode_internal_encode
obj: object
errors: str(accept={str, NoneType}) = NULL
/
[clinic start generated code]*/
static PyObject *
_codecs_unicode_internal_encode_impl(PyModuleDef *module, PyObject *obj,
const char *errors)
/*[clinic end generated code: output=be08457068ad503b input=8628f0280cf5ba61]*/
{
if (PyErr_WarnEx(PyExc_DeprecationWarning,
"unicode_internal codec has been deprecated",
1))
return NULL;
if (PyUnicode_Check(obj)) {
Py_UNICODE *u;
Py_ssize_t len, size;
if (PyUnicode_READY(obj) < 0)
return NULL;
u = PyUnicode_AsUnicodeAndSize(obj, &len);
if (u == NULL)
return NULL;
if ((size_t)len > (size_t)PY_SSIZE_T_MAX / sizeof(Py_UNICODE))
return PyErr_NoMemory();
size = len * sizeof(Py_UNICODE);
return codec_tuple(PyBytes_FromStringAndSize((const char*)u, size),
PyUnicode_GET_LENGTH(obj));
}
else {
Py_buffer view;
PyObject *result;
if (PyObject_GetBuffer(obj, &view, PyBUF_SIMPLE) != 0)
return NULL;
result = codec_tuple(PyBytes_FromStringAndSize(view.buf, view.len),
view.len);
PyBuffer_Release(&view);
return result;
}
}
/*[clinic input]
_codecs.utf_7_encode
str: object
errors: str(accept={str, NoneType}) = NULL
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_7_encode_impl(PyModuleDef *module, PyObject *str,
const char *errors)
/*[clinic end generated code: output=a7accc496a32b759 input=fd91a78f103b0421]*/
{
PyObject *v;
str = PyUnicode_FromObject(str);
if (str == NULL || PyUnicode_READY(str) < 0) {
Py_XDECREF(str);
return NULL;
}
v = codec_tuple(_PyUnicode_EncodeUTF7(str, 0, 0, errors),
PyUnicode_GET_LENGTH(str));
Py_DECREF(str);
return v;
}
/*[clinic input]
_codecs.utf_8_encode
str: object
errors: str(accept={str, NoneType}) = NULL
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_8_encode_impl(PyModuleDef *module, PyObject *str,
const char *errors)
/*[clinic end generated code: output=ec831d80e7aedede input=2c22d40532f071f3]*/
{
PyObject *v;
str = PyUnicode_FromObject(str);
if (str == NULL || PyUnicode_READY(str) < 0) {
Py_XDECREF(str);
return NULL;
}
v = codec_tuple(PyUnicode_AsEncodedString(str, "utf-8", errors),
PyUnicode_GET_LENGTH(str));
Py_DECREF(str);
return v;
}
/* This version provides access to the byteorder parameter of the
builtin UTF-16 codecs as optional third argument. It defaults to 0
which means: use the native byte order and prepend the data with a
BOM mark.
*/
/*[clinic input]
_codecs.utf_16_encode
str: object
errors: str(accept={str, NoneType}) = NULL
byteorder: int = 0
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_16_encode_impl(PyModuleDef *module, PyObject *str,
const char *errors, int byteorder)
/*[clinic end generated code: output=93ac58e960a9ee4d input=3935a489b2d5385e]*/
{
PyObject *v;
str = PyUnicode_FromObject(str);
if (str == NULL || PyUnicode_READY(str) < 0) {
Py_XDECREF(str);
return NULL;
}
v = codec_tuple(_PyUnicode_EncodeUTF16(str, errors, byteorder),
PyUnicode_GET_LENGTH(str));
Py_DECREF(str);
return v;
}
/*[clinic input]
_codecs.utf_16_le_encode
str: object
errors: str(accept={str, NoneType}) = NULL
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_16_le_encode_impl(PyModuleDef *module, PyObject *str,
const char *errors)
/*[clinic end generated code: output=422bedb8da34fb66 input=bc27df05d1d20dfe]*/
{
PyObject *v;
str = PyUnicode_FromObject(str);
if (str == NULL || PyUnicode_READY(str) < 0) {
Py_XDECREF(str);
return NULL;
}
v = codec_tuple(_PyUnicode_EncodeUTF16(str, errors, -1),
PyUnicode_GET_LENGTH(str));
Py_DECREF(str);
return v;
}
/*[clinic input]
_codecs.utf_16_be_encode
str: object
errors: str(accept={str, NoneType}) = NULL
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_16_be_encode_impl(PyModuleDef *module, PyObject *str,
const char *errors)
/*[clinic end generated code: output=3aa7ee9502acdd77 input=5a69d4112763462b]*/
{
PyObject *v;
str = PyUnicode_FromObject(str);
if (str == NULL || PyUnicode_READY(str) < 0) {
Py_XDECREF(str);
return NULL;
}
v = codec_tuple(_PyUnicode_EncodeUTF16(str, errors, +1),
PyUnicode_GET_LENGTH(str));
Py_DECREF(str);
return v;
}
/* This version provides access to the byteorder parameter of the
builtin UTF-32 codecs as optional third argument. It defaults to 0
which means: use the native byte order and prepend the data with a
BOM mark.
*/
/*[clinic input]
_codecs.utf_32_encode
str: object
errors: str(accept={str, NoneType}) = NULL
byteorder: int = 0
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_32_encode_impl(PyModuleDef *module, PyObject *str,
const char *errors, int byteorder)
/*[clinic end generated code: output=3e7d5a003b02baed input=434a1efa492b8d58]*/
{
PyObject *v;
str = PyUnicode_FromObject(str);
if (str == NULL || PyUnicode_READY(str) < 0) {
Py_XDECREF(str);
return NULL;
}
v = codec_tuple(_PyUnicode_EncodeUTF32(str, errors, byteorder),
PyUnicode_GET_LENGTH(str));
Py_DECREF(str);
return v;
}
/*[clinic input]
_codecs.utf_32_le_encode
str: object
errors: str(accept={str, NoneType}) = NULL
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_32_le_encode_impl(PyModuleDef *module, PyObject *str,
const char *errors)
/*[clinic end generated code: output=5dda641cd33dbfc2 input=dfa2d7dc78b99422]*/
{
PyObject *v;
str = PyUnicode_FromObject(str);
if (str == NULL || PyUnicode_READY(str) < 0) {
Py_XDECREF(str);
return NULL;
}
v = codec_tuple(_PyUnicode_EncodeUTF32(str, errors, -1),
PyUnicode_GET_LENGTH(str));
Py_DECREF(str);
return v;
}
/*[clinic input]
_codecs.utf_32_be_encode
str: object
errors: str(accept={str, NoneType}) = NULL
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_32_be_encode_impl(PyModuleDef *module, PyObject *str,
const char *errors)
/*[clinic end generated code: output=ccca8b44d91a7c7a input=4595617b18169002]*/
{
PyObject *v;
str = PyUnicode_FromObject(str);
if (str == NULL || PyUnicode_READY(str) < 0) {
Py_XDECREF(str);
return NULL;
}
v = codec_tuple(_PyUnicode_EncodeUTF32(str, errors, +1),
PyUnicode_GET_LENGTH(str));
Py_DECREF(str);
return v;
}
/*[clinic input]
_codecs.unicode_escape_encode
str: object
errors: str(accept={str, NoneType}) = NULL
/
[clinic start generated code]*/
static PyObject *
_codecs_unicode_escape_encode_impl(PyModuleDef *module, PyObject *str,
const char *errors)
/*[clinic end generated code: output=389f23d2b8f8d80b input=8273506f14076912]*/
{
PyObject *v;
str = PyUnicode_FromObject(str);
if (str == NULL || PyUnicode_READY(str) < 0) {
Py_XDECREF(str);
return NULL;
}
v = codec_tuple(PyUnicode_AsUnicodeEscapeString(str),
PyUnicode_GET_LENGTH(str));
Py_DECREF(str);
return v;
}
/*[clinic input]
_codecs.raw_unicode_escape_encode
str: object
errors: str(accept={str, NoneType}) = NULL
/
[clinic start generated code]*/
static PyObject *
_codecs_raw_unicode_escape_encode_impl(PyModuleDef *module, PyObject *str,
const char *errors)
/*[clinic end generated code: output=fec4e39d6ec37a62 input=181755d5dfacef3c]*/
{
PyObject *v;
str = PyUnicode_FromObject(str);
if (str == NULL || PyUnicode_READY(str) < 0) {
Py_XDECREF(str);
return NULL;
}
v = codec_tuple(PyUnicode_AsRawUnicodeEscapeString(str),
PyUnicode_GET_LENGTH(str));
Py_DECREF(str);
return v;
}
/*[clinic input]
_codecs.latin_1_encode
str: object
errors: str(accept={str, NoneType}) = NULL
/
[clinic start generated code]*/
static PyObject *
_codecs_latin_1_encode_impl(PyModuleDef *module, PyObject *str,
const char *errors)
/*[clinic end generated code: output=ecf00eb8e48c889c input=f03f6dcf1d84bee4]*/
{
PyObject *v;
str = PyUnicode_FromObject(str);
if (str == NULL || PyUnicode_READY(str) < 0) {
Py_XDECREF(str);
return NULL;
}
v = codec_tuple(_PyUnicode_AsLatin1String(str, errors),
PyUnicode_GET_LENGTH(str));
Py_DECREF(str);
return v;
}
/*[clinic input]
_codecs.ascii_encode
str: object
errors: str(accept={str, NoneType}) = NULL
/
[clinic start generated code]*/
static PyObject *
_codecs_ascii_encode_impl(PyModuleDef *module, PyObject *str,
const char *errors)
/*[clinic end generated code: output=a9d18fc6b6b91cfb input=d87e25a10a593fee]*/
{
PyObject *v;
str = PyUnicode_FromObject(str);
if (str == NULL || PyUnicode_READY(str) < 0) {
Py_XDECREF(str);
return NULL;
}
v = codec_tuple(_PyUnicode_AsASCIIString(str, errors),
PyUnicode_GET_LENGTH(str));
Py_DECREF(str);
return v;
}
/*[clinic input]
_codecs.charmap_encode
str: object
errors: str(accept={str, NoneType}) = NULL
mapping: object = NULL
/
[clinic start generated code]*/
static PyObject *
_codecs_charmap_encode_impl(PyModuleDef *module, PyObject *str,
const char *errors, PyObject *mapping)
/*[clinic end generated code: output=14ca42b83853c643 input=85f4172661e8dad9]*/
{
PyObject *v;
if (mapping == Py_None)
mapping = NULL;
str = PyUnicode_FromObject(str);
if (str == NULL || PyUnicode_READY(str) < 0) {
Py_XDECREF(str);
return NULL;
}
v = codec_tuple(_PyUnicode_EncodeCharmap(str, mapping, errors),
PyUnicode_GET_LENGTH(str));
Py_DECREF(str);
return v;
}
/*[clinic input]
_codecs.charmap_build
map: unicode
/
[clinic start generated code]*/
static PyObject *
_codecs_charmap_build_impl(PyModuleDef *module, PyObject *map)
/*[clinic end generated code: output=9485b58fa44afa6a input=d91a91d1717dbc6d]*/
{
return PyUnicode_BuildEncodingMap(map);
}
#ifdef HAVE_MBCS
/*[clinic input]
_codecs.mbcs_encode
str: object
errors: str(accept={str, NoneType}) = NULL
/
[clinic start generated code]*/
static PyObject *
_codecs_mbcs_encode_impl(PyModuleDef *module, PyObject *str,
const char *errors)
/*[clinic end generated code: output=d1a013bc68798bd7 input=65c09ee1e4203263]*/
{
PyObject *v;
str = PyUnicode_FromObject(str);
if (str == NULL || PyUnicode_READY(str) < 0) {
Py_XDECREF(str);
return NULL;
}
v = codec_tuple(PyUnicode_EncodeCodePage(CP_ACP, str, errors),
PyUnicode_GET_LENGTH(str));
Py_DECREF(str);
return v;
}
/*[clinic input]
_codecs.code_page_encode
code_page: int
str: object
errors: str(accept={str, NoneType}) = NULL
/
[clinic start generated code]*/
static PyObject *
_codecs_code_page_encode_impl(PyModuleDef *module, int code_page,
PyObject *str, const char *errors)
/*[clinic end generated code: output=3b406618dbfbce25 input=c8562ec460c2e309]*/
{
PyObject *v;
str = PyUnicode_FromObject(str);
if (str == NULL || PyUnicode_READY(str) < 0) {
Py_XDECREF(str);
return NULL;
}
v = codec_tuple(PyUnicode_EncodeCodePage(code_page,
str,
errors),
PyUnicode_GET_LENGTH(str));
Py_DECREF(str);
return v;
}
#endif /* HAVE_MBCS */
/* --- Error handler registry --------------------------------------------- */
/*[clinic input]
_codecs.register_error
errors: str
handler: object
/
Register the specified error handler under the name errors.
handler must be a callable object, that will be called with an exception
instance containing information about the location of the encoding/decoding
error and must return a (replacement, new position) tuple.
[clinic start generated code]*/
static PyObject *
_codecs_register_error_impl(PyModuleDef *module, const char *errors,
PyObject *handler)
/*[clinic end generated code: output=be00d3b1849ce68a input=5e6709203c2e33fe]*/
{
if (PyCodec_RegisterError(errors, handler))
return NULL;
Py_RETURN_NONE;
}
/*[clinic input]
_codecs.lookup_error
name: str
/
lookup_error(errors) -> handler
Return the error handler for the specified error handling name or raise a
LookupError, if no handler exists under this name.
[clinic start generated code]*/
static PyObject *
_codecs_lookup_error_impl(PyModuleDef *module, const char *name)
/*[clinic end generated code: output=731e6df8c83c6158 input=4775dd65e6235aba]*/
{
return PyCodec_LookupError(name);
}
/* --- Module API --------------------------------------------------------- */
static PyMethodDef _codecs_functions[] = {
_CODECS_REGISTER_METHODDEF
_CODECS_LOOKUP_METHODDEF
_CODECS_ENCODE_METHODDEF
_CODECS_DECODE_METHODDEF
_CODECS_ESCAPE_ENCODE_METHODDEF
_CODECS_ESCAPE_DECODE_METHODDEF
_CODECS_UTF_8_ENCODE_METHODDEF
_CODECS_UTF_8_DECODE_METHODDEF
_CODECS_UTF_7_ENCODE_METHODDEF
_CODECS_UTF_7_DECODE_METHODDEF
_CODECS_UTF_16_ENCODE_METHODDEF
_CODECS_UTF_16_LE_ENCODE_METHODDEF
_CODECS_UTF_16_BE_ENCODE_METHODDEF
_CODECS_UTF_16_DECODE_METHODDEF
_CODECS_UTF_16_LE_DECODE_METHODDEF
_CODECS_UTF_16_BE_DECODE_METHODDEF
_CODECS_UTF_16_EX_DECODE_METHODDEF
_CODECS_UTF_32_ENCODE_METHODDEF
_CODECS_UTF_32_LE_ENCODE_METHODDEF
_CODECS_UTF_32_BE_ENCODE_METHODDEF
_CODECS_UTF_32_DECODE_METHODDEF
_CODECS_UTF_32_LE_DECODE_METHODDEF
_CODECS_UTF_32_BE_DECODE_METHODDEF
_CODECS_UTF_32_EX_DECODE_METHODDEF
_CODECS_UNICODE_ESCAPE_ENCODE_METHODDEF
_CODECS_UNICODE_ESCAPE_DECODE_METHODDEF
_CODECS_UNICODE_INTERNAL_ENCODE_METHODDEF
_CODECS_UNICODE_INTERNAL_DECODE_METHODDEF
_CODECS_RAW_UNICODE_ESCAPE_ENCODE_METHODDEF
_CODECS_RAW_UNICODE_ESCAPE_DECODE_METHODDEF
_CODECS_LATIN_1_ENCODE_METHODDEF
_CODECS_LATIN_1_DECODE_METHODDEF
_CODECS_ASCII_ENCODE_METHODDEF
_CODECS_ASCII_DECODE_METHODDEF
_CODECS_CHARMAP_ENCODE_METHODDEF
_CODECS_CHARMAP_DECODE_METHODDEF
_CODECS_CHARMAP_BUILD_METHODDEF
_CODECS_READBUFFER_ENCODE_METHODDEF
_CODECS_MBCS_ENCODE_METHODDEF
_CODECS_MBCS_DECODE_METHODDEF
_CODECS_CODE_PAGE_ENCODE_METHODDEF
_CODECS_CODE_PAGE_DECODE_METHODDEF
_CODECS_REGISTER_ERROR_METHODDEF
_CODECS_LOOKUP_ERROR_METHODDEF
_CODECS__FORGET_CODEC_METHODDEF
{NULL, NULL} /* sentinel */
};
static struct PyModuleDef codecsmodule = {
PyModuleDef_HEAD_INIT,
"_codecs",
NULL,
-1,
_codecs_functions,
NULL,
NULL,
NULL,
NULL
};
PyMODINIT_FUNC
PyInit__codecs(void)
{
return PyModule_Create(&codecsmodule);
}
|
#!/bin/bash
# Set bash to 'debug' mode, it will exit on :
# -e 'error', -u 'undefined variable', -o ... 'error in pipeline', -x 'print commands',
set -e
set -u
set -o pipefail
min_or_max=min # "min" or "max". This is to determine how the mixtures are generated in local/data.sh.
sample_rate=8k
train_set="tr_${min_or_max}_${sample_rate}"
valid_set="cv_${min_or_max}_${sample_rate}"
test_sets="tt_${min_or_max}_${sample_rate} "
./enh.sh \
--train_set "${train_set}" \
--valid_set "${valid_set}" \
--test_sets "${test_sets}" \
--fs "${sample_rate}" \
--lang en \
--ngpu 1 \
--local_data_opts "--sample_rate ${sample_rate} --min_or_max ${min_or_max}" \
--enh_config ./conf/tuning/train_enh_rnn_tf.yaml \
"$@"
|
<filename>internal/ceres/block_jacobian_writer.cc
// Ceres Solver - A fast non-linear least squares minimizer
// Copyright 2022 Google Inc. All rights reserved.
// http://ceres-solver.org/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
// * Neither the name of Google Inc. nor the names of its contributors may be
// used to endorse or promote products derived from this software without
// specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
//
// Author: <EMAIL> (<NAME>)
#include "ceres/block_jacobian_writer.h"
#include <algorithm>
#include <memory>
#include "ceres/block_evaluate_preparer.h"
#include "ceres/block_sparse_matrix.h"
#include "ceres/internal/eigen.h"
#include "ceres/internal/export.h"
#include "ceres/parameter_block.h"
#include "ceres/program.h"
#include "ceres/residual_block.h"
namespace ceres {
namespace internal {
using std::vector;
namespace {
// Given the residual block ordering, build a lookup table to determine which
// per-parameter jacobian goes where in the overall program jacobian.
//
// Since we expect to use a Schur type linear solver to solve the LM step, take
// extra care to place the E blocks and the F blocks contiguously. E blocks are
// the first num_eliminate_blocks parameter blocks as indicated by the parameter
// block ordering. The remaining parameter blocks are the F blocks.
//
// TODO(keir): Consider if we should use a boolean for each parameter block
// instead of num_eliminate_blocks.
void BuildJacobianLayout(const Program& program,
int num_eliminate_blocks,
vector<int*>* jacobian_layout,
vector<int>* jacobian_layout_storage) {
const vector<ResidualBlock*>& residual_blocks = program.residual_blocks();
// Iterate over all the active residual blocks and determine how many E blocks
// are there. This will determine where the F blocks start in the jacobian
// matrix. Also compute the number of jacobian blocks.
int f_block_pos = 0;
int num_jacobian_blocks = 0;
for (auto* residual_block : residual_blocks) {
const int num_residuals = residual_block->NumResiduals();
const int num_parameter_blocks = residual_block->NumParameterBlocks();
// Advance f_block_pos over each E block for this residual.
for (int j = 0; j < num_parameter_blocks; ++j) {
ParameterBlock* parameter_block = residual_block->parameter_blocks()[j];
if (!parameter_block->IsConstant()) {
// Only count blocks for active parameters.
num_jacobian_blocks++;
if (parameter_block->index() < num_eliminate_blocks) {
f_block_pos += num_residuals * parameter_block->TangentSize();
}
}
}
}
// We now know that the E blocks are laid out starting at zero, and the F
// blocks are laid out starting at f_block_pos. Iterate over the residual
// blocks again, and this time fill the jacobian_layout array with the
// position information.
jacobian_layout->resize(program.NumResidualBlocks());
jacobian_layout_storage->resize(num_jacobian_blocks);
int e_block_pos = 0;
int* jacobian_pos = &(*jacobian_layout_storage)[0];
for (int i = 0; i < residual_blocks.size(); ++i) {
const ResidualBlock* residual_block = residual_blocks[i];
const int num_residuals = residual_block->NumResiduals();
const int num_parameter_blocks = residual_block->NumParameterBlocks();
(*jacobian_layout)[i] = jacobian_pos;
for (int j = 0; j < num_parameter_blocks; ++j) {
ParameterBlock* parameter_block = residual_block->parameter_blocks()[j];
const int parameter_block_index = parameter_block->index();
if (parameter_block->IsConstant()) {
continue;
}
const int jacobian_block_size =
num_residuals * parameter_block->TangentSize();
if (parameter_block_index < num_eliminate_blocks) {
*jacobian_pos = e_block_pos;
e_block_pos += jacobian_block_size;
} else {
*jacobian_pos = f_block_pos;
f_block_pos += jacobian_block_size;
}
jacobian_pos++;
}
}
}
} // namespace
BlockJacobianWriter::BlockJacobianWriter(const Evaluator::Options& options,
Program* program)
: program_(program) {
CHECK_GE(options.num_eliminate_blocks, 0)
<< "num_eliminate_blocks must be greater than 0.";
BuildJacobianLayout(*program,
options.num_eliminate_blocks,
&jacobian_layout_,
&jacobian_layout_storage_);
}
// Create evaluate prepareres that point directly into the final jacobian. This
// makes the final Write() a nop.
std::unique_ptr<BlockEvaluatePreparer[]>
BlockJacobianWriter::CreateEvaluatePreparers(int num_threads) {
int max_derivatives_per_residual_block =
program_->MaxDerivativesPerResidualBlock();
auto preparers = std::make_unique<BlockEvaluatePreparer[]>(num_threads);
for (int i = 0; i < num_threads; i++) {
preparers[i].Init(&jacobian_layout_[0], max_derivatives_per_residual_block);
}
return preparers;
}
std::unique_ptr<SparseMatrix> BlockJacobianWriter::CreateJacobian() const {
auto* bs = new CompressedRowBlockStructure;
const vector<ParameterBlock*>& parameter_blocks =
program_->parameter_blocks();
// Construct the column blocks.
bs->cols.resize(parameter_blocks.size());
for (int i = 0, cursor = 0; i < parameter_blocks.size(); ++i) {
CHECK_NE(parameter_blocks[i]->index(), -1);
CHECK(!parameter_blocks[i]->IsConstant());
bs->cols[i].size = parameter_blocks[i]->TangentSize();
bs->cols[i].position = cursor;
cursor += bs->cols[i].size;
}
// Construct the cells in each row.
const vector<ResidualBlock*>& residual_blocks = program_->residual_blocks();
int row_block_position = 0;
bs->rows.resize(residual_blocks.size());
for (int i = 0; i < residual_blocks.size(); ++i) {
const ResidualBlock* residual_block = residual_blocks[i];
CompressedRow* row = &bs->rows[i];
row->block.size = residual_block->NumResiduals();
row->block.position = row_block_position;
row_block_position += row->block.size;
// Size the row by the number of active parameters in this residual.
const int num_parameter_blocks = residual_block->NumParameterBlocks();
int num_active_parameter_blocks = 0;
for (int j = 0; j < num_parameter_blocks; ++j) {
if (residual_block->parameter_blocks()[j]->index() != -1) {
num_active_parameter_blocks++;
}
}
row->cells.resize(num_active_parameter_blocks);
// Add layout information for the active parameters in this row.
for (int j = 0, k = 0; j < num_parameter_blocks; ++j) {
const ParameterBlock* parameter_block =
residual_block->parameter_blocks()[j];
if (!parameter_block->IsConstant()) {
Cell& cell = row->cells[k];
cell.block_id = parameter_block->index();
cell.position = jacobian_layout_[i][k];
// Only increment k for active parameters, since there is only layout
// information for active parameters.
k++;
}
}
std::sort(row->cells.begin(), row->cells.end(), CellLessThan);
}
return std::make_unique<BlockSparseMatrix>(bs);
}
} // namespace internal
} // namespace ceres
|
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vhudson-jaxb-ri-2.2-147
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2010.01.26 at 02:04:22 PM MST
//
package net.opengis.gml._311;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlSeeAlso;
import javax.xml.bind.annotation.XmlType;
/**
* An abstract operation on coordinates that does not include any change of datum. The best-known example of a coordinate conversion is a map projection. The parameters describing coordinate conversions are defined rather than empirically derived. Note that some conversions have no parameters.
*
* This abstract complexType is expected to be extended for well-known operation methods with many Conversion instances, in Application Schemas that define operation-method-specialized element names and contents. This conversion uses an operation method, usually with associated parameter values. However, operation methods and parameter values are directly associated with concrete subtypes, not with this abstract type. All concrete types derived from this type shall extend this type to include a "usesMethod" element that references the "OperationMethod" element. Similarly, all concrete types derived from this type shall extend this type to include zero or more elements each named "uses...Value" that each use the type of an element substitutable for the "_generalParameterValue" element.
*
* <p>Java class for AbstractGeneralConversionType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="AbstractGeneralConversionType">
* <complexContent>
* <restriction base="{http://www.opengis.net/gml}AbstractCoordinateOperationType">
* <sequence>
* <element ref="{http://www.opengis.net/gml}metaDataProperty" maxOccurs="unbounded" minOccurs="0"/>
* <element ref="{http://www.opengis.net/gml}coordinateOperationName"/>
* <element ref="{http://www.opengis.net/gml}coordinateOperationID" maxOccurs="unbounded" minOccurs="0"/>
* <element ref="{http://www.opengis.net/gml}remarks" minOccurs="0"/>
* <element ref="{http://www.opengis.net/gml}validArea" minOccurs="0"/>
* <element ref="{http://www.opengis.net/gml}scope" minOccurs="0"/>
* <element ref="{http://www.opengis.net/gml}_positionalAccuracy" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* <attribute ref="{http://www.opengis.net/gml}id use="required""/>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "AbstractGeneralConversionType")
@XmlSeeAlso({
ConversionType.class
})
public abstract class AbstractGeneralConversionType
extends AbstractCoordinateOperationType
{
}
|
<reponame>seongchangkim/board_laravel
/******/ (() => { // webpackBootstrap
var __webpack_exports__ = {};
/*!******************************!*\
!*** ./resources/js/show.js ***!
\******************************/
var updateBtn = document.querySelector('.updateBtn');
var deleteBtn = document.querySelector('.deleteBtn');
var backBtn = document.querySelector('.backBtn');
var url = window.location.href.split('/');
var number = url[4];
updateBtn.addEventListener('click', function () {
location.href = "/update/".concat(number);
});
deleteBtn.addEventListener('click', function () {
location.href = "/delete/".concat(number);
});
backBtn.addEventListener('click', function () {
history.back();
});
/******/ })()
; |
#!/bin/sh
set -e
rm -rf ../subproject1/my-api/build ../subproject2/my-impl/build
./gradlew clean
./gradlew -i packageFatCapsule
java -jar my-app/build/libs/my-app-capsule.jar
|
<gh_stars>1-10
import React, { Component } from 'react'
import BarPieChart from '../BarPieCharts'
import BarChart from '../BarChart'
import D3Chart from '../D3Chart'
import BudgetChartD3 from '../BudgetChartD3'
import RadialD3Chart from '../RadialD3Chart'
export default class BarChartWrapper extends Component {
componentDidMount () {
switch (this.props.type) {
case 'budget':
return new BudgetChartD3(this.refs.chart, this.props.data)
case 'bar-pie':
return new BarPieChart(this.refs.chart, this.props.data, this.props.categories, this.refs.container)
case 'bar':
return new BarChart(this.refs.chart)
case 'gauge':
return new RadialD3Chart(this.refs.chart, this.props.data)
default: return new D3Chart(this.refs.chart, this.props.data, this.props.categoryType)
}
}
render () {
return (
<div ref='chart' id='chart' />
)
}
}
|
#!/bin/bash
set -ex
cd "$(dirname $0)/.."
if [ "$1" ]; then
fpm="$1"
else
fpm=fpm
fi
# Build example packages
pushd example_packages/
rm -rf ./*/build
pushd hello_world
"$fpm" build
"$fpm" run --target hello_world
"$fpm" run
popd
pushd hello_fpm
"$fpm" build
"$fpm" run --target hello_fpm
popd
pushd circular_test
"$fpm" build
popd
pushd circular_example
"$fpm" build
popd
pushd hello_complex
"$fpm" build
"$fpm" test
"$fpm" run --target say_Hello
"$fpm" run --target say_goodbye
"$fpm" test --target greet_test
"$fpm" test --target farewell_test
popd
pushd hello_complex_2
"$fpm" build
"$fpm" run --target say_hello_world
"$fpm" run --target say_goodbye
"$fpm" test --target greet_test
"$fpm" test --target farewell_test
popd
pushd with_examples
"$fpm" build
"$fpm" run --example --target demo-prog
"$fpm" run --target demo-prog
popd
pushd auto_discovery_off
"$fpm" build
"$fpm" run --target auto_discovery_off
"$fpm" test --target my_test
test ! -x ./build/gfortran_*/app/unused
test ! -x ./build/gfortran_*/test/unused_test
popd
pushd with_c
"$fpm" build
"$fpm" run --target with_c
popd
pushd submodules
"$fpm" build
popd
pushd program_with_module
"$fpm" build
"$fpm" run --target Program_with_module
popd
pushd link_executable
"$fpm" build
"$fpm" run --target gomp_test
popd
pushd fortran_includes
"$fpm" build
popd
pushd c_includes
"$fpm" build
popd
pushd c_header_only
"$fpm" build
popd
# Cleanup
rm -rf ./*/build
|
<gh_stars>10-100
/**
* (C) Copyright IBM Corp. 2015, 2016
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.ibm.stocator.fs.swift2d.systemtests;
import java.net.URI;
import java.nio.ByteBuffer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Test;
import com.ibm.stocator.fs.ObjectStoreFileSystem;
import static com.ibm.stocator.fs.common.Utils.getHost;
public class StreamingSwiftTest {
private boolean objectExpired = false;
private void createObjectTimer(final double time, final String file) {
// This thread handles the timePerFile expiration policy
// This thread sleeps for the time specified.
// When it wakes up, it will set the file as expired and close it
// When the next tuple comes in, we check that the file has
// expired and will create a new file for writing
Thread fObjectTimerThread = new Thread(new Runnable() {
@Override
public void run() {
try {
System.out.println("Object Timer Started: " + time);
Thread.sleep((long) time);
objectTimerExpired(file);
} catch (Exception e) {
System.out.println("Exception in object timer thread: " + e.getMessage());
}
}
});
fObjectTimerThread.setDaemon(false);
fObjectTimerThread.start();
}
public synchronized void objectTimerExpired(String file) {
System.out.println("Timer Expired!!!");
objectExpired = true;
}
// @Test
public void accessPublicSwiftContainerWithSpaceTest() throws Exception {
FileSystem fs = new ObjectStoreFileSystem();
Configuration conf = new Configuration();
String uriString = conf.get("fs.swift2d.test.uri");
Assume.assumeNotNull(uriString);
// adding suffix with space to the container name
String scheme = "swift2d";
String host = getHost(URI.create(uriString));
// String origContainerName = getContainerName(host);
// String newContainerName = origContainerName + " t";
// uriString = uriString.replace(origContainerName, newContainerName);
// use URI ctor that encodes authority according to the rules specified
// in RFC 2396, section 5.2, step 7
URI publicContainerURI = new URI(scheme, getHost(URI.create(uriString)), "/", null, null);
fs.initialize(publicContainerURI, conf);
FileStatus objectFS = null;
try {
objectFS = fs.getFileStatus(new Path(publicContainerURI));
} catch (Exception e) {
e.printStackTrace();
Assert.assertNotNull("Unable to access public object ", objectFS);
}
}
@Test
public void accessObjectWithSpaceTest() throws Exception {
FileSystem fs = new ObjectStoreFileSystem();
Configuration conf = new Configuration();
String uriString = conf.get("fs.swift2d.test.uri");
Assume.assumeNotNull(uriString);
// adding suffix with space to the container name
String scheme = "swift2d";
String objectName = "/a/testObject.txt";
URI publicContainerURI = new URI(uriString + objectName);
// initialize file system
fs.initialize(publicContainerURI, conf);
FileStatus objectFS = null;
Path f = null;
try {
FSDataOutputStream fsDataOutputStream = null;
String currObjName = null;
for (int i = 0; i < 5; i++) {
currObjName = objectName + String.valueOf(i);
// create timer
createObjectTimer(90000.0, currObjName);
publicContainerURI = new URI(scheme + "://"
+ getHost(URI.create(uriString)) + "/" + currObjName);
f = new Path(publicContainerURI.toString());
fsDataOutputStream = fs.create(f);
String line = null;
while (!objectExpired) {
// generates input
byte[] bytes = new byte[0];
line = "\"2017-7-15 3:6:43\"," + String.valueOf(Math.random()) + ",6,18" + "\n";
ByteBuffer linesBB = ByteBuffer.wrap(line.getBytes());
bytes = new byte[linesBB.limit()];
linesBB.get(bytes);
// writes to output
fsDataOutputStream.write(bytes);
// simulate delays in input
Thread.sleep(50);
}
fsDataOutputStream.close();
objectExpired = false;
}
} catch (Exception e) {
e.printStackTrace();
Assert.assertNotNull("Unable to access public object.", objectFS);
} finally {
fs.delete(f, true);
}
}
}
|
#!/bin/bash
# Check for the --yes command line argument to skip yes/no prompts
if [ "$1" = "--yes" ]
then
YES=1
else
YES=0
fi
set -o nounset
if ! which lsb_release > /dev/null
then
function lsb_release {
if [ -f /etc/lsb-release ]
then
cat /etc/lsb-release | grep DISTRIB_ID | cut -d= -f 2
else
echo Unknown
fi
}
fi
if [ $YES -eq 0 ]
then
distro="${1:-$(lsb_release -i|cut -f 2)}"
distro_version="${1:-$(lsb_release -r|cut -f 2|cut -c1-2)}"
else
distro="${2:-$(lsb_release -i|cut -f 2)}"
distro_version="${2:-$(lsb_release -r|cut -f 2|cut -c1-2)}"
fi
REQUIRED_UTILS="wget tar python"
APTCMD="apt"
APTGETCMD="apt-get"
YUMCMD="yum"
if [ $distro = "Kali" ]
then
APT_CANDIDATES="git build-essential libqt4-opengl mtd-utils gzip bzip2 tar arj lhasa p7zip p7zip-full cabextract util-linux firmware-mod-kit cramfsswap squashfs-tools zlib1g-dev liblzma-dev liblzo2-dev sleuthkit default-jdk lzop cpio"
elif [ $distro_version = "17" ]
then
APT_CANDIDATES="git build-essential libqt4-opengl mtd-utils gzip bzip2 tar arj lhasa p7zip p7zip-full cabextract cramfsswap squashfs-tools zlib1g-dev liblzma-dev liblzo2-dev sleuthkit default-jdk lzop srecord cpio"
else
APT_CANDIDATES="git build-essential libqt4-opengl mtd-utils gzip bzip2 tar arj lhasa p7zip p7zip-full cabextract cramfsprogs cramfsswap squashfs-tools zlib1g-dev liblzma-dev liblzo2-dev sleuthkit default-jdk lzop srecord cpio"
fi
PYTHON2_APT_CANDIDATES="python-crypto python-lzo python-lzma python-pip python-tk"
PYTHON3_APT_CANDIDATES="python3-crypto python3-pip python3-tk"
PYTHON3_YUM_CANDIDATES=""
YUM_CANDIDATES="git gcc gcc-c++ make openssl-devel qtwebkit-devel qt-devel gzip bzip2 tar arj p7zip p7zip-plugins cabextract squashfs-tools zlib zlib-devel lzo lzo-devel xz xz-compat-libs xz-libs xz-devel xz-lzma-compat python-backports-lzma lzip pyliblzma perl-Compress-Raw-Lzma lzop srecord"
PYTHON2_YUM_CANDIDATES="python-pip python-Bottleneck cpio"
APT_CANDIDATES="$APT_CANDIDATES $PYTHON2_APT_CANDIDATES"
YUM_CANDIDATES="$YUM_CANDIDATES $PYTHON2_YUM_CANDIDATES"
PIP_COMMANDS="pip"
# Check for root privileges
if [ $UID -eq 0 ]
then
SUDO=""
else
SUDO="sudo"
REQUIRED_UTILS="sudo $REQUIRED_UTILS"
fi
function install_yaffshiv
{
git clone https://github.com/devttys0/yaffshiv
(cd yaffshiv && $SUDO python2 setup.py install)
$SUDO rm -rf yaffshiv
}
function install_sasquatch
{
git clone https://github.com/devttys0/sasquatch
(cd sasquatch && $SUDO ./build.sh)
$SUDO rm -rf sasquatch
}
function install_jefferson
{
install_pip_package "cstruct==1.0"
git clone https://github.com/sviehb/jefferson
(cd jefferson && $SUDO python2 setup.py install)
$SUDO rm -rf jefferson
}
function install_unstuff
{
mkdir -p /tmp/unstuff
cd /tmp/unstuff
wget -O - http://my.smithmicro.com/downloads/files/stuffit520.611linux-i386.tar.gz | tar -zxv
$SUDO cp bin/unstuff /usr/local/bin/
cd -
rm -rf /tmp/unstuff
}
function install_ubireader
{
git clone https://github.com/jrspruitt/ubi_reader
# Some UBIFS extraction breaks after this commit, due to "Added fatal error check if UBI block extends beyond file size"
# (see this commit: https://github.com/jrspruitt/ubi_reader/commit/af678a5234dc891e8721ec985b1a6e74c77620b6)
# Reset to a known working commit.
(cd ubi_reader && git reset --hard 0955e6b95f07d849a182125919a1f2b6790d5b51 && $SUDO python setup.py install)
$SUDO rm -rf ubi_reader
}
function install_pip_package
{
PACKAGE="$1"
for PIP_COMMAND in $PIP_COMMANDS
do
$SUDO $PIP_COMMAND install $PACKAGE
done
}
function find_path
{
FILE_NAME="$1"
echo -ne "checking for $FILE_NAME..."
which $FILE_NAME > /dev/null
if [ $? -eq 0 ]
then
echo "yes"
return 0
else
echo "no"
return 1
fi
}
# Make sure the user really wants to do this
if [ $YES -eq 0 ]
then
echo ""
echo "WARNING: This script will download and install all required and optional dependencies for binwalk."
echo " This script has only been tested on, and is only intended for, Debian based systems."
echo " Some dependencies are downloaded via unsecure (HTTP) protocols."
echo " This script requires internet access."
echo " This script requires root privileges."
echo ""
if [ $distro != Unknown ]
then
echo " $distro $distro_version detected"
else
echo "WARNING: Distro not detected, using package-manager defaults"
fi
echo ""
echo -n "Continue [y/N]? "
read YN
if [ "$(echo "$YN" | grep -i -e 'y' -e 'yes')" == "" ]
then
echo "Quitting..."
exit 1
fi
elif [ $distro != Unknown ]
then
echo "$distro $distro_version detected"
else
echo "WARNING: Distro not detected, using package-manager defaults"
fi
# Check to make sure we have all the required utilities installed
NEEDED_UTILS=""
for UTIL in $REQUIRED_UTILS
do
find_path $UTIL
if [ $? -eq 1 ]
then
NEEDED_UTILS="$NEEDED_UTILS $UTIL"
fi
done
# Check for supported package managers and set the PKG_* envars appropriately
find_path $APTCMD
if [ $? -eq 1 ]
then
find_path $APTGETCMD
if [ $? -eq 1 ]
then
find_path $YUMCMD
if [ $? -eq 1 ]
then
NEEDED_UTILS="$NEEDED_UTILS $APTCMD/$APTGETCMD/$YUMCMD"
else
PKGCMD="$YUMCMD"
PKGCMD_OPTS="-y install"
PKG_CANDIDATES="$YUM_CANDIDATES"
PKG_PYTHON3_CANDIDATES="$PYTHON3_YUM_CANDIDATES"
fi
else
PKGCMD="$APTGETCMD"
PKGCMD_OPTS="install -y"
PKG_CANDIDATES="$APT_CANDIDATES"
PKG_PYTHON3_CANDIDATES="$PYTHON3_APT_CANDIDATES"
fi
else
if "$APTCMD" install -s -y dpkg > /dev/null
then
PKGCMD="$APTCMD"
PKGCMD_OPTS="install -y"
PKG_CANDIDATES="$APT_CANDIDATES"
PKG_PYTHON3_CANDIDATES="$PYTHON3_APT_CANDIDATES"
else
PKGCMD="$APTGETCMD"
PKGCMD_OPTS="install -y"
PKG_CANDIDATES="$APT_CANDIDATES"
PKG_PYTHON3_CANDIDATES="$PYTHON3_APT_CANDIDATES"
fi
fi
if [ "$NEEDED_UTILS" != "" ]
then
echo "Please install the following required utilities: $NEEDED_UTILS"
exit 1
fi
# Check to see if we should install modules for python3 as well
find_path python3
if [ $? -eq 0 ]
then
PKG_CANDIDATES="$PKG_CANDIDATES $PKG_PYTHON3_CANDIDATES"
PIP_COMMANDS="pip3 $PIP_COMMANDS"
fi
# Do the install(s)
cd /tmp
$SUDO $PKGCMD $PKGCMD_OPTS $PKG_CANDIDATES
if [ $? -ne 0 ]
then
echo "Package installation failed: $PKG_CANDIDATES"
exit 1
fi
install_pip_package matplotlib
install_pip_package capstone
install_sasquatch
install_yaffshiv
install_jefferson
install_unstuff
install_ubireader
|
#!/bin/bash
read -p 'Entrez le nom du certificat (le même que le CSR) : ' cert_name
openssl ca \
-config ../etc/auth-ca.conf \
-in ../certs/$cert_name.csr \
-out ../certs/$cert_name.crt \
-policy extern_pol \
-extensions client_ext \
-enddate 20200101000000Z
cat ../certs/$cert_name.crt ../ca/auth-ca.crt ../ca/root-ca.crt > \
../certs/$cert_name-chain.pem
|
#!/bin/bash
DBREP_HOME=$(cd "`dirname $0`/.." && pwd)
export LANG=ru_RU.UTF-8
export LANGUAGE=ru
export LC_CTYPE=ru_RU.UTF-8
export TZ=UTC
logfile=/dev/stdout
pidfile=$DBREP_HOME/run/dbreplicator2.pid
quit() {
local ex=$?
echo -e "${1}"
rm $pidfile &>/dev/null
exit ${ex}
}
# Обработка выхода. -> dbreplicator2 stop
if [ x"$1" == x"stop" ]; then
echo -n Stopping dbreplicator2...
if [ -f $pidfile ]; then
read pid < $pidfile
if ps $pid &>/dev/null ; then
echo Process $pid found. Sending SIGTERM
kill $pid && quit "Process dbreplicator2 terminated success."
fi
else
echo " Error: pid file ($pidfile) not found. Please, stop dbreplicator2 manually!"
fi
exit
fi
test -f $pidfile && {
read pid < $pidfile
ps $pid &>/dev/null &&
die "$cmd is already running... exiting" ||
die "$cmd is dead but pidfile exists... exiting"
}
echo -n "$$" > $pidfile
classdir=$DBREP_HOME/lib/
jcmd="java -Xmx512m -cp ""${classdir}dbreplicator2.jar:${classdir}log4j-1.2.17.jar:${classdir}mail-1.4.jar:${classdir}commons-cli-1.2.jar:${classdir}hibernate-core-4.2.6.Final.jar:${classdir}hibernate-commons-annotations-4.0.2.Final.jar:${classdir}hibernate-jpa-2.0-api-1.0.1.Final.jar:${classdir}h2-1.3.173.jar:${classdir}bonecp-0.7.1.RELEASE.jar:${classdir}dom4j-1.6.1.jar:${classdir}jboss-logging-3.1.0.GA.jar:${classdir}jboss-transaction-api_1.1_spec-1.0.1.Final.jar:${classdir}javassist-3.15.0-GA.jar:${classdir}antlr-2.7.7.jar:${classdir}slf4j-log4j12-1.5.5.jar:${classdir}slf4j-api-1.5.10.jar:${classdir}guava-r08.jar:${DBREP_HOME}/resources/"""
exec ${jcmd} ru.taximaxim.dbreplicator2.Application "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8" "$9" "${10}" "${11}" >> $logfile 2>&1
rm $pidfile
exit 0
|
<reponame>minuk8932/Algorithm_BaekJoon<filename>src/implementation/Boj2480.java<gh_stars>1-10
package implementation;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.StringTokenizer;
/**
*
* @author minchoba
* 백준 2480번: 주사위 세개
*
* @see https://www.acmicpc.net/problem/2480/
*
*/
public class Boj2480 {
private static final int TRIPLE = 10_000;
private static final int DOUBLE = 1_000;
private static final int UNI = 100;
public static void main(String[] args) throws Exception{
// 버퍼를 통한 값 입력
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine());
int d1 = Integer.parseInt(st.nextToken());
int d2 = Integer.parseInt(st.nextToken());
int d3 = Integer.parseInt(st.nextToken());
int res = 0;
if(d1 == d2 && d2 == d3) { // 3개 모두 같을 때
res = TRIPLE + d1 * DOUBLE;
}
else if(d1 != d2 && d2 != d3 && d1 != d3) { // 3개 모두 다를 때
res = UNI * getMax(d1, d2, d3); // 그 중 최댓값을 통해 결과값 생성
}
else {
if(d1 == d2) { // 2개만 같은 경우
res = DOUBLE + UNI * d1;
}
if(d2 == d3) {
res = DOUBLE + UNI * d2;
}
if(d1 == d3){
res = DOUBLE + UNI * d3;
}
}
System.out.println(res);
}
/**
* 최댓값 반환 메소드
* @param 주사위 3개의 값
* @return 3개의 값 중 최댓값
*/
private static int getMax(int a, int b, int c) {
int val = a;
val = Math.max(val, Math.max(b, c));
return val;
}
}
|
const { MessageEmbed } = require("discord.js");
const discord = require("discord.js");
const config = require("../../../config.json");
const Viola = require("../../structures/utils.js");
module.exports = {
config: {
name: 'botinfo',
aliases: ['bt', 'bi', 'bot'],
cooldown: 10,
usage: 'botinfo',
category: 'Info',
description: 'My informations',
userPerms: [],
clientPerms: ['EMBED_LINKS']
},
run: async (client, message, args) => {
let bot = client.user.createdAt;
const promises = [
client.shard.fetchClientValues('guilds.cache.size'),
client.shard.broadcastEval('this.guilds.cache.reduce((acc, guild) => acc + guild.memberCount, 0)'),
client.shard.broadcastEval(`process.memoryUsage().rss / 1024 / 1024`)
];
return Promise.all(promises)
.then(results => {
const totalGuilds = results[0].reduce((acc, guildCount) => acc + guildCount, 0);
const totalMembers = results[1].reduce((acc, memberCount) => acc + memberCount, 0);
const memory = results[2].reduce((acc, memoryusage) => acc + memoryusage, 0);
const embed = new MessageEmbed()
.setAuthor(client.user.tag, client.user.avatarURL())
.setThumbnail(client.user.avatarURL())
.addField('Main', `> **ID: \`${client.user.id}\`\n> My creation date is \`${Viola.time('MM-DD-YYYY`, `HH:mm:ss', bot)}\`\n> Servers: \`${totalGuilds}\`\n> Users: \`${totalMembers}\`\n> Avatar source: ${Viola.arts}\n> GitHub: [\`pixis-star/Viola\`](${Viola.github})\n> Developer: \`${client.users.cache.get('410385863030341653').tag}\`**`, false)
.addField('System', `> **Language: [\`JavaScript\`](https://www.javascript.com/)\n> Node version: \`${process.env.NODE_VERSION}\`\n> Discord.js version: \`${discord.version}\`\n> CPU: \`${(process.cpuUsage().system / 1024 / 1024).toFixed(2)}%\`\n> RAM: \`${(memory).toFixed(2)}MB / Unlimited\`\n> Prefix: \`${config.prefix}\`**`, false)
.setTimestamp()
.setColor(config.color)
message.channel.send(embed)
})
.catch(console.error);
},
};
|
<filename>src/packages/@ncigdc/components/QuickSearch/types.ts
export interface IShard {
failed: number;
successful: number;
total: number;
}
export type ISearchHitType =
| 'gene'
| 'case'
| 'ssm'
| 'occurrence'
| 'project'
| 'file'
| 'annotation'
| 'report';
export interface ISearchHit {
_id: string;
_score: number;
_type: ISearchHitType;
}
export interface ISearchResponse {
data: {
_shards: IShard;
hits: ISearchHit[];
total: number;
timed_out: boolean;
took: number;
};
warings: {};
}
|
<reponame>mkralik3/syndesis-qe
package io.syndesis.qe.steps.other;
import io.syndesis.qe.utils.SlackUtils;
import io.syndesis.qe.utils.TestUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Lazy;
import com.github.seratch.jslack.api.methods.SlackApiException;
import java.io.IOException;
import io.cucumber.java.en.When;
public class SlackSteps {
@Lazy
@Autowired
private SlackUtils slack;
@When("^.*checks? that last slack message equals \"([^\"]*)\" on channel \"([^\"]*)\"$")
public void checkMessage(String message, String channel) throws SlackApiException, IOException {
TestUtils.sleepForJenkinsDelayIfHigher(2);
slack.checkLastMessageFromChannel(message, channel);
}
@When("^.*send? message \"([^\"]*)\" on channel \"([^\"]*)\"$")
public void sendMessage(String message, String channel) throws InterruptedException, SlackApiException, IOException {
TestUtils.sleepForJenkinsDelayIfHigher(2);
slack.sendMessage(message, channel);
}
}
|
import json
from serial_io import SerialIO
from gui_module import run_gui
def simulate_gui(seq):
try:
seqs = [json.load(open(s)) for s in seq] # Load sequences from JSON files
ser = SerialIO.new_and_start(ser) # Initialize serial I/O connection
run_gui(ser, seqs) # Run the GUI with the initialized serial I/O connection and loaded sequences
return "GUI simulation completed successfully."
except FileNotFoundError:
return "Error: One or more sequence files not found."
except json.JSONDecodeError:
return "Error: Invalid JSON format in one or more sequence files."
except Exception as e:
return f"Error: {str(e)}"
# Test the function
seq = ["sequence1.json", "sequence2.json"]
result = simulate_gui(seq)
print(result) |
const arr = ["one", "two", "one", "three", "four", "two"];
const filteredArray = arr.filter((value, index) => arr.indexOf(value) === index);
//filteredArray = ["one", "two", "three", "four"] |
/**
* Encoder, decoder which
* transforms a {@link ChannelBuffer} into a Stream and
* vice versa.
* <br>
* Legacy frameworks which require InputStream and OutputStream, such as xml parsers,
* serializers, etc. can thus be easily integrated into netty.
*/
package org.rzo.netty.ahessian.io; |
package org.jaudiotagger.audio.asf.data;
import java.io.Serializable;
import java.util.Comparator;
/**
* @author <NAME>
*
*/
public class MetadataDescriptorComparator implements
Comparator<MetadataDescriptor>, Serializable {
/**
*
*/
private static final long serialVersionUID = 4503738612948660496L;
/**
* {@inheritDoc}
*/
public int compare(MetadataDescriptor o1, MetadataDescriptor o2) {
assert o1 != o2;
assert o1 != null && o2 != null;
int result = o1.getContainerType().ordinal()
- o2.getContainerType().ordinal();
result = 0;
if (result == 0) {
result = o1.getName().compareTo(o2.getName());
}
if (result == 0) {
result = o1.getType() - o2.getType();
}
if (result == 0) {
result = o1.getLanguageIndex() - o2.getLanguageIndex();
}
if (result == 0) {
result = o1.getStreamNumber() - o2.getStreamNumber();
}
return result;
}
}
|
<filename>src/interface.js
import * as helpers from './helpers';
import { isEmpty, isArray, map, clone, each } from 'lodash'
module.exports = function(Target) {
Target.prototype.toQuery = function(tz) {
let data = this.toSQL(this._method, tz);
if (!isArray(data)) data = [data];
return map(data, (statement) => {
return this.client._formatQuery(statement.sql, statement.bindings, tz);
}).join(';\n');
};
// Create a new instance of the `Runner`, passing in the current object.
Target.prototype.then = function(/* onFulfilled, onRejected */) {
const result = this.client.runner(this).run()
return result.then.apply(result, arguments);
};
// Add additional "options" to the builder. Typically used for client specific
// items, like the `mysql` and `sqlite3` drivers.
Target.prototype.options = function(opts) {
this._options = this._options || [];
this._options.push(clone(opts) || {});
return this;
};
// Sets an explicit "connnection" we wish to use for this query.
Target.prototype.connection = function(connection) {
this._connection = connection;
return this;
};
// Set a debug flag for the current schema query stack.
Target.prototype.debug = function(enabled) {
this._debug = arguments.length ? enabled : true;
return this;
};
// Set the transaction object for this query.
Target.prototype.transacting = function(t) {
if (t && t.client) {
if (!t.client.transacting) {
helpers.warn(`Invalid transaction value: ${t.client}`)
} else {
this.client = t.client
}
}
if (isEmpty(t)) {
helpers.error('Invalid value on transacting call, potential bug')
throw Error('Invalid transacting value (null, undefined or empty object)')
}
return this;
};
// Initializes a stream.
Target.prototype.stream = function(options) {
return this.client.runner(this).stream(options);
};
// Initialize a stream & pipe automatically.
Target.prototype.pipe = function(writable, options) {
return this.client.runner(this).pipe(writable, options);
};
// Creates a method which "coerces" to a promise, by calling a
// "then" method on the current `Target`
each(['bind', 'catch', 'finally', 'asCallback',
'spread', 'map', 'reduce', 'tap', 'thenReturn',
'return', 'yield', 'ensure', 'reflect',
'get', 'mapSeries', 'delay'], function(method) {
Target.prototype[method] = function() {
const promise = this.then();
return promise[method].apply(promise, arguments);
};
});
}
|
<gh_stars>1-10
export default class Canvas {
constructor (canvas, {
axis = true,
size,
dash = 8,
strokeStyle = 'black',
fillStyle = 'black'
} = {}) {
const ctx = this.ctx = canvas.getContext('2d')
if (!size) size = 512
this.size = size
this.dash = dash
this.axis = axis
const scale = this.scale = canvas.width / size
ctx.scale(scale, -scale)
this.translate(size / 2, -size / 2)
ctx.strokeStyle = strokeStyle
ctx.fillStyle = fillStyle
this.clear()
}
clear () {
const { ctx, size, axis } = this
ctx.clearRect(-size / 2, -size / 2, size, size)
if (axis) this.drawAxis ()
}
save () {
this.ctx.save()
}
restore () {
this.ctx.restore()
}
translate (dx, dy) {
this.ctx.translate(dx, dy)
}
drawAxis () {
const { ctx, size, dash } = this
const O = {x: 0, y: 0}
const X = {x: 1, y: 0}
const Y = {x: 0, y: 1}
this.line(O, X, true)
this.line(O, Y, true)
this.point(O, 'O')
}
text (x, y, text, filled = false) {
const { ctx } = this
ctx.scale(1, -1)
ctx.font = `16px serif`
filled ? ctx.fillText(text, x, -y) : ctx.strokeText(text, x, -y)
ctx.scale(1, -1)
}
line (A, B, dashed = false, strokeStyle) {
if (A.x === B.x && A.y === B.y) return
const p = this.size / 2
const ends = [
{ x: A.x + (p - A.y) / (B.y - A.y) * (B.x - A.x), y: p},
{ x: A.x + (-p - A.y) / (B.y - A.y) * (B.x - A.x), y: -p},
{ x: p, y: A.y + (p - A.x) / (B.x - A.x) * (B.y - A.y) },
{ x: -p, y: A.y + (-p - A.x) / (B.x - A.x) * (B.y - A.y) }
]
ends.forEach(P => this.lineSeg(A, P, dashed, strokeStyle))
}
lineSeg ({x: x1, y: y1}, {x: x2, y: y2}, dashed = false, strokeStyle) {
const { ctx, dash } = this
ctx.beginPath()
ctx.moveTo(x1, y1)
ctx.lineTo(x2, y2)
if (dashed) ctx.setLineDash([dash, dash])
if (strokeStyle) ctx.strokeStyle = strokeStyle
ctx.stroke()
ctx.setLineDash([])
ctx.strokeStyle = this.strokeStyle
}
circle (x, y, r, filled = false) {
const { ctx } = this
ctx.beginPath()
ctx.ellipse(x, y, r, r, 0, 0, Math.PI * 2)
filled ? ctx.fill() : ctx.stroke()
}
point ({x, y}, name) {
this.circle(x, y, 2, true)
if (name) this.text(x, y, name, true)
}
polygon (points, { fillStyle = 'black',
close = true,
rule = 'nonzero',
filled = false
} = {}) {
const { ctx } = this
ctx.beginPath()
ctx.moveTo(...points[0])
points.slice(1).forEach(point => ctx.lineTo(...point))
if (close) ctx.closePath()
ctx.fillStyle = fillStyle
filled ? ctx.fill(rule) : ctx.stroke()
ctx.fillStyle = this.fillStyle
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.