text stringlengths 1 1.05M |
|---|
<gh_stars>0
import {
Column,
Entity,
JoinColumn,
ManyToOne,
OneToMany,
OneToOne,
RelationId,
} from 'typeorm';
import { AbstractEntity } from '../data/entities/abstract.entity';
import { User } from 'src/user/user.entity';
import { Image } from '../image/image.entity';
import { PostReaction } from 'src/post-reaction/post-reaction.entity';
@Entity()
export class Post extends AbstractEntity {
@Column()
body: string;
@RelationId((post: Post) => post.image)
@Column()
readonly imageId: string;
@OneToOne(() => Image, { onDelete: 'CASCADE' })
@JoinColumn()
public image: Image;
@RelationId((post: Post) => post.user)
@Column()
readonly userId: string;
@ManyToOne(() => User, (user) => user.posts, {
onDelete: 'CASCADE',
})
user: User;
@OneToMany(() => PostReaction, (postReaction) => postReaction.post, {
cascade: true,
})
postReactions: PostReaction[];
}
|
count :: Int -> [Int] -> Int
count n xs = length $ filter (== n) xs
main = do
let xs = [1, 2, 2, 3, 2, 4]
print $ count 2 xs |
#!/bin/bash
# Copyright Istio Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -eux
red='\[\033[0;31m\]'
clr='\[\033[0m\]'
VM_APP="${VM_APP:?}"
VM_NAME="${VM_NAME:-${VM_APP}}"
VM_NAMESPACE="${VM_NAMESPACE:?}"
VERSION="${VERSION:?"version, like 1.10-alpha.45c5661eb8c96cebe8fcb467b4c1be3262b0de4c"}"
PROJECT="${PROJECT:-mixologist-142215}"
ZONE="${ZONE:-us-central1-c}"
WORK_DIR=/tmp/vm
SERVICE_ACCOUNT=default
export CLOUDSDK_COMPUTE_ZONE="${ZONE}"
export CLOUDSDK_CORE_PROJECT="${PROJECT}"
mkdir -p "${WORK_DIR}"
docker-copy() {
image="${1:?image}"
src="${2:?src}"
dst="${3:?dst}"
docker create --rm --name temp-docker-copy "${image}"
docker cp temp-docker-copy:"${src}" "${dst}"
docker stop temp-docker-copy
docker rm temp-docker-copy
}
gcloud compute instances describe "${VM_APP:?}" > /dev/null 2>&1 && { echo "${red}Instance already configured! Warning: script will not update VM.${clr}"; exit 0; }
gcloud compute instances create "${VM_NAME}" \
--image-family debian-10 --image-project debian-cloud \
--machine-type e2-standard-2
kubectl create namespace "${VM_NAMESPACE}" || true
kubectl create serviceaccount "${SERVICE_ACCOUNT}" -n "${VM_NAMESPACE}" || true
kubectl get cm -n "${VM_NAMESPACE}" service-graph-config -ojsonpath='{.data.service-graph}' > "${WORK_DIR}"/service-graph.yaml
istioctl x workload group create --name "${VM_APP}" --namespace "${VM_NAMESPACE}" --labels app="${VM_APP}" --serviceAccount "${SERVICE_ACCOUNT}" > "${WORK_DIR}"/workloadgroup.yaml
cat <<EOF > "${WORK_DIR}"/workloadgroup.yaml
apiVersion: networking.istio.io/v1alpha3
kind: WorkloadGroup
metadata:
name: "${VM_APP}"
namespace: "${VM_NAMESPACE}"
spec:
metadata:
labels:
app: "${VM_APP}"
template:
serviceAccount: "${SERVICE_ACCOUNT}"
probe:
httpGet:
path: /metrics
port: 8080
initialDelaySeconds: 5
periodSeconds: 5
EOF
kubectl --namespace "${VM_NAMESPACE}" apply -f "${WORK_DIR}"/workloadgroup.yaml
istioctl x workload entry configure -f "${WORK_DIR}"/workloadgroup.yaml -o "${WORK_DIR}" --autoregister
# Wait until we can ssh
sleep 15
cat <<EOF > "${WORK_DIR}"/isotope.service
[Unit]
Description=Isotope
After=network.target
StartLimitIntervalSec=0
[Service]
Type=simple
Restart=always
Environment="SERVICE_NAME=${VM_APP}"
RestartSec=1
ExecStart=/usr/bin/isotope_service --max-idle-connections-per-host=32
[Install]
WantedBy=multi-user.target
EOF
docker-copy gcr.io/istio-testing/isotope:0.0.2 /usr/local/bin/isotope_service "${WORK_DIR}"/isotope_service
gcloud compute scp "${WORK_DIR}"/* "${VM_APP}":
gcloud compute ssh "${VM_APP}" -- sudo bash -c "\"
mkdir -p /etc/certs /var/run/secrets/tokens /etc/istio/config/ /etc/istio/proxy /etc/config
curl -LO https://storage.googleapis.com/istio-build/dev/${VERSION}/deb/istio-sidecar.deb
sudo dpkg -i istio-sidecar.deb
cp root-cert.pem /etc/certs/root-cert.pem
cp istio-token /var/run/secrets/tokens/istio-token
cp cluster.env /var/lib/istio/envoy/cluster.env
cp mesh.yaml /etc/istio/config/mesh
cp service-graph.yaml /etc/config/service-graph.yaml
cp isotope_service /usr/bin/isotope_service
cp isotope.service /etc/systemd/system/isotope.service
chmod 777 /etc/config/service-graph.yaml
cat hosts >> /etc/hosts
chown -R istio-proxy /var/lib/istio /etc/certs /etc/istio/proxy /etc/istio/config /var/run/secrets /etc/certs/root-cert.pem
systemctl start istio
systemctl start isotope
\""
|
package org.openapitools.model;
import java.util.Objects;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.net.URI;
import org.openapitools.model.License;
import org.openapitools.model.ToolType;
import org.openapitools.jackson.nullable.JsonNullable;
import javax.validation.Valid;
import javax.validation.constraints.*;
/**
* Information about an NLP tool
*/
@ApiModel(description = "Information about an NLP tool")
@javax.annotation.Generated(value = "org.openapitools.codegen.languages.SpringCodegen", date = "2021-06-20T21:46:49.832722-07:00[America/Los_Angeles]")
public class Tool {
@JsonProperty("name")
private String name;
@JsonProperty("version")
private String version;
@JsonProperty("license")
private License license;
@JsonProperty("repository")
private String repository;
@JsonProperty("description")
private String description;
@JsonProperty("author")
private String author;
@JsonProperty("authorEmail")
private String authorEmail;
@JsonProperty("url")
private URI url;
@JsonProperty("type")
private ToolType type;
@JsonProperty("apiVersion")
private String apiVersion;
public Tool name(String name) {
this.name = name;
return this;
}
/**
* The tool name
* @return name
*/
@ApiModelProperty(required = true, value = "The tool name")
@NotNull
@Pattern(regexp="^[a-z0-9]+(?:-[a-z0-9]+)*$") @Size(min=3,max=60)
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Tool version(String version) {
this.version = version;
return this;
}
/**
* The version of the tool (SemVer string)
* @return version
*/
@ApiModelProperty(required = true, value = "The version of the tool (SemVer string)")
@NotNull
@Pattern(regexp="^(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$") @Size(min=1)
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
public Tool license(License license) {
this.license = license;
return this;
}
/**
* Get license
* @return license
*/
@ApiModelProperty(required = true, value = "")
@NotNull
@Valid
public License getLicense() {
return license;
}
public void setLicense(License license) {
this.license = license;
}
public Tool repository(String repository) {
this.repository = repository;
return this;
}
/**
* The place where the code lives
* @return repository
*/
@ApiModelProperty(required = true, value = "The place where the code lives")
@NotNull
public String getRepository() {
return repository;
}
public void setRepository(String repository) {
this.repository = repository;
}
public Tool description(String description) {
this.description = description;
return this;
}
/**
* A short, one-sentence summary of the tool
* @return description
*/
@ApiModelProperty(required = true, value = "A short, one-sentence summary of the tool")
@NotNull
@Size(max=280)
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Tool author(String author) {
this.author = author;
return this;
}
/**
* The author of the tool
* @return author
*/
@ApiModelProperty(required = true, value = "The author of the tool")
@NotNull
public String getAuthor() {
return author;
}
public void setAuthor(String author) {
this.author = author;
}
public Tool authorEmail(String authorEmail) {
this.authorEmail = authorEmail;
return this;
}
/**
* The email address of the author
* @return authorEmail
*/
@ApiModelProperty(required = true, value = "The email address of the author")
@NotNull
@javax.validation.constraints.Email
public String getAuthorEmail() {
return authorEmail;
}
public void setAuthorEmail(String authorEmail) {
this.authorEmail = authorEmail;
}
public Tool url(URI url) {
this.url = url;
return this;
}
/**
* The URL to the homepage of the tool
* @return url
*/
@ApiModelProperty(required = true, value = "The URL to the homepage of the tool")
@NotNull
@Valid
public URI getUrl() {
return url;
}
public void setUrl(URI url) {
this.url = url;
}
public Tool type(ToolType type) {
this.type = type;
return this;
}
/**
* Get type
* @return type
*/
@ApiModelProperty(required = true, value = "")
@NotNull
@Valid
public ToolType getType() {
return type;
}
public void setType(ToolType type) {
this.type = type;
}
public Tool apiVersion(String apiVersion) {
this.apiVersion = apiVersion;
return this;
}
/**
* The version of the tool OpenAPI specification
* @return apiVersion
*/
@ApiModelProperty(required = true, value = "The version of the tool OpenAPI specification")
@NotNull
@Pattern(regexp="^(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$") @Size(min=1)
public String getApiVersion() {
return apiVersion;
}
public void setApiVersion(String apiVersion) {
this.apiVersion = apiVersion;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Tool tool = (Tool) o;
return Objects.equals(this.name, tool.name) &&
Objects.equals(this.version, tool.version) &&
Objects.equals(this.license, tool.license) &&
Objects.equals(this.repository, tool.repository) &&
Objects.equals(this.description, tool.description) &&
Objects.equals(this.author, tool.author) &&
Objects.equals(this.authorEmail, tool.authorEmail) &&
Objects.equals(this.url, tool.url) &&
Objects.equals(this.type, tool.type) &&
Objects.equals(this.apiVersion, tool.apiVersion);
}
@Override
public int hashCode() {
return Objects.hash(name, version, license, repository, description, author, authorEmail, url, type, apiVersion);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class Tool {\n");
sb.append(" name: ").append(toIndentedString(name)).append("\n");
sb.append(" version: ").append(toIndentedString(version)).append("\n");
sb.append(" license: ").append(toIndentedString(license)).append("\n");
sb.append(" repository: ").append(toIndentedString(repository)).append("\n");
sb.append(" description: ").append(toIndentedString(description)).append("\n");
sb.append(" author: ").append(toIndentedString(author)).append("\n");
sb.append(" authorEmail: ").append(toIndentedString(authorEmail)).append("\n");
sb.append(" url: ").append(toIndentedString(url)).append("\n");
sb.append(" type: ").append(toIndentedString(type)).append("\n");
sb.append(" apiVersion: ").append(toIndentedString(apiVersion)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
|
package com.cgfy.mybatis.base.bean.select.condition;
import com.cgfy.mybatis.base.bean.select.Condition;
import io.swagger.annotations.ApiModelProperty;
import tk.mybatis.mapper.entity.Example;
import tk.mybatis.mapper.entity.Example.Criteria;
/**
* 不相等相等
*/
public class Default extends Condition {
@ApiModelProperty(value = "值", required = true)
private String value;
@ApiModelProperty(value = "属性", required = true)
private String property;
public Default() {
}
public Default(String property, String value) {
super();
this.value = value;
this.property = property;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
public String getProperty() {
return property;
}
public void setProperty(String property) {
this.property = property;
}
public void addCondition(Example example, Criteria criteria, Class clazz) {
// criteria.andNotEqualTo(property, getValue(value, clazz));
if(property.equals("title")) {
criteria.andLike(property, "%" +getValue(value, clazz)+ "%");
}else if(property.equals("type")) {
criteria.andEqualTo(property, getValue(value, clazz));
}else if(property.equals("status")) {
criteria.andEqualTo(property, getValue(value, clazz));
}
}
}
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/CocoaLumberjack/CocoaLumberjack.framework"
install_framework "${BUILT_PRODUCTS_DIR}/KWBaseViewController/KWBaseViewController.framework"
install_framework "${BUILT_PRODUCTS_DIR}/KWCategoriesLib/KWCategoriesLib.framework"
install_framework "${BUILT_PRODUCTS_DIR}/KWLogger/KWLogger.framework"
install_framework "${BUILT_PRODUCTS_DIR}/KWPublicUISDK/KWPublicUISDK.framework"
install_framework "${BUILT_PRODUCTS_DIR}/MJRefresh/MJRefresh.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Masonry/Masonry.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/CocoaLumberjack/CocoaLumberjack.framework"
install_framework "${BUILT_PRODUCTS_DIR}/KWBaseViewController/KWBaseViewController.framework"
install_framework "${BUILT_PRODUCTS_DIR}/KWCategoriesLib/KWCategoriesLib.framework"
install_framework "${BUILT_PRODUCTS_DIR}/KWLogger/KWLogger.framework"
install_framework "${BUILT_PRODUCTS_DIR}/KWPublicUISDK/KWPublicUISDK.framework"
install_framework "${BUILT_PRODUCTS_DIR}/MJRefresh/MJRefresh.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Masonry/Masonry.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
<reponame>tchaik/tchaik<gh_stars>100-1000
var keyMirror = require("keymirror");
module.exports = keyMirror({
SET_FILTER_ITEM: null,
FILTER_PATHS: null,
FILTER_LIST: null,
});
|
<filename>traveldb/abstract/def_f_msgakt.sql<gh_stars>0
/**************************************************************
*
* yafra.org DEF_F_msgakt.sql
*
* action messages definition
*
* DEF_F_msgakt
*
**************************************************************/
delete from msg where msg.msg_typ = 6 and msg.s_id = 2;
insert into msg values (6, 0, 2, 1,
'Tout est en ordre');
insert into msg values (6, 1, 2, 1,
'**** Message d''erreur *****');
insert into msg values (6, 2, 2, 1,
'**** Erreur du syst�me d''exploitation ****');
insert into msg values (6, 3, 2, 1,
'**** Erreur base de donn�es ****');
insert into msg values (6, 4, 2, 1,
'Avertissez votre service technique de cette erreur');
insert into msg values (6, 5, 2, 1,
'Produit le : %s %s');
insert into msg values (6, 6, 2, 1,
'Produit par : %s');
insert into msg values (6, 7, 2, 1,
'Prestation : %d %s');
insert into msg values (6, 8, 2, 1,
'Support : %d %s %s');
insert into msg values (6, 9, 2, 1,
'Num�ro FAX/TLX : %s %s');
insert into msg values (6, 10, 2, 1,
'Pages : %d');
insert into msg values (6, 11, 2, 1,
'Sortie sur : %s');
insert into msg values (6, 12, 2, 1,
'Action Termin�e');
insert into msg values (6, 13, 2, 1,
'Mode d''execution : %s');
insert into msg values (6, 14, 2, 1,
'L''action n''a pu �tre d�marr�e: soit elle a d�j� �t� lanc�e par une');
insert into msg values (6, 14, 2, 2,
' autre personne, soit elle n''est pas executable');
insert into msg values (6, 15, 2, 1,
'Erreur de lecture d''un objet: soit l''objet n''existe pas');
insert into msg values (6, 15, 2, 2,
'soit il y a un probl�me du syst�me de base de donn�e');
insert into msg values (6, 16, 2, 1,
'Param�tres insuffisants ou erronn�s lors de l''invocation de l''action');
insert into msg values (6, 16, 2, 2,
'Relancez en v�rifiant les param�tres !');
insert into msg values (6, 17, 2, 1,
'Nom du fichier : %s');
insert into msg values (6, 18, 2, 1,
'L''action du dossier %d est d�j� lanc�e');
insert into msg values (6, 19, 2, 1,
'L''action du dossier %d a d�j� �t� correctement execut�e');
insert into msg values (6, 20, 2, 1,
'L''action du dossier %d avec un status: %s ne peut �tre execut�e');
insert into msg values (6, 21, 2, 1,
'L''action du dossier %d n''est pas executable');
insert into msg values (6, 22, 2, 1,
'Une erreur base de donn�es emp�che la pr�paration de l''action');
insert into msg values (6, 22, 2, 2,
'pour le dossier %d');
insert into msg values (6, 23, 2, 1,
'L''action du dossier %d est pass�e � l''�tat -en erreur- ');
insert into msg values (6, 24, 2, 1,
'L''action du dossier %d est pass�e � l''�tat -non executable- ');
insert into msg values (6, 25, 2, 1,
'Erreur dans la confirmation de l''action de la r�servation %d');
insert into msg values (6, 26, 2, 1,
'Une erreur base de donn�es emp�che la r�alisation de l''action.');
insert into msg values (6, 26, 2, 2,
'Interruption');
insert into msg values (6, 27, 2, 1,
'Offre : %d %s');
insert into msg values (6, 28, 2, 1,
'Prestation partielle: %d %s');
insert into msg values (6, 29, 2, 1,
'Dossier : %d %s');
insert into msg values (6, 30, 2, 1,
'Il y a un probl�me dans le traitement de l''appareil de sortie');
insert into msg values (6, 31, 2, 1,
'Le dossier %s a plus que 2 interruptions, des parties manquent !');
insert into msg values (6, 32, 2, 1,
'Dans le dossier %s, le prix n''est pas correct pour la prestation: %s');
insert into msg values (6, 33, 2, 1,
'Status courant : %s');
insert into msg values (6, 34, 2, 1,
'Status vis� : %s');
insert into msg values (6, 35, 2, 1,
'Il y a un probl�me dans l''allocation du fichier de sortie: OutBuffer');
insert into msg values (6, 36, 2, 1,
'Arrangement : %d %s');
insert into msg values (6, 37, 2, 1,
'Les param�tres choisis ne sont pas compatible pour l''execution de l''action');
insert into msg values (6, 38, 2, 1,
'Dans le dossier %s, il y a trop de commission %s');
insert into msg values (6, 39, 2, 1,
'Dans le dossier %s, il doit exister au moins une commission %s');
insert into msg values (6, 40, 2, 1,
'Prix pour ID=%d, DLTT_ID=%d, KAT_ID=%d, RKR_ID=%d, DLA_ID=%d, DL_ID=%d !');
insert into msg values (6, 41, 2, 1,
'%d Prix trouvee.');
|
x_validate_dependencies() {
local FLEET_ID; FLEET_ID="$(context_get-option "FLEET_ID")"
if can_modify_gcp_apis; then
enable_gcloud_apis
elif should_validate; then
exit_if_apis_not_enabled
fi
if can_register_cluster; then
register_cluster
elif should_validate; then
exit_if_cluster_unregistered
fi
if can_modify_gcp_components; then
enable_workload_identity
if ! is_stackdriver_enabled; then
enable_stackdriver_kubernetes
fi
enable_service_mesh_feature
else
exit_if_no_workload_identity
exit_if_stackdriver_not_enabled
exit_if_service_mesh_feature_not_enabled
fi
get_project_number "${FLEET_ID}"
if can_modify_cluster_labels; then
add_cluster_labels
elif should_validate; then
exit_if_cluster_unlabeled
fi
if can_create_namespace; then
create_istio_namespace
elif should_validate; then
exit_if_istio_namespace_not_exists
fi
}
|
import numpy as np
import pandas as pd
import keras
from keras import Sequential
from keras.layers import Dense, Embedding, Input, LSTM
model = Sequential()
model.add(Embedding(1000, 256))
model.add(LSTM(256))
model.add(Dense(1, activation='sigmoid'))
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
model.fit(X_train, y_train, epochs=10, batch_size=32, validation_split=0.1, verbose=1) |
#
# Defines Docker aliases.
#
# Author:
# François Vantomme <akarzim@gmail.com>
#
# Return if requirements are not found.
if (( ! $+commands[docker] )); then
return 1
fi
#
# Functions
#
# Set Docker Machine environment
function dkme {
if (( ! $+commands[docker-machine] )); then
return 1
fi
eval $(docker-machine env $1)
}
# Set Docker Machine default machine
function dkmd {
if (( ! $+commands[docker-machine] )); then
return 1
fi
pushd ~/.docker/machine/machines
if [[ ! -d $1 ]]; then
echo "Docker machine '$1' does not exists. Abort."
popd
return 1
fi
if [[ -L default ]]; then
eval $(rm -f default)
elif [[ -d default ]]; then
echo "A default machine already exists. Abort."
popd
return 1
elif [[ -e default ]]; then
echo "A file named 'default' already exists. Abort."
popd
return 1
fi
eval $(ln -s $1 default)
popd
}
# Source module files.
# source "${0:h}/alias.zsh"
|
#!/usr/bin/env bash
php_tag='7.4.14-cli-alpine3.11'
sudo docker run --rm -it \
--user $(id -u):$(id -g) \
--volume $PWD:/app \
-w "/app" \
php:${php_tag} bin/robo.phar $@
|
module Ecm
module Links
module Backend
class LinksController < Itsf::Backend::Resource::BaseController
include ResourcesController::Sorting
def self.resource_class
Ecm::Links::Link
end
private
def permitted_params
params
.require(:link)
.permit(:category_id, :name, :url, :markup_language, :description)
end
end
end
end
end
|
<gh_stars>0
package cmd
import (
"context"
"errors"
"fmt"
"log"
"net"
"strings"
"time"
"github.com/spf13/cobra"
"github.com/cretz/bine/tor"
"github.com/direct-connect/go-dcpp/adc"
)
const torHubPort = 1411
func init() {
cmd := &cobra.Command{
Use: "hub adc://localhost:411",
Short: "register the hub in Tor network",
RunE: func(cmd *cobra.Command, args []string) error {
if len(args) != 1 {
return errors.New("local hub address must be specified")
}
return runHub(Tor, args[0])
},
}
Root.AddCommand(cmd)
}
func runHub(t *tor.Tor, localHub string) error {
proxy, err := NewHubProxy(localHub)
if err != nil {
return err
}
fmt.Println("Registering onion service...")
// Wait at most a few minutes to publish the service
ctx, cancel := context.WithTimeout(context.Background(), 3*time.Minute)
defer cancel()
// Create an onion service to listen on any port but show as 80
onion, err := t.Listen(ctx, &tor.ListenConf{
RemotePorts: []int{torHubPort},
})
if err != nil {
return fmt.Errorf("failed to create onion service: %v", err)
}
defer onion.Close()
fmt.Printf("Tor address: adc://%v.onion\n", onion.ID)
// Run a hub service until terminated
return proxy.Serve(onion)
}
func NewHubProxy(addr string) (*HubProxy, error) {
if addr == "" {
return nil, errors.New("hub address should be set")
}
if !strings.HasPrefix(addr, adc.SchemaADC+"://") {
return nil, errors.New("only adc is currently supported")
}
addr = strings.TrimPrefix(addr, adc.SchemaADC+"://")
return &HubProxy{addr: addr}, nil
}
type HubProxy struct {
addr string
}
func (p *HubProxy) Serve(l net.Listener) error {
for {
conn, err := l.Accept()
if err != nil {
return err
}
go func() {
defer conn.Close()
if err := p.ServeConn(conn); err != nil {
log.Println(err)
}
}()
}
}
func (p *HubProxy) ServeConn(conn net.Conn) error {
ac, err := adc.NewConn(conn)
if err != nil {
return err
}
defer ac.Close()
// connect to the real hub
hconn, err := net.Dial("tcp", p.addr)
if err != nil {
return err
}
defer hconn.Close()
ah, err := adc.NewConn(hconn)
if err != nil {
return err
}
defer ah.Close()
c := &hubProxyConn{
c: ac, hc: ah,
}
errc := make(chan error, 2)
go func() {
errc <- c.hubLoop()
}()
go func() {
errc <- c.clientLoop()
}()
return <-errc
}
type hubProxyConn struct {
c *adc.Conn // client side of the connection (tor)
hc *adc.Conn // hub side of the connection (local)
}
func (c *hubProxyConn) hubLoop() error {
// FIXME: rewrite all IPs
return proxyTo(c.c, c.hc)
}
func (c *hubProxyConn) clientLoop() error {
// FIXME: rewrite all IPs
return proxyTo(c.hc, c.c)
}
|
<filename>src/parseconfig.js
// @flow
import url from 'url';
import fs from 'fs';
import readline from 'readline';
import path from 'path';
import program from 'commander';
import axios from 'axios';
import type { $AxiosXHR } from 'axios';
import type {
Schema,
CollectionDefinition,
FunctionDefinition,
TriggerDefinition
} from './schema';
import {
AddIndex,
UpdateIndex,
DeleteIndex,
prettyPrintCommand,
} from './command';
import type {
Command
} from './command';
import type { Options } from './actions';
import { plan } from './planner';
import { execute } from './executor';
import { getPlan, check } from './actions';
import { CliError, MissingParameterError } from './errors';
import { consoleLogger } from './logger';
const PARSE_SERVER_URL = process.env.PARSE_SERVER_URL;
// TODO: consolidate printing of status into a function
// TODO: support indexes
export type CliOptions = {
applicationId: ?string,
key: ?string,
hookUrl: ?string,
ignoreIndexes: boolean,
disallowColumnRedefine: boolean,
disallowIndexRedefine: boolean,
verbose: boolean
}
program.usage('parseconfig [commands]');
function handleError(e) {
if (e instanceof CliError) {
console.error(e.message);
if (e.shouldExit) {
process.exit(e.exitCode);
}
} else {
console.error('Unexpected error:', e);
process.exit(2);
}
}
// commands:
// plan: outputs a gameplan in JSON format that can be executed
// --disallow-column-redefine: returns an error if the definition of a column changes
// --disallow-index-redefine: returns an error if the definition of an index changes
// execute: takes a gameplan in JSON and executes it.
// apply: Converts a schema into a gameplan then executes it, asks for confirmation first.
// --dry-run: prints commands instead of executing them.
// --non-interactive: doesn't ask for confirmation before applying gameplan
// --disallow-column-redefine: returns an error if the definition of a column changes
// --disallow-index-redefine: returns an error if the definition of an index changes
program
.command('plan <parseUrl> <schema>')
.description('Generate a gameplan that can be run using the execute command')
.option('-i, --application-id <s>', 'Application id of the parse server')
.option('-k, --key <s>', 'Parse access key')
.option('-u, --hook-url <s>', 'Base url for functions and triggers')
.option('--ignore-indexes', 'Skips verification and updating of indices')
.option('--disallow-column-redefine', 'Prevents columns from being updated or deleted')
.option('--disallow-index-redefine', 'Prevents indices from being updated or deleted')
.action(async (parseUrl, schema, cliOptions: CliOptions) => {
try {
const options = validateOptions(cliOptions);
const newSchema = getNewSchema(schema);
const gamePlan = await getPlan(newSchema, parseUrl, options, consoleLogger);
console.log(JSON.stringify(gamePlan));
} catch (e) {
handleError(e);
}
});
program
.command('check <parseUrl> <schema>')
.description('Return an error if Parse is out of sync with the given schema')
.option('-i, --application-id <s>', 'Application id of the parse server')
.option('-k, --key <s>', 'Parse access key')
.option('-u, --hook-url <s>', 'Base url for functions and triggers')
.option('--ignore-indexes', 'Skips verification and updating of indices')
.action(async (parseUrl, schema, cliOptions: CliOptions) => {
try {
const options = validateOptions(cliOptions);
const newSchema = getNewSchema(schema);
await check(newSchema, parseUrl, options, consoleLogger);
console.error('Parse is up-to-date');
} catch (e) {
handleError(e);
}
});
program
.command('apply <parseUrl> <schema>')
.description('Apply the given schema to Parse')
.option('-i, --application-id <s>', 'Application id of the parse server')
.option('-k, --key <s>', 'Parse access key')
.option('-u, --hook-url <s>', 'Base url for functions and triggers')
.option('--non-interactive', 'Do not ask for confirmation before applying changes')
.option('--ignore-indexes', 'Skips verification and updating of indices')
.option('--disallow-column-redefine', 'Prevents columns from being updated or deleted')
.option('--disallow-index-redefine', 'Prevents indices from being updated or deleted')
.option('--verbose', 'Output extra logging')
.action(async (parseUrl, schema, cliOptions: CliOptions) => {
try {
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
const options = validateOptions(cliOptions);
const newSchema = getNewSchema(schema);
const gamePlan = await getPlan(newSchema, parseUrl, options, consoleLogger);
if (gamePlan.length === 0) {
console.error('No changes to make');
process.exit();
}
gamePlan.forEach((command) => console.error(prettyPrintCommand(command)));
if (cliOptions.nonInteractive) {
execute(
gamePlan,
parseUrl,
options.applicationId,
options.key,
consoleLogger,
!!cliOptions.verbose
).then(() => rl.close(), handleError);
} else {
rl.question('Do you want to execute these commands? [y/N] ', (answer) => {
if (answer.toLowerCase() !== 'y') {
console.error('Exiting without making changes');
process.exit();
}
execute(
gamePlan,
parseUrl,
options.applicationId,
options.key,
consoleLogger,
!!cliOptions.verbose
).then(() => rl.close(), handleError);
});
}
} catch (e) {
handleError(e);
};
});
program
.command('execute <parseUrl> <commands>')
.description('Execute the given gameplan against Parse')
.option('-i, --application-id <s>', 'Application id of the parse server')
.option('-k, --key <s>', 'Parse access key')
.option('-u, --hook-url <s>', 'Base url for functions and triggers')
.option('--non-interactive', 'Do not ask for confirmation before applying changes')
.option('--ignore-indexes', 'Skips verification and updating of indices')
.option('--verbose', 'Output extra logging')
.action(async (parseUrl, commandsFile, cliOptions: CliOptions) => {
try {
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
const options = validateOptions(cliOptions);
const gamePlan = getCommands(commandsFile);
if (gamePlan.length === 0) {
console.error('No changes to make');
process.exit();
}
gamePlan.forEach((command) => console.error(prettyPrintCommand(command)));
if (cliOptions.nonInteractive) {
execute(
gamePlan,
parseUrl,
options.applicationId,
options.key,
consoleLogger,
!!cliOptions.verbose
).then(() => rl.close(), handleError);
} else {
rl.question('Do you want to execute these commands? [y/N] ', (answer) => {
if (answer.toLowerCase() !== 'y') {
console.error('Exiting without making changes');
process.exit();
}
execute(
gamePlan,
parseUrl,
options.applicationId,
options.key,
consoleLogger,
!!cliOptions.verbose
).then(() => rl.close(), handleError);
});
}
} catch (e) {
handleError(e);
};
});
program
.command('display <commands>')
.description('Display the given gameplan')
.option('--verbose', 'Output extra logging')
.action(async (commandsFile, cliOptions: CliOptions) => {
try {
const gamePlan = getCommands(commandsFile);
if (gamePlan.length === 0) {
console.error('No changes to make');
process.exit();
}
gamePlan.forEach((command) => console.error(prettyPrintCommand(command)));
} catch (e) {
handleError(e);
};
});
const parseSchemaJSON = (jsonSchema: string): Schema => {
const schema = JSON.parse(jsonSchema);
const newSchema = schema;
for (let i = 0; i < schema.collections.length; i++) {
// NOTE: parse-server stores indices in _SCHEMA in a naive way
// (name => key), we store indices with their options for
// posterity. Since we don't use parse-server to apply these
// indices, munge the shape to what we expect
const simpleIndices = {};
const indices = schema.collections[i].indexes;
// $FlowFixMe
const indexEntries: Array<[string, { key: { [string]: number }, options: Object }]> = Object.entries(indices);
for (const [key, value] of indexEntries) {
simpleIndices[key] = value.key;
}
newSchema.collections[i].indexes = simpleIndices;
}
return newSchema;
};
const getNewSchema = (schemaFile: string): Schema => {
try {
const fileContents = fs.readFileSync(schemaFile, {encoding: 'UTF-8'});
return parseSchemaJSON(fileContents);
} catch (err) {
console.error(err.message);
process.exit(1);
throw err; // Satisfy flow
}
};
const getCommands = (commandFile: string): Array<Command> => {
try {
const fileContents = fs.readFileSync(commandFile, {encoding: 'UTF-8'});
console.log(fileContents)
return JSON.parse(fileContents);
} catch (err) {
console.error(err.message);
process.exit(1);
throw err; // Satisfy flow
}
};
const validateOptions = (options: CliOptions): Options => {
const applicationId: ?string = options.applicationId || process.env.PARSE_APPLICATION_ID;
const key: ?string = options.key || process.env.PARSE_MASTER_KEY;
const hookUrl: ?string = options.hookUrl || process.env.PARSE_HOOK_URL || null;
const ignoreIndexes = options.ignoreIndexes;
const disallowColumnRedefine = options.disallowColumnRedefine;
const disallowIndexRedefine = options.disallowIndexRedefine;
if (applicationId === null || applicationId === undefined) {
throw new MissingParameterError('Application id', '-i', 'PARSE_APPLICATION_ID');
}
if (key === null || key === undefined) {
throw new MissingParameterError('Parse Master Key', '-k', 'PARSE_MASTER_KEY');
}
return {
applicationId,
key,
hookUrl,
ignoreIndexes,
disallowColumnRedefine,
disallowIndexRedefine
};
};
program.parse(process.argv);
if (process.argv.length < 3) {
program.help()
}
|
#!/bin/bash
source "$(dirname "$BASH_SOURCE")"/linux-install-shared.sh
source "$(dirname "$BASH_SOURCE")"/defaults-lgpl-shared.sh
|
def find_average(nums):
return sum(nums)/len(nums) |
<ul>
<li><a href="https://www.google.com/">Google</a></li>
<li><a href="https://www.facebook.com/">Facebook</a></li>
<li><a href="https://www.youtube.com/">YouTube</a></li>
<li><a href="https://www.amazon.com/">Amazon</a></li>
<li><a href="https://www.twitter.com/">Twitter</a></li>
</ul> |
#!/usr/bin/env bash
# Copyright (C) 2016 Kernc, Google Inc., authors, and contributors
# Licensed under http://www.apache.org/licenses/LICENSE-2.0
# Created By: miha@reciprocitylabs.com
set -o pipefail
set -o nounset
set -o errexit
ARG1=${1:-}
GIT_REPO="$(pwd)"
TMP_REPO="$GIT_REPO/$(mktemp -d pylint_diff.XXXXXXX)"
CACHE_DIR="$GIT_REPO/.pylint_cache"
UNCOMMITED_PATCH="$TMP_REPO/uncommited.patch"
SCRIPT=$(basename "$0")
PYLINT="$(command -v pylint 2>/dev/null || true)"
RADON="$(command -v radon 2>/dev/null || true)"
PYLINT_ARGS="--output-format=parseable"
RADON_ARGS='cc --min C --no-assert --show-closures --show-complexity --average'
trap "status=\$?; cd '$GIT_REPO'; rm -rf '$TMP_REPO'; exit \$status" EXIT
mkdir -p "$CACHE_DIR"
print_help ()
{
echo "
Usage: $SCRIPT [TEST_COMMIT | -h]
This script will compare pylint error count from two different commits.
Note: all changes that are not committed will be ignored.
The script will work only if the current commit is a merge commit, or if the
second test_commit argument is provided.
Given the commit tree:
D---E---F---G---H
\\ /
A---B---C
- Running '$SCRIPT' on H will check the diff between G and H.
- Running '$SCRIPT F' on H will check the diff between F and H.
- Running '$SCRIPT F' on C will check the diff between E and C. The E commit is
set by the merge base of the current head and the specified commit F.
"
exit 0
}
case $ARG1 in -h|--help) print_help ; esac
if [ ! "$PYLINT$RADON" ]; then
echo 'Error: pylint and/or radon required'
exit 3
fi
# Make a local clone: prevents copying of objects
# Handle shallow git clones
is_shallow=$([ -f "$GIT_REPO/.git/shallow" ] && echo true || echo)
if [ "$is_shallow" ]; then
mv "$GIT_REPO/.git/shallow" "$GIT_REPO/.git/shallow-bak"
fi
git clone -q --local --depth=50 "$GIT_REPO" "$TMP_REPO" 2>/dev/null
if [ "$is_shallow" ]; then
mv "$GIT_REPO/.git/shallow-bak" "$GIT_REPO/.git/shallow"
cp "$GIT_REPO/.git/shallow" "$TMP_REPO/.git/shallow"
fi
# Move over any modified but uncommited files ...
if ! git diff-index --quiet HEAD; then
git stash save -q --keep-index
git stash show -p stash@\{0\} > "$UNCOMMITED_PATCH"
git stash pop -q --index
fi
cd "$TMP_REPO"
# ... and commit them
if [ "$(cat "$UNCOMMITED_PATCH" 2>/dev/null || true)" ]; then
git apply "$UNCOMMITED_PATCH"
git commit -a -m 'Commit changed files'
was_dirty='+'
fi >/dev/null 2>&1
git reset --hard -q HEAD
CURRENT_COMMIT=$(git rev-parse HEAD)
if [ "$ARG1" ]; then
PREVIOUS_COMMIT=$(git merge-base HEAD "$ARG1")
else
PREVIOUS_COMMIT=$(git show --pretty=raw HEAD |
awk '/^parent /{ print $2; exit }')
fi
echo
echo "Comparing commits ${CURRENT_COMMIT:0:10}${was_dirty:-} and ${PREVIOUS_COMMIT:0:10}"
CHANGED_FILES=$(git diff --name-only $CURRENT_COMMIT $PREVIOUS_COMMIT |
grep "\.py$" || true )
[ ! "$(command -v md5sum 2>/dev/null)" ] && md5sum() { md5; } # for OS X
CHANGED_FILES_HASH=$(echo "$CHANGED_FILES" | md5sum | cut -d ' ' -f 1)
if [ ! "$CHANGED_FILES" ]; then
echo "No python files changed. Skipping lint checks."
exit 0
fi
echo
echo "Comparing files"
echo "==============="
echo "$CHANGED_FILES"
echo
# Run pylint on the old and new code, to compare the quality.
# If pylint is run multiple times it will store the previous results and show
# the change in quality with a non-negative number if code was improved or not
# changed, and a negative number if more code issues have been introduced.
checkout ()
{
{ git checkout -q "$1"
git reset --hard -q HEAD
} 2>/dev/null
}
Number_of_issues ()
{
cached="$1"
{ cat "$cached" 2>/dev/null ||
echo "$CHANGED_FILES" |
xargs "$PYLINT" $PYLINT_ARGS |
tee "$cached"
} | awk -F'[\\. ]' '/^Your code has been rated at /{ print $7 }' || true
}
Cyclomatic_complexity ()
{
cached="$1"
{ cat "$cached" 2>/dev/null ||
echo "$CHANGED_FILES" |
xargs "$RADON" $RADON_ARGS |
tee "$cached"
} | awk -F'[()]' '/ .+\([0-9]+\)$/ { tot += $2 } END { print tot }' || true
}
Get_diffable ()
{
sed -E "/$diff_block_end/,\$d" |
sort |
sed -E "s/$match_line_num/$replace_line_num/"
}
for check in \
'Pylint,Number_of_issues,^Report$,^([^:]+:)[0-9]+:,\\1,^\\+' \
'radon,Cyclomatic_complexity,^[0-9]+ blocks,^( +[MCF]) [0-9:]+,\\1:,^[+-]'
do
IFS=',' read check \
func \
diff_block_end \
match_line_num \
replace_line_num \
show_diff_lines < <(echo "$check")
# If command not available, skip it
if [ ! "$(eval echo \$$(echo $check | tr '[:lower:]' '[:upper:]') )" ]; then
continue
fi
cached_previous="$CACHE_DIR/previous.$check.$PREVIOUS_COMMIT.$CHANGED_FILES_HASH"
cached_current="$CACHE_DIR/current.$check.$CURRENT_COMMIT.$CHANGED_FILES_HASH"
[ -f "$cached_previous" ] || rm -r "$CACHE_DIR/previous."* 2>/dev/null || true
[ -f "$cached_current" ] || rm -r "$CACHE_DIR/current."* 2>/dev/null || true
[ -f "$cached_previous" ] || checkout $PREVIOUS_COMMIT
RESULT_PARENT=$($func "$cached_previous")
[ -f "$cached_current" ] || checkout $CURRENT_COMMIT
RESULT_CURRENT=$($func "$cached_current")
echo
echo "$check result"
echo "================================================================="
cat "$cached_current"
echo
echo
echo "$check diff"
echo "================================================================="
diff --unified=0 --minimal \
<(Get_diffable < "$cached_previous") \
<(Get_diffable < "$cached_current") |
grep -E "$show_diff_lines" | tail -n +3 || true
echo
echo
echo "$check results"
echo "================================================================="
echo "${func//_/ } on parent commit: $RESULT_PARENT"
echo "${func//_/ } on the pull request: $RESULT_CURRENT ($(printf "%+d" $((RESULT_CURRENT - RESULT_PARENT))))"
echo
if awk "BEGIN { exit ${RESULT_CURRENT:-0} > ${RESULT_PARENT:-0} ? 0 : 1 }"; then
echo "FAIL: ${func//_/ } got worse"
exit 1
fi
done
echo "OK"
|
#import "OpenImSdkRn.h"
#if __has_include("RCTBridgeModule.h")
#import "RCTBridgeModule.h"
#import "RCTEventEmitter.h"
#else
#import <React/RCTBridgeModule.h>
#import <React/RCTEventEmitter.h>
#endif
@import OpenIMCore;
NS_ASSUME_NONNULL_BEGIN
@interface RNSendMessageCallbackProxy : NSObject <Open_im_sdk_callbackSendMsgCallBack>
- (id)initWithMessage:(NSString *)msg module:(OpenIMSDKRN *)module resolver:(RCTPromiseResolveBlock)resolver rejecter:(RCTPromiseRejectBlock)rejecter;
@end
NS_ASSUME_NONNULL_END
|
#!/bin/sh
# `$*` expands the `args` supplied in an `array` individually
# or splits `args` in a string separated by whitespace.
echo ">>> Running serverless-tools"
sh -c "serverless-tools $*"
|
package nike.platform.persion;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.PageRequest;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
@Controller
public class PersionController {
@Autowired
PersionDao persionDao;
@RequestMapping("persion")
public String persion(Model model, @RequestParam(name = "page",defaultValue ="0" ) Integer page) {
page=page==null?0:page;
model.addAttribute("persions",persionDao.findAll(PageRequest.of(page, 100) ));
return "persion";
}
}
|
#!/bin/bash
#SBATCH -J Act_cosper_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=2000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/progs/meta.py cosper 1 Adamax 2 0.7163439571102154 232 0.0018506530341485222 lecun_uniform PE-infersent 0.01
|
package com.booleanbites.tabsample.legacy;
import android.app.Activity;
import android.os.Bundle;
import com.booleanbites.tabsample.R;
/**
* @author <NAME>
*
*/
public class HomeActivity extends Activity {
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.home_activity);
}
} |
<gh_stars>0
import java.util.*;
public class Main {
public static void main(String[] args) {
AuthenticationSystem authenticationSystem = new AuthenticationSystem();
authenticationSystem.load("test_set.txt");
authenticationSystem.register("Andrea", "Meister");
authenticationSystem.save("test_set2.txt");
authenticationSystem.saveObjs("test_set.dat");
//Employee9934170;5927181
authenticationSystem = new AuthenticationSystem();
authenticationSystem.loadObjs("test_set.dat", 10000);
Scanner scanner = new Scanner(System.in);
while (true) {
String input = scanner.nextLine();
System.out.println("Test: " + input);
}
}
}
|
import { makeStyles } from '@material-ui/core/styles';
export const useStyles = makeStyles(theme => ({
root: {
display: 'flex',
flexDirection: 'column',
margin: theme.spacing(6, 0, 3)
},
searchBar: {
display: 'flex',
justifyContent: 'center',
margin: 8
},
gridCards: {
display: 'flex',
flexWrap: 'wrap',
justifyContent: 'center'
},
fabAddRule: {
position: 'fixed',
bottom: theme.spacing(5),
right: theme.spacing(5),
zIndex: 3
},
loadMoreButtonView: {
padding: 16
},
loadMoreButton: {
backgroundColor: theme.palette.primary.main
}
}));
export default useStyles;
|
import Whatsapp from "../../models/Whatsapp";
const AssociateWhatsappQueue = async (
whatsapp: Whatsapp,
queueIds: number[]
): Promise<void> => {
await whatsapp.$set("queues", queueIds);
await whatsapp.reload();
};
export default AssociateWhatsappQueue;
|
package weixin.tenant.entity;
import java.math.BigDecimal;
import java.util.Date;
import java.lang.String;
import java.lang.Double;
import java.lang.Integer;
import java.math.BigDecimal;
import javax.xml.soap.Text;
import java.sql.Blob;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import org.hibernate.annotations.GenericGenerator;
import javax.persistence.SequenceGenerator;
import org.jeecgframework.poi.excel.annotation.Excel;
/**
* @Title: Entity
* @Description: 套餐类型
* @author onlineGenerator
* @date 2015-03-05 12:59:58
* @version V1.0
*
*/
@Entity
@Table(name = "weixin_product", schema = "")
@SuppressWarnings("serial")
public class WeixinProductEntity implements java.io.Serializable {
/**主键*/
private java.lang.String id;
/**创建人名称*/
private java.lang.String productName;
/**价格*/
@Excel(exportName="价格")
private java.lang.Integer price;
private Integer groupSMSNum;//群发次数
private Integer newsTemplateNum;//图文素材上限
private Integer textTemplateNum;//文本素材上限
private Integer requestNum;//请求上限
/**描述*/
@Excel(exportName="描述")
private java.lang.String remark;
/**
*方法: 取得java.lang.String
*@return: java.lang.String 主键
*/
@Id
@GeneratedValue(generator = "paymentableGenerator")
@GenericGenerator(name = "paymentableGenerator", strategy = "uuid")
@Column(name ="ID",nullable=false,length=36)
public java.lang.String getId(){
return this.id;
}
/**
*方法: 设置java.lang.String
*@param: java.lang.String 主键
*/
public void setId(java.lang.String id){
this.id = id;
}
/**
*方法: 取得java.lang.String
*@return: java.lang.String 创建人名称
*/
@Column(name ="PRODUCT_NAME",nullable=true,length=50)
public java.lang.String getProductName(){
return this.productName;
}
/**
*方法: 设置java.lang.String
*@param: java.lang.String 创建人名称
*/
public void setProductName(java.lang.String productName){
this.productName = productName;
}
/**
*方法: 取得java.lang.Integer
*@return: java.lang.Integer 价格
*/
@Column(name ="PRICE",nullable=true,length=8)
public java.lang.Integer getPrice(){
return this.price;
}
/**
*方法: 设置java.lang.Integer
*@param: java.lang.Integer 价格
*/
public void setPrice(java.lang.Integer price){
this.price = price;
}
/**
*方法: 取得java.lang.String
*@return: java.lang.String 描述
*/
@Column(name ="REMARK",nullable=true,length=200)
public java.lang.String getRemark(){
return this.remark;
}
/**
*方法: 设置java.lang.String
*@param: java.lang.String 描述
*/
public void setRemark(java.lang.String remark){
this.remark = remark;
}
@Column(name ="REQUEST_NUM",nullable=true,length=8)
public Integer getRequestNum() {
return requestNum;
}
public void setRequestNum(Integer requestNum) {
this.requestNum = requestNum;
}
@Column(name ="TEXT_TEMPLATE_NUM",nullable=true,length=8)
public Integer getTextTemplateNum() {
return textTemplateNum;
}
public void setTextTemplateNum(Integer textTemplateNum) {
this.textTemplateNum = textTemplateNum;
}
@Column(name ="NEWS_TEMPLATE_NUM",nullable=true,length=8)
public Integer getNewsTemplateNum() {
return newsTemplateNum;
}
public void setNewsTemplateNum(Integer newsTemplateNum) {
this.newsTemplateNum = newsTemplateNum;
}
@Column(name ="GROUP_SMS_NUM",nullable=true,length=8)
public Integer getGroupSMSNum() {
return groupSMSNum;
}
public void setGroupSMSNum(Integer groupSMSNum) {
this.groupSMSNum = groupSMSNum;
}
}
|
RSpec.describe Group, type: :model do
it { should have_many(:workflowgroups) }
it { should have_many(:workflows).through(:workflowgroups) }
it { should have_many(:stages) }
it { should validate_presence_of(:name) }
it { should validate_presence_of(:contact_method) }
it { should validate_presence_of(:contact_setting) }
end
|
/*
* Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The OpenAirInterface Software Alliance licenses this file to You under
* the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*-------------------------------------------------------------------------------
* For more information about the OpenAirInterface (OAI) Software Alliance:
* <EMAIL>
*/
#include <stdbool.h>
#include <pthread.h>
#include <unistd.h>
#include <string.h>
#include <errno.h>
#include <stdint.h>
#include <inttypes.h>
#include <arpa/inet.h>
#include "tree.h"
#include "gcc_diag.h"
#include "dynamic_memory_check.h"
#include "assertions.h"
#include "log.h"
#include "msc.h"
#include "common_types.h"
#include "conversions.h"
#include "intertask_interface.h"
#include "mme_config.h"
#include "enum_string.h"
#include "mme_app_extern.h"
#include "mme_app_ue_context.h"
#include "mme_app_defs.h"
#include "s1ap_mme.h"
//------------------------------------------------------------------------------
ue_context_t *mme_create_new_ue_context (void)
{
ue_context_t *new_p = calloc (1, sizeof (ue_context_t));
return new_p;
}
//------------------------------------------------------------------------------
void mme_app_ue_context_free_content (ue_context_t * const mme_ue_context_p)
{
// imsi64_t imsi;
// unsigned imsi_auth:1;
// enb_ue_s1ap_id_t enb_ue_s1ap_id:24;
// mme_ue_s1ap_id_t mme_ue_s1ap_id;
// uint32_t ue_id;
// uint8_t nb_of_vectors;
// eutran_vector_t *vector_list;
// eutran_vector_t *vector_in_use;
// unsigned subscription_known:1;
// uint8_t msisdn[MSISDN_LENGTH+1];
// uint8_t msisdn_length;
// mm_state_t mm_state;
// guti_t guti;
// me_identity_t me_identity;
// ecgi_t e_utran_cgi;
// time_t cell_age;
// network_access_mode_t access_mode;
// apn_config_profile_t apn_profile;
// ard_t access_restriction_data;
// subscriber_status_t sub_status;
// ambr_t subscribed_ambr;
// ambr_t used_ambr;
// rau_tau_timer_t rau_tau_timer;
//if (mme_ue_context_p->ue_radio_capabilities) free_wrapper(mme_ue_context_p->ue_radio_capabilities);
// int ue_radio_cap_length;
// teid_t mme_s11_teid;
// teid_t sgw_s11_teid;
// PAA_t paa;
// char pending_pdn_connectivity_req_imsi[16];
// uint8_t pending_pdn_connectivity_req_imsi_length;
bdestroy(mme_ue_context_p->pending_pdn_connectivity_req_apn);
bdestroy(mme_ue_context_p->pending_pdn_connectivity_req_pdn_addr);
// int pending_pdn_connectivity_req_pti;
// unsigned pending_pdn_connectivity_req_ue_id;
// network_qos_t pending_pdn_connectivity_req_qos;
// pco_flat_t pending_pdn_connectivity_req_pco;
// DO NOT FREE THE FOLLOWING POINTER, IT IS esm_proc_data_t*
// void *pending_pdn_connectivity_req_proc_data;
//int pending_pdn_connectivity_req_request_type;
//ebi_t default_bearer_id;
//bearer_context_t eps_bearers[BEARERS_PER_UE];
}
//------------------------------------------------------------------------------
ue_context_t *
mme_ue_context_exists_enb_ue_s1ap_id (
mme_ue_context_t * const mme_ue_context_p,
const enb_s1ap_id_key_t enb_key)
{
struct ue_context_s *ue_context_p = NULL;
hashtable_ts_get (mme_ue_context_p->enb_ue_s1ap_id_ue_context_htbl, (const hash_key_t)enb_key, (void **)&ue_context_p);
return ue_context_p;
}
//------------------------------------------------------------------------------
ue_context_t *
mme_ue_context_exists_mme_ue_s1ap_id (
mme_ue_context_t * const mme_ue_context_p,
const mme_ue_s1ap_id_t mme_ue_s1ap_id)
{
hashtable_rc_t h_rc = HASH_TABLE_OK;
void *id = NULL;
h_rc = hashtable_ts_get (mme_ue_context_p->mme_ue_s1ap_id_ue_context_htbl, (const hash_key_t)mme_ue_s1ap_id, (void **)&id);
if (HASH_TABLE_OK == h_rc) {
return mme_ue_context_exists_enb_ue_s1ap_id (mme_ue_context_p, (enb_s1ap_id_key_t)(uintptr_t) id);
}
return NULL;
}
//------------------------------------------------------------------------------
struct ue_context_s *
mme_ue_context_exists_imsi (
mme_ue_context_t * const mme_ue_context_p,
const imsi64_t imsi)
{
hashtable_rc_t h_rc = HASH_TABLE_OK;
void *id = NULL;
h_rc = hashtable_ts_get (mme_ue_context_p->imsi_ue_context_htbl, (const hash_key_t)imsi, (void **)&id);
if (HASH_TABLE_OK == h_rc) {
return mme_ue_context_exists_mme_ue_s1ap_id (mme_ue_context_p, (mme_ue_s1ap_id_t)(uintptr_t) id);
}
return NULL;
}
//------------------------------------------------------------------------------
struct ue_context_s *
mme_ue_context_exists_s11_teid (
mme_ue_context_t * const mme_ue_context_p,
const s11_teid_t teid)
{
hashtable_rc_t h_rc = HASH_TABLE_OK;
void *id = NULL;
h_rc = hashtable_ts_get (mme_ue_context_p->tun11_ue_context_htbl, (const hash_key_t)teid, (void **)&id);
if (HASH_TABLE_OK == h_rc) {
return mme_ue_context_exists_mme_ue_s1ap_id (mme_ue_context_p, (mme_ue_s1ap_id_t)(uintptr_t) id);
}
return NULL;
}
//------------------------------------------------------------------------------
ue_context_t *
mme_ue_context_exists_guti (
mme_ue_context_t * const mme_ue_context_p,
const guti_t * const guti_p)
{
hashtable_rc_t h_rc = HASH_TABLE_OK;
void *id = NULL;
h_rc = obj_hashtable_ts_get (mme_ue_context_p->guti_ue_context_htbl, (const void *)guti_p, sizeof (*guti_p), (void **)&id);
if (HASH_TABLE_OK == h_rc) {
return mme_ue_context_exists_mme_ue_s1ap_id (mme_ue_context_p, (mme_ue_s1ap_id_t)(uintptr_t)id);
}
return NULL;
}
//------------------------------------------------------------------------------
void mme_app_move_context (ue_context_t *dst, ue_context_t *src)
{
OAILOG_FUNC_IN (LOG_MME_APP);
if ((dst) && (src)) {
dst->imsi = src->imsi;
dst->imsi_auth = src->imsi_auth;
//enb_s1ap_id_key
//enb_ue_s1ap_id
//mme_ue_s1ap_id
dst->sctp_assoc_id_key = src->sctp_assoc_id_key;
dst->subscription_known = src->subscription_known;
memcpy((void *)dst->msisdn, (const void *)src->msisdn, sizeof(src->msisdn));
dst->msisdn_length = src->msisdn_length;src->msisdn_length = 0;
dst->mm_state = src->mm_state;
dst->is_guti_set = src->is_guti_set;
dst->guti = src->guti;
dst->me_identity = src->me_identity;
dst->e_utran_cgi = src->e_utran_cgi;
dst->cell_age = src->cell_age;
dst->access_mode = src->access_mode;
dst->apn_profile = src->apn_profile;
dst->access_restriction_data = src->access_restriction_data;
dst->sub_status = src->sub_status;
dst->subscribed_ambr = src->subscribed_ambr;
dst->used_ambr = src->used_ambr;
dst->rau_tau_timer = src->rau_tau_timer;
dst->mme_s11_teid = src->mme_s11_teid;
dst->sgw_s11_teid = src->sgw_s11_teid;
memcpy((void *)dst->pending_pdn_connectivity_req_imsi, (const void *)src->pending_pdn_connectivity_req_imsi, sizeof(src->pending_pdn_connectivity_req_imsi));
dst->pending_pdn_connectivity_req_imsi_length = src->pending_pdn_connectivity_req_imsi_length;
dst->pending_pdn_connectivity_req_apn = src->pending_pdn_connectivity_req_apn;
src->pending_pdn_connectivity_req_apn = NULL;
dst->pending_pdn_connectivity_req_pdn_addr = src->pending_pdn_connectivity_req_pdn_addr;
src->pending_pdn_connectivity_req_pdn_addr = NULL;
dst->pending_pdn_connectivity_req_pti = src->pending_pdn_connectivity_req_pti;
dst->pending_pdn_connectivity_req_ue_id = src->pending_pdn_connectivity_req_ue_id;
dst->pending_pdn_connectivity_req_qos = src->pending_pdn_connectivity_req_qos;
dst->pending_pdn_connectivity_req_pco = src->pending_pdn_connectivity_req_pco;
dst->pending_pdn_connectivity_req_proc_data = src->pending_pdn_connectivity_req_proc_data;
src->pending_pdn_connectivity_req_proc_data = NULL;
dst->pending_pdn_connectivity_req_request_type= src->pending_pdn_connectivity_req_request_type;
dst->default_bearer_id = src->default_bearer_id;
memcpy((void *)dst->eps_bearers, (const void *)src->eps_bearers, sizeof(bearer_context_t)*BEARERS_PER_UE);
OAILOG_DEBUG (LOG_MME_APP,
"mme_app_move_context("ENB_UE_S1AP_ID_FMT " <- " ENB_UE_S1AP_ID_FMT ") done\n",
dst->enb_ue_s1ap_id, src->enb_ue_s1ap_id);
}
OAILOG_FUNC_OUT (LOG_MME_APP);
}
//------------------------------------------------------------------------------
// this is detected only while receiving an INITIAL UE message
void
mme_ue_context_duplicate_enb_ue_s1ap_id_detected (
const enb_s1ap_id_key_t enb_key,
const mme_ue_s1ap_id_t mme_ue_s1ap_id,
const bool is_remove_old)
{
hashtable_rc_t h_rc = HASH_TABLE_OK;
void *id = NULL;
enb_ue_s1ap_id_t enb_ue_s1ap_id = 0;
enb_s1ap_id_key_t old_enb_key = 0;
OAILOG_FUNC_IN (LOG_MME_APP);
enb_ue_s1ap_id = MME_APP_ENB_S1AP_ID_KEY2ENB_S1AP_ID(enb_key);
if (INVALID_MME_UE_S1AP_ID == mme_ue_s1ap_id) {
OAILOG_ERROR (LOG_MME_APP,
"Error could not associate this enb_ue_s1ap_ue_id "ENB_UE_S1AP_ID_FMT " with mme_ue_s1ap_id " MME_UE_S1AP_ID_FMT "\n",
enb_ue_s1ap_id, mme_ue_s1ap_id);
OAILOG_FUNC_OUT (LOG_MME_APP);
}
h_rc = hashtable_ts_get (mme_app_desc.mme_ue_contexts.mme_ue_s1ap_id_ue_context_htbl, (const hash_key_t)mme_ue_s1ap_id, (void **)&id);
if (HASH_TABLE_OK == h_rc) {
old_enb_key = (enb_s1ap_id_key_t)(uintptr_t) id;
if (old_enb_key != enb_key) {
if (is_remove_old) {
ue_context_t *old = NULL;
h_rc = hashtable_ts_remove (mme_app_desc.mme_ue_contexts.mme_ue_s1ap_id_ue_context_htbl, (const hash_key_t)mme_ue_s1ap_id, (void **)&id);
h_rc = hashtable_ts_insert (mme_app_desc.mme_ue_contexts.mme_ue_s1ap_id_ue_context_htbl, (const hash_key_t)mme_ue_s1ap_id, (void *)(uintptr_t)enb_key);
h_rc = hashtable_ts_remove (mme_app_desc.mme_ue_contexts.enb_ue_s1ap_id_ue_context_htbl, (const hash_key_t)old_enb_key, (void **)&old);
if (HASH_TABLE_OK == h_rc) {
ue_context_t *new = NULL;
h_rc = hashtable_ts_get (mme_app_desc.mme_ue_contexts.enb_ue_s1ap_id_ue_context_htbl, (const hash_key_t)enb_key, (void **)&new);
mme_app_move_context(new, old);
mme_app_ue_context_free_content(old);
OAILOG_DEBUG (LOG_MME_APP,
"Removed old UE context enb_ue_s1ap_ue_id "ENB_UE_S1AP_ID_FMT " mme_ue_s1ap_id " MME_UE_S1AP_ID_FMT "\n",
MME_APP_ENB_S1AP_ID_KEY2ENB_S1AP_ID(old_enb_key), mme_ue_s1ap_id);
}
} else {
ue_context_t *new = NULL;
h_rc = hashtable_ts_remove (mme_app_desc.mme_ue_contexts.enb_ue_s1ap_id_ue_context_htbl, (const hash_key_t)enb_key, (void **)&new);
if (HASH_TABLE_OK == h_rc) {
mme_app_ue_context_free_content(new);
OAILOG_DEBUG (LOG_MME_APP,
"Removed new UE context enb_ue_s1ap_ue_id "ENB_UE_S1AP_ID_FMT " mme_ue_s1ap_id " MME_UE_S1AP_ID_FMT "\n",
enb_ue_s1ap_id, mme_ue_s1ap_id);
}
}
} else {
OAILOG_DEBUG (LOG_MME_APP,
"No duplicated context found enb_ue_s1ap_ue_id "ENB_UE_S1AP_ID_FMT " with mme_ue_s1ap_id " MME_UE_S1AP_ID_FMT "\n",
enb_ue_s1ap_id, mme_ue_s1ap_id);
}
} else {
OAILOG_ERROR (LOG_MME_APP,
"Error could find this mme_ue_s1ap_id " MME_UE_S1AP_ID_FMT "\n",
mme_ue_s1ap_id);
}
OAILOG_FUNC_OUT (LOG_MME_APP);
}
//------------------------------------------------------------------------------
int
mme_ue_context_notified_new_ue_s1ap_id_association (
const enb_s1ap_id_key_t enb_key,
const mme_ue_s1ap_id_t mme_ue_s1ap_id)
{
hashtable_rc_t h_rc = HASH_TABLE_OK;
ue_context_t *ue_context_p = NULL;
enb_ue_s1ap_id_t enb_ue_s1ap_id = 0;
OAILOG_FUNC_IN (LOG_MME_APP);
enb_ue_s1ap_id = MME_APP_ENB_S1AP_ID_KEY2ENB_S1AP_ID(enb_key);
if (INVALID_MME_UE_S1AP_ID == mme_ue_s1ap_id) {
OAILOG_ERROR (LOG_MME_APP,
"Error could not associate this enb_ue_s1ap_ue_id "ENB_UE_S1AP_ID_FMT " with mme_ue_s1ap_id " MME_UE_S1AP_ID_FMT "\n",
enb_ue_s1ap_id, mme_ue_s1ap_id);
OAILOG_FUNC_RETURN (LOG_MME_APP, RETURNerror);
}
ue_context_p = mme_ue_context_exists_enb_ue_s1ap_id (&mme_app_desc.mme_ue_contexts, enb_key);
if (ue_context_p) {
if (ue_context_p->enb_s1ap_id_key == enb_key) { // useless
if (INVALID_MME_UE_S1AP_ID == ue_context_p->mme_ue_s1ap_id) {
// new insertion of mme_ue_s1ap_id, not a change in the id
h_rc = hashtable_ts_insert (mme_app_desc.mme_ue_contexts.mme_ue_s1ap_id_ue_context_htbl, (const hash_key_t)mme_ue_s1ap_id, (void *)(uintptr_t)enb_key);
if (HASH_TABLE_OK == h_rc) {
ue_context_p->mme_ue_s1ap_id = mme_ue_s1ap_id;
OAILOG_DEBUG (LOG_MME_APP,
"Associated this enb_ue_s1ap_ue_id " ENB_UE_S1AP_ID_FMT " with mme_ue_s1ap_id " MME_UE_S1AP_ID_FMT "\n",
ue_context_p->enb_ue_s1ap_id, ue_context_p->mme_ue_s1ap_id);
s1ap_notified_new_ue_mme_s1ap_id_association (ue_context_p->sctp_assoc_id_key, enb_ue_s1ap_id, mme_ue_s1ap_id);
OAILOG_FUNC_RETURN (LOG_MME_APP, RETURNok);
}
}
}
}
OAILOG_ERROR (LOG_MME_APP,
"Error could not associate this enb_ue_s1ap_ue_id " ENB_UE_S1AP_ID_FMT " with mme_ue_s1ap_id " MME_UE_S1AP_ID_FMT "\n",
enb_ue_s1ap_id, mme_ue_s1ap_id);
OAILOG_FUNC_RETURN (LOG_MME_APP, RETURNerror);
}
//------------------------------------------------------------------------------
void
mme_ue_context_update_coll_keys (
mme_ue_context_t * const mme_ue_context_p,
ue_context_t * const ue_context_p,
const enb_s1ap_id_key_t enb_s1ap_id_key,
const mme_ue_s1ap_id_t mme_ue_s1ap_id,
const imsi64_t imsi,
const s11_teid_t mme_s11_teid,
const guti_t * const guti_p) // never NULL, if none put &ue_context_p->guti
{
hashtable_rc_t h_rc = HASH_TABLE_OK;
void *id = NULL;
OAILOG_FUNC_IN(LOG_MME_APP);
OAILOG_TRACE (LOG_MME_APP, "Update ue context.enb_ue_s1ap_id " ENB_UE_S1AP_ID_FMT " ue context.mme_ue_s1ap_id " MME_UE_S1AP_ID_FMT " ue context.IMSI " IMSI_64_FMT " ue context.GUTI "GUTI_FMT"\n",
ue_context_p->enb_ue_s1ap_id, ue_context_p->mme_ue_s1ap_id, ue_context_p->imsi, GUTI_ARG(&ue_context_p->guti));
OAILOG_TRACE (LOG_MME_APP, "Update ue context %p enb_ue_s1ap_id " ENB_UE_S1AP_ID_FMT " mme_ue_s1ap_id " MME_UE_S1AP_ID_FMT " IMSI " IMSI_64_FMT " GUTI " GUTI_FMT "\n",
ue_context_p, ue_context_p->enb_ue_s1ap_id, mme_ue_s1ap_id, imsi, GUTI_ARG(guti_p));
AssertFatal(ue_context_p->enb_s1ap_id_key == enb_s1ap_id_key,
"Mismatch in UE context enb_s1ap_id_key "MME_APP_ENB_S1AP_ID_KEY_FORMAT"/"MME_APP_ENB_S1AP_ID_KEY_FORMAT"\n",
ue_context_p->enb_s1ap_id_key, enb_s1ap_id_key);
if (INVALID_MME_UE_S1AP_ID != mme_ue_s1ap_id) {
if (ue_context_p->mme_ue_s1ap_id != mme_ue_s1ap_id) {
// new insertion of mme_ue_s1ap_id, not a change in the id
h_rc = hashtable_ts_remove (mme_ue_context_p->mme_ue_s1ap_id_ue_context_htbl, (const hash_key_t)ue_context_p->mme_ue_s1ap_id, (void **)&id);
h_rc = hashtable_ts_insert (mme_ue_context_p->mme_ue_s1ap_id_ue_context_htbl, (const hash_key_t)mme_ue_s1ap_id, (void *)(uintptr_t)enb_s1ap_id_key);
if (HASH_TABLE_OK != h_rc) {
OAILOG_ERROR (LOG_MME_APP,
"Error could not update this ue context %p enb_ue_s1ap_ue_id "ENB_UE_S1AP_ID_FMT " mme_ue_s1ap_id " MME_UE_S1AP_ID_FMT " %s\n",
ue_context_p, ue_context_p->enb_ue_s1ap_id, ue_context_p->mme_ue_s1ap_id, hashtable_rc_code2string(h_rc));
}
ue_context_p->mme_ue_s1ap_id = mme_ue_s1ap_id;
}
if (INVALID_IMSI64 != ue_context_p->imsi) {
h_rc = hashtable_ts_remove (mme_ue_context_p->imsi_ue_context_htbl, (const hash_key_t)ue_context_p->imsi, (void **)&id);
h_rc = hashtable_ts_insert (mme_ue_context_p->imsi_ue_context_htbl, (const hash_key_t)ue_context_p->imsi, (void *)(uintptr_t)mme_ue_s1ap_id);
}
h_rc = hashtable_ts_remove (mme_ue_context_p->tun11_ue_context_htbl, (const hash_key_t)ue_context_p->mme_s11_teid, (void **)&id);
h_rc = hashtable_ts_insert (mme_ue_context_p->tun11_ue_context_htbl, (const hash_key_t)ue_context_p->mme_s11_teid, (void *)(uintptr_t)mme_ue_s1ap_id);
h_rc = obj_hashtable_ts_remove (mme_ue_context_p->guti_ue_context_htbl, (const void *const)&ue_context_p->guti, sizeof (ue_context_p->guti), (void **)&id);
if (INVALID_MME_UE_S1AP_ID != mme_ue_s1ap_id) {
h_rc = obj_hashtable_ts_insert (mme_ue_context_p->guti_ue_context_htbl, (const void *const)&ue_context_p->guti, sizeof (ue_context_p->guti), (void *)(uintptr_t)mme_ue_s1ap_id);
}
}
if ((ue_context_p->imsi != imsi)
|| (ue_context_p->mme_ue_s1ap_id != mme_ue_s1ap_id)) {
h_rc = hashtable_ts_remove (mme_ue_context_p->imsi_ue_context_htbl, (const hash_key_t)ue_context_p->imsi, (void **)&id);
if (INVALID_MME_UE_S1AP_ID != mme_ue_s1ap_id) {
h_rc = hashtable_ts_insert (mme_ue_context_p->imsi_ue_context_htbl, (const hash_key_t)imsi, (void *)(uintptr_t)mme_ue_s1ap_id);
} else {
h_rc = HASH_TABLE_KEY_NOT_EXISTS;
}
if (HASH_TABLE_OK != h_rc) {
OAILOG_TRACE (LOG_MME_APP,
"Error could not update this ue context %p enb_ue_s1ap_ue_id " ENB_UE_S1AP_ID_FMT " mme_ue_s1ap_id " MME_UE_S1AP_ID_FMT " imsi " IMSI_64_FMT ": %s\n",
ue_context_p, ue_context_p->enb_ue_s1ap_id, ue_context_p->mme_ue_s1ap_id, imsi, hashtable_rc_code2string(h_rc));
}
ue_context_p->imsi = imsi;
}
if ((ue_context_p->mme_s11_teid != mme_s11_teid)
|| (ue_context_p->mme_ue_s1ap_id != mme_ue_s1ap_id)) {
h_rc = hashtable_ts_remove (mme_ue_context_p->tun11_ue_context_htbl, (const hash_key_t)ue_context_p->mme_s11_teid, (void **)&id);
if (INVALID_MME_UE_S1AP_ID != mme_ue_s1ap_id) {
h_rc = hashtable_ts_insert (mme_ue_context_p->tun11_ue_context_htbl, (const hash_key_t)mme_s11_teid, (void *)(uintptr_t)mme_ue_s1ap_id);
} else {
h_rc = HASH_TABLE_KEY_NOT_EXISTS;
}
if (HASH_TABLE_OK != h_rc) {
OAILOG_TRACE (LOG_MME_APP,
"Error could not update this ue context %p enb_ue_s1ap_ue_id "ENB_UE_S1AP_ID_FMT " mme_ue_s1ap_id " MME_UE_S1AP_ID_FMT " mme_s11_teid " TEID_FMT " : %s\n",
ue_context_p, ue_context_p->enb_ue_s1ap_id, ue_context_p->mme_ue_s1ap_id, mme_s11_teid, hashtable_rc_code2string(h_rc));
}
ue_context_p->mme_s11_teid = mme_s11_teid;
}
if (guti_p) {
if ((guti_p->gummei.mme_code != ue_context_p->guti.gummei.mme_code)
|| (guti_p->gummei.mme_gid != ue_context_p->guti.gummei.mme_gid)
|| (guti_p->m_tmsi != ue_context_p->guti.m_tmsi)
|| (guti_p->gummei.plmn.mcc_digit1 != ue_context_p->guti.gummei.plmn.mcc_digit1)
|| (guti_p->gummei.plmn.mcc_digit2 != ue_context_p->guti.gummei.plmn.mcc_digit2)
|| (guti_p->gummei.plmn.mcc_digit3 != ue_context_p->guti.gummei.plmn.mcc_digit3)
|| (ue_context_p->mme_ue_s1ap_id != mme_ue_s1ap_id)) {
// may check guti_p with a kind of instanceof()?
h_rc = obj_hashtable_ts_remove (mme_ue_context_p->guti_ue_context_htbl, &ue_context_p->guti, sizeof (*guti_p), (void **)&id);
if (INVALID_MME_UE_S1AP_ID != mme_ue_s1ap_id) {
h_rc = obj_hashtable_ts_insert (mme_ue_context_p->guti_ue_context_htbl, (const void *const)guti_p, sizeof (*guti_p), (void *)(uintptr_t)mme_ue_s1ap_id);
} else {
h_rc = HASH_TABLE_KEY_NOT_EXISTS;
}
if (HASH_TABLE_OK != h_rc) {
OAILOG_TRACE (LOG_MME_APP, "Error could not update this ue context %p enb_ue_s1ap_ue_id "ENB_UE_S1AP_ID_FMT " mme_ue_s1ap_id " MME_UE_S1AP_ID_FMT " guti " GUTI_FMT " %s\n",
ue_context_p, ue_context_p->enb_ue_s1ap_id, ue_context_p->mme_ue_s1ap_id, GUTI_ARG(guti_p), hashtable_rc_code2string(h_rc));
}
memcpy(&ue_context_p->guti , guti_p, sizeof(ue_context_p->guti));
}
}
OAILOG_FUNC_OUT(LOG_MME_APP);
}
//------------------------------------------------------------------------------
void mme_ue_context_dump_coll_keys(void)
{
bstring tmp = bfromcstr(" ");
btrunc(tmp, 0);
hashtable_ts_dump_content (mme_app_desc.mme_ue_contexts.imsi_ue_context_htbl, tmp);
OAILOG_TRACE (LOG_MME_APP,"imsi_ue_context_htbl %s\n", bdata(tmp));
btrunc(tmp, 0);
hashtable_ts_dump_content (mme_app_desc.mme_ue_contexts.tun11_ue_context_htbl, tmp);
OAILOG_TRACE (LOG_MME_APP,"tun11_ue_context_htbl %s\n", bdata(tmp));
btrunc(tmp, 0);
hashtable_ts_dump_content (mme_app_desc.mme_ue_contexts.mme_ue_s1ap_id_ue_context_htbl, tmp);
OAILOG_TRACE (LOG_MME_APP,"mme_ue_s1ap_id_ue_context_htbl %s\n", bdata(tmp));
btrunc(tmp, 0);
hashtable_ts_dump_content (mme_app_desc.mme_ue_contexts.enb_ue_s1ap_id_ue_context_htbl, tmp);
OAILOG_TRACE (LOG_MME_APP,"enb_ue_s1ap_id_ue_context_htbl %s\n", bdata(tmp));
btrunc(tmp, 0);
obj_hashtable_ts_dump_content (mme_app_desc.mme_ue_contexts.guti_ue_context_htbl, tmp);
OAILOG_TRACE (LOG_MME_APP,"guti_ue_context_htbl %s", bdata(tmp));
}
//------------------------------------------------------------------------------
int
mme_insert_ue_context (
mme_ue_context_t * const mme_ue_context_p,
const struct ue_context_s *const ue_context_p)
{
hashtable_rc_t h_rc = HASH_TABLE_OK;
OAILOG_FUNC_IN (LOG_MME_APP);
DevAssert (mme_ue_context_p );
DevAssert (ue_context_p );
// filled ENB UE S1AP ID
h_rc = hashtable_ts_is_key_exists (mme_ue_context_p->enb_ue_s1ap_id_ue_context_htbl, (const hash_key_t)ue_context_p->enb_s1ap_id_key);
if (HASH_TABLE_OK == h_rc) {
OAILOG_DEBUG (LOG_MME_APP, "This ue context %p already exists enb_ue_s1ap_id " ENB_UE_S1AP_ID_FMT "\n",
ue_context_p, ue_context_p->enb_ue_s1ap_id);
OAILOG_FUNC_RETURN (LOG_MME_APP, RETURNerror);
}
h_rc = hashtable_ts_insert (mme_ue_context_p->enb_ue_s1ap_id_ue_context_htbl,
(const hash_key_t)ue_context_p->enb_s1ap_id_key,
(void *)ue_context_p);
if (HASH_TABLE_OK != h_rc) {
OAILOG_DEBUG (LOG_MME_APP, "Error could not register this ue context %p enb_ue_s1ap_id " ENB_UE_S1AP_ID_FMT " ue_id 0x%x\n",
ue_context_p, ue_context_p->enb_ue_s1ap_id, ue_context_p->mme_ue_s1ap_id);
OAILOG_FUNC_RETURN (LOG_MME_APP, RETURNerror);
}
if ( INVALID_MME_UE_S1AP_ID != ue_context_p->mme_ue_s1ap_id) {
h_rc = hashtable_ts_is_key_exists (mme_ue_context_p->mme_ue_s1ap_id_ue_context_htbl, (const hash_key_t)ue_context_p->mme_ue_s1ap_id);
if (HASH_TABLE_OK == h_rc) {
OAILOG_DEBUG (LOG_MME_APP, "This ue context %p already exists mme_ue_s1ap_id " MME_UE_S1AP_ID_FMT "\n",
ue_context_p, ue_context_p->mme_ue_s1ap_id);
OAILOG_FUNC_RETURN (LOG_MME_APP, RETURNerror);
}
//OAI_GCC_DIAG_OFF(discarded-qualifiers);
h_rc = hashtable_ts_insert (mme_ue_context_p->mme_ue_s1ap_id_ue_context_htbl,
(const hash_key_t)ue_context_p->mme_ue_s1ap_id,
(void *)((uintptr_t)ue_context_p->enb_s1ap_id_key));
//OAI_GCC_DIAG_ON(discarded-qualifiers);
if (HASH_TABLE_OK != h_rc) {
OAILOG_DEBUG (LOG_MME_APP, "Error could not register this ue context %p mme_ue_s1ap_id " MME_UE_S1AP_ID_FMT "\n",
ue_context_p, ue_context_p->mme_ue_s1ap_id);
OAILOG_FUNC_RETURN (LOG_MME_APP, RETURNerror);
}
// filled IMSI
if (ue_context_p->imsi) {
h_rc = hashtable_ts_insert (mme_ue_context_p->imsi_ue_context_htbl,
(const hash_key_t)ue_context_p->imsi,
(void *)((uintptr_t)ue_context_p->mme_ue_s1ap_id));
if (HASH_TABLE_OK != h_rc) {
OAILOG_DEBUG (LOG_MME_APP, "Error could not register this ue context %p mme_ue_s1ap_id " MME_UE_S1AP_ID_FMT " imsi %" SCNu64 "\n",
ue_context_p, ue_context_p->mme_ue_s1ap_id, ue_context_p->imsi);
OAILOG_FUNC_RETURN (LOG_MME_APP, RETURNerror);
}
}
// filled S11 tun id
if (ue_context_p->mme_s11_teid) {
h_rc = hashtable_ts_insert (mme_ue_context_p->tun11_ue_context_htbl,
(const hash_key_t)ue_context_p->mme_s11_teid,
(void *)((uintptr_t)ue_context_p->mme_ue_s1ap_id));
if (HASH_TABLE_OK != h_rc) {
OAILOG_DEBUG (LOG_MME_APP, "Error could not register this ue context %p mme_ue_s1ap_id " MME_UE_S1AP_ID_FMT " mme_s11_teid " TEID_FMT "\n",
ue_context_p, ue_context_p->mme_ue_s1ap_id, ue_context_p->mme_s11_teid);
OAILOG_FUNC_RETURN (LOG_MME_APP, RETURNerror);
}
}
// filled guti
if ((0 != ue_context_p->guti.gummei.mme_code) || (0 != ue_context_p->guti.gummei.mme_gid) || (0 != ue_context_p->guti.m_tmsi) || (0 != ue_context_p->guti.gummei.plmn.mcc_digit1) || // MCC 000 does not exist in ITU table
(0 != ue_context_p->guti.gummei.plmn.mcc_digit2)
|| (0 != ue_context_p->guti.gummei.plmn.mcc_digit3)) {
h_rc = obj_hashtable_ts_insert (mme_ue_context_p->guti_ue_context_htbl,
(const void *const)&ue_context_p->guti,
sizeof (ue_context_p->guti),
(void *)((uintptr_t)ue_context_p->mme_ue_s1ap_id));
if (HASH_TABLE_OK != h_rc) {
OAILOG_DEBUG (LOG_MME_APP, "Error could not register this ue context %p mme_ue_s1ap_id " MME_UE_S1AP_ID_FMT " guti "GUTI_FMT"\n",
ue_context_p, ue_context_p->mme_ue_s1ap_id, GUTI_ARG(&ue_context_p->guti));
OAILOG_FUNC_RETURN (LOG_MME_APP, RETURNerror);
}
}
}
/*
* Updating statistics
*/
__sync_fetch_and_add (&mme_ue_context_p->nb_ue_managed, 1);
__sync_fetch_and_add (&mme_ue_context_p->nb_ue_since_last_stat, 1);
OAILOG_FUNC_RETURN (LOG_MME_APP, RETURNok);
}
//------------------------------------------------------------------------------
void mme_notify_ue_context_released (
mme_ue_context_t * const mme_ue_context_p,
struct ue_context_s *ue_context_p)
{
OAILOG_FUNC_IN (LOG_MME_APP);
DevAssert (mme_ue_context_p );
DevAssert (ue_context_p );
/*
* Updating statistics
*/
__sync_fetch_and_sub (&mme_ue_context_p->nb_ue_managed, 1);
__sync_fetch_and_sub (&mme_ue_context_p->nb_ue_since_last_stat, 1);
// TODO HERE free resources
OAILOG_FUNC_OUT (LOG_MME_APP);
}
//------------------------------------------------------------------------------
void mme_remove_ue_context (
mme_ue_context_t * const mme_ue_context_p,
struct ue_context_s *ue_context_p)
{
unsigned int *id = NULL;
hashtable_rc_t hash_rc = HASH_TABLE_OK;
OAILOG_FUNC_IN (LOG_MME_APP);
DevAssert (mme_ue_context_p );
DevAssert (ue_context_p );
/*
* Updating statistics
*/
__sync_fetch_and_sub (&mme_ue_context_p->nb_ue_managed, 1);
__sync_fetch_and_sub (&mme_ue_context_p->nb_ue_since_last_stat, 1);
if (ue_context_p->imsi) {
hash_rc = hashtable_ts_remove (mme_ue_context_p->imsi_ue_context_htbl, (const hash_key_t)ue_context_p->imsi, (void **)&id);
if (HASH_TABLE_OK != hash_rc)
OAILOG_DEBUG(LOG_MME_APP, "UE context enb_ue_s1ap_ue_id "ENB_UE_S1AP_ID_FMT " mme_ue_s1ap_id " MME_UE_S1AP_ID_FMT ", IMSI %" SCNu64 " not in IMSI collection",
ue_context_p->enb_ue_s1ap_id, ue_context_p->mme_ue_s1ap_id, ue_context_p->imsi);
}
// filled NAS UE ID
if (INVALID_MME_UE_S1AP_ID != ue_context_p->mme_ue_s1ap_id) {
hash_rc = hashtable_ts_remove (mme_ue_context_p->mme_ue_s1ap_id_ue_context_htbl, (const hash_key_t)ue_context_p->mme_ue_s1ap_id, (void **)&id);
if (HASH_TABLE_OK != hash_rc)
OAILOG_DEBUG(LOG_MME_APP, "UE context enb_ue_s1ap_ue_id "ENB_UE_S1AP_ID_FMT ", mme_ue_s1ap_id " MME_UE_S1AP_ID_FMT " not in MME UE S1AP ID collection",
ue_context_p->enb_ue_s1ap_id, ue_context_p->mme_ue_s1ap_id);
}
// filled S11 tun id
if (ue_context_p->mme_s11_teid) {
hash_rc = hashtable_ts_remove (mme_ue_context_p->tun11_ue_context_htbl, (const hash_key_t)ue_context_p->mme_s11_teid, (void **)&id);
if (HASH_TABLE_OK != hash_rc)
OAILOG_DEBUG(LOG_MME_APP, "UE context enb_ue_s1ap_ue_id "ENB_UE_S1AP_ID_FMT " mme_ue_s1ap_id " MME_UE_S1AP_ID_FMT ", MME S11 TEID " TEID_FMT " not in S11 collection",
ue_context_p->enb_ue_s1ap_id, ue_context_p->mme_ue_s1ap_id, ue_context_p->mme_s11_teid);
}
// filled guti
if ((ue_context_p->guti.gummei.mme_code) || (ue_context_p->guti.gummei.mme_gid) || (ue_context_p->guti.m_tmsi) ||
(ue_context_p->guti.gummei.plmn.mcc_digit1) || (ue_context_p->guti.gummei.plmn.mcc_digit2) || (ue_context_p->guti.gummei.plmn.mcc_digit3)) { // MCC 000 does not exist in ITU table
hash_rc = obj_hashtable_ts_remove (mme_ue_context_p->guti_ue_context_htbl, (const void *const)&ue_context_p->guti, sizeof (ue_context_p->guti), (void **)&id);
if (HASH_TABLE_OK != hash_rc)
OAILOG_DEBUG(LOG_MME_APP, "UE context enb_ue_s1ap_ue_id "ENB_UE_S1AP_ID_FMT " mme_ue_s1ap_id " MME_UE_S1AP_ID_FMT ", GUTI not in GUTI collection",
ue_context_p->enb_ue_s1ap_id, ue_context_p->mme_ue_s1ap_id);
}
hash_rc = hashtable_ts_remove (mme_ue_context_p->enb_ue_s1ap_id_ue_context_htbl, (const hash_key_t)ue_context_p->enb_s1ap_id_key, (void **)&ue_context_p);
if (HASH_TABLE_OK != hash_rc)
OAILOG_DEBUG(LOG_MME_APP, "UE context enb_ue_s1ap_ue_id "ENB_UE_S1AP_ID_FMT " mme_ue_s1ap_id " MME_UE_S1AP_ID_FMT ", ENB_UE_S1AP_ID not ENB_UE_S1AP_ID collection",
ue_context_p->enb_ue_s1ap_id, ue_context_p->mme_ue_s1ap_id);
mme_app_ue_context_free_content(ue_context_p);
free_wrapper (ue_context_p);
OAILOG_FUNC_OUT (LOG_MME_APP);
}
//------------------------------------------------------------------------------
bool
mme_app_dump_ue_context (
const hash_key_t keyP,
void *const ue_context_pP,
void *unused_param_pP,
void** unused_result_pP)
//------------------------------------------------------------------------------
{
struct ue_context_s *const context_p = (struct ue_context_s *)ue_context_pP;
uint8_t j = 0;
OAILOG_DEBUG (LOG_MME_APP, "-----------------------UE context %p --------------------\n", ue_context_pP);
if (context_p) {
OAILOG_DEBUG (LOG_MME_APP, " - IMSI ...........: %" IMSI_64_FMT "\n", context_p->imsi);
OAILOG_DEBUG (LOG_MME_APP, " | m_tmsi | mmec | mmegid | mcc | mnc |\n");
OAILOG_DEBUG (LOG_MME_APP, " - GUTI............: | %08x | %02x | %04x | %03u | %03u |\n", context_p->guti.m_tmsi, context_p->guti.gummei.mme_code, context_p->guti.gummei.mme_gid,
/*
* TODO check if two or three digits MNC...
*/
context_p->guti.gummei.plmn.mcc_digit3 * 100 +
context_p->guti.gummei.plmn.mcc_digit2 * 10 + context_p->guti.gummei.plmn.mcc_digit1,
context_p->guti.gummei.plmn.mnc_digit3 * 100 + context_p->guti.gummei.plmn.mnc_digit2 * 10 + context_p->guti.gummei.plmn.mnc_digit1);
OAILOG_DEBUG (LOG_MME_APP, " - Authenticated ..: %s\n", (context_p->imsi_auth == IMSI_UNAUTHENTICATED) ? "FALSE" : "TRUE");
OAILOG_DEBUG (LOG_MME_APP, " - eNB UE s1ap ID .: %08x\n", context_p->enb_ue_s1ap_id);
OAILOG_DEBUG (LOG_MME_APP, " - MME UE s1ap ID .: %08x\n", context_p->mme_ue_s1ap_id);
OAILOG_DEBUG (LOG_MME_APP, " - MME S11 TEID ...: %08x\n", context_p->mme_s11_teid);
OAILOG_DEBUG (LOG_MME_APP, " - SGW S11 TEID ...: %08x\n", context_p->sgw_s11_teid);
OAILOG_DEBUG (LOG_MME_APP, " | mcc | mnc | cell identity |\n");
OAILOG_DEBUG (LOG_MME_APP, " - E-UTRAN CGI ....: | %03u | %03u | %05x.%02x |\n",
context_p->e_utran_cgi.plmn.mcc_digit3 * 100 +
context_p->e_utran_cgi.plmn.mcc_digit2 * 10 +
context_p->e_utran_cgi.plmn.mcc_digit1,
context_p->e_utran_cgi.plmn.mnc_digit3 * 100 + context_p->e_utran_cgi.plmn.mnc_digit2 * 10 + context_p->e_utran_cgi.plmn.mnc_digit1,
context_p->e_utran_cgi.cell_identity.enb_id, context_p->e_utran_cgi.cell_identity.cell_id);
/*
* Ctime return a \n in the string
*/
OAILOG_DEBUG (LOG_MME_APP, " - Last acquired ..: %s", ctime (&context_p->cell_age));
/*
* Display UE info only if we know them
*/
if (SUBSCRIPTION_KNOWN == context_p->subscription_known) {
OAILOG_DEBUG (LOG_MME_APP, " - Status .........: %s\n", (context_p->sub_status == SS_SERVICE_GRANTED) ? "Granted" : "Barred");
#define DISPLAY_BIT_MASK_PRESENT(mASK) \
((context_p->access_restriction_data & mASK) ? 'X' : 'O')
OAILOG_DEBUG (LOG_MME_APP, " (O = allowed, X = !O) |UTRAN|GERAN|GAN|HSDPA EVO|E_UTRAN|HO TO NO 3GPP|\n");
OAILOG_DEBUG (LOG_MME_APP,
" - Access restriction | %c | %c | %c | %c | %c | %c |\n",
DISPLAY_BIT_MASK_PRESENT (ARD_UTRAN_NOT_ALLOWED),
DISPLAY_BIT_MASK_PRESENT (ARD_GERAN_NOT_ALLOWED),
DISPLAY_BIT_MASK_PRESENT (ARD_GAN_NOT_ALLOWED), DISPLAY_BIT_MASK_PRESENT (ARD_I_HSDPA_EVO_NOT_ALLOWED), DISPLAY_BIT_MASK_PRESENT (ARD_E_UTRAN_NOT_ALLOWED), DISPLAY_BIT_MASK_PRESENT (ARD_HO_TO_NON_3GPP_NOT_ALLOWED));
OAILOG_DEBUG (LOG_MME_APP, " - Access Mode ....: %s\n", ACCESS_MODE_TO_STRING (context_p->access_mode));
OAILOG_DEBUG (LOG_MME_APP, " - MSISDN .........: %-*s\n", MSISDN_LENGTH, context_p->msisdn);
OAILOG_DEBUG (LOG_MME_APP, " - RAU/TAU timer ..: %u\n", context_p->rau_tau_timer);
OAILOG_DEBUG (LOG_MME_APP, " - IMEISV .........: %*s\n", IMEISV_DIGITS_MAX, context_p->me_identity.imeisv);
OAILOG_DEBUG (LOG_MME_APP, " - AMBR (bits/s) ( Downlink | Uplink )\n");
OAILOG_DEBUG (LOG_MME_APP, " Subscribed ...: (%010" PRIu64 "|%010" PRIu64 ")\n", context_p->subscribed_ambr.br_dl, context_p->subscribed_ambr.br_ul);
OAILOG_DEBUG (LOG_MME_APP, " Allocated ....: (%010" PRIu64 "|%010" PRIu64 ")\n", context_p->used_ambr.br_dl, context_p->used_ambr.br_ul);
OAILOG_DEBUG (LOG_MME_APP, " - PDN List:\n");
for (j = 0; j < context_p->apn_profile.nb_apns; j++) {
struct apn_configuration_s *apn_config_p;
apn_config_p = &context_p->apn_profile.apn_configuration[j];
/*
* Default APN ?
*/
OAILOG_DEBUG (LOG_MME_APP, " - Default APN ...: %s\n", (apn_config_p->context_identifier == context_p->apn_profile.context_identifier)
? "TRUE" : "FALSE");
OAILOG_DEBUG (LOG_MME_APP, " - APN ...........: %s\n", apn_config_p->service_selection);
OAILOG_DEBUG (LOG_MME_APP, " - AMBR (bits/s) ( Downlink | Uplink )\n");
OAILOG_DEBUG (LOG_MME_APP, " (%010" PRIu64 "|%010" PRIu64 ")\n", apn_config_p->ambr.br_dl, apn_config_p->ambr.br_ul);
OAILOG_DEBUG (LOG_MME_APP, " - PDN type ......: %s\n", PDN_TYPE_TO_STRING (apn_config_p->pdn_type));
OAILOG_DEBUG (LOG_MME_APP, " - QOS\n");
OAILOG_DEBUG (LOG_MME_APP, " QCI .........: %u\n", apn_config_p->subscribed_qos.qci);
OAILOG_DEBUG (LOG_MME_APP, " Prio level ..: %u\n", apn_config_p->subscribed_qos.allocation_retention_priority.priority_level);
OAILOG_DEBUG (LOG_MME_APP, " Pre-emp vul .: %s\n", (apn_config_p->subscribed_qos.allocation_retention_priority.pre_emp_vulnerability == PRE_EMPTION_VULNERABILITY_ENABLED) ? "ENABLED" : "DISABLED");
OAILOG_DEBUG (LOG_MME_APP, " Pre-emp cap .: %s\n", (apn_config_p->subscribed_qos.allocation_retention_priority.pre_emp_capability == PRE_EMPTION_CAPABILITY_ENABLED) ? "ENABLED" : "DISABLED");
if (apn_config_p->nb_ip_address == 0) {
OAILOG_DEBUG (LOG_MME_APP, " IP addr .....: Dynamic allocation\n");
} else {
int i;
OAILOG_DEBUG (LOG_MME_APP, " IP addresses :\n");
for (i = 0; i < apn_config_p->nb_ip_address; i++) {
if (apn_config_p->ip_address[i].pdn_type == IPv4) {
OAILOG_DEBUG (LOG_MME_APP, " [" IPV4_ADDR "]\n", IPV4_ADDR_DISPLAY_8 (apn_config_p->ip_address[i].address.ipv4_address));
} else {
char ipv6[40];
inet_ntop (AF_INET6, apn_config_p->ip_address[i].address.ipv6_address, ipv6, 40);
OAILOG_DEBUG (LOG_MME_APP, " [%s]\n", ipv6);
}
}
}
OAILOG_DEBUG (LOG_MME_APP, "\n");
}
OAILOG_DEBUG (LOG_MME_APP, " - Bearer List:\n");
for (j = 0; j < BEARERS_PER_UE; j++) {
bearer_context_t *bearer_context_p;
bearer_context_p = &context_p->eps_bearers[j];
if (bearer_context_p->s_gw_teid != 0) {
OAILOG_DEBUG (LOG_MME_APP, " Bearer id .......: %02u\n", j);
OAILOG_DEBUG (LOG_MME_APP, " S-GW TEID (UP)...: %08x\n", bearer_context_p->s_gw_teid);
OAILOG_DEBUG (LOG_MME_APP, " P-GW TEID (UP)...: %08x\n", bearer_context_p->p_gw_teid);
OAILOG_DEBUG (LOG_MME_APP, " QCI .............: %u\n", bearer_context_p->qci);
OAILOG_DEBUG (LOG_MME_APP, " Priority level ..: %u\n", bearer_context_p->prio_level);
OAILOG_DEBUG (LOG_MME_APP, " Pre-emp vul .....: %s\n", (bearer_context_p->pre_emp_vulnerability == PRE_EMPTION_VULNERABILITY_ENABLED) ? "ENABLED" : "DISABLED");
OAILOG_DEBUG (LOG_MME_APP, " Pre-emp cap .....: %s\n", (bearer_context_p->pre_emp_capability == PRE_EMPTION_CAPABILITY_ENABLED) ? "ENABLED" : "DISABLED");
}
}
}
OAILOG_DEBUG (LOG_MME_APP, "---------------------------------------------------------\n");
return false;
}
OAILOG_DEBUG (LOG_MME_APP, "---------------------------------------------------------\n");
return true;
}
//------------------------------------------------------------------------------
void
mme_app_dump_ue_contexts (
const mme_ue_context_t * const mme_ue_context_p)
//------------------------------------------------------------------------------
{
hashtable_ts_apply_callback_on_elements (mme_ue_context_p->enb_ue_s1ap_id_ue_context_htbl, mme_app_dump_ue_context, NULL, NULL);
}
//------------------------------------------------------------------------------
void
mme_app_handle_s1ap_ue_context_release_req (
const itti_s1ap_ue_context_release_req_t const *s1ap_ue_context_release_req)
//------------------------------------------------------------------------------
{
struct ue_context_s *ue_context_p = NULL;
MessageDef *message_p = NULL;
OAILOG_FUNC_IN (LOG_MME_APP);
ue_context_p = mme_ue_context_exists_mme_ue_s1ap_id(&mme_app_desc.mme_ue_contexts, s1ap_ue_context_release_req->mme_ue_s1ap_id);
if (!ue_context_p) {
MSC_LOG_EVENT (MSC_MMEAPP_MME, "0 S1AP_UE_CONTEXT_RELEASE_REQ Unknown mme_ue_s1ap_id 0x%06" PRIX32 " ", s1ap_ue_context_release_req->mme_ue_s1ap_id);
OAILOG_ERROR (LOG_MME_APP, "UE context doesn't exist for enb_ue_s1ap_ue_id "ENB_UE_S1AP_ID_FMT " mme_ue_s1ap_id " MME_UE_S1AP_ID_FMT "\n",
s1ap_ue_context_release_req->enb_ue_s1ap_id, s1ap_ue_context_release_req->mme_ue_s1ap_id);
OAILOG_FUNC_OUT (LOG_MME_APP);
}
if ((ue_context_p->mme_s11_teid == 0) && (ue_context_p->sgw_s11_teid == 0)) {
// no session was created, no need for releasing bearers in SGW
message_p = itti_alloc_new_message (TASK_MME_APP, S1AP_UE_CONTEXT_RELEASE_COMMAND);
AssertFatal (message_p , "itti_alloc_new_message Failed");
memset ((void *)&message_p->ittiMsg.s1ap_ue_context_release_command, 0, sizeof (itti_s1ap_ue_context_release_command_t));
S1AP_UE_CONTEXT_RELEASE_COMMAND (message_p).mme_ue_s1ap_id = ue_context_p->mme_ue_s1ap_id;
S1AP_UE_CONTEXT_RELEASE_COMMAND (message_p).enb_ue_s1ap_id = ue_context_p->enb_ue_s1ap_id;
MSC_LOG_TX_MESSAGE (MSC_MMEAPP_MME, MSC_S1AP_MME, NULL, 0, "0 S1AP_UE_CONTEXT_RELEASE_COMMAND mme_ue_s1ap_id %06" PRIX32 " ",
S1AP_UE_CONTEXT_RELEASE_COMMAND (message_p).mme_ue_s1ap_id);
itti_send_msg_to_task (TASK_S1AP, INSTANCE_DEFAULT, message_p);
} else {
mme_app_send_s11_release_access_bearers_req (ue_context_p);
}
OAILOG_FUNC_OUT (LOG_MME_APP);
}
/*
From GPP TS 23.401 version 11.11.0 Release 11, section 5.3.5 S1 release procedure, point 6:
The MME deletes any eNodeB related information ("eNodeB Address in Use for S1-MME" and "eNB UE S1AP
ID") from the UE's MME context, but, retains the rest of the UE's MME context including the S-GW's S1-U
configuration information (address and TEIDs). All non-GBR EPS bearers established for the UE are preserved
in the MME and in the Serving GW.
If the cause of S1 release is because of User is inactivity, Inter-RAT Redirection, the MME shall preserve the
GBR bearers. If the cause of S1 release is because of CS Fallback triggered, further details about bearer handling
are described in TS 23.272 [58]. Otherwise, e.g. Radio Connection With UE Lost, S1 signalling connection lost,
eNodeB failure the MME shall trigger the MME Initiated Dedicated Bearer Deactivation procedure
(clause 5.4.4.2) for the GBR bearer(s) of the UE after the S1 Release procedure is completed.
*/
//------------------------------------------------------------------------------
void
mme_app_handle_s1ap_ue_context_release_complete (
const itti_s1ap_ue_context_release_complete_t const
*s1ap_ue_context_release_complete)
//------------------------------------------------------------------------------
{
struct ue_context_s *ue_context_p = NULL;
OAILOG_FUNC_IN (LOG_MME_APP);
ue_context_p = mme_ue_context_exists_mme_ue_s1ap_id (&mme_app_desc.mme_ue_contexts, s1ap_ue_context_release_complete->mme_ue_s1ap_id);
if (!ue_context_p) {
MSC_LOG_EVENT (MSC_MMEAPP_MME, "0 S1AP_UE_CONTEXT_RELEASE_COMPLETE Unknown mme_ue_s1ap_id 0x%06" PRIX32 " ", s1ap_ue_context_release_complete->mme_ue_s1ap_id);
OAILOG_ERROR (LOG_MME_APP, "UE context doesn't exist for enb_ue_s1ap_ue_id "ENB_UE_S1AP_ID_FMT " mme_ue_s1ap_id " MME_UE_S1AP_ID_FMT "\n",
s1ap_ue_context_release_complete->enb_ue_s1ap_id, s1ap_ue_context_release_complete->mme_ue_s1ap_id);
OAILOG_FUNC_OUT (LOG_MME_APP);
}
mme_notify_ue_context_released(&mme_app_desc.mme_ue_contexts, ue_context_p);
//mme_remove_ue_context(&mme_app_desc.mme_ue_contexts, ue_context_p);
// TODO remove in context GBR bearers
OAILOG_FUNC_OUT (LOG_MME_APP);
}
|
#include <utility/utility.h>
#include <utility/logger.h>
#include <fstream>
#include <iostream>
#include <algorithm>
#ifdef WIN32
# include <Windows.h>
# include <direct.h>
# define GetCurrentDir _getcwd
# define ChangeWorkingDir _chdir
#else
# include <unistd.h>
# define GetCurrentDir getcwd
# define ChangeWorkingDir chdir
#endif
#ifdef __APPLE__
# include <mach-o/dyld.h>
#endif
namespace helios
{
namespace utility
{
static std::string g_exe_path = "";
// -----------------------------------------------------------------------------------------------------------------------------------
std::string path_for_resource(const std::string& resource)
{
std::string exe_path = executable_path();
#ifdef __APPLE__
return exe_path + "/Contents/Resources/" + resource;
#else
return exe_path + "/" + resource;
#endif
}
// -----------------------------------------------------------------------------------------------------------------------------------
#ifdef WIN32
std::string executable_path()
{
if (g_exe_path == "")
{
char buffer[1024];
GetModuleFileName(NULL, &buffer[0], 1024);
g_exe_path = buffer;
g_exe_path = path_without_file(g_exe_path);
}
return g_exe_path;
}
#elif __APPLE__
std::string executable_path()
{
if (g_exe_path == "")
{
char path[1024];
uint32_t size = sizeof(path);
if (_NSGetExecutablePath(path, &size) == 0)
{
g_exe_path = path;
// Substring three times to get back to root path.
for (int i = 0; i < 3; i++)
{
std::size_t found = g_exe_path.find_last_of("/");
g_exe_path = g_exe_path.substr(0, found);
}
}
}
return g_exe_path;
}
#else
std::string executable_path()
{
if (g_exe_path == "")
{
}
return g_exe_path;
}
#endif
// -----------------------------------------------------------------------------------------------------------------------------------
std::string current_working_directory()
{
char buffer[FILENAME_MAX];
if (!GetCurrentDir(buffer, sizeof(buffer)))
return "";
buffer[sizeof(buffer) - 1] = '\0';
return std::string(buffer);
}
// -----------------------------------------------------------------------------------------------------------------------------------
void change_current_working_directory(std::string path)
{
ChangeWorkingDir(path.c_str());
}
// -----------------------------------------------------------------------------------------------------------------------------------
std::string path_without_file(std::string filepath)
{
#ifdef WIN32
std::replace(filepath.begin(), filepath.end(), '\\', '/');
#endif
std::size_t found = filepath.find_last_of("/\\");
std::string path = filepath.substr(0, found);
return path;
}
// -----------------------------------------------------------------------------------------------------------------------------------
std::string file_extension(std::string filepath)
{
std::size_t found = filepath.find_last_of(".");
std::string ext = filepath.substr(found, filepath.size());
return ext;
}
// -----------------------------------------------------------------------------------------------------------------------------------
std::string file_name_from_path(std::string filepath)
{
std::size_t slash = filepath.find_last_of("/");
if (slash == std::string::npos)
slash = 0;
else
slash++;
std::size_t dot = filepath.find_last_of(".");
std::string filename = filepath.substr(slash, dot - slash);
return filename;
}
// -----------------------------------------------------------------------------------------------------------------------------------
} // namespace utility
} // namespace helios |
<gh_stars>0
import numpy as np
class ReplayMemory(object):
"""
This class implements function to manage a replay memory as the one used in
"Human-Level Control Through Deep Reinforcement Learning" by <NAME>. et al..
"""
def __init__(self, initial_size, max_size):
"""
Constructor.
Args:
initial_size (int): initial number of elements in the replay memory;
max_size (int): maximum number of elements that the replay memory
can contain.
"""
self._initial_size = initial_size
self._max_size = max_size
self.reset()
def add(self, dataset):
"""
Add elements to the replay memory.
Args:
dataset (list): list of elements to add to the replay memory.
"""
for i in range(len(dataset)):
self._states[self._idx] = dataset[i][0]
self._actions[self._idx] = dataset[i][1]
self._rewards[self._idx] = dataset[i][2]
self._next_states[self._idx] = dataset[i][3]
self._absorbing[self._idx] = dataset[i][4]
self._last[self._idx] = dataset[i][5]
self._idx += 1
if self._idx == self._max_size:
self._full = True
self._idx = 0
def get(self, n_samples):
"""
Returns the provided number of states from the replay memory.
Args:
n_samples (int): the number of samples to return.
Returns:
The requested number of samples.
"""
if self._current_sample_idx + n_samples >= len(self._sample_idxs):
self._sample_idxs = np.random.choice(self.size, self.size,
replace=False)
self._current_sample_idx = 0
start = self._current_sample_idx
stop = start + n_samples
self._current_sample_idx = stop
s = list()
a = list()
r = list()
ss = list()
ab = list()
last = list()
for i in self._sample_idxs[start:stop]:
s.append(np.array(self._states[i]))
a.append(self._actions[i])
r.append(self._rewards[i])
ss.append(np.array(self._next_states[i]))
ab.append(self._absorbing[i])
last.append(self._last[i])
return np.array(s), np.array(a), np.array(r), np.array(ss),\
np.array(ab), np.array(last)
def reset(self):
"""
Reset the replay memory.
"""
self._idx = 0
self._full = False
self._states = [None for _ in range(self._max_size)]
self._actions = [None for _ in range(self._max_size)]
self._rewards = [None for _ in range(self._max_size)]
self._next_states = [None for _ in range(self._max_size)]
self._absorbing = [None for _ in range(self._max_size)]
self._last = [None for _ in range(self._max_size)]
self._sample_idxs = np.random.choice(self._initial_size,
self._initial_size,
replace=False)
self._current_sample_idx = 0
@property
def initialized(self):
"""
Returns:
Whether the replay memory has reached the number of elements that
allows it to be used.
"""
return self.size > self._initial_size
@property
def size(self):
"""
Returns:
The number of elements contained in the replay memory.
"""
return self._idx if not self._full else self._max_size
|
#!/bin/bash
set -euo pipefail
dn=$(dirname "$0")
# This is invoked by Dockerfile
echo "Installing base build requirements"
dnf -y install /usr/bin/xargs 'dnf-command(builddep)'
deps=$(grep -v '^#' "${dn}"/buildroot-reqs.txt)
echo "${deps}" | xargs dnf -y install
echo "Installing build dependencies of primary packages"
brs=$(grep -v '^#' "${dn}"/buildroot-buildreqs.txt)
echo "${brs}" | xargs dnf -y builddep
echo "Installing build dependencies from canonical spec files"
specs=$(grep -v '^#' "${dn}"/buildroot-specs.txt)
tmpd=$(mktemp -d) && trap 'rm -rf ${tmpd}' EXIT
(cd "${tmpd}" && echo "${specs}" | xargs curl -L --remote-name-all)
(cd "${tmpd}" && find . -type f -print0 | xargs -0 dnf -y builddep --spec)
echo 'Done!'
|
#!/bin/bash
#
# Copyright 2013 The Flutter Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
set -e
shopt -s nullglob
# Needed because if it is set, cd may print the path it changed to.
unset CDPATH
# On Mac OS, readlink -f doesn't work, so follow_links traverses the path one
# link at a time, and then cds into the link destination and find out where it
# ends up.
#
# The function is enclosed in a subshell to avoid changing the working directory
# of the caller.
function follow_links() (
cd -P "$(dirname -- "$1")"
file="$PWD/$(basename -- "$1")"
while [[ -h "$file" ]]; do
cd -P "$(dirname -- "$file")"
file="$(readlink -- "$file")"
cd -P "$(dirname -- "$file")"
file="$PWD/$(basename -- "$file")"
done
echo "$file"
)
SCRIPT_DIR=$(follow_links "$(dirname -- "${BASH_SOURCE[0]}")")
SRC_DIR="$(cd "$SCRIPT_DIR/../.."; pwd -P)"
DART_BIN="$SRC_DIR/third_party/dart/tools/sdks/dart-sdk/bin"
PATH="$DART_BIN:$PATH"
# Use:
# env VERBOSE=1 ./ci/licenses.sh
# to turn on verbose progress report printing.
QUIET="--quiet"
if [[ "${VERBOSE}" == "1" ]]; then
QUIET=""
fi
echo "Verifying license script is still happy..."
echo "Using pub from $(command -v pub), dart from $(command -v dart)"
untracked_files="$(cd "$SRC_DIR/flutter"; git status --ignored --short | grep -E "^!" | awk "{print\$2}")"
untracked_count="$(echo "$untracked_files" | wc -l)"
if [[ $untracked_count -gt 0 ]]; then
echo ""
echo "WARNING: There are $untracked_count untracked/ignored files or directories in the flutter repository."
echo "False positives may occur."
echo "You can use 'git clean -dxf' in the flutter dir to clean out these files."
echo "BUT, be warned that this will recursively remove all these files and directories:"
echo "$untracked_files"
echo ""
fi
dart --version
# Collects the license information from the repo.
# Runs in a subshell.
function collect_licenses() (
cd "$SRC_DIR/flutter/tools/licenses"
pub get
dart --enable-asserts lib/main.dart \
--src ../../.. \
--out ../../../out/license_script_output \
--golden ../../ci/licenses_golden \
"${QUIET}"
)
# Verifies the licenses in the repo.
# Runs in a subshell.
function verify_licenses() (
local exitStatus=0
cd "$SRC_DIR"
# These files trip up the script on Mac OS X.
find . -name ".DS_Store" -exec rm {} \;
collect_licenses
for f in out/license_script_output/licenses_*; do
if ! cmp -s "flutter/ci/licenses_golden/$(basename "$f")" "$f"; then
echo "============================= ERROR ============================="
echo "License script got different results than expected for $f."
echo "Please rerun the licenses script locally to verify that it is"
echo "correctly catching any new licenses for anything you may have"
echo "changed, and then update this file:"
echo " flutter/sky/packages/sky_engine/LICENSE"
echo "For more information, see the script in:"
echo " https://github.com/flutter/engine/tree/master/tools/licenses"
echo ""
diff -U 6 "flutter/ci/licenses_golden/$(basename "$f")" "$f"
echo "================================================================="
echo ""
exitStatus=1
fi
done
echo "Verifying license tool signature..."
if ! cmp -s "flutter/ci/licenses_golden/tool_signature" "out/license_script_output/tool_signature"; then
echo "============================= ERROR ============================="
echo "The license tool signature has changed. This is expected when"
echo "there have been changes to the license tool itself. Licenses have"
echo "been re-computed for all components. If only the license script has"
echo "changed, no diffs are typically expected in the output of the"
echo "script. Verify the output, and if it looks correct, update the"
echo "license tool signature golden file:"
echo " ci/licenses_golden/tool_signature"
echo "For more information, see the script in:"
echo " https://github.com/flutter/engine/tree/master/tools/licenses"
echo ""
diff -U 6 "flutter/ci/licenses_golden/tool_signature" "out/license_script_output/tool_signature"
echo "================================================================="
echo ""
exitStatus=1
fi
echo "Checking license count in licenses_flutter..."
local actualLicenseCount
actualLicenseCount="$(tail -n 1 flutter/ci/licenses_golden/licenses_flutter | tr -dc '0-9')"
local expectedLicenseCount=16 # When changing this number: Update the error message below as well describing all expected license types.
if [[ $actualLicenseCount -ne $expectedLicenseCount ]]; then
echo "=============================== ERROR ==============================="
echo "The total license count in flutter/ci/licenses_golden/licenses_flutter"
echo "changed from $expectedLicenseCount to $actualLicenseCount."
echo "It's very likely that this is an unintentional change. Please"
echo "double-check that all newly added files have a BSD-style license"
echo "header with the following copyright:"
echo " Copyright 2013 The Flutter Authors. All rights reserved."
echo "Files in 'third_party/txt' may have an Apache license header instead."
echo "If you're absolutely sure that the change in license count is"
echo "intentional, update 'flutter/ci/licenses.sh' with the new count."
echo "================================================================="
echo ""
exitStatus=1
fi
if [[ $exitStatus -eq 0 ]]; then
echo "Licenses are as expected."
fi
return $exitStatus
)
verify_licenses
|
SELECT TOP 5 * FROM product ORDER BY sellingPrice DESC; |
#!/bin/bash
# SPDX-License-Identifier: Apache-2.0
set -e
# Print usage
function print_help() {
echo "Usage: "
echo " main.sh <mode> [-v]"
echo " <mode> - one of 'install', 'test' or 'clean'"
echo " - 'install' - install all dependencies of the project"
echo " - 'test' - run unit tests of the application and client code"
echo " - 'clean' - clean the project directory of installed dependencies"
echo " -v - enable verbose output"
echo " -h - print this message"
}
function do_install() {
VERBOSE=${VERBOSE:+-ddd}
npm install $VERBOSE
(cd client && npm install $VERBOSE && npm run build)
}
function do_test() {
(npm run test)
(cd client && npm run test:ci -- -u --coverage)
}
function do_clean() {
rm -rf node_modules
rm -rf client/node_modules client/build client/coverage
}
# Get subcommand
SUBCOMMAND=$1
shift
case $SUBCOMMAND in
install | test | clean)
;;
*)
print_help
exit 1
;;
esac
OPTIONS="hv"
VERBOSE=
while getopts "$OPTIONS" opt; do
case "$opt" in
v) VERBOSE=true ;;
h)
print_help
exit 1
;;
*)
echo "Unrecognized option: $opt"
exit 2
;;
esac
done
case $SUBCOMMAND in
install)
do_install
;;
test)
do_test
;;
clean)
do_clean
;;
*)
echo "Logic Error"
exit 3
;;
esac
|
import java.lang.reflect.Constructor;
public class SubscriptionManager {
public Object getSubscriptionObject(String planName) {
try {
Class<?> subscriptionClass = Class.forName(planName + "Subscription");
Constructor<?> constructor = subscriptionClass.getConstructor();
return constructor.newInstance();
} catch (Exception e) {
return null;
}
}
// Example usage
public static void main(String[] args) {
SubscriptionManager manager = new SubscriptionManager();
Object basicSubscription = manager.getSubscriptionObject("Basic");
Object standardSubscription = manager.getSubscriptionObject("Standard");
Object premiumSubscription = manager.getSubscriptionObject("Premium");
Object invalidSubscription = manager.getSubscriptionObject("InvalidPlan");
System.out.println(basicSubscription); // Output: BasicSubscription@1f7030a
System.out.println(standardSubscription); // Output: StandardSubscription@6f75e721
System.out.println(premiumSubscription); // Output: PremiumSubscription@3764951d
System.out.println(invalidSubscription); // Output: null
}
} |
#!/bin/bash
# Teal Dulcet
# wget https://raw.github.com/tdulcet/Distributed-Computing-Scripts/master/mprime.sh -qO - | bash -s --
# ./mprime.sh [PrimeNet User ID] [Computer name] [Type of work] [Idle time to run (mins)]
# ./mprime.sh "$USER" "$HOSTNAME" 150 10
# ./mprime.sh ANONYMOUS
DIR="mprime"
FILE="p95v303b6.linux64.tar.gz"
SUM="EE54B56062FEB05C9F80963A4E3AE8555D0E59CA60DDBCBA65CE05225C9B9A79"
if [[ $# -gt 4 ]]; then
echo "Usage: $0 [PrimeNet User ID] [Computer name] [Type of work] [Idle time to run (mins)]" >&2
exit 1
fi
USERID=${1:-$USER}
COMPUTER=${2:-$HOSTNAME}
TYPE=${3:-150}
TIME=${4:-10}
RE='^([024568]|1(0[0124]|5[0123]|6[01])?)$'
if ! [[ $TYPE =~ $RE ]]; then
echo "Usage: [Type of work] is not a valid number" >&2
exit 1
fi
RE='^([0-9]*[.])?[0-9]+$'
if ! [[ $TIME =~ $RE ]]; then
echo "Usage: [Idle time to run] must be a number" >&2
exit 1
fi
echo -e "PrimeNet User ID:\t$USERID"
echo -e "Computer name:\t\t$COMPUTER"
echo -e "Type of work:\t\t$TYPE"
echo -e "Idle time to run:\t$TIME minutes\n"
if [[ -e idletime.sh ]]; then
bash -- idletime.sh
else
wget https://raw.github.com/tdulcet/Distributed-Computing-Scripts/master/idletime.sh -qO - | bash -s
fi
if [[ -d "$DIR" ]]; then
echo "Error: Prime95 is already downloaded" >&2
exit 1
fi
if ! command -v expect >/dev/null; then
echo -e "Installing Expect"
echo -e "Please enter your password if prompted.\n"
sudo apt-get update -y
sudo apt-get install expect -y
fi
TIME=$(echo "$TIME" | awk '{ printf "%g", $1 * 60 }')
MEMINFO=$(</proc/meminfo)
TOTAL_PHYSICAL_MEM=$(echo "$MEMINFO" | awk '/^MemTotal:/ {print $2}')
echo -e "\nTotal memory (RAM):\t\t$(printf "%'d" $((TOTAL_PHYSICAL_MEM / 1024))) MiB ($(printf "%'d" $((((TOTAL_PHYSICAL_MEM * 1024) / 1000) / 1000))) MB)\n"
if ! mkdir "$DIR"; then
echo "Error: Failed to create directory $DIR" >&2
exit 1
fi
cd "$DIR"
DIR=$PWD
echo -e "Downloading Prime95\n"
wget https://www.mersenne.org/ftp_root/gimps/$FILE
if [[ "$(sha256sum $FILE | head -c 64 | tr 'a-z' 'A-Z')" != "$SUM" ]]; then
echo "Error: sha256sum does not match" >&2
echo "Please run \"rm -r $DIR\" make sure you are using the latest version of this script and try running it again" >&2
echo "If you still get this error, please create an issue: https://github.com/tdulcet/Distributed-Computing-Scripts/issues" >&2
exit 1
fi
echo -e "\nDecompressing the files\n"
tar -xzvf $FILE
echo -e "\nSetting up Prime95\n"
if [[ -e ../mprime.exp ]]; then
cp ../mprime.exp .
else
wget https://raw.github.com/tdulcet/Distributed-Computing-Scripts/master/mprime.exp -q
fi
sed -i '/^expect {/a \\t"stage 2 memory in GB (*):" { sleep 1; send -- "'"$(echo "$TOTAL_PHYSICAL_MEM" | awk '{ printf "%g", ($1 * 0.8) / 1024 / 1024 }')"'\\r"; exp_continue }' mprime.exp
expect mprime.exp -- "$USERID" "$COMPUTER" "$TYPE"
echo -e "\nStarting Prime95\n"
nohup ./mprime &
echo -e "\nSetting it to start if the computer has not been used in the specified idle time and stop it when someone uses the computer\n"
#crontab -l | { cat; echo "cd '$DIR' && nohup ./mprime &"; } | crontab -
crontab -l | { cat; echo "* * * * * if who -s | awk '{ print \$2 }' | (cd /dev && xargs -r stat -c '\%U \%X') | awk '{if ('\"\${EPOCHSECONDS:-\$(date +\%s)}\"'-\$2<$TIME) { print \$1\"\t\"'\"\${EPOCHSECONDS:-\$(date +\%s)}\"'-\$2; ++count }} END{if (count>0) { exit 1 }}' >/dev/null; then pgrep -x mprime >/dev/null || (cd '$DIR' && exec nohup ./mprime &); else pgrep -x mprime >/dev/null && killall mprime; fi"; } | crontab -
|
nice -20 \
./build/examples/cpp_classification/standalone_classify.bin \
models/ResNet/ResNet-50-deploy.prototxt \
models/ResNet/ResNet-50-model.caffemodel \
models/ResNet/ResNet_mean.binaryproto \
data/ilsvrc12/synset_words_short.txt \
examples/images/cat.jpg
|
<filename>test/aggregate/alpha-aggregate-data.ts
import { OakAction } from '../../src/model';
import { version } from './version';
const aggregateDataAction: OakAction = {
name: 'alpha-aggregate-europe',
functionName: 'aggregateData',
version,
description: 'Aggregate european data',
flags: [],
customStatusDict: {},
systemFlagsDict: {
circuitBreaking: 'circuit-breaking',
},
};
export { aggregateDataAction };
|
<gh_stars>0
package org.synaptra.sdd;
import java.util.ArrayList;
import java.util.List;
public class FloatRow {
private List<Float> values = new ArrayList<>();
public FloatRow() {
}
public FloatRow(List<Float> values) {
this.values = values;
}
public List<Float> getValues() {
return values;
}
public void setValues(List<Float> values) {
this.values = values;
}
}
|
mkdir -p ~/.streamlit/
echo "\
[general]\n\
email = \"youremail@domain.com\"\n\
" > ~/.streamlit/credentials.toml
echo "\
[server]\n\
headless = true\n\
enableCORS=false\n\
port = $PORT\n\
" > ~/.streamlit/config.toml
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for CESA-2015:0249
#
# Security announcement date: 2015-02-23 14:18:15 UTC
# Script generation date: 2017-01-01 21:11:21 UTC
#
# Operating System: CentOS 5
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - samba3x-winbind.i386:3.6.23-9.el5_11
# - samba3x-winbind-devel.i386:3.6.23-9.el5_11
# - samba3x.x86_64:3.6.23-9.el5_11
# - samba3x-client.x86_64:3.6.23-9.el5_11
# - samba3x-common.x86_64:3.6.23-9.el5_11
# - samba3x-doc.x86_64:3.6.23-9.el5_11
# - samba3x-domainjoin-gui.x86_64:3.6.23-9.el5_11
# - samba3x-swat.x86_64:3.6.23-9.el5_11
# - samba3x-winbind.x86_64:3.6.23-9.el5_11
# - samba3x-winbind-devel.x86_64:3.6.23-9.el5_11
#
# Last versions recommanded by security team:
# - samba3x-winbind.i386:3.6.23-12.el5_11
# - samba3x-winbind-devel.i386:3.6.23-12.el5_11
# - samba3x.x86_64:3.6.23-12.el5_11
# - samba3x-client.x86_64:3.6.23-12.el5_11
# - samba3x-common.x86_64:3.6.23-12.el5_11
# - samba3x-doc.x86_64:3.6.23-12.el5_11
# - samba3x-domainjoin-gui.x86_64:3.6.23-12.el5_11
# - samba3x-swat.x86_64:3.6.23-12.el5_11
# - samba3x-winbind.x86_64:3.6.23-12.el5_11
# - samba3x-winbind-devel.x86_64:3.6.23-12.el5_11
#
# CVE List:
# - CVE-2015-0240
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo yum install samba3x-winbind.i386-3.6.23 -y
sudo yum install samba3x-winbind-devel.i386-3.6.23 -y
sudo yum install samba3x.x86_64-3.6.23 -y
sudo yum install samba3x-client.x86_64-3.6.23 -y
sudo yum install samba3x-common.x86_64-3.6.23 -y
sudo yum install samba3x-doc.x86_64-3.6.23 -y
sudo yum install samba3x-domainjoin-gui.x86_64-3.6.23 -y
sudo yum install samba3x-swat.x86_64-3.6.23 -y
sudo yum install samba3x-winbind.x86_64-3.6.23 -y
sudo yum install samba3x-winbind-devel.x86_64-3.6.23 -y
|
<reponame>webdevbyjoss/html5-gravity-car
define([
'app/objects/Platform',
'app/objects/Car',
'app/objects/RallyCar',
'app/objects/Road'
], function(
Platform,
Car,
RallyCar,
Road
){
var fn = function(scene, camera, b2world) {
if (!scene || !camera || !b2world) {
throw("WebGL schene & camera, Box2D world objects are required");
}
this.scene = scene;
this.camera = camera;
this.b2world = b2world;
this.debugElem = document.getElementById('cam');
// create car
this.car = new Car(this, {
posx: 30,
posy: 16,
w: 5,
h: 1.7,
wheelRadius: 0.6
});
this.road = new Road(this, 5);
};
fn.prototype.update = function(input) {
// update camera position according to car body
var pos = this.car.carBody.GetPosition();
var velmodule = this.car.getSpeedPow2();
this.debugElem.value = Math.floor(velmodule * 3.6) +
'km/h, distance: ' + Math.round(pos.x - 30) + 'm';
// update camera Z coordinate softly
var cameraTargetZ = -10 - (velmodule * 0.5);
var cameraTargetY = pos.y - 2;
var cameraSpeed = Math.abs(cameraTargetZ - this.camera.position.z) * 0.1;
if (this.camera.position.z < cameraTargetZ) {
this.camera.position.z += cameraSpeed;
} else {
this.camera.position.z -= cameraSpeed;
}
var angVel = this.car.carBody.GetLinearVelocity();
var camSpeedY = Math.abs(angVel.y) * 0.015;
if (camSpeedY < 0.02) {
camSpeedY = 0.02;
}
if (Math.abs(this.camera.position.y - cameraTargetY) > 0.5) {
if (this.camera.position.y < cameraTargetY) {
this.camera.position.y += camSpeedY;
} else {
this.camera.position.y -= camSpeedY;
}
}
var camShiftX = (Math.abs(this.camera.position.z) - 10);
this.camera.position.x = pos.x + 5 + camShiftX * 0.5;
// this.camera.position.y = pos.y - 2 - camShiftX * 0.3;
this.road.update(this.camera.position.x);
this.car.update(input);
};
fn.prototype.render = function() {
// render debug physics output
var pixelToMeter = 30;
var pos = this.car.carBody.GetPosition();
var offsetx = pos.x * pixelToMeter / 2 - 100;
var offsety = -1 * (pos.y * pixelToMeter / 2 - 250);
this.b2world.DrawDebugData(-offsetx, offsety);
};
fn.prototype.remove = function() {
this.car.remove();
this.road.remove();
this.scene = null;
this.camera = null;
this.b2world = null;
this.debugElem = null;
this.car = null;
this.road = null;
};
return fn;
}); |
#!/bin/bash
# =====================================================================
# Environment variables.
# =====================================================================
# From the official PostgreSQL Docker image.
# https://hub.docker.com/_/postgres/
POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
POSTGRES_USER=${POSTGRES_USER}
POSTGRES_DB=${POSTGRES_DB}
POSTGRES_INITDB_ARGS=${POSTGRES_INITDB_ARGS}
POSTGRES_INITDB_WALDIR=${POSTGRES_INITDB_WALDIR}
POSTGRES_HOST_AUTH_METHOD=${POSTGRES_HOST_AUTH_METHOD}
PGDATA=${PGDATA}
# The section below lists custom variables for our project.
# General purpose.
BIN_DIR=${BIN_DIR} # Folder containing all the PostgreSQL binaries.
PGPORT=${PGPORT} # The port to listen on.
# Replication.
NP_REPLICATION_TYPE=${NP_REPLICATION_TYPE} # Must be "primary" or "replica"
NP_REPLICATION_USER=${NP_REPLICATION_USER} # Replication user.
NP_REPLICATION_PASSWORD=${NP_REPLICATION_PASSWORD} # Replication password.
# =====================================================================
# Default environment variable values.
# =====================================================================
if [ -z "$POSTGRES_USER" ]; then
POSTGRES_USER="noisepage"
fi
if [ -z "$POSTGRES_DB" ]; then
POSTGRES_DB="noisepage"
fi
if [ -z "$POSTGRES_HOST_AUTH_METHOD" ]; then
POSTGRES_HOST_AUTH_METHOD="md5"
fi
if [ -z "$PGPORT" ]; then
PGPORT=15721
fi
# =====================================================================
# Helper functions.
# =====================================================================
_pgctl_start() {
${BIN_DIR}/pg_ctl --pgdata=${PGDATA} -w start
}
_pg_stop() {
${BIN_DIR}/pg_ctl --pgdata=${PGDATA} -w stop
}
_pg_start() {
${BIN_DIR}/postgres "-D" "${PGDATA}" -p 15721
}
_pg_initdb() {
WALDIR="--waldir=${POSTGRES_INITDB_WALDIR}"
if [ -z ${POSTGRES_INITDB_WALDIR} ]; then
WALDIR=""
fi
${BIN_DIR}/initdb --pgdata=${PGDATA} $WALDIR ${POSTGRES_INITDB_ARGS}
}
_pg_config() {
AUTO_CONF=${PGDATA}/postgresql.auto.conf
HBA_CONF=${PGDATA}/pg_hba.conf
# pg_hba.conf
echo "host all all 0.0.0.0/0 ${POSTGRES_HOST_AUTH_METHOD}" >> ${HBA_CONF}
# postgresql.auto.conf
# Allow Docker host to connect to container.
echo "listen_addresses = '*'" >> ${AUTO_CONF}
}
_pg_create_user_and_db() {
${BIN_DIR}/psql -c "create user ${POSTGRES_USER} with login password '${POSTGRES_PASSWORD}'" postgres
${BIN_DIR}/psql -c "create database ${POSTGRES_DB} with owner = '${POSTGRES_USER}'" postgres
# Enable monitoring for the created user.
${BIN_DIR}/psql -c "grant pg_monitor to ${POSTGRES_USER}" postgres
}
_pg_setup_replication() {
AUTO_CONF=${PGDATA}/postgresql.auto.conf
HBA_CONF=${PGDATA}/pg_hba.conf
# See PostgreSQL docs for complete description of parameters.
# wal_level: How much information to ship over.
echo "wal_level = replica" >> ${AUTO_CONF}
# hot_standby: True to enable connecting and running queries during recovery.
echo "hot_standby = on" >> ${AUTO_CONF}
# max_wal_senders: Maximum number of concurrent connections to standby/backup clients.
echo "max_wal_senders = 10" >> ${AUTO_CONF}
# max_replication_slots: Maximum number of replication slots.
echo "max_replication_slots = 10" >> ${AUTO_CONF}
# hot_standby_feedback: True if standby should tell primary about what queries are currently executing.
echo "hot_standby_feedback = on" >> ${AUTO_CONF}
if [ "${NP_REPLICATION_TYPE}" = "primary" ]; then
# ===============================
# Enable replication.
# ===============================
# Create replication user.
${BIN_DIR}/psql -c "create user ${NP_REPLICATION_USER} with replication encrypted password '${NP_REPLICATION_PASSWORD}'" postgres
# Allow replication user to connect..
echo "host replication ${NP_REPLICATION_USER} 0.0.0.0/0 md5" >> ${HBA_CONF}
# Reload configuration.
${BIN_DIR}/psql -c "select pg_reload_conf()" postgres
# Create replication slot for replica.
${BIN_DIR}/psql -c "select pg_create_physical_replication_slot('replication_slot_replica1')" postgres
fi
}
# All the steps required to start up PostgreSQL.
_pg_start_all() {
_pg_initdb # Initialize a new PostgreSQL cluster.
_pg_config # Write any configuration options required.
_pgctl_start # Start the PostgreSQL cluster.
_pg_create_user_and_db # Create the specified user and database.
if [ ! -z "${NP_REPLICATION_TYPE}" ]; then
_pg_setup_replication
fi
}
# =====================================================================
# Main logic.
# =====================================================================
main() {
# Only initdb if this is not a replica. The replica will recover from backup.
if [ ! "${NP_REPLICATION_TYPE}" = "replica" ]; then
# This is a single-node or the primary.
_pg_start_all
_pg_stop
_pg_start
else
# This is a replica.
while true ; do
# TODO(WAN): Issue #6 Note that there is a potential race here where the primary restarts and healthcheck succeeds.
sleep 10
${BIN_DIR}/pg_isready --host primary --port 15721 --username noisepage
READY_CHECK=$?
if [ "$READY_CHECK" = "0" ]; then
break
fi
done
rm -rf ${PGDATA}/*
# Initialize replica backup from primary.
echo passyMcPassword | ${BIN_DIR}/pg_basebackup --host primary --username replicator --port 15721 --pgdata=${PGDATA} --format=p --wal-method=stream --progress --write-recovery-conf --slot replication_slot_replica1
_pg_start
fi
}
main
|
#!/bin/bash
if [[ $1 == 'train' ]]; then
echo 'Run training...'
python train_kiel_train_schedule_restart.py \
--cuda \
--data /m/triton/scratch/elec/puhe/p/jaina5/transformer-xl/FinnishXL/data/kiel_data/ \
--dataset Ktrain \
--n_layer 32 \
--d_model 256 \
--n_head 8 \
--d_head 40 \
--d_inner 1024 \
--dropout 0.05 \
--dropatt 0.05 \
--optim adam \
--warmup_step 0 \
--max_step 1200000 \
--lr 0.00025 \
--tgt_len 32 \
--mem_len 32 \
--eval_tgt_len 32 \
--batch_size 512 \
--batch_chunk 4 \
--restart \
--restart_dir /m/triton/scratch/elec/puhe/p/jaina5/transformer-xl/FinnishXL/-Ktrain/20191014-151152 \
${@:2}
elif [[ $1 == 'eval' ]]; then
echo 'Run evaluation...'
python eval.py \
--cuda \
--data ../data/one-billion-words/ \
--dataset lm1b \
--batch_size 8 \
--tgt_len 32 \
--mem_len 128 \
--split test \
--same_length \
${@:2}
else
echo 'unknown argment 1'
fi
|
sap.ui.define([
'sap/ui/core/mvc/Controller', 'sap/ui/Device', 'sap/ui/model/Filter', 'sap/ui/model/FilterOperator'
], function(Controller, Device, Filter, FilterOperator) {
"use strict";
return Controller.extend("sap.dm.controller.Menu", {
onInit: function() {
var oComponent = this.getOwnerComponent();
this._router = oComponent.getRouter();
this._router.getRoute("menu").attachMatched(this._loadMenu, this);
},
_loadMenu: function(oEvent) {
return;
},
onPressAppointment: function() {
this._router.navTo("appointment");
},
onPressSitemap: function() {
this._router.navTo("sitemap");
},
onPressAbout: function() {
this._router.navTo("about");
},
onPressSuccessStories: function() {
this._router.navTo("successStories");
}
});
});
|
//
// FSInventoryGoodsController.h
// myhome
//
// Created by FudonFuchina on 2018/2/3.
// Copyright © 2018年 fuhope. All rights reserved.
//
#import "FSBaseController.h"
#import "FSGoodsModel.h"
@interface FSInventoryGoodsController : FSBaseController
@property (nonatomic,copy) void (^selectedGoods)(FSInventoryGoodsController *bVC,FSGoodsModel *bModel);
@end
|
pod trunk push CRDeviceGUID.podspec --verbose --allow-warnings
--use-libraries
|
#!/usr/bin/env bash
# PLEASE NOTE: This script has been automatically generated by conda-smithy. Any changes here
# will be lost next time ``conda smithy rerender`` is run. If you would like to make permanent
# changes to this script, consider a proposal to conda-smithy so that other feedstocks can also
# benefit from the improvement.
set -xeuo pipefail
export PYTHONUNBUFFERED=1
export FEEDSTOCK_ROOT="${FEEDSTOCK_ROOT:-/home/conda/feedstock_root}"
export RECIPE_ROOT="${RECIPE_ROOT:-/home/conda/recipe_root}"
export CI_SUPPORT="${FEEDSTOCK_ROOT}/.ci_support"
export CONFIG_FILE="${CI_SUPPORT}/${CONFIG}.yaml"
cat >~/.condarc <<CONDARC
conda-build:
root-dir: ${FEEDSTOCK_ROOT}/build_artifacts
CONDARC
conda install --yes --quiet conda-forge-ci-setup=3 conda-build pip -c conda-forge
# set up the condarc
setup_conda_rc "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}"
source run_conda_forge_build_setup
# make the build number clobber
make_build_number "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}"
conda build "${RECIPE_ROOT}" -m "${CI_SUPPORT}/${CONFIG}.yaml" \
--clobber-file "${CI_SUPPORT}/clobber_${CONFIG}.yaml"
validate_recipe_outputs "framel-feedstock"
if [[ "${UPLOAD_PACKAGES}" != "False" ]]; then
upload_package --validate --feedstock-name="framel-feedstock" "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}"
fi
touch "${FEEDSTOCK_ROOT}/build_artifacts/conda-forge-build-done-${CONFIG}" |
<reponame>ohduran/ohduran.github.io
import React from "react";
import { DefaultLayout } from "../layouts";
import { InternalLink } from "../atoms";
const Home = () => {
return (
<DefaultLayout
title="Home"
className="grid grid-rows-2 container mx-auto"
style={{
gridTemplateColumns: "1fr 1fr min-content",
}}
>
<figure className="col-start-1 sm:col-start-2 row-start-1 row-span-2 self-center z-0 flex">
<img
className="h-auto w-auto opacity-80 lg:w-1/2 float-right"
src="./Cascais.jpg"
alt="Writer"
style={{
mixBlendMode: "overlay",
}}
/>
</figure>
<main className="col-start-3 col-span-1 row-start-1 row-span-2 z-10 self-center justify-self-center w-7/12 lg:w-6/12 mx-auto text-sm sm:text-base md:text-lg">
<p className="text-lg sm:text-xl md:text-2xl font-semibold">
Hi, I'm{" "}
<span className="font-family-tertiary text-xl sm:text-2xl md:text-3xl">
<NAME>
</span>
.
</p>
<p className="mt-4 md:mt-5">
I'm a software engineer at{" "}
<a className="text-nord-7" href="https://edgetier.com">
EdgeTier
</a>
, and I'm especially interested in the{" "}
<span className="font-semibold text-nord-13 font-family-secondary">
uncontrollability
</span>{" "}
of the infrastructure of the world.
</p>
<p className="mt-1 md:mt-5">
I{" "}
<a href="https://www.youtube.com/watch?v=S3ebYJxXBRU">
speak at conferences
</a>{" "}
and <InternalLink to="/essays">write essays</InternalLink>.
</p>
</main>
<div
id="box"
className="border-2 sm:border-4 md:border-6 lg:border-8 border-solid border-nord-8 z-0 m-1 md:m-12 col-start-1 col-span-2 row-start-1 row-span-2"
/>
<aside className="col-start-1 col-span-2 row-start-1 row-span-2 z-10 w-11/12 mx-auto text-2xl sm:text-3xl md:text-5xl font-family-secondary font-bold grid gap-2 grid-cols-7 grid-rows-6 justify-center items-center">
<span className="col-start-2 row-start-1">Á</span>
<span className="col-start-3 row-start-1">L</span>
<span className="col-start-5 row-start-1">V</span>
<span className="col-start-3 row-start-3">A</span>
<span className="col-start-4 row-start-2">R</span>
<span className="col-start-5 row-start-3">O</span>
<span className="col-start-6 row-start-3">D</span>
<span className="col-start-1 row-start-4">U</span>
<span className="col-start-4 row-start-5">R</span>
<span className="col-start-5 row-start-6">Á</span>
<span className="col-start-6 row-start-6">N</span>
</aside>
</DefaultLayout>
);
};
export default Home;
|
#!/bin/bash -ex
git clone https://github.com/nagadomi/waifu2x.git
cd waifu2x
sudo ./install_lua_modules.sh
|
#!/bin/bash
rm -rf ./build
./node_modules/.bin/babel --stage 0 --out-dir ./build ./src
find ./build -type f -name '*.jsx' -exec sh -c 'mv -f $0 ${0%.jsx}.js' {} \; |
<filename>packages/react-table/tests/pagination.spec.ts
import { renderHook, act, RenderResult } from '@testing-library/react-hooks';
import useTable, { Columns, Data, Options, State } from '../src';
interface PaginationTableData {
col: number;
}
const columns: Columns<PaginationTableData> = {
col: {},
};
const data: Data<PaginationTableData> = Array.from(Array(50).keys()).map((i) => ({ col: i }));
const options: Options<PaginationTableData> = { pageSize: 10 };
function expectPagination(
{ current: { paginationHelpers } }: RenderResult<State<PaginationTableData>>,
canPrev: boolean,
canNext: boolean,
page: number,
): void {
expect(paginationHelpers.canPrev).toBe(canPrev);
expect(paginationHelpers.canNext).toBe(canNext);
expect(paginationHelpers.page).toBe(page);
}
describe('Pagination', () => {
it('switches pages using utility functions', () => {
const { result } = renderHook(() => useTable<PaginationTableData>(columns, data, options));
expect(result.current.paginationHelpers.pageAmount).toBe(5);
for (let i = 0; i < 5; i++) {
expectPagination(result, i !== 0, i !== 4, i);
if (i !== 4) act(() => result.current.paginationHelpers.nextPage());
}
});
it('cannot set page outside of boundaries', () => {
const { result } = renderHook(() => useTable<PaginationTableData>(columns, data, options));
console.error = jest.fn();
act(() => result.current.paginationHelpers.setPage(-1));
expect(console.error).toHaveBeenCalledTimes(1);
act(() => result.current.paginationHelpers.setPage(5));
expect(console.error).toHaveBeenCalledTimes(2);
});
it('can be disabled', () => {
const { result } = renderHook(() => useTable<PaginationTableData>(columns, data));
console.warn = jest.fn();
expect(result.current.paginationHelpers.pageAmount).toBe(1);
expectPagination(result, false, false, 0);
act(() => result.current.paginationHelpers.setPage(1));
expect(console.warn).toHaveBeenCalledTimes(1);
});
});
|
#pragma once
#include "Assert.h"
template<typename T>
inline void DefaultMutexProtectedResourceDeleter(T& res)
{
res.~T();
}
BOOL MutexProtectedResourceCloseHandle(HANDLE);
enum ENoInit { NoInit };
template<typename T, void DELETER(T&)=DefaultMutexProtectedResourceDeleter>
class MutexProtectedResource
{
union { T resource; };
HANDLE mutex = INVALID_HANDLE_VALUE;
public:
MutexProtectedResource(ENoInit)
{
}
MutexProtectedResource(const char* name = nullptr)
{
new (&resource) T();
mutex = CreateMutex(nullptr, FALSE, name);
EXPECT_NE(INVALID_HANDLE_VALUE, mutex);
}
MutexProtectedResource(T res, const char* name=nullptr)
{
Initialize(std::move(res), name);
}
void Initialize(T res, const char* name=nullptr)
{
new (&resource) T(res);
mutex = CreateMutex(nullptr, FALSE, name);
EXPECT_NE(INVALID_HANDLE_VALUE, mutex);
}
~MutexProtectedResource()
{
release();
}
void release()
{
if (mutex != INVALID_HANDLE_VALUE)
{
DELETER(resource);
EXPECT_EQ(TRUE, MutexProtectedResourceCloseHandle(mutex));
mutex = INVALID_HANDLE_VALUE;
}
}
class Lock
{
friend class MutexProtectedResource;
T* resource = nullptr;
HANDLE mutex = INVALID_HANDLE_VALUE;
public:
Lock()
{
}
Lock(Lock&& rhs)
: resource(rhs.resource)
, mutex(rhs.mutex)
{
rhs.mutex = INVALID_HANDLE_VALUE;
rhs.resource = nullptr;
}
~Lock()
{
release();
}
void operator=(Lock&& rhs)
{
release();
new (this) Lock(std::move(rhs));
}
void release()
{
if (mutex != INVALID_HANDLE_VALUE)
{
EXPECT_NE(FALSE, ReleaseMutex(mutex));
resource = nullptr;
mutex = INVALID_HANDLE_VALUE;
}
}
T* get() const { return resource; }
T* operator->() const { return resource; }
T& operator*() const { return *resource; }
};
Lock lock()
{
Lock result;
result.resource = &resource;
result.mutex = mutex;
EXPECT_EQ(WAIT_OBJECT_0, WaitForSingleObject(mutex, INFINITE));
return result;
}
};
|
#!/usr/bin/env bash
set -ex
source .bluemix/pipeline-COMMON.sh
source .bluemix/pipeline-CLOUDANT.sh
source .bluemix/pipeline-BLOCKCHAIN.sh
export CONTRACTS=$(ls contracts)
export APPS=$(ls apps)
if ls contracts/*/package.json > /dev/null 2>&1
then
export HAS_COMPOSER_CONTRACTS=true
fi
export REST_SERVER_URLS={}
function deploy_contracts {
for CONTRACT in ${CONTRACTS}
do
deploy_contract ${CONTRACT}
done
}
function deploy_contract {
CONTRACT=$1
if [ -f contracts/${CONTRACT}/package.json ]
then
deploy_composer_contract ${CONTRACT}
elif ls contracts/${CONTRACT}/*.go > /dev/null 2>&1
then
deploy_fabric_contract ${CONTRACT}
else
echo unrecognized contract type ${CONTRACT}
exit 1
fi
}
function deploy_composer_contract {
CONTRACT=$1
echo deploying composer contract ${CONTRACT}
pushd contracts/${CONTRACT}
BUSINESS_NETWORK_NAME=$(jq --raw-output '.name' package.json)
BUSINESS_NETWORK_VERSION=$(jq --raw-output '.version' package.json)
BUSINESS_NETWORK_ARCHIVES=$(ls dist/*.bna)
BUSINESS_NETWORK_CARD=admin@${BUSINESS_NETWORK_NAME}
echo 111111111111111111111111111111111111111111111111111
composer card list
for BUSINESS_NETWORK_ARCHIVE in ${BUSINESS_NETWORK_ARCHIVES}
do
if ! OUTPUT=$(composer network install -c ${BLOCKCHAIN_NETWORK_CARD} -a ${BUSINESS_NETWORK_ARCHIVES} 2>&1)
then
if [[ "${OUTPUT}" != *"already installed"* ]]
then
echo failed to install composer contract ${CONTRACT}
exit 1
fi
fi
while ! OUTPUT=$(composer network start -c ${BLOCKCHAIN_NETWORK_CARD} -n ${BUSINESS_NETWORK_NAME} -V ${BUSINESS_NETWORK_VERSION} -A ${BLOCKCHAIN_NETWORK_ENROLL_ID} -S ${BLOCKCHAIN_NETWORK_ENROLL_SECRET} -f adminCard.card 2>&1)
do
if [[ "${OUTPUT}" = *"REQUEST_TIMEOUT"* ]]
then
sleep 30
elif [[ "${OUTPUT}" = *"premature execution"* ]]
then
sleep 30
elif [[ "${OUTPUT}" = *"chaincode exists"* ]]
then
BUSINESS_NETWORK_UPGRADE=true
ls
composer card list
break
else
echo failed to start composer contract ${CONTRACT}
exit 1
fi
done
if [[ "${BUSINESS_NETWORK_UPGRADE}" = "true" ]]
then
while ! OUTPUT=$(composer network upgrade -c ${BLOCKCHAIN_NETWORK_CARD} -n ${BUSINESS_NETWORK_NAME} -V ${BUSINESS_NETWORK_VERSION} 2>&1)
do
if [[ "${OUTPUT}" = *"REQUEST_TIMEOUT"* ]]
then
sleep 30
elif [[ "${OUTPUT}" = *"premature execution"* ]]
then
sleep 30
elif [[ "${OUTPUT}" = *"version already exists for chaincode"* ]]
then
break
else
echo failed to upgrade composer contract ${CONTRACT}
exit 1
fi
done
else
if composer card list -c ${BUSINESS_NETWORK_CARD} > /dev/null 2>&1
then
composer card list
composer card delete -c ${BUSINESS_NETWORK_CARD}
fi
composer card import -f adminCard.card -c ${BUSINESS_NETWORK_CARD}
fi
composer card list
ls
composer network ping -c ${BUSINESS_NETWORK_CARD}
done
popd
}
function deploy_fabric_contract {
CONTRACT=$1
echo deploying fabric contract ${CONTRACT}
pushd contracts/${CONTRACT}
source version.env
CHAINCODE_FILES=$(find . -name "*.go")
CHAINCODE_FILE_OPTS=""
CHANNEL=defaultchannel
for CHAINCODE_FILE in ${CHAINCODE_FILES}
do
CHAINCODE_FILE_OPTS="${CHAINCODE_FILE_OPTS} -F files[]=@${CHAINCODE_FILE}"
done
if ! OUTPUT=$(do_curl -X POST -u ${BLOCKCHAIN_KEY}:${BLOCKCHAIN_SECRET} ${CHAINCODE_FILE_OPTS} -F chaincode_id=${CHAINCODE_ID} -F chaincode_version=${CHAINCODE_VERSION} ${BLOCKCHAIN_URL}/api/v1/networks/${BLOCKCHAIN_NETWORK_ID}/chaincode/install)
then
if [[ "${OUTPUT}" != *"chaincode code"*"exists"* ]]
then
echo failed to install fabric contract ${CONTRACT}
exit 1
fi
fi
cat << EOF > request.json
{
"chaincode_id": "${CHAINCODE_ID}",
"chaincode_version": "${CHAINCODE_VERSION}",
"chaincode_arguments": "[\"12345\"]"
}
EOF
while ! OUTPUT=$(do_curl -X POST -H 'Content-Type: application/json' -u ${BLOCKCHAIN_KEY}:${BLOCKCHAIN_SECRET} --data-binary @request.json ${BLOCKCHAIN_URL}/api/v1/networks/${BLOCKCHAIN_NETWORK_ID}/channels/${CHANNEL}/chaincode/instantiate)
do
if [[ "${OUTPUT}" = *"Failed to establish a backside connection"* ]]
then
sleep 30
elif [[ "${OUTPUT}" = *"premature execution"* ]]
then
sleep 30
elif [[ "${OUTPUT}" = *"version already exists for chaincode"* ]]
then
break
else
echo failed to start fabric contract ${CONTRACT}
exit 1
fi
done
rm -f request.json
popd
}
function deploy_rest_servers {
for CONTRACT in ${CONTRACTS}
do
deploy_rest_server ${CONTRACT}
done
}
function deploy_rest_server {
CONTRACT=$1
if [ -f contracts/${CONTRACT}/package.json ]
then
deploy_composer_rest_server ${CONTRACT}
else
echo rest server not supported for contract type ${CONTRACT}
fi
}
function deploy_composer_rest_server {
CONTRACT=$1
echo deploying rest server for composer contract ${CONTRACT}
pushd contracts/${CONTRACT}
BUSINESS_NETWORK_NAME=$(jq --raw-output '.name' package.json)
BUSINESS_NETWORK_CARD=admin@${BUSINESS_NETWORK_NAME}
CF_APP_NAME=lowes-rest-server
cf push \
${CF_APP_NAME} \
--docker-image ibmblockchain/composer-rest-server:${COMPOSER_VERSION} \
-i 1 \
-m 256M \
--no-start \
--no-manifest
cf set-env ${CF_APP_NAME} NODE_CONFIG "${NODE_CONFIG}"
cf set-env ${CF_APP_NAME} COMPOSER_CARD ${BUSINESS_NETWORK_CARD}
cf set-env ${CF_APP_NAME} COMPOSER_NAMESPACES required
cf set-env ${CF_APP_NAME} COMPOSER_WEBSOCKETS true
popd
}
function deploy_apps {
for APP in ${APPS}
do
deploy_app ${APP}
done
}
function deploy_app {
APP=$1
if [ -f apps/${APP}/manifest.yml ]
then
deploy_cf_app ${APP}
elif [ -f apps/${APP}/Dockerfile ]
then
deploy_docker_app ${APP}
else
echo unrecognized app type ${APP}
exit 1
fi
}
function deploy_cf_app {
APP=$1
echo deploying cloud foundry app ${APP}
pushd apps/${APP}
cf push ${APP} -i 1 -m 128M --no-start
cf bind-service ${APP} ${BLOCKCHAIN_SERVICE_INSTANCE} -c '{"permissions":"read-only"}'
popd
}
function deploy_docker_app {
APP=$1
echo deploying docker app ${APP}
pushd apps/${APP}
echo cannot deploy docker apps just yet
popd
}
function gather_rest_server_urls {
for CONTRACT in ${CONTRACTS}
do
gather_rest_server_url ${CONTRACT}
done
}
function gather_rest_server_url {
CONTRACT=$1
if [ -f contracts/${CONTRACT}/package.json ]
then
gather_composer_rest_server_url ${CONTRACT}
else
echo rest server not supported for contract type ${CONTRACT}
fi
}
function gather_composer_rest_server_url {
CONTRACT=$1
echo gathering rest server url for composer contract ${CONTRACT}
pushd contracts/${CONTRACT}
BUSINESS_NETWORK_NAME=$(jq --raw-output '.name' package.json)
CF_APP_NAME=lowes-rest-server
REST_SERVER_URL=$(cf app ${CF_APP_NAME} | grep routes: | awk '{print $2}')
export REST_SERVER_URLS=$(echo ${REST_SERVER_URLS} | jq ". + {\"${BUSINESS_NETWORK_NAME}\":\"https://${REST_SERVER_URL}\"}")
popd
}
function gather_app_urls {
for APP in ${APPS}
do
gather_app_url ${APP}
done
}
function gather_app_url {
APP=$1
if [ -f apps/${APP}/manifest.yml ]
then
gather_cf_app_url ${APP}
elif [ -f apps/${APP}/Dockerfile ]
then
gather_docker_app_url ${APP}
else
echo unrecognized app type ${APP}
exit 1
fi
}
function gather_cf_app_url {
APP=$1
echo gathering url for cloud foundry app ${APP}
pushd apps/${APP}
if [[ "${APP}" = "${BLOCKCHAIN_SAMPLE_APP}" ]]
then
export BLOCKCHAIN_SAMPLE_URL=$(cf app ${APP} | grep routes: | awk '{print $2}')
fi
popd
}
function gather_docker_app_url {
APP=$1
echo gathering url for docker app ${APP}
pushd apps/${APP}
echo cannot gather urls for docker apps just yet
popd
}
function start_rest_servers {
for CONTRACT in ${CONTRACTS}
do
start_rest_server ${CONTRACT}
done
}
function start_rest_server {
CONTRACT=$1
if [ -f contracts/${CONTRACT}/package.json ]
then
start_composer_rest_server ${CONTRACT}
else
echo rest server not supported for contract type ${CONTRACT}
fi
}
function start_composer_rest_server {
CONTRACT=$1
echo starting rest server for composer contract ${CONTRACT}
pushd contracts/${CONTRACT}
BUSINESS_NETWORK_NAME=$(jq --raw-output '.name' package.json)
CF_APP_NAME=lowes-rest-server
cf start ${CF_APP_NAME}
popd
}
function start_apps {
for APP in ${APPS}
do
start_app ${APP}
done
}
function start_app {
APP=$1
if [ -f apps/${APP}/manifest.yml ]
then
start_cf_app ${APP}
elif [ -f apps/${APP}/Dockerfile ]
then
start_docker_app ${APP}
else
echo unrecognized app type ${APP}
exit 1
fi
}
function start_cf_app {
APP=$1
echo starting cloud foundry app ${APP}
pushd apps/${APP}
cf set-env ${APP} REST_SERVER_URLS "${REST_SERVER_URLS}"
cf start ${APP}
popd
}
function start_docker_app {
APP=$1
echo starting docker app ${APP}
pushd apps/${APP}
echo cannot start docker apps just yet
popd
}
install_nodejs
if [[ "${HAS_COMPOSER_CONTRACTS}" = "true" ]]
then
install_composer
provision_cloudant
create_cloudant_database
configure_composer_wallet
fi
provision_blockchain
if [[ "${HAS_COMPOSER_CONTRACTS}" = "true" ]]
then
create_blockchain_network_card
fi
update_blockchain_deploy_status 1
deploy_contracts &
DEPLOY_CONTRACTS_PID=$!
deploy_rest_servers &
DEPLOY_REST_SERVERS_PID=$!
deploy_apps &
DEPLOY_APPS_PID=$!
wait ${DEPLOY_CONTRACTS_PID}
update_blockchain_deploy_status 2
wait ${DEPLOY_REST_SERVERS_PID}
update_blockchain_deploy_status 3
wait ${DEPLOY_APPS_PID}
update_blockchain_deploy_status 4
gather_rest_server_urls
update_blockchain_deploy_status 5
gather_app_urls
update_blockchain_deploy_status 6
start_rest_servers &
START_REST_SERVERS_PID=$!
start_apps &
START_APPS_PID=$!
wait ${START_REST_SERVERS_PID}
update_blockchain_deploy_status 7
wait ${START_APPS_PID}
update_blockchain_deploy_status 8
|
class ConfigVar:
def __init__(self, key, prompt, required_if, is_secure, is_connect_key):
self.key = key
self.prompt = prompt
self.required_if = required_if
self.is_secure = is_secure
self.is_connect_key = is_connect_key
class TradingFeesManager:
def __init__(self):
self.trading_fees = {}
self.api_keys = {}
def get_trading_fees(self, exchange, trading_pair):
if exchange in self.trading_fees and trading_pair in self.trading_fees[exchange]:
return self.trading_fees[exchange][trading_pair]
else:
return None
def update_trading_fees(self, exchange, trading_pair, fees):
if exchange not in self.trading_fees:
self.trading_fees[exchange] = {}
self.trading_fees[exchange][trading_pair] = fees
def set_api_key(self, exchange, api_key):
self.api_keys[exchange] = api_key
def get_api_key(self, exchange):
return self.api_keys.get(exchange, None)
# Example usage
manager = TradingFeesManager()
manager.update_trading_fees("bittrex", "ZRX-ETH", [0.2, 0.2])
manager.set_api_key("bittrex", "your_bittrex_api_key")
print(manager.get_trading_fees("bittrex", "ZRX-ETH")) # Output: [0.2, 0.2]
print(manager.get_api_key("bittrex")) # Output: your_bittrex_api_key |
// comment
"use strict";
|
#!/bin/sh
tmpdir="$(mktemp -d)"
cleanup() {
rm -rf "$tmpdir"
}
trap cleanup EXIT
targets="$(go tool dist list)"
for target in ${targets}; do
# only check platforms we build for
case "${target}" in
linux/*) ;;
windows/*) ;;
freebsd/*) ;;
darwin/*) ;;
*) continue;;
esac
GOOS=${target%%/*} GOARCH=${target##*/} \
go list -deps -f '{{with .Module}}{{.Path}}{{end}}' ./cmd/circonus-unified-agent/ >> "${tmpdir}/golist"
done
for dep in $(LC_ALL=C sort -u "${tmpdir}/golist"); do
case "${dep}" in
# ignore ourselves
github.com/circonus-labs/circonus-unified-agent) continue;;
# dependency is replaced in go.mod
github.com/satori/go.uuid) continue;;
# go-autorest has a single license for all sub modules
github.com/Azure/go-autorest/autorest)
dep=github.com/Azure/go-autorest;;
github.com/Azure/go-autorest/*)
continue;;
# single license for all sub modules
cloud.google.com/go/*)
continue;;
esac
# Remove single and double digit version from path; these are generally not
# actual parts of the path and instead indicate a branch or tag.
# example: github.com/influxdata/go-syslog/v2 -> github.com/influxdata/go-syslog
dep="${dep%%/v[0-9]}"
dep="${dep%%/v[0-9][0-9]}"
echo "${dep}" >> "${tmpdir}/HEAD"
done
# If there are two versions of a library that have the same base (like
# github.com/foo/bar github.com/foo/bar/v3) there will be a duplicate
# in the list. Remove duplicates again.
mv "${tmpdir}/HEAD" "${tmpdir}/HEAD-dup"
uniq "${tmpdir}/HEAD-dup" > "${tmpdir}/HEAD"
grep '^-' docs/LICENSE_OF_DEPENDENCIES.md | grep -v github.com/DataDog/datadog-agent | cut -f 2 -d' ' > "${tmpdir}/LICENSE_OF_DEPENDENCIES.md"
diff -U0 "${tmpdir}/LICENSE_OF_DEPENDENCIES.md" "${tmpdir}/HEAD" || {
cat - <<EOF
The docs/LICENSE_OF_DEPENDENCIES.md file does not contain the expected entries.
Lines prefixed with '+' should be added to LICENSE_OF_DEPENDENCIES.md and '-'
lines should be removed.
Include a link to the appropriate licenses for any additions.
EOF
exit 1
}
|
#!/usr/bin/env bash
core_uninstall ()
{
ascii
heading "Uninstalling RipaEx Core..."
forger_delete
relay_delete
database_destroy
database_drop_user
# Ensure we are not in a directory we are going to delete
cd "$commander_dir"
heading "Deleting Data..."
sudo rm -rf "$CORE_DIR"
success "Deleted Data!"
heading "Deleting Data..."
sudo rm -rf "$CORE_PATH_DATA"
success "Deleted Data!"
heading "Deleting Configuration..."
sudo rm -rf "$CORE_PATH_CONFIG"
success "Deleted Configuration!"
heading "Deleting Cache..."
sudo rm -rf "$CORE_PATH_CACHE"
success "Deleted Cache!"
heading "Deleting Logs..."
sudo rm -rf "$CORE_PATH_LOG"
success "Deleted Logs!"
heading "Deleting Temp..."
sudo rm -rf "$CORE_PATH_TEMP"
success "Deleted Temp!"
success "Uninstalled RipaEx Core!"
core_version
}
|
# SPDX-License-Identifier: GPL-2.0+
# Copyright (c) 2016 Google, Inc
# Written by <NAME> <<EMAIL>>
#
# Entry-type module for a U-Boot binary with an embedded microcode pointer
#
from binman.entry import Entry
from binman.etype.blob import Entry_blob
from patman import tools
class Entry_u_boot_ucode(Entry_blob):
"""U-Boot microcode block
Properties / Entry arguments:
None
The contents of this entry are filled in automatically by other entries
which must also be in the image.
U-Boot on x86 needs a single block of microcode. This is collected from
the various microcode update nodes in the device tree. It is also unable
to read the microcode from the device tree on platforms that use FSP
(Firmware Support Package) binaries, because the API requires that the
microcode is supplied before there is any SRAM available to use (i.e.
the FSP sets up the SRAM / cache-as-RAM but does so in the call that
requires the microcode!). To keep things simple, all x86 platforms handle
microcode the same way in U-Boot (even non-FSP platforms). This is that
a table is placed at _dt_ucode_base_size containing the base address and
size of the microcode. This is either passed to the FSP (for FSP
platforms), or used to set up the microcode (for non-FSP platforms).
This all happens in the build system since it is the only way to get
the microcode into a single blob and accessible without SRAM.
There are two cases to handle. If there is only one microcode blob in
the device tree, then the ucode pointer it set to point to that. This
entry (u-boot-ucode) is empty. If there is more than one update, then
this entry holds the concatenation of all updates, and the device tree
entry (u-boot-dtb-with-ucode) is updated to remove the microcode. This
last step ensures that that the microcode appears in one contiguous
block in the image and is not unnecessarily duplicated in the device
tree. It is referred to as 'collation' here.
Entry types that have a part to play in handling microcode:
Entry_u_boot_with_ucode_ptr:
Contains u-boot-nodtb.bin (i.e. U-Boot without the device tree).
It updates it with the address and size of the microcode so that
U-Boot can find it early on start-up.
Entry_u_boot_dtb_with_ucode:
Contains u-boot.dtb. It stores the microcode in a
'self.ucode_data' property, which is then read by this class to
obtain the microcode if needed. If collation is performed, it
removes the microcode from the device tree.
Entry_u_boot_ucode:
This class. If collation is enabled it reads the microcode from
the Entry_u_boot_dtb_with_ucode entry, and uses it as the
contents of this entry.
"""
def __init__(self, section, etype, node):
super().__init__(section, etype, node)
def ObtainContents(self):
# If the section does not need microcode, there is nothing to do
found = False
for suffix in ['', '-spl', '-tpl']:
name = 'u-boot%s-with-ucode-ptr' % suffix
entry = self.section.FindEntryType(name)
if entry and entry.target_offset:
found = True
if not found:
self.data = b''
return True
# Get the microcode from the device tree entry. If it is not available
# yet, return False so we will be called later. If the section simply
# doesn't exist, then we may as well return True, since we are going to
# get an error anyway.
for suffix in ['', '-spl', '-tpl']:
name = 'u-boot%s-dtb-with-ucode' % suffix
fdt_entry = self.section.FindEntryType(name)
if fdt_entry:
break
if not fdt_entry:
self.data = b''
return True
if not fdt_entry.ready:
return False
if not fdt_entry.collate:
# This binary can be empty
self.data = b''
return True
# Write it out to a file
self._pathname = tools.GetOutputFilename('u-boot-ucode.bin')
tools.WriteFile(self._pathname, fdt_entry.ucode_data)
self.ReadBlobContents()
return True
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-old/13-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-old/13-512+512+512-N-VB-FILL-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_nouns_and_verbs_fill_first_third_sixth --eval_function last_sixth_eval |
<gh_stars>0
import React from 'react'
import styled from 'styled-components'
import Avatar from './Avatar'
const Flexyboi = styled.div`
display: flex;
flex-direction: row;
align-items: center;
`
const P = styled.p`
margin: 2px 0 0 7px;
`
export default ({ users }) => {
return (
<>
{Object.entries(users).map(([username, color]) => {
return (
<Flexyboi>
<Avatar primaryColor={color[0]} shadowColor={color[1]} />
<P>{username}</P>
</Flexyboi>
)
})}
</>
)
}
|
<filename>index.js
module.exports = require('./lib/ReactBricks') |
<reponame>Annihilator708/cpp_adventure<filename>Namespaces/src/Namespaces.cpp
#include <iostream>
#include "Cat.h"
#include "Animals.h"
using namespace std;
using namespace dog;
int main() {
Cat cat;
cat.speak();
dog::Cat cat2;
cat2.speak();
realcat::Cat cat3;
cat3.speak();
cout << dog::CATNAME << endl;
cout << realcat::CATNAME << endl;
cout << CATNAME << endl;
return 0;
}
|
/*
* Copyright (c) 2021 gematik GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.gematik.ti.epa.vzd.gem.command.commandExecutions;
import de.gematik.ti.epa.vzd.client.invoker.ApiException;
import de.gematik.ti.epa.vzd.client.invoker.ApiResponse;
import de.gematik.ti.epa.vzd.client.model.BaseDirectoryEntry;
import de.gematik.ti.epa.vzd.client.model.CreateDirectoryEntry;
import de.gematik.ti.epa.vzd.client.model.DistinguishedName;
import de.gematik.ti.epa.vzd.client.model.UserCertificate;
import de.gematik.ti.epa.vzd.gem.CommandNamesEnum;
import de.gematik.ti.epa.vzd.gem.api.GemCertificateAdministrationApi;
import de.gematik.ti.epa.vzd.gem.command.Transformer;
import de.gematik.ti.epa.vzd.gem.command.commandExecutions.dto.ExecutionResult;
import de.gematik.ti.epa.vzd.gem.invoker.GemApiClient;
import de.gematik.ti.epa.vzd.gem.invoker.IConnectionPool;
import generated.CommandType;
import generated.DistinguishedNameType;
import generated.UserCertificateType;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpStatus;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Specific execution for Command "AddDirectoryEntryCertificate"
*/
public class AddDirEntryCertExecution extends ExecutionBase {
private Logger LOG = LoggerFactory.getLogger(AddDirEntryCertExecution.class);
public AddDirEntryCertExecution(IConnectionPool connectionPool) {
super(connectionPool, CommandNamesEnum.ADD_DIR_CERT);
}
@Override
public boolean checkValidation(CommandType command) {
if (command.getUserCertificate().isEmpty()) {
LOG.error("No certificate element found");
return false;
}
String uid = null;
if (command.getDn() != null) {
uid = command.getDn().getUid();
}
for (UserCertificateType cert : command.getUserCertificate()) {
if (StringUtils.isBlank(cert.getUserCertificate())) {
LOG.error("No user certificate for element found");
return false;
}
if (cert.getDn() != null) {
DistinguishedNameType certDn = cert.getDn();
if (uid == null) {
uid = certDn.getUid();
}
if (StringUtils.isNotBlank(certDn.getUid())) {
if (!uid.equals(certDn.getUid())) {
LOG.error("Mismatching uid delivered");
return false;
}
}
}
}
if (StringUtils.isBlank(uid)) {
LOG.error("No or mismatching uid delivered");
return false;
}
return true;
}
@Override
protected ExecutionResult executeCommand(CommandType command, GemApiClient apiClient) throws ApiException {
StringBuffer sb = new StringBuffer();
ApiResponse<DistinguishedName> response = null;
boolean runSuccessful = true;
int errorCode = 0;
CreateDirectoryEntry createDirectoryEntry = Transformer.getCreateDirectoryEntry(command);
for (UserCertificate userCertificate : createDirectoryEntry.getUserCertificates()) {
try {
String uid = getUid(createDirectoryEntry.getDirectoryEntryBase(), userCertificate);
response = addSingleCertificate(uid, userCertificate, apiClient);
if (response.getStatusCode() == HttpStatus.SC_CREATED) {
sb.append("\nCertificate successful added: \n" + response.getData() + " Responce status was: " + response.getStatusCode());
}
} catch (ApiException ex) {
runSuccessful = false;
errorCode = ex.getCode();
sb.append("\nSomething went wrong while adding certificate. Response status was: " + ex.getCode()
+ " certificate: " + userCertificate.getUserCertificate());
}
}
if (!runSuccessful) {
throw new ApiException(errorCode, sb + "\n" +
"At least one certificate could not be added in:" + "\n" + Transformer
.getCreateDirectoryEntry(command));
}
return new ExecutionResult(sb.toString(), true, response.getStatusCode());
}
private String getUid(BaseDirectoryEntry directoryEntryBase, UserCertificate userCertificate) {
String uidCert = null;
String uidEntry = null;
if (userCertificate.getDn() != null) {
uidCert = userCertificate.getDn().getUid();
}
if (directoryEntryBase != null) {
DistinguishedName dn = directoryEntryBase.getDn();
if (dn != null) {
uidEntry = dn.getUid();
}
}
return uidCert == null ? uidEntry : uidCert;
}
private ApiResponse<DistinguishedName> addSingleCertificate(String uid, UserCertificate userCertificate, GemApiClient apiClient)
throws ApiException {
return new GemCertificateAdministrationApi(apiClient)
.addDirectoryEntryCertificateWithHttpInfo(uid, userCertificate);
}
@Override
public boolean postCheck() {
try {
super.postCheck();
return true;
} catch (Exception ex) {
LOG.error(ex.getMessage());
}
return false;
}
}
|
<reponame>Decipher/druxt.js<filename>packages/entity/test/components/fields/DruxtFieldEntityReferenceEntityView.test.js
import 'regenerator-runtime/runtime'
import { createLocalVue, shallowMount } from '@vue/test-utils'
import Vuex from 'vuex'
import mockAxios from 'jest-mock-axios'
import { DruxtClient, DruxtStore } from '../../../../druxt/src'
import DruxtFieldEntityReferenceEntityView from '../../../src/components/fields/DruxtFieldEntityReferenceEntityView.vue'
jest.mock('axios')
const baseURL = 'https://example.com'
// Setup local vue instance.
const localVue = createLocalVue()
localVue.use(Vuex)
const stubs = ['druxt-entity']
let store
const mountComponent = (options) => {
const entity = {
type: 'pages',
id: 'fe00c55d-0335-49d6-964e-a868c0c68f9c',
attributes: {
title: 'Welcome to Contenta CMS!',
path: {
alias: '/welcome'
}
}
}
store.commit('druxt/addResource', { resource: { data: entity }})
const propsData = {
value: [{
type: entity.type,
uuid: entity.id
}],
schema: {}
}
return shallowMount(DruxtFieldEntityReferenceEntityView, { ...options, localVue, propsData, store, stubs })
}
describe('Component - DruxtFieldEntityReferenceEntityView', () => {
beforeEach(() => {
mockAxios.reset()
// Setup vuex store.
store = new Vuex.Store()
DruxtStore({ store })
store.$druxt = new DruxtClient('https://demo-api.druxtjs.org')
store.app = { context: { error: jest.fn() }, store }
})
test('default', async () => {
const wrapper = mountComponent()
await localVue.nextTick()
await localVue.nextTick()
expect(wrapper.vm.mode).toBe('default')
expect(wrapper.html()).toMatchSnapshot()
})
})
|
// Copyright (c) 2012 <NAME>. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the name Chromium Embedded
// Framework nor the names of its contributors may be used to endorse
// or promote products derived from this software without specific prior
// written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// ---------------------------------------------------------------------------
//
// The contents of this file must follow a specific format in order to
// support the CEF translator tool. See the translator.README.txt file in the
// tools directory for more information.
//
#ifndef CEF_INCLUDE_CEF_BROWSER_H_
#define CEF_INCLUDE_CEF_BROWSER_H_
#pragma once
#include <vector>
#include "include/cef_base.h"
#include "include/cef_devtools_message_observer.h"
#include "include/cef_drag_data.h"
#include "include/cef_frame.h"
#include "include/cef_image.h"
#include "include/cef_navigation_entry.h"
#include "include/cef_registration.h"
#include "include/cef_request_context.h"
class CefBrowserHost;
class CefClient;
///
// Class used to represent a browser. When used in the browser process the
// methods of this class may be called on any thread unless otherwise indicated
// in the comments. When used in the render process the methods of this class
// may only be called on the main thread.
///
/*--cef(source=library)--*/
class CefBrowser : public virtual CefBaseRefCounted {
public:
///
// True if this object is currently valid. This will return false after
// CefLifeSpanHandler::OnBeforeClose is called.
///
/*--cef()--*/
virtual bool IsValid() = 0;
///
// Returns the browser host object. This method can only be called in the
// browser process.
///
/*--cef()--*/
virtual CefRefPtr<CefBrowserHost> GetHost() = 0;
///
// Returns true if the browser can navigate backwards.
///
/*--cef()--*/
virtual bool CanGoBack() = 0;
///
// Navigate backwards.
///
/*--cef()--*/
virtual void GoBack() = 0;
///
// Returns true if the browser can navigate forwards.
///
/*--cef()--*/
virtual bool CanGoForward() = 0;
///
// Navigate forwards.
///
/*--cef()--*/
virtual void GoForward() = 0;
///
// Returns true if the browser is currently loading.
///
/*--cef()--*/
virtual bool IsLoading() = 0;
///
// Reload the current page.
///
/*--cef()--*/
virtual void Reload() = 0;
///
// Reload the current page ignoring any cached data.
///
/*--cef()--*/
virtual void ReloadIgnoreCache() = 0;
///
// Stop loading the page.
///
/*--cef()--*/
virtual void StopLoad() = 0;
///
// Returns the globally unique identifier for this browser. This value is also
// used as the tabId for extension APIs.
///
/*--cef()--*/
virtual int GetIdentifier() = 0;
///
// Returns true if this object is pointing to the same handle as |that|
// object.
///
/*--cef()--*/
virtual bool IsSame(CefRefPtr<CefBrowser> that) = 0;
///
// Returns true if the browser is a popup.
///
/*--cef()--*/
virtual bool IsPopup() = 0;
///
// Returns true if a document has been loaded in the browser.
///
/*--cef()--*/
virtual bool HasDocument() = 0;
///
// Returns the main (top-level) frame for the browser. In the browser process
// this will return a valid object until after
// CefLifeSpanHandler::OnBeforeClose is called. In the renderer process this
// will return NULL if the main frame is hosted in a different renderer
// process (e.g. for cross-origin sub-frames). The main frame object will
// change during cross-origin navigation or re-navigation after renderer
// process termination (due to crashes, etc).
///
/*--cef()--*/
virtual CefRefPtr<CefFrame> GetMainFrame() = 0;
///
// Returns the focused frame for the browser.
///
/*--cef()--*/
virtual CefRefPtr<CefFrame> GetFocusedFrame() = 0;
///
// Returns the frame with the specified identifier, or NULL if not found.
///
/*--cef(capi_name=get_frame_byident)--*/
virtual CefRefPtr<CefFrame> GetFrame(int64 identifier) = 0;
///
// Returns the frame with the specified name, or NULL if not found.
///
/*--cef(optional_param=name)--*/
virtual CefRefPtr<CefFrame> GetFrame(const CefString& name) = 0;
///
// Returns the number of frames that currently exist.
///
/*--cef()--*/
virtual size_t GetFrameCount() = 0;
///
// Returns the identifiers of all existing frames.
///
/*--cef(count_func=identifiers:GetFrameCount)--*/
virtual void GetFrameIdentifiers(std::vector<int64>& identifiers) = 0;
///
// Returns the names of all existing frames.
///
/*--cef()--*/
virtual void GetFrameNames(std::vector<CefString>& names) = 0;
};
///
// Callback interface for CefBrowserHost::RunFileDialog. The methods of this
// class will be called on the browser process UI thread.
///
/*--cef(source=client)--*/
class CefRunFileDialogCallback : public virtual CefBaseRefCounted {
public:
///
// Called asynchronously after the file dialog is dismissed.
// |selected_accept_filter| is the 0-based index of the value selected from
// the accept filters array passed to CefBrowserHost::RunFileDialog.
// |file_paths| will be a single value or a list of values depending on the
// dialog mode. If the selection was cancelled |file_paths| will be empty.
///
/*--cef(index_param=selected_accept_filter,optional_param=file_paths)--*/
virtual void OnFileDialogDismissed(
int selected_accept_filter,
const std::vector<CefString>& file_paths) = 0;
};
///
// Callback interface for CefBrowserHost::GetNavigationEntries. The methods of
// this class will be called on the browser process UI thread.
///
/*--cef(source=client)--*/
class CefNavigationEntryVisitor : public virtual CefBaseRefCounted {
public:
///
// Method that will be executed. Do not keep a reference to |entry| outside of
// this callback. Return true to continue visiting entries or false to stop.
// |current| is true if this entry is the currently loaded navigation entry.
// |index| is the 0-based index of this entry and |total| is the total number
// of entries.
///
/*--cef()--*/
virtual bool Visit(CefRefPtr<CefNavigationEntry> entry,
bool current,
int index,
int total) = 0;
};
///
// Callback interface for CefBrowserHost::PrintToPDF. The methods of this class
// will be called on the browser process UI thread.
///
/*--cef(source=client)--*/
class CefPdfPrintCallback : public virtual CefBaseRefCounted {
public:
///
// Method that will be executed when the PDF printing has completed. |path|
// is the output path. |ok| will be true if the printing completed
// successfully or false otherwise.
///
/*--cef()--*/
virtual void OnPdfPrintFinished(const CefString& path, bool ok) = 0;
};
///
// Callback interface for CefBrowserHost::DownloadImage. The methods of this
// class will be called on the browser process UI thread.
///
/*--cef(source=client)--*/
class CefDownloadImageCallback : public virtual CefBaseRefCounted {
public:
///
// Method that will be executed when the image download has completed.
// |image_url| is the URL that was downloaded and |http_status_code| is the
// resulting HTTP status code. |image| is the resulting image, possibly at
// multiple scale factors, or empty if the download failed.
///
/*--cef(optional_param=image)--*/
virtual void OnDownloadImageFinished(const CefString& image_url,
int http_status_code,
CefRefPtr<CefImage> image) = 0;
};
///
// Class used to represent the browser process aspects of a browser. The methods
// of this class can only be called in the browser process. They may be called
// on any thread in that process unless otherwise indicated in the comments.
///
/*--cef(source=library)--*/
class CefBrowserHost : public virtual CefBaseRefCounted {
public:
typedef cef_drag_operations_mask_t DragOperationsMask;
typedef cef_file_dialog_mode_t FileDialogMode;
typedef cef_mouse_button_type_t MouseButtonType;
typedef cef_paint_element_type_t PaintElementType;
///
// Create a new browser using the window parameters specified by |windowInfo|.
// All values will be copied internally and the actual window (if any) will be
// created on the UI thread. If |request_context| is empty the global request
// context will be used. This method can be called on any browser process
// thread and will not block. The optional |extra_info| parameter provides an
// opportunity to specify extra information specific to the created browser
// that will be passed to CefRenderProcessHandler::OnBrowserCreated() in the
// render process.
///
/*--cef(optional_param=client,optional_param=url,
optional_param=request_context,optional_param=extra_info)--*/
static bool CreateBrowser(const CefWindowInfo& windowInfo,
CefRefPtr<CefClient> client,
const CefString& url,
const CefBrowserSettings& settings,
CefRefPtr<CefDictionaryValue> extra_info,
CefRefPtr<CefRequestContext> request_context);
///
// Create a new browser using the window parameters specified by |windowInfo|.
// If |request_context| is empty the global request context will be used. This
// method can only be called on the browser process UI thread. The optional
// |extra_info| parameter provides an opportunity to specify extra information
// specific to the created browser that will be passed to
// CefRenderProcessHandler::OnBrowserCreated() in the render process.
///
/*--cef(optional_param=client,optional_param=url,
optional_param=request_context,optional_param=extra_info)--*/
static CefRefPtr<CefBrowser> CreateBrowserSync(
const CefWindowInfo& windowInfo,
CefRefPtr<CefClient> client,
const CefString& url,
const CefBrowserSettings& settings,
CefRefPtr<CefDictionaryValue> extra_info,
CefRefPtr<CefRequestContext> request_context);
///
// Returns the hosted browser object.
///
/*--cef()--*/
virtual CefRefPtr<CefBrowser> GetBrowser() = 0;
///
// Request that the browser close. The JavaScript 'onbeforeunload' event will
// be fired. If |force_close| is false the event handler, if any, will be
// allowed to prompt the user and the user can optionally cancel the close.
// If |force_close| is true the prompt will not be displayed and the close
// will proceed. Results in a call to CefLifeSpanHandler::DoClose() if the
// event handler allows the close or if |force_close| is true. See
// CefLifeSpanHandler::DoClose() documentation for additional usage
// information.
///
/*--cef()--*/
virtual void CloseBrowser(bool force_close) = 0;
///
// Helper for closing a browser. Call this method from the top-level window
// close handler (if any). Internally this calls CloseBrowser(false) if the
// close has not yet been initiated. This method returns false while the close
// is pending and true after the close has completed. See CloseBrowser() and
// CefLifeSpanHandler::DoClose() documentation for additional usage
// information. This method must be called on the browser process UI thread.
///
/*--cef()--*/
virtual bool TryCloseBrowser() = 0;
///
// Set whether the browser is focused.
///
/*--cef()--*/
virtual void SetFocus(bool focus) = 0;
///
// Retrieve the window handle (if any) for this browser. If this browser is
// wrapped in a CefBrowserView this method should be called on the browser
// process UI thread and it will return the handle for the top-level native
// window.
///
/*--cef()--*/
virtual CefWindowHandle GetWindowHandle() = 0;
///
// Retrieve the window handle (if any) of the browser that opened this
// browser. Will return NULL for non-popup browsers or if this browser is
// wrapped in a CefBrowserView. This method can be used in combination with
// custom handling of modal windows.
///
/*--cef()--*/
virtual CefWindowHandle GetOpenerWindowHandle() = 0;
///
// Returns true if this browser is wrapped in a CefBrowserView.
///
/*--cef()--*/
virtual bool HasView() = 0;
///
// Returns the client for this browser.
///
/*--cef()--*/
virtual CefRefPtr<CefClient> GetClient() = 0;
///
// Returns the request context for this browser.
///
/*--cef()--*/
virtual CefRefPtr<CefRequestContext> GetRequestContext() = 0;
///
// Get the current zoom level. The default zoom level is 0.0. This method can
// only be called on the UI thread.
///
/*--cef()--*/
virtual double GetZoomLevel() = 0;
///
// Change the zoom level to the specified value. Specify 0.0 to reset the
// zoom level. If called on the UI thread the change will be applied
// immediately. Otherwise, the change will be applied asynchronously on the
// UI thread.
///
/*--cef()--*/
virtual void SetZoomLevel(double zoomLevel) = 0;
///
// Call to run a file chooser dialog. Only a single file chooser dialog may be
// pending at any given time. |mode| represents the type of dialog to display.
// |title| to the title to be used for the dialog and may be empty to show the
// default title ("Open" or "Save" depending on the mode). |default_file_path|
// is the path with optional directory and/or file name component that will be
// initially selected in the dialog. |accept_filters| are used to restrict the
// selectable file types and may any combination of (a) valid lower-cased MIME
// types (e.g. "text/*" or "image/*"), (b) individual file extensions (e.g.
// ".txt" or ".png"), or (c) combined description and file extension delimited
// using "|" and ";" (e.g. "Image Types|.png;.gif;.jpg").
// |selected_accept_filter| is the 0-based index of the filter that will be
// selected by default. |callback| will be executed after the dialog is
// dismissed or immediately if another dialog is already pending. The dialog
// will be initiated asynchronously on the UI thread.
///
/*--cef(optional_param=title,optional_param=default_file_path,
optional_param=accept_filters,index_param=selected_accept_filter)--*/
virtual void RunFileDialog(FileDialogMode mode,
const CefString& title,
const CefString& default_file_path,
const std::vector<CefString>& accept_filters,
int selected_accept_filter,
CefRefPtr<CefRunFileDialogCallback> callback) = 0;
///
// Download the file at |url| using CefDownloadHandler.
///
/*--cef()--*/
virtual void StartDownload(const CefString& url) = 0;
///
// Download |image_url| and execute |callback| on completion with the images
// received from the renderer. If |is_favicon| is true then cookies are not
// sent and not accepted during download. Images with density independent
// pixel (DIP) sizes larger than |max_image_size| are filtered out from the
// image results. Versions of the image at different scale factors may be
// downloaded up to the maximum scale factor supported by the system. If there
// are no image results <= |max_image_size| then the smallest image is resized
// to |max_image_size| and is the only result. A |max_image_size| of 0 means
// unlimited. If |bypass_cache| is true then |image_url| is requested from the
// server even if it is present in the browser cache.
///
/*--cef()--*/
virtual void DownloadImage(const CefString& image_url,
bool is_favicon,
uint32 max_image_size,
bool bypass_cache,
CefRefPtr<CefDownloadImageCallback> callback) = 0;
///
// Print the current browser contents.
///
/*--cef()--*/
virtual void Print() = 0;
///
// Print the current browser contents to the PDF file specified by |path| and
// execute |callback| on completion. The caller is responsible for deleting
// |path| when done. For PDF printing to work on Linux you must implement the
// CefPrintHandler::GetPdfPaperSize method.
///
/*--cef(optional_param=callback)--*/
virtual void PrintToPDF(const CefString& path,
const CefPdfPrintSettings& settings,
CefRefPtr<CefPdfPrintCallback> callback) = 0;
///
// Search for |searchText|. |identifier| must be a unique ID and these IDs
// must strictly increase so that newer requests always have greater IDs than
// older requests. If |identifier| is zero or less than the previous ID value
// then it will be automatically assigned a new valid ID. |forward| indicates
// whether to search forward or backward within the page. |matchCase|
// indicates whether the search should be case-sensitive. |findNext| indicates
// whether this is the first request or a follow-up. The CefFindHandler
// instance, if any, returned via CefClient::GetFindHandler will be called to
// report find results.
///
/*--cef()--*/
virtual void Find(int identifier,
const CefString& searchText,
bool forward,
bool matchCase,
bool findNext) = 0;
///
// Cancel all searches that are currently going on.
///
/*--cef()--*/
virtual void StopFinding(bool clearSelection) = 0;
///
// Open developer tools (DevTools) in its own browser. The DevTools browser
// will remain associated with this browser. If the DevTools browser is
// already open then it will be focused, in which case the |windowInfo|,
// |client| and |settings| parameters will be ignored. If |inspect_element_at|
// is non-empty then the element at the specified (x,y) location will be
// inspected. The |windowInfo| parameter will be ignored if this browser is
// wrapped in a CefBrowserView.
///
/*--cef(optional_param=windowInfo,optional_param=client,
optional_param=settings,optional_param=inspect_element_at)--*/
virtual void ShowDevTools(const CefWindowInfo& windowInfo,
CefRefPtr<CefClient> client,
const CefBrowserSettings& settings,
const CefPoint& inspect_element_at) = 0;
///
// Explicitly close the associated DevTools browser, if any.
///
/*--cef()--*/
virtual void CloseDevTools() = 0;
///
// Returns true if this browser currently has an associated DevTools browser.
// Must be called on the browser process UI thread.
///
/*--cef()--*/
virtual bool HasDevTools() = 0;
///
// Send a method call message over the DevTools protocol. |message| must be a
// UTF8-encoded JSON dictionary that contains "id" (int), "method" (string)
// and "params" (dictionary, optional) values. See the DevTools protocol
// documentation at https://chromedevtools.github.io/devtools-protocol/ for
// details of supported methods and the expected "params" dictionary contents.
// |message| will be copied if necessary. This method will return true if
// called on the UI thread and the message was successfully submitted for
// validation, otherwise false. Validation will be applied asynchronously and
// any messages that fail due to formatting errors or missing parameters may
// be discarded without notification. Prefer ExecuteDevToolsMethod if a more
// structured approach to message formatting is desired.
//
// Every valid method call will result in an asynchronous method result or
// error message that references the sent message "id". Event messages are
// received while notifications are enabled (for example, between method calls
// for "Page.enable" and "Page.disable"). All received messages will be
// delivered to the observer(s) registered with AddDevToolsMessageObserver.
// See CefDevToolsMessageObserver::OnDevToolsMessage documentation for details
// of received message contents.
//
// Usage of the SendDevToolsMessage, ExecuteDevToolsMethod and
// AddDevToolsMessageObserver methods does not require an active DevTools
// front-end or remote-debugging session. Other active DevTools sessions will
// continue to function independently. However, any modification of global
// browser state by one session may not be reflected in the UI of other
// sessions.
//
// Communication with the DevTools front-end (when displayed) can be logged
// for development purposes by passing the
// `--devtools-protocol-log-file=<path>` command-line flag.
///
/*--cef()--*/
virtual bool SendDevToolsMessage(const void* message,
size_t message_size) = 0;
///
// Execute a method call over the DevTools protocol. This is a more structured
// version of SendDevToolsMessage. |message_id| is an incremental number that
// uniquely identifies the message (pass 0 to have the next number assigned
// automatically based on previous values). |method| is the method name.
// |params| are the method parameters, which may be empty. See the DevTools
// protocol documentation (linked above) for details of supported methods and
// the expected |params| dictionary contents. This method will return the
// assigned message ID if called on the UI thread and the message was
// successfully submitted for validation, otherwise 0. See the
// SendDevToolsMessage documentation for additional usage information.
///
/*--cef(optional_param=params)--*/
virtual int ExecuteDevToolsMethod(int message_id,
const CefString& method,
CefRefPtr<CefDictionaryValue> params) = 0;
///
// Add an observer for DevTools protocol messages (method results and events).
// The observer will remain registered until the returned Registration object
// is destroyed. See the SendDevToolsMessage documentation for additional
// usage information.
///
/*--cef()--*/
virtual CefRefPtr<CefRegistration> AddDevToolsMessageObserver(
CefRefPtr<CefDevToolsMessageObserver> observer) = 0;
///
// Retrieve a snapshot of current navigation entries as values sent to the
// specified visitor. If |current_only| is true only the current navigation
// entry will be sent, otherwise all navigation entries will be sent.
///
/*--cef()--*/
virtual void GetNavigationEntries(
CefRefPtr<CefNavigationEntryVisitor> visitor,
bool current_only) = 0;
///
// If a misspelled word is currently selected in an editable node calling
// this method will replace it with the specified |word|.
///
/*--cef()--*/
virtual void ReplaceMisspelling(const CefString& word) = 0;
///
// Add the specified |word| to the spelling dictionary.
///
/*--cef()--*/
virtual void AddWordToDictionary(const CefString& word) = 0;
///
// Returns true if window rendering is disabled.
///
/*--cef()--*/
virtual bool IsWindowRenderingDisabled() = 0;
///
// Notify the browser that the widget has been resized. The browser will first
// call CefRenderHandler::GetViewRect to get the new size and then call
// CefRenderHandler::OnPaint asynchronously with the updated regions. This
// method is only used when window rendering is disabled.
///
/*--cef()--*/
virtual void WasResized() = 0;
///
// Notify the browser that it has been hidden or shown. Layouting and
// CefRenderHandler::OnPaint notification will stop when the browser is
// hidden. This method is only used when window rendering is disabled.
///
/*--cef()--*/
virtual void WasHidden(bool hidden) = 0;
///
// Send a notification to the browser that the screen info has changed. The
// browser will then call CefRenderHandler::GetScreenInfo to update the
// screen information with the new values. This simulates moving the webview
// window from one display to another, or changing the properties of the
// current display. This method is only used when window rendering is
// disabled.
///
/*--cef()--*/
virtual void NotifyScreenInfoChanged() = 0;
///
// Invalidate the view. The browser will call CefRenderHandler::OnPaint
// asynchronously. This method is only used when window rendering is
// disabled.
///
/*--cef()--*/
virtual void Invalidate(PaintElementType type) = 0;
///
// Issue a BeginFrame request to Chromium. Only valid when
// CefWindowInfo::external_begin_frame_enabled is set to true.
///
/*--cef()--*/
virtual void SendExternalBeginFrame() = 0;
///
// Send a key event to the browser.
///
/*--cef()--*/
virtual void SendKeyEvent(const CefKeyEvent& event) = 0;
///
// Send a mouse click event to the browser. The |x| and |y| coordinates are
// relative to the upper-left corner of the view.
///
/*--cef()--*/
virtual void SendMouseClickEvent(const CefMouseEvent& event,
MouseButtonType type,
bool mouseUp,
int clickCount) = 0;
///
// Send a mouse move event to the browser. The |x| and |y| coordinates are
// relative to the upper-left corner of the view.
///
/*--cef()--*/
virtual void SendMouseMoveEvent(const CefMouseEvent& event,
bool mouseLeave) = 0;
///
// Send a mouse wheel event to the browser. The |x| and |y| coordinates are
// relative to the upper-left corner of the view. The |deltaX| and |deltaY|
// values represent the movement delta in the X and Y directions respectively.
// In order to scroll inside select popups with window rendering disabled
// CefRenderHandler::GetScreenPoint should be implemented properly.
///
/*--cef()--*/
virtual void SendMouseWheelEvent(const CefMouseEvent& event,
int deltaX,
int deltaY) = 0;
///
// Send a touch event to the browser for a windowless browser.
///
/*--cef()--*/
virtual void SendTouchEvent(const CefTouchEvent& event) = 0;
///
// Send a focus event to the browser.
///
/*--cef()--*/
virtual void SendFocusEvent(bool setFocus) = 0;
///
// Send a capture lost event to the browser.
///
/*--cef()--*/
virtual void SendCaptureLostEvent() = 0;
///
// Notify the browser that the window hosting it is about to be moved or
// resized. This method is only used on Windows and Linux.
///
/*--cef()--*/
virtual void NotifyMoveOrResizeStarted() = 0;
///
// Returns the maximum rate in frames per second (fps) that CefRenderHandler::
// OnPaint will be called for a windowless browser. The actual fps may be
// lower if the browser cannot generate frames at the requested rate. The
// minimum value is 1 and the maximum value is 60 (default 30). This method
// can only be called on the UI thread.
///
/*--cef()--*/
virtual int GetWindowlessFrameRate() = 0;
///
// Set the maximum rate in frames per second (fps) that CefRenderHandler::
// OnPaint will be called for a windowless browser. The actual fps may be
// lower if the browser cannot generate frames at the requested rate. The
// minimum value is 1 and the maximum value is 60 (default 30). Can also be
// set at browser creation via CefBrowserSettings.windowless_frame_rate.
///
/*--cef()--*/
virtual void SetWindowlessFrameRate(int frame_rate) = 0;
///
// Begins a new composition or updates the existing composition. Blink has a
// special node (a composition node) that allows the input method to change
// text without affecting other DOM nodes. |text| is the optional text that
// will be inserted into the composition node. |underlines| is an optional set
// of ranges that will be underlined in the resulting text.
// |replacement_range| is an optional range of the existing text that will be
// replaced. |selection_range| is an optional range of the resulting text that
// will be selected after insertion or replacement. The |replacement_range|
// value is only used on OS X.
//
// This method may be called multiple times as the composition changes. When
// the client is done making changes the composition should either be canceled
// or completed. To cancel the composition call ImeCancelComposition. To
// complete the composition call either ImeCommitText or
// ImeFinishComposingText. Completion is usually signaled when:
// A. The client receives a WM_IME_COMPOSITION message with a GCS_RESULTSTR
// flag (on Windows), or;
// B. The client receives a "commit" signal of GtkIMContext (on Linux), or;
// C. insertText of NSTextInput is called (on Mac).
//
// This method is only used when window rendering is disabled.
///
/*--cef(optional_param=text, optional_param=underlines)--*/
virtual void ImeSetComposition(
const CefString& text,
const std::vector<CefCompositionUnderline>& underlines,
const CefRange& replacement_range,
const CefRange& selection_range) = 0;
///
// Completes the existing composition by optionally inserting the specified
// |text| into the composition node. |replacement_range| is an optional range
// of the existing text that will be replaced. |relative_cursor_pos| is where
// the cursor will be positioned relative to the current cursor position. See
// comments on ImeSetComposition for usage. The |replacement_range| and
// |relative_cursor_pos| values are only used on OS X.
// This method is only used when window rendering is disabled.
///
/*--cef(optional_param=text)--*/
virtual void ImeCommitText(const CefString& text,
const CefRange& replacement_range,
int relative_cursor_pos) = 0;
///
// Completes the existing composition by applying the current composition node
// contents. If |keep_selection| is false the current selection, if any, will
// be discarded. See comments on ImeSetComposition for usage.
// This method is only used when window rendering is disabled.
///
/*--cef()--*/
virtual void ImeFinishComposingText(bool keep_selection) = 0;
///
// Cancels the existing composition and discards the composition node
// contents without applying them. See comments on ImeSetComposition for
// usage.
// This method is only used when window rendering is disabled.
///
/*--cef()--*/
virtual void ImeCancelComposition() = 0;
///
// Call this method when the user drags the mouse into the web view (before
// calling DragTargetDragOver/DragTargetLeave/DragTargetDrop).
// |drag_data| should not contain file contents as this type of data is not
// allowed to be dragged into the web view. File contents can be removed using
// CefDragData::ResetFileContents (for example, if |drag_data| comes from
// CefRenderHandler::StartDragging).
// This method is only used when window rendering is disabled.
///
/*--cef()--*/
virtual void DragTargetDragEnter(CefRefPtr<CefDragData> drag_data,
const CefMouseEvent& event,
DragOperationsMask allowed_ops) = 0;
///
// Call this method each time the mouse is moved across the web view during
// a drag operation (after calling DragTargetDragEnter and before calling
// DragTargetDragLeave/DragTargetDrop).
// This method is only used when window rendering is disabled.
///
/*--cef()--*/
virtual void DragTargetDragOver(const CefMouseEvent& event,
DragOperationsMask allowed_ops) = 0;
///
// Call this method when the user drags the mouse out of the web view (after
// calling DragTargetDragEnter).
// This method is only used when window rendering is disabled.
///
/*--cef()--*/
virtual void DragTargetDragLeave() = 0;
///
// Call this method when the user completes the drag operation by dropping
// the object onto the web view (after calling DragTargetDragEnter).
// The object being dropped is |drag_data|, given as an argument to
// the previous DragTargetDragEnter call.
// This method is only used when window rendering is disabled.
///
/*--cef()--*/
virtual void DragTargetDrop(const CefMouseEvent& event) = 0;
///
// Call this method when the drag operation started by a
// CefRenderHandler::StartDragging call has ended either in a drop or
// by being cancelled. |x| and |y| are mouse coordinates relative to the
// upper-left corner of the view. If the web view is both the drag source
// and the drag target then all DragTarget* methods should be called before
// DragSource* mthods.
// This method is only used when window rendering is disabled.
///
/*--cef()--*/
virtual void DragSourceEndedAt(int x, int y, DragOperationsMask op) = 0;
///
// Call this method when the drag operation started by a
// CefRenderHandler::StartDragging call has completed. This method may be
// called immediately without first calling DragSourceEndedAt to cancel a
// drag operation. If the web view is both the drag source and the drag
// target then all DragTarget* methods should be called before DragSource*
// mthods.
// This method is only used when window rendering is disabled.
///
/*--cef()--*/
virtual void DragSourceSystemDragEnded() = 0;
///
// Returns the current visible navigation entry for this browser. This method
// can only be called on the UI thread.
///
/*--cef()--*/
virtual CefRefPtr<CefNavigationEntry> GetVisibleNavigationEntry() = 0;
///
// Set accessibility state for all frames. |accessibility_state| may be
// default, enabled or disabled. If |accessibility_state| is STATE_DEFAULT
// then accessibility will be disabled by default and the state may be further
// controlled with the "force-renderer-accessibility" and
// "disable-renderer-accessibility" command-line switches. If
// |accessibility_state| is STATE_ENABLED then accessibility will be enabled.
// If |accessibility_state| is STATE_DISABLED then accessibility will be
// completely disabled.
//
// For windowed browsers accessibility will be enabled in Complete mode (which
// corresponds to kAccessibilityModeComplete in Chromium). In this mode all
// platform accessibility objects will be created and managed by Chromium's
// internal implementation. The client needs only to detect the screen reader
// and call this method appropriately. For example, on macOS the client can
// handle the @"AXEnhancedUserInterface" accessibility attribute to detect
// VoiceOver state changes and on Windows the client can handle WM_GETOBJECT
// with OBJID_CLIENT to detect accessibility readers.
//
// For windowless browsers accessibility will be enabled in TreeOnly mode
// (which corresponds to kAccessibilityModeWebContentsOnly in Chromium). In
// this mode renderer accessibility is enabled, the full tree is computed, and
// events are passed to CefAccessibiltyHandler, but platform accessibility
// objects are not created. The client may implement platform accessibility
// objects using CefAccessibiltyHandler callbacks if desired.
///
/*--cef()--*/
virtual void SetAccessibilityState(cef_state_t accessibility_state) = 0;
///
// Enable notifications of auto resize via CefDisplayHandler::OnAutoResize.
// Notifications are disabled by default. |min_size| and |max_size| define the
// range of allowed sizes.
///
/*--cef()--*/
virtual void SetAutoResizeEnabled(bool enabled,
const CefSize& min_size,
const CefSize& max_size) = 0;
///
// Returns the extension hosted in this browser or NULL if no extension is
// hosted. See CefRequestContext::LoadExtension for details.
///
/*--cef()--*/
virtual CefRefPtr<CefExtension> GetExtension() = 0;
///
// Returns true if this browser is hosting an extension background script.
// Background hosts do not have a window and are not displayable. See
// CefRequestContext::LoadExtension for details.
///
/*--cef()--*/
virtual bool IsBackgroundHost() = 0;
///
// Set whether the browser's audio is muted.
///
/*--cef()--*/
virtual void SetAudioMuted(bool mute) = 0;
///
// Returns true if the browser's audio is muted. This method can only be
// called on the UI thread.
///
/*--cef()--*/
virtual bool IsAudioMuted() = 0;
};
#endif // CEF_INCLUDE_CEF_BROWSER_H_
|
package http
import (
"../../cc"
"../../cc/err"
"github.com/360EntSecGroup-Skylar/excelize"
"strconv"
)
func init() {
cc.AddActionGroup( "/v1x1/practice", func( a cc.ActionGroup ) error {
// \brief 返回题库
// \type GET
// \arg[dbname] database name 数据库文件名
// \arg[stname] sheet name 表名
// \arg[sr] start row 起始行,该行应当具有表头信息
// \arg[head] 题目开始下标,可为缺省
// \arg[end] 题目结束下标,可为缺省
a.GET( "/db", func( ap cc.ActionPackage ) ( cc.HttpErrReturn, cc.StatusCode ) {
dbname := ap.GetFormValue("dbname")
stname := ap.GetFormValue("stname")
startRow := ap.GetFormValue("sr" )
sr, e := strconv.Atoi( startRow ); err.Assert( e )
f, e := excelize.OpenFile( "./.practice/"+dbname ); err.Assert( e )
rows, e := f.GetRows(stname); err.Assert( e )
ds := make([]map[string]string, 0)
for i, row := range rows {
if i > sr {
d := make(map[string]string)
for j, col := range row {
for x := 0; x < sr; x++ {
if j >= len(rows[x]) {
break
}
if rows[x][j] != "" {
d[rows[x][j]] = col
}
}
}
if len(d) != 0 {
ds = append(ds, d)
}
}
}
head := ap.GetFormValue("head")
end := ap.GetFormValue("end")
if head == "" {
h, e := strconv.Atoi( head ); err.Assert( e )
return cc.HerOkWithData( ds[h:] )
} else if end == "" {
en, e := strconv.Atoi( end ); err.Assert( e )
return cc.HerOkWithData( ds[:en] )
} else if head == "" && end =="" {
return cc.HerOkWithData( ds )
} else {
h, e := strconv.Atoi( head ); err.Assert( e )
en, e := strconv.Atoi( end ); err.Assert( e )
return cc.HerOkWithData( ds[h:en] )
}
} )
a.GET( "/db/sheets", func( ap cc.ActionPackage ) ( cc.HttpErrReturn, cc.StatusCode ) {
dbname := ap.GetFormValue("dbname")
f, e := excelize.OpenFile( "./.practice/"+dbname ); err.Assert( e )
return cc.HerOkWithData( f.GetSheetList() )
} )
return nil
} )
}
|
/*
* RadioManager
* RadioManager
*
* OpenAPI spec version: 2.0
* Contact: <EMAIL>
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
package com.pluxbox.radiomanager.api.models;
import java.util.Objects;
import com.google.gson.TypeAdapter;
import com.google.gson.annotations.JsonAdapter;
import com.google.gson.annotations.SerializedName;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.io.IOException;
/**
* CampaignTemplateItem
*/
public class CampaignTemplateItem {
@SerializedName("model_type_id")
private Long modelTypeId = null;
@SerializedName("external_id")
private String externalId = null;
@SerializedName("field_values")
private Object fieldValues = null;
@SerializedName("title")
private String title = null;
@SerializedName("duration")
private Long duration = null;
@SerializedName("recommended")
private Boolean recommended = null;
@SerializedName("static_start")
private Boolean staticStart = null;
@SerializedName("_deltas")
private Object deltas = null;
public CampaignTemplateItem modelTypeId(Long modelTypeId) {
this.modelTypeId = modelTypeId;
return this;
}
/**
* Get modelTypeId
* @return modelTypeId
**/
@ApiModelProperty(example = "14", value = "")
public Long getModelTypeId() {
return modelTypeId;
}
public void setModelTypeId(Long modelTypeId) {
this.modelTypeId = modelTypeId;
}
public CampaignTemplateItem externalId(String externalId) {
this.externalId = externalId;
return this;
}
/**
* Get externalId
* @return externalId
**/
@ApiModelProperty(example = "0", value = "")
public String getExternalId() {
return externalId;
}
public void setExternalId(String externalId) {
this.externalId = externalId;
}
public CampaignTemplateItem fieldValues(Object fieldValues) {
this.fieldValues = fieldValues;
return this;
}
/**
* Get fieldValues
* @return fieldValues
**/
@ApiModelProperty(value = "")
public Object getFieldValues() {
return fieldValues;
}
public void setFieldValues(Object fieldValues) {
this.fieldValues = fieldValues;
}
public CampaignTemplateItem title(String title) {
this.title = title;
return this;
}
/**
* Get title
* @return title
**/
@ApiModelProperty(example = "FooBar Show", value = "")
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public CampaignTemplateItem duration(Long duration) {
this.duration = duration;
return this;
}
/**
* Get duration
* @return duration
**/
@ApiModelProperty(example = "120", value = "")
public Long getDuration() {
return duration;
}
public void setDuration(Long duration) {
this.duration = duration;
}
public CampaignTemplateItem recommended(Boolean recommended) {
this.recommended = recommended;
return this;
}
/**
* Get recommended
* @return recommended
**/
@ApiModelProperty(example = "false", value = "")
public Boolean isRecommended() {
return recommended;
}
public void setRecommended(Boolean recommended) {
this.recommended = recommended;
}
public CampaignTemplateItem staticStart(Boolean staticStart) {
this.staticStart = staticStart;
return this;
}
/**
* Get staticStart
* @return staticStart
**/
@ApiModelProperty(example = "false", value = "")
public Boolean isStaticStart() {
return staticStart;
}
public void setStaticStart(Boolean staticStart) {
this.staticStart = staticStart;
}
public CampaignTemplateItem deltas(Object deltas) {
this.deltas = deltas;
return this;
}
/**
* Get deltas
* @return deltas
**/
@ApiModelProperty(value = "")
public Object getDeltas() {
return deltas;
}
public void setDeltas(Object deltas) {
this.deltas = deltas;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
CampaignTemplateItem campaignTemplateItem = (CampaignTemplateItem) o;
return Objects.equals(this.modelTypeId, campaignTemplateItem.modelTypeId) &&
Objects.equals(this.externalId, campaignTemplateItem.externalId) &&
Objects.equals(this.fieldValues, campaignTemplateItem.fieldValues) &&
Objects.equals(this.title, campaignTemplateItem.title) &&
Objects.equals(this.duration, campaignTemplateItem.duration) &&
Objects.equals(this.recommended, campaignTemplateItem.recommended) &&
Objects.equals(this.staticStart, campaignTemplateItem.staticStart) &&
Objects.equals(this.deltas, campaignTemplateItem.deltas);
}
@Override
public int hashCode() {
return Objects.hash(modelTypeId, externalId, fieldValues, title, duration, recommended, staticStart, deltas);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class CampaignTemplateItem {\n");
sb.append(" modelTypeId: ").append(toIndentedString(modelTypeId)).append("\n");
sb.append(" externalId: ").append(toIndentedString(externalId)).append("\n");
sb.append(" fieldValues: ").append(toIndentedString(fieldValues)).append("\n");
sb.append(" title: ").append(toIndentedString(title)).append("\n");
sb.append(" duration: ").append(toIndentedString(duration)).append("\n");
sb.append(" recommended: ").append(toIndentedString(recommended)).append("\n");
sb.append(" staticStart: ").append(toIndentedString(staticStart)).append("\n");
sb.append(" deltas: ").append(toIndentedString(deltas)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
|
"use strict";
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
function _classPrivateMethodInitSpec(obj, privateSet) { _checkPrivateRedeclaration(obj, privateSet); privateSet.add(obj); }
function _checkPrivateRedeclaration(obj, privateCollection) { if (privateCollection.has(obj)) { throw new TypeError("Cannot initialize the same private elements twice on an object"); } }
function _classPrivateMethodGet(receiver, privateSet, fn) { if (!privateSet.has(receiver)) { throw new TypeError("attempted to get private field on non-instance"); } return fn; }
var _createElement = /*#__PURE__*/new WeakSet();
var _changeHandler = /*#__PURE__*/new WeakSet();
var _addPreview = /*#__PURE__*/new WeakSet();
var _removeHandler = /*#__PURE__*/new WeakSet();
var _getFileSizeMb = /*#__PURE__*/new WeakSet();
var _createMessage = /*#__PURE__*/new WeakSet();
var _filesValidation = /*#__PURE__*/new WeakSet();
var CustomInputFile = /*#__PURE__*/function () {
function CustomInputFile(options) {
var _this = this;
_classCallCheck(this, CustomInputFile);
_classPrivateMethodInitSpec(this, _filesValidation);
_classPrivateMethodInitSpec(this, _createMessage);
_classPrivateMethodInitSpec(this, _getFileSizeMb);
_classPrivateMethodInitSpec(this, _removeHandler);
_classPrivateMethodInitSpec(this, _addPreview);
_classPrivateMethodInitSpec(this, _changeHandler);
_classPrivateMethodInitSpec(this, _createElement);
this.inputData = {};
this.inputData.files = {};
this.inputData.filesCount = 0;
this.inputData.filesSizeMb = 0;
this._vars = {};
this._elements = {};
this._error = false;
if (!options || !options.input) {
console.error('В опции не передан инпут или id');
this._error = true;
}
this._elements.form = options.input.closest('form');
if (!this._elements.form) {
console.error('Форма не найдена');
this._error = true;
}
if (!this._error) {
var DEFAULT_ACCEPT = ['image/jpg', 'image/jpeg', 'image/png', 'image/gif', 'application/pdf'];
var DEFAULT_MAX_FILE_COUNT = 1;
var DEFAULT_MAX_ALL_FILE_SIZE_MB = 1;
var DEFAULT_MAX_FILE_SIZE_MB = 1;
var DEFAULT_VISIBLE_BTN_CLEAR = false;
var DEFAULT_HAS_PREVIEW_IMAGE = false;
var DEFAULT_TEXT_BTN_ADD = 'Добавить файл';
var DEFAULT_TEXT_BTN_clear = 'Удалить все файлы';
var DEFAULT_ERROR_REMOVE_TIMEOUT = 5000;
if (options.hasOwnProperty('error_remove_timeout') && Number.isInteger(options.error_remove_timeout)) {
this._vars.errorRemoveTimeout = options.error_remove_timeout;
} else {
this._vars.errorRemoveTimeout = DEFAULT_ERROR_REMOVE_TIMEOUT;
}
if (options.hasOwnProperty('visible_btn_clear')) {
this._vars.visibleBtnClear = options.visible_btn_clear;
} else {
this._vars.visibleBtnClear = DEFAULT_VISIBLE_BTN_CLEAR;
}
if (options.hasOwnProperty('accept') && Array.isArray(options.accept)) {
this._vars.accept = options.accept;
} else {
this._vars.accept = DEFAULT_ACCEPT;
}
if (options.hasOwnProperty('max_file_count') && Number.isInteger(options.max_file_count)) {
this._vars.maxFileCount = options.max_file_count;
} else {
this._vars.maxFileCount = DEFAULT_MAX_FILE_COUNT;
}
if (options.hasOwnProperty('max_all_file_size_mb') && Number.isInteger(options.max_all_file_size_mb)) {
this._vars.maxAllFileSizeMb = options.max_all_file_size_mb;
} else {
this._vars.maxAllFileSizeMb = DEFAULT_MAX_ALL_FILE_SIZE_MB;
}
if (options.hasOwnProperty('max_file_size_mb') && Number.isInteger(options.max_file_size_mb)) {
this._vars.maxFileSizeMb = options.max_file_size_mb;
} else {
this._vars.maxFileSizeMb = DEFAULT_MAX_FILE_SIZE_MB;
}
if (options.hasOwnProperty('has_preview_image')) {
this._vars.hasPreviewImage = options.has_preview_image;
} else {
this._vars.hasPreviewImage = DEFAULT_HAS_PREVIEW_IMAGE;
}
var DEFAULT_TEXT_DESCRIPTION = "\u0412\u044B \u043C\u043E\u0436\u0435\u0442\u0435 \u043F\u0440\u0438\u043A\u0440\u0435\u043F\u0438\u0442\u044C \u0434\u043E ".concat(this._vars.maxFileCount, " \u043E\u0431\u0449\u0438\u043C \u043E\u0431\u044A\u0435\u043C\u043E\u043C ").concat(this._vars.maxAllFileSizeMb);
if (options.hasOwnProperty('text_description') && typeof options.text_description === 'string') {
this._vars.textDescription = options.text_description;
} else {
this._vars.textDescription = DEFAULT_TEXT_DESCRIPTION;
}
this._elements.container = _classPrivateMethodGet(this, _createElement, _createElement2).call(this, 'div', 'file-upload');
this._elements.btnAdd = _classPrivateMethodGet(this, _createElement, _createElement2).call(this, 'button', 'add');
this._elements.btnAdd.setAttribute('type', 'button');
this._elements.btnAddContainer = _classPrivateMethodGet(this, _createElement, _createElement2).call(this, 'div', 'controls__item, add-container');
this._elements.controls = _classPrivateMethodGet(this, _createElement, _createElement2).call(this, 'div', 'controls');
if (options.hasOwnProperty('text_btn_add')) {
this._elements.btnAdd.textContent = options.text_btn_add;
} else {
this._elements.btnAdd.textContent = DEFAULT_TEXT_BTN_ADD;
}
this._elements.btnAddContainer.insertAdjacentElement('beforeend', this._elements.btnAdd);
this._elements.controls.insertAdjacentElement('beforeend', this._elements.btnAddContainer);
this._elements.container.insertAdjacentElement('beforeend', this._elements.controls);
if (this._vars.visibleBtnClear) {
this._elements.btnClear = _classPrivateMethodGet(this, _createElement, _createElement2).call(this, 'button', 'clear');
this._elements.btnClearContainer = _classPrivateMethodGet(this, _createElement, _createElement2).call(this, 'div', 'controls__item, clear-container');
this._elements.btnClear.setAttribute('type', 'button');
if (options.hasOwnProperty('text_btn_clear')) {
this._elements.btnClear.textContent = options.text_btn_clear;
} else {
this._elements.btnClear.textContent = DEFAULT_TEXT_BTN_clear;
}
this._elements.btnClear.addEventListener('click', function (e) {
return _this.clearCustomFiles(_this._elements.form);
});
this._elements.btnClearContainer.insertAdjacentElement('beforeend', this._elements.btnClear);
this._elements.controls.insertAdjacentElement('beforeend', this._elements.btnClearContainer);
}
this._elements.inputDescription = _classPrivateMethodGet(this, _createElement, _createElement2).call(this, 'div', 'input-description');
this._elements.inputDescription.textContent = this._vars.textDescription;
this._elements.container.insertAdjacentElement('beforeend', this._elements.inputDescription); // Setup controls & input
var input = options.input;
if (options.hasOwnProperty('multiple') && options.multiple) {
input.setAttribute('multiple', true);
}
input.setAttribute('name', 'filename[]');
input.setAttribute('accept', this._vars.accept);
if (input.getAttribute('type') !== 'file') {
input.setAttribute('type', 'file');
}
input.setAttribute('hidden', true);
input.insertAdjacentElement('afterend', this._elements.container);
this._elements.btnAdd.addEventListener('click', function (e) {
return input.click();
});
input.addEventListener('change', function (e) {
return _classPrivateMethodGet(_this, _changeHandler, _changeHandler2).call(_this, e);
});
}
}
_createClass(CustomInputFile, [{
key: "checkFileType",
value: function checkFileType(file) {
var fileType = file.type;
var allowedAccept = this._vars.accept.filter(function (acceptType) {
return acceptType === fileType;
});
return allowedAccept.length ? true : false;
}
}, {
key: "getCustomFiles",
value: function getCustomFiles(FormData) {
if (this._error || !FormData) {
return false;
}
if (FormData.get('filename[]')) {
FormData["delete"]('filename[]');
}
for (var file in this.inputData.files) {
FormData.append('filename[]', this.inputData.files[file]);
}
return FormData;
}
}, {
key: "clearCustomFiles",
value: function clearCustomFiles() {
if (this._error) {
return false;
}
var fileList = this._elements.form.querySelector('.file-list');
var fileMessage = this._elements.form.querySelector('.message-container');
if (this.inputData.files) {
for (var file in this.inputData.files) {
delete this.inputData.files[file];
}
}
this.inputData.filesCount = 0;
this.inputData.filesSizeMb = 0;
if (fileList) {
fileList.remove();
}
if (fileMessage) {
fileMessage.remove();
}
}
}]);
return CustomInputFile;
}();
function _createElement2(type, elementClass) {
var element = document.createElement(type);
if (elementClass) {
var classList = elementClass.replace(/\s/g, '').split(',');
classList.forEach(function (className) {
element.classList.add(className);
});
}
return element;
}
function _changeHandler2(event) {
var _this2 = this;
var files = Array.from(event.target.files);
if (!files.length) {
return false;
}
var validationResult = _classPrivateMethodGet(this, _filesValidation, _filesValidation2).call(this, files);
if (validationResult.status) {
files.forEach(function (file) {
_this2.inputData.files[file.name] = file;
_classPrivateMethodGet(_this2, _addPreview, _addPreview2).call(_this2, file);
});
} else {
_classPrivateMethodGet(this, _createMessage, _createMessage2).call(this, validationResult.message, validationResult.type);
}
event.target.value = '';
}
function _addPreview2(file) {
var _this3 = this;
if (!this._elements.form.querySelector('.file-list')) {
var fileList = _classPrivateMethodGet(this, _createElement, _createElement2).call(this, 'div', 'file-list');
this._elements.controls.insertAdjacentElement('afterend', fileList);
}
var reader = new FileReader();
reader.addEventListener('load', function (event) {
var fileItem = _classPrivateMethodGet(_this3, _createElement, _createElement2).call(_this3, 'div', "file-item, ".concat(file.type.replace(/[\\.\\/]/g, '-')));
fileItem.setAttribute('data-file-name', file.name);
if (_this3._vars.hasPreviewImage) {
var fileItemAside = _classPrivateMethodGet(_this3, _createElement, _createElement2).call(_this3, 'div', 'file-item__aside');
var fileItemPreview = _classPrivateMethodGet(_this3, _createElement, _createElement2).call(_this3, 'div', 'file-item__preview');
fileItemPreview.setAttribute('style', "background-image: url(".concat(event.target.result, ")"));
fileItemAside.insertAdjacentElement('beforeend', fileItemPreview);
fileItem.insertAdjacentElement('beforeend', fileItemAside);
}
var fileItemMain = _classPrivateMethodGet(_this3, _createElement, _createElement2).call(_this3, 'div', 'file-item__main');
var fileItemName = _classPrivateMethodGet(_this3, _createElement, _createElement2).call(_this3, 'div', 'file-item__name');
fileItemName.textContent = file.name;
var fileItemSize = _classPrivateMethodGet(_this3, _createElement, _createElement2).call(_this3, 'div', 'file-item__size');
fileItemSize.textContent = "".concat(_classPrivateMethodGet(_this3, _getFileSizeMb, _getFileSizeMb2).call(_this3, file), " \u041C\u0431");
var fileItemRemove = _classPrivateMethodGet(_this3, _createElement, _createElement2).call(_this3, 'div', 'file-item__remove');
fileItemRemove.addEventListener('click', function (e) {
return _classPrivateMethodGet(_this3, _removeHandler, _removeHandler2).call(_this3, e, file);
});
var fileList = _this3._elements.form.querySelector('.file-list');
fileItemMain.insertAdjacentElement('beforeend', fileItemName);
fileItemMain.insertAdjacentElement('beforeend', fileItemSize);
fileItemMain.insertAdjacentElement('beforeend', fileItemRemove);
fileItem.insertAdjacentElement('beforeend', fileItemMain);
fileList.insertAdjacentElement('beforeend', fileItem);
});
this.inputData.filesCount = ++this.inputData.filesCount;
reader.readAsDataURL(file);
}
function _removeHandler2(eventRemove) {
var fileItem = eventRemove.target.closest('.file-item');
var fileName = fileItem.dataset.fileName;
var fileSizeMb = _classPrivateMethodGet(this, _getFileSizeMb, _getFileSizeMb2).call(this, this.inputData.files[fileName]);
delete this.inputData.files[fileName];
fileItem.remove();
this.inputData.filesCount = --this.inputData.filesCount;
this.inputData.filesSizeMb -= fileSizeMb;
}
function _getFileSizeMb2(file) {
var bytes = file.size;
if (bytes === 0) return 0;
return Math.ceil(bytes / (1024 * 1024));
}
function _createMessage2(text, type) {
var messageContainer = _classPrivateMethodGet(this, _createElement, _createElement2).call(this, 'div', 'message-container');
var messageText = _classPrivateMethodGet(this, _createElement, _createElement2).call(this, 'div', "message-container__text, message-container__text--".concat(type));
messageText.textContent = text;
messageContainer.insertAdjacentElement('beforeend', messageText);
this._elements.container.insertAdjacentElement('beforeend', messageContainer);
setTimeout(function () {
messageContainer.remove();
}, this._vars.errorRemoveTimeout);
}
function _filesValidation2(files) {
var _this4 = this;
var obj = {};
obj.message = '';
obj.type = '';
obj.status = true;
if (files.length > this._vars.maxFileCount - this.inputData.filesCount) {
obj.status = false;
obj.type = 'error';
obj.message = 'Превышено количество файлов';
return obj;
}
files.forEach(function (file) {
var fileSizeMb = _classPrivateMethodGet(_this4, _getFileSizeMb, _getFileSizeMb2).call(_this4, file);
_this4.inputData.filesSizeMb += fileSizeMb;
if (fileSizeMb >= _this4._vars.maxFileSizeMb) {
obj.status = false;
obj.type = 'error';
obj.message = "\u0412\u0435\u0441 \u0444\u0430\u0439\u043B\u0430 ".concat(file.name, " \u0431\u043E\u043B\u044C\u0448\u0435 \u0440\u0430\u0437\u0440\u0435\u0448\u0435\u043D\u043D\u043E\u0433\u043E");
}
if (!_this4.checkFileType(file)) {
obj.status = false;
obj.type = 'error';
obj.message = "\u041D\u0435\u043B\u044C\u0437\u044F \u0437\u0430\u0433\u0440\u0443\u0436\u0430\u0442\u044C \u0444\u0430\u0439\u043B\u044B \u0441 \u0440\u0430\u0441\u0448\u0438\u0440\u0435\u043D\u0438\u0435\u043C ".concat(file.type);
}
if (_this4.inputData.files[file.name]) {
obj.status = false;
obj.type = 'error';
obj.message = "\u0424\u0430\u0439\u043B ".concat(file.name, " \u0443\u0436\u0435 \u0431\u044B\u043B \u0432\u044B\u0431\u0440\u0430\u043D");
}
});
if (!obj.status) {
return obj;
}
if (this.inputData.filesSizeMb >= this._vars.maxAllFileSizeMb) {
obj.type = 'error';
obj.message = 'Превышено общий вес файлов';
return obj;
}
obj.type = 'success';
obj.status = true;
return obj;
} |
/*
*
*/
package net.community.chest.apache.ant.winver;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import net.community.chest.util.collection.CollectionsUtils;
/**
* <P>Copyright as per GPLv2</P>
*
* <P>Represents the 4 standard Windows version numbers</P>
*
* @author <NAME>.
* @since Jul 7, 2009 10:30:58 AM
*/
public enum VersionComponent {
// NOTE !!! order is important
MAJOR,
MINOR,
RELEASE,
BUILD;
public static final List<VersionComponent> VALUES=Collections.unmodifiableList(Arrays.asList(values()));
public static final VersionComponent fromString (final String s)
{
return CollectionsUtils.fromString(VALUES, s, false);
}
public static final boolean isValidComponentNumber (final int n)
{
return (n >= 0) && (n < 0x0FFFF);
}
public static final boolean isValidComponentNumber (final Number n)
{
return (n != null) && isValidComponentNumber(n.intValue());
}
}
|
#!/bin/zsh
# fzf-tab
# preview directory's content with exa when completing cd
zstyle ':fzf-tab:complete:(z|cd):*' fzf-preview 'exa -1 --color=always $realpath'
# switch group using `,` and `.`
zstyle ':fzf-tab:*' switch-group ',' '.'
# fzf
# https://github.com/junegunn/fzf
export FZF_DEFAULT_COMMAND="fd -t f -H -L -E '.git' || rg -f -. -L -g '!.git' || find ."
export FZF_CTRL_T_COMMAND=${FZF_DEFAULT_COMMAND}
export FZF_ALT_C_COMMAND="fd -t d"
export FZF_CTRL_R_OPTS='--sort --exact'
export FZF_DEFAULT_OPTS="
--layout=reverse
--info=inline
--height=60%
--multi
--border
--preview-window=:hidden
--preview '([[ -f {} ]] && (bat --style=numbers --color=always {} || cat {})) || ([[ -d {} ]] && (exa -T {} | less)) || echo {} 2> /dev/null | head -200'
--prompt='∼ ' --marker='✓'
--color='dark,hl:33,hl+:37,fg+:235,bg+:136,fg+:254'
--color='info:254,prompt:37,spinner:108,pointer:235,marker:235'
--bind '?:toggle-preview'
"
# Options to fzf command
export FZF_COMPLETION_OPTS='--border --info=inline'
# Use fd (https://github.com/sharkdp/fd) instead of the default find
# command for listing path candidates.
# - The first argument to the function ($1) is the base path to start traversal
# - See the source code (completion.{bash,zsh}) for the details.
_fzf_compgen_path() {
fd --hidden --follow --exclude ".git" . "$1"
}
# Use fd to generate the list for directory completion
_fzf_compgen_dir() {
fd --type d --hidden --follow --exclude ".git" . "$1"
}
# (EXPERIMENTAL) Advanced customization of fzf options via _fzf_comprun function
# - The first argument to the function is the name of the command.
# - You should make sure to pass the rest of the arguments to fzf.
_fzf_comprun() {
local command=$1
shift
case "$command" in
cd|z) fzf "$@" --preview 'exa -T {} | head -200' ;;
export|unset) fzf "$@" --preview "eval 'echo \$'{}" ;;
ssh) fzf "$@" --preview 'dig {}' ;;
*) fzf "$@" ;;
esac
}
# GIT heart FZF
# @see https://junegunn.kr/2016/07/fzf-git/
# CTRL-G CTRL-F for files
# CTRL-G CTRL-B for branches
# CTRL-G CTRL-T for tags
# CTRL-G CTRL-R for remotes
# CTRL-G CTRL-H for commit hashes
is_in_git_repo() {
git rev-parse HEAD > /dev/null 2>&1
}
fzf-down() {
fzf --height 50% --min-height 20 --border --bind ctrl-/:toggle-preview "$@"
}
_gf() {
is_in_git_repo || return
git -c color.status=always status --short |
fzf-down -m --ansi --nth 2..,.. \
--preview '(git diff --color=always -- {-1} | sed 1,4d; cat {-1})' |
cut -c4- | sed 's/.* -> //'
}
_gb() {
is_in_git_repo || return
git branch -a --color=always | grep -v '/HEAD\s' | sort |
fzf-down --ansi --multi --tac --preview-window right:70% \
--preview 'git log --oneline --graph --date=short --color=always --pretty="format:%C(auto)%cd %h%d %s" $(sed s/^..// <<< {} | cut -d" " -f1)' |
sed 's/^..//' | cut -d' ' -f1 |
sed 's#^remotes/##'
}
_gt() {
is_in_git_repo || return
git tag --sort -version:refname |
fzf-down --multi --preview-window right:70% \
--preview 'git show --color=always {}'
}
_gh() {
is_in_git_repo || return
git log --date=short --format="%C(green)%C(bold)%cd %C(auto)%h%d %s (%an)" --graph --color=always |
fzf-down --ansi --no-sort --reverse --multi --bind 'ctrl-s:toggle-sort' \
--header 'Press CTRL-S to toggle sort' \
--preview 'grep -o "[a-f0-9]\{7,\}" <<< {} | xargs git show --color=always' |
grep -o "[a-f0-9]\{7,\}"
}
_gr() {
is_in_git_repo || return
git remote -v | awk '{print $1 "\t" $2}' | uniq |
fzf-down --tac \
--preview 'git log --oneline --graph --date=short --pretty="format:%C(auto)%cd %h%d %s" {1}' |
cut -d$'\t' -f1
}
_gs() {
is_in_git_repo || return
git stash list | fzf-down --reverse -d: --preview 'git show --color=always {1}' |
cut -d: -f1
}
join-lines() {
local item
while read item; do
echo -n "${(q)item} "
done
}
bind-git-helper() {
local c
for c in $@; do
eval "fzf-g$c-widget() { local result=\$(_g$c | join-lines); zle reset-prompt; LBUFFER+=\$result }"
eval "zle -N fzf-g$c-widget"
eval "bindkey '^g^$c' fzf-g$c-widget"
done
}
bind-git-helper f b t r h s
unset -f bind-git-helper
|
<reponame>atomlab/stihi-frontend-1.0<filename>src/app/posts/posts-announce.component.ts
import { Component, OnInit } from '@angular/core';
import { ActivatedRoute, Router } from '@angular/router';
import {PostsService} from "../posts/posts.service";
import {HeaderService} from "../shared/services/header.service";
declare var jquery:any;
declare var $ :any;
@Component({
selector: 'app-posts-announce',
templateUrl: './posts-announce.component.html',
styleUrls: []
})
export class PostsAnnounceComponent implements OnInit {
type: string = '';
selectedObjects: any = [];
post: any = {
title: 'Заголовок',
author: 'Автор',
id: 0,
val_power: 0,
user: {},
};
postId: number = 0;
announces: any = [];
user: any = {};
secret: any = {
login: '',
key: '',
};
page: string = '';
userRole: any;
userId: any;
TODO: boolean = false;
constructor(
private postsService: PostsService,
private route: ActivatedRoute,
private router: Router,
private headerService: HeaderService,
) {}
ngOnInit() {
$('.header__bottom-right-drop').hide();
$.magnificPopup.close({
items: {
src: '#p-page'
}
});
this.user = this.postsService.getSession();
this.userRole = this.postsService.getUserRole();
this.userId = this.postsService.getUserId();
if (this.user.n) this.secret.login = this.postsService.getUserLogin(this.postsService.getSession());
this.loadAnnounces();
if (this.route.snapshot.params.id && !isNaN(this.route.snapshot.params.id)) {
this.postId = this.route.snapshot.params.id;
}
this.page = '/posts/'+this.postId+'/announce';
if (this.postId > 0) this.load();
this.loadUser();
}
load() {
this.postsService.articelsGetOne({id: +this.postId, source_list: {
list: 'list',
sort_field: 'time',
desc_order: false,
}})
.subscribe((data) => {
data.content.metadata = JSON.parse(data.content.metadata);
data.content.displayName = this.postsService.getUserDisplayName(data.content.user);
data.content.displayNameBlog = this.postsService.getUserDisplayNameBlog(data.content.user);
this.post = data.content;
this.postsService.processJWT(data);
});
}
showAuth() {
$.magnificPopup.open({
items: {
src: '#p-auth'
},
//closeOnBgClick: false,
callbacks: {
beforeOpen: function() { this.wrap.removeAttr('tabindex') },
open: function() {},
close: () => {}
}
});
}
loadAnnounces() {
this.postsService.getAnnouncePageList({id: +this.postId})
.subscribe((data) => {
this.announces = data.list;
this.postsService.processJWT(data);
});
}
auth() {
if (this.secret.key.length < 5 || this.secret.login.length < 3) {
alert('Пожалуйста авторизуйтесь');
return;
}
this.save();
}
save() {
if (this.post.id == 0) {
alert('Пожалуйста выберите произведение.');
return;
}
if (this.type.length < 2) {
alert('Пожалуйста выберите, где разместить Ваш анонс.');
return;
}
if (this.secret.key.length < 5 || this.secret.login.length < 3) {
this.showAuth();
return;
}
$.magnificPopup.close();
this.postsService.createAnnounce({"article_id": this.post.id, "page_code": this.type, "active_key": this.secret.key, "login": this.secret.login})
.subscribe((data) => {
// console.log(data);
if (data.error && data.error.length > 0) {
alert(data.error);
return;
}
this.router.navigate(['/posts/'+this.type.replace('main', 'index')]);
});
}
getUserPower(value, type) {
return this.postsService.getUserPower(value, type);
}
getUserReputation(value) {
return this.postsService.getUserReputation(value);
}
loadUser() {
this.postsService.getUser({id: +this.postsService.getUserId()})
.subscribe((data) => {
this.user = data.user;
this.postsService.processJWT(data);
});
}
selectAnnounce(o: any) {
$('.checkbox__input').prop('checked', false);
$('#checkbox1'+o.code).prop('checked', true);
this.type = o.code;
}
}
|
data_dir='/home/admin/workspace/datasets/multirc-v2'
model_dir='/home/admin/workspace/bert-base-uncased'
model_name='iter_bert_sc_v3'
reader_name='multi_rc_sent'
oss_pretrain='bert_iter_sr_mlm_4/pytorch_model_20000.bin'
python main_multirc.py \
--model_name $model_name --reader_name $reader_name \
--model_name_or_path $model_dir \
--do_train --do_eval \
--train_file $data_dir/train.json --dev_file $data_dir/dev.json \
--per_gpu_train_batch_size 16 \
--per_gpu_eval_batch_size 16 \
--gradient_accumulation_steps 2 \
--learning_rate 3e-5 \
--num_train_epochs 8.0 \
--max_seq_length 512 \
--max_query_length 512 \
--output_dir experiments/multi_rc_iter_bert/iter_mcrc_v3_4_0_w_pt_20k \
--save_steps -1 \
--logging_steps 500 \
--save_metric em0 \
--warmup_steps 600 --evaluate_during_training --weight_decay 0.01 --max_grad_norm 1.0 \
--query_dropout 0.1 --cls_type 1 \
--oss_pretrain $oss_pretrain
python main_multirc.py \
--model_name $model_name --reader_name $reader_name \
--model_name_or_path $model_dir \
--do_train --do_eval \
--train_file $data_dir/train.json --dev_file $data_dir/dev.json \
--per_gpu_train_batch_size 16 \
--per_gpu_eval_batch_size 16 \
--gradient_accumulation_steps 2 \
--learning_rate 3e-5 \
--num_train_epochs 8.0 \
--max_seq_length 512 \
--max_query_length 512 \
--output_dir experiments/multi_rc_iter_bert/iter_mcrc_v3_4_0_w_pt_20k_s33 \
--save_steps -1 \
--logging_steps 500 \
--save_metric em0 \
--warmup_steps 600 --evaluate_during_training --weight_decay 0.01 --max_grad_norm 1.0 \
--query_dropout 0.1 --cls_type 1 \
--oss_pretrain $oss_pretrain --seed 33
python main_multirc.py \
--model_name $model_name --reader_name $reader_name \
--model_name_or_path $model_dir \
--do_train --do_eval \
--train_file $data_dir/train.json --dev_file $data_dir/dev.json \
--per_gpu_train_batch_size 16 \
--per_gpu_eval_batch_size 16 \
--gradient_accumulation_steps 2 \
--learning_rate 3e-5 \
--num_train_epochs 8.0 \
--max_seq_length 512 \
--max_query_length 512 \
--output_dir experiments/multi_rc_iter_bert/iter_mcrc_v3_4_0_w_pt_20k_s57 \
--save_steps -1 \
--logging_steps 500 \
--save_metric em0 \
--warmup_steps 600 --evaluate_during_training --weight_decay 0.01 --max_grad_norm 1.0 \
--query_dropout 0.1 --cls_type 1 \
--oss_pretrain $oss_pretrain --seed 57
python main_multirc.py \
--model_name $model_name --reader_name $reader_name \
--model_name_or_path $model_dir \
--do_train --do_eval \
--train_file $data_dir/train.json --dev_file $data_dir/dev.json \
--per_gpu_train_batch_size 16 \
--per_gpu_eval_batch_size 16 \
--gradient_accumulation_steps 2 \
--learning_rate 3e-5 \
--num_train_epochs 8.0 \
--max_seq_length 512 \
--max_query_length 512 \
--output_dir experiments/multi_rc_iter_bert/iter_mcrc_v3_4_0_w_pt_20k_s67 \
--save_steps -1 \
--logging_steps 500 \
--save_metric em0 \
--warmup_steps 600 --evaluate_during_training --weight_decay 0.01 --max_grad_norm 1.0 \
--query_dropout 0.1 --cls_type 1 \
--oss_pretrain $oss_pretrain --seed 67
|
'use strict';
const path = require('path');
module.exports = {
meta: {
docs: {
description: 'require a license in package.json'
},
schema: [
{
'enum': ['always', 'allow-unlicensed']
}
]
},
create(context) {
let filename = context.getFilename();
if (path.basename(filename) !== 'package.json') {
return {};
}
let allowUnlicensed = context.options[0] === 'allow-unlicensed';
return {
AssignmentExpression(node) {
let json = node.right;
let property = json.properties.find(p => p.key.value === 'license');
if (!property) {
context.report({
node: json,
message: 'Missing license.'
});
return;
}
let license = property.value;
if (license.value === 'UNLICENSED' && !allowUnlicensed) {
context.report({
node: license,
message: 'Missing license.'
});
}
}
};
}
};
|
import java.util.List;
import java.util.ArrayList;
public class FilterPrime {
public static List<Integer> filter(List<Integer> numbers) {
List<Integer> primes = new ArrayList<>();
for (int n : numbers) {
if (isPrime(n)) {
primes.add(n);
}
}
return primes;
}
private static boolean isPrime(int number) {
if (number <= 1) {
return false;
}
for (int i = 2; i < number; i++) {
if (number % i == 0) {
return false;
}
}
return true;
}
}
List<Integer> numbers = new ArrayList<>();
numbers.add(2);
numbers.add(3);
numbers.add(5);
numbers.add(6);
numbers.add(7);
numbers.add(8);
numbers.add(10);
List<Integer> primes = FilterPrime.filter(numbers);
System.out.println(primes);
# Output: [2, 3, 5, 7] |
<gh_stars>1-10
package cc.mrbird.febs.gateway.fallback;
import com.alibaba.csp.sentinel.adapter.gateway.zuul.fallback.BlockResponse;
import com.alibaba.csp.sentinel.adapter.gateway.zuul.fallback.ZuulBlockFallbackProvider;
import com.alibaba.csp.sentinel.slots.block.BlockException;
import org.springframework.http.HttpStatus;
/**
* 自定义限流异常
*
* @author MrBird
*/
public class FebsGatewayBlockFallbackProvider implements ZuulBlockFallbackProvider {
@Override
public String getRoute() {
return "*";
}
@Override
public BlockResponse fallbackResponse(String route, Throwable throwable) {
if (throwable instanceof BlockException) {
return new BlockResponse(HttpStatus.TOO_MANY_REQUESTS.value(),
"访问频率超限", route);
} else {
return new BlockResponse(HttpStatus.INTERNAL_SERVER_ERROR.value(),
"系统内部异常", route);
}
}
}
|
<reponame>nimatz151/outline-server<gh_stars>1000+
// Copyright 2021 The Outline Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import * as digitalocean from './digitalocean';
import * as gcp from './gcp';
export interface CloudAccounts {
/**
* Connects a DigitalOcean account.
*
* Only one DigitalOcean account can be connected at any given time.
* Subsequent calls to this method will overwrite any previously connected
* DigtialOcean account.
*
* @param accessToken: The DigitalOcean access token.
*/
connectDigitalOceanAccount(accessToken: string): digitalocean.Account;
/**
* Connects a Google Cloud Platform account.
*
* Only one Google Cloud Platform account can be connected at any given time.
* Subsequent calls to this method will overwrite any previously connected
* Google Cloud Platform account.
*
* @param refreshToken: The GCP refresh token.
*/
connectGcpAccount(refreshToken: string): gcp.Account;
/**
* Disconnects the DigitalOcean account.
*/
disconnectDigitalOceanAccount(): void;
/**
* Disconnects the Google Cloud Platform account.
*/
disconnectGcpAccount(): void;
/**
* @returns the connected DigitalOcean account (or null if none exists).
*/
getDigitalOceanAccount(): digitalocean.Account;
/**
* @returns the connected Google Cloud Platform account (or null if none exists).
*/
getGcpAccount(): gcp.Account;
}
|
import { constants as docConstants, DocsInfo, DocsInfoConfig, SupportedDocJson } from '@0xproject/react-docs';
import * as _ from 'lodash';
import * as React from 'react';
import { connect } from 'react-redux';
import { Dispatch } from 'redux';
import { DocPage as DocPageComponent, DocPageProps } from 'ts/pages/documentation/doc_page';
import { Dispatcher } from 'ts/redux/dispatcher';
import { State } from 'ts/redux/reducer';
import { DocPackages, Environments, WebsitePaths } from 'ts/types';
import { configs } from 'ts/utils/configs';
import { constants } from 'ts/utils/constants';
import { Translate } from 'ts/utils/translate';
/* tslint:disable:no-var-requires */
const IntroMarkdown = require('md/docs/web3_wrapper/introduction');
const InstallationMarkdown = require('md/docs/web3_wrapper/installation');
/* tslint:enable:no-var-requires */
const docSections = {
introduction: 'introduction',
installation: 'installation',
web3Wrapper: 'web3Wrapper',
types: docConstants.TYPES_SECTION_NAME,
};
const docsInfoConfig: DocsInfoConfig = {
id: DocPackages.Web3Wrapper,
type: SupportedDocJson.TypeDoc,
displayName: 'Web3Wrapper',
packageUrl: 'https://github.com/0xProject/0x-monorepo',
menu: {
introduction: [docSections.introduction],
install: [docSections.installation],
web3Wrapper: [docSections.web3Wrapper],
types: [docSections.types],
},
sectionNameToMarkdown: {
[docSections.introduction]: IntroMarkdown,
[docSections.installation]: InstallationMarkdown,
},
sectionNameToModulePath: {
[docSections.web3Wrapper]: ['"web3-wrapper/src/web3_wrapper"'],
[docSections.types]: ['"types/src/index"'],
},
menuSubsectionToVersionWhenIntroduced: {},
sections: docSections,
visibleConstructors: [docSections.web3Wrapper],
typeConfigs: {
// Note: This needs to be kept in sync with the types exported in index.ts. Unfortunately there is
// currently no way to extract the re-exported types from index.ts via TypeDoc :(
publicTypes: [
'TxData',
'TransactionReceipt',
'RawLogEntry',
'ContractAbi',
'BlockParam',
'FilterObject',
'LogEntry',
'BlockWithoutTransactionData',
'CallData',
'LogEntryEvent',
'Provider',
'AbiDefinition',
'LogTopic',
'JSONRPCRequestPayload',
'JSONRPCResponsePayload',
'BlockParamLiteral',
'FunctionAbi',
'EventAbi',
'JSONRPCErrorCallback',
'MethodAbi',
'ConstructorAbi',
'FallbackAbi',
'EventParameter',
'DataItem',
'StateMutability',
'Function',
'Fallback',
'Constructor',
'Event',
'ConstructorStateMutability',
'TransactionReceiptWithDecodedLogs',
'DecodedLogArgs',
'LogWithDecodedArgs',
'ContractEventArg',
],
typeNameToExternalLink: {
Web3: constants.URL_WEB3_DOCS,
BigNumber: constants.URL_BIGNUMBERJS_GITHUB,
},
typeNameToPrefix: {},
typeNameToDocSection: {
Web3Wrapper: docSections.web3Wrapper,
},
},
};
const docsInfo = new DocsInfo(docsInfoConfig);
interface ConnectedState {
docsVersion: string;
availableDocVersions: string[];
docsInfo: DocsInfo;
translate: Translate;
}
interface ConnectedDispatch {
dispatcher: Dispatcher;
}
const mapStateToProps = (state: State, ownProps: DocPageProps): ConnectedState => ({
docsVersion: state.docsVersion,
availableDocVersions: state.availableDocVersions,
translate: state.translate,
docsInfo,
});
const mapDispatchToProps = (dispatch: Dispatch<State>): ConnectedDispatch => ({
dispatcher: new Dispatcher(dispatch),
});
export const Documentation: React.ComponentClass<DocPageProps> = connect(mapStateToProps, mapDispatchToProps)(
DocPageComponent,
);
|
import { createTestHelper } from 'test/utils';
import { setupBackupInterpretor, sync } from 'app/sop/backup';
import { config } from 'dotenv';
test('Local backup setup', async () => {
config({ path: '.env.local' });
const interpret = createTestHelper(true, false, [setupBackupInterpretor()]).setup();
const fm = sync('a');
const result = await interpret(fm);
console.log('result :>> ', result);
throw Error('incomplete test');
});
|
<filename>src/test/java/com/softicar/gradle/java/library/plugin/SofticarJavaLibraryPluginTest.java
package com.softicar.gradle.java.library.plugin;
import com.softicar.gradle.AbstractGradlePluginTest;
import java.io.File;
import org.gradle.testkit.runner.GradleRunner;
import org.junit.Before;
import org.junit.Test;
public class SofticarJavaLibraryPluginTest extends AbstractGradlePluginTest {
@Before
public void setup() {
appendToFile("build.gradle", """
plugins {
id 'com.softicar.gradle.java.library'
}
group = 'com.example'
version = '1.2.3'
""");
appendToFile("settings.gradle", """
rootProject.name = 'foo'
""");
}
@Test
public void test() {
File sourceFolder = mkdirs("src/main/java");
appendToFile(new File(sourceFolder, "Test.java"), """
public class Test {
public static void main(String[] args) {}
}
""");
GradleRunner//
.create()
.withProjectDir(testProjectDir.getRoot())
.withArguments("clean", "build")
.withPluginClasspath()
.build();
File jarFile = new File(testProjectDir.getRoot(), "build/libs/foo-1.2.3.jar");
assertTrue(jarFile.exists());
}
}
|
<filename>packages/client/src/components/Common/FormikTextField/FormikTextField.test.tsx
import React from 'react';
import { render, screen } from 'test-utils';
import { FormikTextField } from './FormikTextField';
import { Formik } from 'formik';
import { fireEvent } from '@testing-library/dom';
import { act } from 'react-dom/test-utils';
const mockSubmit = jest.fn(() => {});
const mockValidateFail = jest.fn(() => ({
testField: { key: 'errorMessage' },
}));
const mockValidateSuccess = jest.fn(() => ({}));
describe('<FormikTextField />', () => {
it('should correctly pass input value initially', async () => {
render(
<Formik
initialValues={{ testField: 'testValue' }}
onSubmit={mockSubmit}
validate={mockValidateFail}
>
{() => (
<form>
<FormikTextField
name="testField"
id="testField"
label="TestLabel"
helperText="helperText"
/>
</form>
)}
</Formik>
);
expect(await screen.findByDisplayValue('testValue')).toBeInTheDocument();
});
it('should call validate when touched', async () => {
render(
<Formik
initialValues={{ testField: 'testValue' }}
onSubmit={mockSubmit}
validate={mockValidateFail}
>
{() => (
<form>
<FormikTextField
name="testField"
id="testField"
label="TestLabel"
/>
</form>
)}
</Formik>
);
const input = screen.getByLabelText('TestLabel');
await act(async () => {
fireEvent.blur(input);
});
expect(mockValidateFail).toBeCalledTimes(1);
});
it('should display helperText initially', async () => {
render(
<Formik
initialValues={{ testField: 'testValue' }}
onSubmit={mockSubmit}
validate={mockValidateFail}
>
{() => (
<form>
<FormikTextField
name="testField"
id="testField"
label="TestLabel"
helperText="helperText"
/>
</form>
)}
</Formik>
);
expect(await screen.findByText('helperText')).toBeInTheDocument();
});
it('should display error after touched and validation fails', async () => {
render(
<Formik
initialValues={{ testField: 'testValue' }}
onSubmit={mockSubmit}
validate={mockValidateFail}
>
{() => (
<form>
<FormikTextField
name="testField"
id="testField"
label="TestLabel"
helperText="helperText"
/>
</form>
)}
</Formik>
);
const input = screen.getByLabelText('TestLabel');
expect(await screen.findByText('helperText')).toBeInTheDocument();
await act(async () => {
fireEvent.blur(input);
});
expect(await screen.queryByText('helperText')).toBeNull();
expect(
await screen.findByText('translation:errorMessage')
).toBeInTheDocument();
expect(await screen.findByText('TestLabel')).toHaveClass('Mui-error');
});
it('should not display error when not touched', async () => {
render(
<Formik
initialValues={{ testField: 'testValue' }}
onSubmit={mockSubmit}
validate={mockValidateFail}
>
{() => (
<form>
<FormikTextField
name="testField"
id="testField"
label="TestLabel"
helperText="helperText"
/>
</form>
)}
</Formik>
);
expect(await screen.findByText('helperText')).toBeInTheDocument();
expect(await screen.queryByText('translation:errorMessage')).toBeNull();
expect(await screen.findByText('TestLabel')).not.toHaveClass('Mui-error');
});
it('should display helperText after validation passed', async () => {
render(
<Formik
initialValues={{ testField: 'testValue' }}
onSubmit={mockSubmit}
validate={mockValidateSuccess}
>
{() => (
<form>
<FormikTextField
name="testField"
id="testField"
label="TestLabel"
helperText="helperText"
/>
</form>
)}
</Formik>
);
const input = screen.getByLabelText('TestLabel');
expect(await screen.findByText('helperText')).toBeInTheDocument();
await act(async () => {
fireEvent.blur(input);
});
expect(mockValidateSuccess).toBeCalledTimes(1);
expect(await screen.findByText('helperText')).toBeInTheDocument();
expect(await screen.queryByText('translation:errorMessage')).toBeNull();
expect(await screen.findByText('TestLabel')).not.toHaveClass('Mui-error');
});
it('should display zero width space if no helperText or errorMessage provided', async () => {
render(
<Formik
initialValues={{ testField: 'testValue' }}
onSubmit={mockSubmit}
validate={mockValidateSuccess}
>
{() => (
<form>
<FormikTextField
name="testField"
id="testField"
label="TestLabel"
/>
</form>
)}
</Formik>
);
expect(screen.getByText(/\u200B/u)).toBeInTheDocument();
});
});
|
#!/bin/bash
USE_SINGLE_BUILDDIR=1 make -j2 release
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-shuffled-N-VB/model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-shuffled-N-VB/512+512+512-LPMI-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_within_sentences_low_pmi_first_third_sixth --eval_function penultimate_sixth_eval |
package org.ringingmaster.util.javafx.grid.canvas;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import javafx.application.Platform;
import javafx.scene.control.Tooltip;
import javafx.scene.input.KeyEvent;
import javafx.scene.input.MouseEvent;
import javafx.scene.layout.Pane;
import javafx.scene.shape.Rectangle;
import org.apache.commons.lang3.CharUtils;
import org.ringingmaster.util.javafx.grid.GridPosition;
import org.ringingmaster.util.javafx.grid.model.CharacterModel;
import org.ringingmaster.util.javafx.grid.model.GridModel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Optional;
/**
* TODO comments ???
*
* @author <NAME>
*/
public class InteractionLayer extends Pane implements BlinkTimerListener {
private final Logger log = LoggerFactory.getLogger(this.getClass());
private GridModel model;
private GridDimensions dimensions;
private static final double CARET_WIDTH = 2.0;
private volatile boolean caretVisible = false;
private volatile boolean caretBlinkOn = false;
private final Rectangle caret = new Rectangle(0, 0, 0, 0);
private final CaretPositionMover caretPositionMover;
boolean mouseDown = false;
private Tooltip tooltip = new Tooltip();
public InteractionLayer() {
caretPositionMover = new CaretPositionMover();
BlinkTimerManager.getInstance().addListener(this);
getChildren().add(caret);
focusedProperty().addListener((observable, oldValue, newValue) -> {
//log.info("[{}] focussed [{}]", parent.getName(), newValue);
caretVisible = newValue;
forceCaretBlinkOnIfVisible();
});
setOnKeyPressed(this::handleKeyPressed);
setOnKeyTyped(this::handleKeyTyped);
setOnMousePressed(this::handleMousePressed);
setOnMouseReleased(this::handleMouseReleased);
setOnMouseDragged(this::handleMouseDragged);
setOnMouseMoved(this::handleMouseMoved);
setFocusTraversable(true);
new TooltipBehavior(true).install(this, tooltip);
}
void setModel(GridModel model) {
this.model = model;
caretPositionMover.setModel(model);
}
public void setDimensions(GridDimensions dimensions) {
this.dimensions = dimensions;
setPrefSize(dimensions.getTableRight(), dimensions.getTableBottom());
}
@Override
public void blinkTimerManager_triggerBlink(boolean blinkOn) {
caretBlinkOn = blinkOn;
Platform.runLater(() -> caret.setVisible(caretVisible && caretBlinkOn));
}
void forceCaretBlinkOnIfVisible() {
caretBlinkOn = false;
Platform.runLater(() -> caret.setVisible(caretVisible));
}
private void handleKeyTyped(KeyEvent e) {
String character = e.getCharacter();
if (Strings.isNullOrEmpty(character)) {
return;
}
if (CharUtils.isAsciiPrintable(character.charAt(0))) {
Preconditions.checkState(character.length() == 1);
GridPosition caretPosition = model.getCaretPosition();
model.getCellModel(caretPosition.getRow(), caretPosition.getColumn()).insertCharacter(caretPosition.getCharacterIndex(), character);
caretPositionMover.moveRight();
//log.info("keyTyped:" + e);
}
}
private void handleKeyPressed(KeyEvent event) {
switch (event.getCode()) {
case RIGHT:
caretPositionMover.moveRight();
break;
case LEFT:
caretPositionMover.moveLeft();
break;
case UP:
caretPositionMover.moveUp();
break;
case DOWN:
caretPositionMover.moveDown();
break;
case END:
if (event.isAltDown() && !event.isShiftDown() && !event.isControlDown()) {
caretPositionMover.moveToStartOfLastCellIfItHasContentsElseLastButOne();
}
break;
case HOME:
if (event.isAltDown() && !event.isShiftDown() && !event.isControlDown()) {
caretPositionMover.moveToStartOfRow();
}
break;
case BACK_SPACE:
caretPositionMover.deleteBack();
break;
case DELETE:
caretPositionMover.deleteForward();
break;
}
//log.info(event.toString());
event.consume();
}
void draw() {
GridPosition caretPosition = model.getCaretPosition();
final double left = dimensions.getCell(caretPosition.getRow(), caretPosition.getColumn()).getVerticalCharacterStartPosition(caretPosition.getCharacterIndex());
final double cellTop = dimensions.getTableHorizontalLinePosition(caretPosition.getRow());
final double cellHeight = dimensions.getRowHeight(caretPosition.getRow());
caret.setX(left);
caret.setY(cellTop);
caret.setHeight(cellHeight);
caret.setWidth(CARET_WIDTH);
}
private void handleMousePressed(MouseEvent e) {
//log.info("[{}] mouse pressed", parent.getName());
Optional<GridPosition> gridPosition = mouseCoordinatesToGridPosition(e.getX(), e.getY(), Align.BOUNDARY_MID_CHARACTER);
if (gridPosition.isPresent()) {
model.setCaretPosition(gridPosition.get());
}
mouseDown = true;
requestFocus();
e.consume();
}
private void handleMouseReleased(MouseEvent e) {
//log.info("Mouse Released" + e);
mouseDown = false;
Optional<GridPosition> gridPosition = mouseCoordinatesToGridPosition(e.getX(), e.getY(), Align.BOUNDARY_MID_CHARACTER);
if (gridPosition.isPresent()) {
model.setSelectionEndPosition(gridPosition.get());
}
e.consume();
}
private void handleMouseDragged(MouseEvent e) {
//log.info("mouseDragged " + e);
if (mouseDown) {
Optional<GridPosition> gridPosition = mouseCoordinatesToGridPosition(e.getX(), e.getY(), Align.BOUNDARY_MID_CHARACTER);
if (gridPosition.isPresent()) {
model.setSelectionEndPosition(gridPosition.get());
}
}
e.consume();
}
private void handleMouseMoved(MouseEvent e) {
Optional<GridPosition> gridPosition = mouseCoordinatesToGridPosition(e.getX(), e.getY(), Align.BOUNDARY_BETWEEN_CHARACTER);
if (!gridPosition.isPresent()) {
return;
}
CharacterModel characterModel = model.getCharacterModel(gridPosition.get());
if (characterModel != null) {
Optional<String> tooltipText = characterModel.getTooltipText();
if (tooltipText.isPresent()) {
tooltip.setText(tooltipText.get());
} else {
tooltip.setText(null);
}
}
}
private enum Align {
BOUNDARY_BETWEEN_CHARACTER,
BOUNDARY_MID_CHARACTER
}
public Optional<GridPosition> mouseCoordinatesToGridPosition(final double x, final double y, Align align) {
GridDimensions dimensions = this.dimensions;
if (dimensions.isZeroSized()) {
return Optional.empty();
}
// Calculate the row index
int rowIndex;
if (y <= dimensions.getTableHorizontalLinePosition(0)) {
// We are above the top of the grid, so set to the top row
rowIndex = 0;
} else if (y >= dimensions.getTableBottom()) {
// We are below the bottom of the grid, so set to bottom row
rowIndex = dimensions.getRowCount() - 1;
} else {
// We are inside the grid. Calculate what row.
for (rowIndex = 0; rowIndex < dimensions.getRowCount(); rowIndex++) {
if (y > dimensions.getTableHorizontalLinePosition(rowIndex) &&
y <= dimensions.getTableHorizontalLinePosition(rowIndex + 1)) {
break;
}
}
}
// Calculate the column index.
int columnIndex;
int characterIndex;
if (x <= dimensions.getTableVerticalLinePosition(0)) {
// We are to the left of the grid, so set to start of left column.
columnIndex = 0;
characterIndex = 0;
} else if (x >= dimensions.getTableRight()) {
// We are to the right of the grid, so set to end of right column.
columnIndex = dimensions.getColumnCount() - 1;
characterIndex = dimensions.getCell(rowIndex, columnIndex).getCharacterCount();
} else {
// We are inside the grid. Calculate what column.
for (columnIndex = 0; columnIndex < dimensions.getColumnCount(); columnIndex++) {
if (x < dimensions.getTableVerticalLinePosition(columnIndex + 1)) {
break;
}
}
// Now calculate the character index.
final CellDimension cell = dimensions.getCell(rowIndex, columnIndex);
characterIndex = 0;
if (align == Align.BOUNDARY_MID_CHARACTER) {
while (characterIndex < cell.getCharacterCount() &&
x >= cell.getVerticalCharacterMidPosition(characterIndex)) {
characterIndex++;
}
} else if (align == Align.BOUNDARY_BETWEEN_CHARACTER) {
while (characterIndex < cell.getCharacterCount() &&
x >= cell.getVerticalCharacterEndPosition(characterIndex)) {
characterIndex++;
}
}
}
if (model.hasRowHeader() && columnIndex == 0) {
return Optional.empty();
}
return Optional.of(new GridPosition(rowIndex, columnIndex, characterIndex));
}
}
|
def permutation(string):
if(len(string)==0):
return ['']
prevList = permutation(string[1:len(string)])
nextList = []
for i in range(0,len(prevList)):
for j in range(0,len(string)):
newString = prevList[i][0:j]+string[0]+prevList[i][j:len(string)-1]
if newString not in nextList:
nextList.append(newString)
return nextList
result = permutation("abc")
print(result) |
<gh_stars>0
import { DbProvider } from '../db.provider';
import { DbTransactionStatus } from './db.transaction.status';
export interface PartialDbTransaction {
id: string;
status: DbTransactionStatus;
value: number;
value_netto: number;
operator: DbProvider;
created_at: Date;
updated_at: Date;
}
|
class Polynomial:
def __init__(self, coeffs, const):
self.coeffs = coeffs
self.const = const
def __add__(self, other):
new_coeffs = {}
for exp in set(self.coeffs.keys()) | set(other.coeffs.keys()):
new_coeffs[exp] = self.coeffs.get(exp, 0) + other.coeffs.get(exp, 0)
new_const = self.const + other.const
return Polynomial(new_coeffs, new_const)
def __sub__(self, other):
new_coeffs = {}
for exp in set(self.coeffs.keys()) | set(other.coeffs.keys()):
new_coeffs[exp] = self.coeffs.get(exp, 0) - other.coeffs.get(exp, 0)
new_const = self.const - other.const
return Polynomial(new_coeffs, new_const)
def __mul__(self, other):
new_coeffs = {}
for exp1, coeff1 in self.coeffs.items():
for exp2, coeff2 in other.coeffs.items():
new_exp = exp1 + exp2
new_coeffs[new_exp] = new_coeffs.get(new_exp, 0) + coeff1 * coeff2
new_const = self.const * other.const
return Polynomial(new_coeffs, new_const)
def __hash__(self):
return hash((frozenset(self.coeffs.items()), self.const)) |
<gh_stars>1-10
package back_tracking;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.StringTokenizer;
/**
*
* @author exponential-e
* 백준 17471번: 게리맨더링
*
* @see https://www.acmicpc.net/problem/17471/
*
*/
public class Boj17471 {
private static int size, part;
private static int min = Integer.MAX_VALUE;
private static int [] p;
private static int [][] linked;
private static boolean [] check, visit;
public static void main(String[] args) throws Exception {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int N = Integer.parseInt(br.readLine());
p = new int [N];
linked = new int [N][N];
String s = br.readLine();
StringTokenizer st = new StringTokenizer(s);
for(int i = 0; i < N; i++) {
p[i] = Integer.parseInt(st.nextToken());
}
for(int i = 0; i < N; i++) {
st = new StringTokenizer(br.readLine());
int count = Integer.parseInt(st.nextToken());
while(count-- > 0) {
int adj = Integer.parseInt(st.nextToken()) - 1;
linked[i][adj] = 1; // check adj
}
}
for(int i = 0; i < N / 2 + 1; i++) { // set size
size = i;
check = new boolean[N];
backTracking(N, 0, 0);
}
System.out.println(min == Integer.MAX_VALUE ? -1: min);
}
private static void backTracking(int n, int current, int cnt) {
if(cnt == size) {
int[] tmp = new int[n];
for(int i = 0; i < n; i++) {
if(check[i] == true) tmp[i] = 1; // divide zero or one
}
min = Math.min(min, confirm(n, tmp)); // check is adjacent
return;
}
for(int i = current; i < n; i++) {
check[i]=true;
backTracking(n, i + 1, cnt + 1);
check[i]=false;
}
}
private static int confirm(int n, int[] arr) {
visit = new boolean[n];
int p1 = gerryMandering(n, arr, 1);
int p2 = gerryMandering(n, arr, 0);
for(int i = 0; i < n; i++) {
if(!visit[i]) return Integer.MAX_VALUE;
}
return Math.abs(p2 - p1); // get diff
}
private static int gerryMandering(int n, int[] arr, int target) {
int p = 0;
for(int i = 0; i < n; i++) {
if(arr[i] != target || visit[i]) continue;
part = 0;
set(n, i, arr);
p = part;
break;
}
return p;
}
private static void set(int n, int x, int[] arr) { // is set?
visit[x] = true;
part += p[x];
for(int i = 0; i < n; i++) {
if(visit[i] || arr[i] != arr[x] || linked[i][x] == 0) continue;
set(n, i, arr);
}
}
} |
#!/bin/bash -eu
# Dependencies:
# npm install -g svg-term-cli
# pip3 install asciinema
cast_filename=$(mktemp -u)
asciinema rec ${cast_filename}
out_filename=$(mktemp -u)
svg-term --in ${cast_filename} --out ${out_filename}.svg
echo "saved to ${out_filename}.svg"
|
function generateVerificationEmail($verificationToken, $userEmail) {
$verificationLink = 'http://example.com/gestaotrocasuser/email-verification/check?email=' . urlencode($userEmail) . '&token=' . $verificationToken;
$emailTemplate = '<p>Clique aqui para verificar sua conta <a href="' . $verificationLink . '">' . $verificationLink . '</a></p>';
$emailTemplate .= '<p>Obs.: Não responda este email, ele é gerado automaticamente</p>';
return $emailTemplate;
} |
<reponame>efagerberg/PiCam<filename>send_email.py
import base64
import os
from sendgrid import SendGridAPIClient
from sendgrid.helpers.mail import Mail, Attachment, Email, Content
if __name__ == "__main__":
sg = SendGridAPIClient(apikey=os.environ.get('SENDGRID_API_KEY'))
print os.environ.get('SENDGRID_API_KEY')
from_email = Email("SpyPi@no-reply")
subject = "Motion Detected"
to_email = Email("<EMAIL>")
content = Content("text/plain", "Test!\n\n")
mail = Mail(from_email, subject, to_email, content)
for i in range(2):
with open("test.jpg", "rb") as jpg_file:
attachment = Attachment()
attachment.set_filename("ducky-{}.jpg".format(i))
attachment.set_content(base64.b64encode(jpg_file.read()))
attachment.set_type('image/jpg')
mail.add_attachment(attachment)
response = sg.client.mail.send.post(request_body=mail.get())
print(response.status_code)
print(response.body)
print(response.headers)
|
class VolumeDevice:
def __init__(self):
self.volume_level = 50
def increase(self):
self.volume_level = min(self.volume_level + 10, 100)
def decrease(self):
self.volume_level = max(self.volume_level - 10, 0)
def volume_down():
"""
Decrease system volume by triggering a fake VK_VOLUME_DOWN key event
:return: void
"""
device = VolumeDevice()
device.decrease() |
class ItemManager {
protected $fillable = ['type'];
protected $items = [];
public function addItem($type) {
if (in_array($type, $this->fillable)) {
$this->items[] = ['type' => $type];
} else {
throw new Exception("'$type' is not a fillable attribute.");
}
}
public function getItemsByType($type) {
$filteredItems = array_filter($this->items, function($item) use ($type) {
return $item['type'] === $type;
});
return array_values($filteredItems);
}
}
// Usage
$itemManager = new ItemManager();
$itemManager->addItem('book');
$itemManager->addItem('pen');
$itemManager->addItem('book');
$itemManager->addItem('pencil');
$itemManager->addItem('pen');
$books = $itemManager->getItemsByType('book');
print_r($books); // Output: Array ( [0] => Array ( [type] => book ) [1] => Array ( [type] => book ) ) |
<reponame>scala-steward/http4s-extend
package http4s.extend.test.laws.instances
import http4s.extend.ExceptionDisplay
import monix.eval.{Task => MonixTask}
import org.scalacheck.Arbitrary
import scalaz.concurrent.{Task => ScalazTask}
private[test] trait ArbitraryInstances {
implicit def throwableCompleteMessageArb(implicit A: Arbitrary[String]): Arbitrary[ExceptionDisplay] =
Arbitrary { A.arbitrary map ExceptionDisplay.apply }
implicit def scalazTaskArbitrary[A](implicit A: Arbitrary[A]): Arbitrary[ScalazTask[A]] =
Arbitrary { A.arbitrary map (ScalazTask.delay(_)) }
implicit def monixTaskArbitrary[A](implicit A: Arbitrary[A]): Arbitrary[MonixTask[A]] =
Arbitrary { A.arbitrary map (MonixTask.delay(_)) }
}
|
<gh_stars>0
import tensorflow as tf
class Augment:
"""
Apply augmentation on the a dataset via .map method from tf.data.Dataset API.
Args:
brightness (dict or None) - Default None. Keyward arguments for tf.image.stateless_random_brightness;
contrast (dict or None) - Default None. Keyward arguments for tf.image.stateless_random_contrast;
flip_horizontal (bool) - Default False. If True, randomly flip an image horizontally (left to right) deterministically;
flip_vertical (bool) - Default False. If True, randomly flip an image vertically (up to down) deterministically;
hue (dict or None) - Default None. Keyward arguments for tf.image.stateless_random_hue;
rotate (dict or None) - Default None. Keyward arguments for tf.image.rot90 (rotate image counter-clockwise by 90 degrees);
saturation (dict or None) - Default None. Keyward arguments for tf.image.stateless_random_saturation;
seed (shape 2 Tensor or tuple of 2 ints) - Guarantees the same results given the same seed independent of how many times the function is called.
"""
def __init__(self, brightness=None, contrast=None, flip_horizontal=False, flip_vertical=False, hue=None, rotate=None, saturation=None, seed=(0,0)):
self.brightness = brightness
self.contrast = contrast
self.flip_horizontal = flip_horizontal
self.flip_vertical = flip_vertical
self.hue = hue
self.rotate = rotate
self.saturation = saturation
self.seed = seed
def _brightness(self, image, **kwargs):
return tf.image.stateless_random_brightness(image, **kwargs, seed=self.seed)
def _contrast(self, image, **kwargs):
return tf.image.stateless_random_contrast(image, **kwargs, seed=self.seed)
def _flip_horizontal(self, image):
return tf.image.stateless_random_flip_left_right(image, seed=self.seed)
def _flip_vertical(self, image):
return tf.image.stateless_random_flip_up_down(image, seed=self.seed)
def _hue(self, image, **kwargs):
return tf.image.stateless_random_hue(image, **kwargs, seed=self.seed)
def _rotate(self, image, **kwargs):
return tf.image.rot90(image, **kwargs)
def _saturation(self, image, **kwargs):
return tf.image.stateless_random_saturation(image, **kwargs, seed=self.seed)
def _augment(self, image, label=None):
image = self._brightness(image, **self.brightness) if self.brightness else image
image = self._contrast(image, **self.contrast) if self.contrast else image
image = self._flip_horizontal(image) if self.flip_horizontal else image
image = self._flip_vertical(image) if self.flip_vertical else image
image = self._hue(image, **self.hue) if self.hue else image
image = self._rotate(image, **self.rotate) if self.rotate else image
image = self._saturation(image, **self.saturation) if self.saturation else image
if label is not None:
return image, label
return image
def apply_on(self, dataset):
"""
Applies the augmentation on a dataset of (image, label) or (image) objects.
Args:
dataset (dataset) - The tf.data.Dataset object.
Returns:
dataset.map object
"""
return dataset.map(self._augment, num_parallel_calls=tf.data.AUTOTUNE)
|
<filename>src/main/java/kanye/mixin/KanyeQuoteMixin.java
package kanye.mixin;
import kanye.Quotes;
import net.minecraft.client.MinecraftClient;
import net.minecraft.client.network.ClientPlayerEntity;
import net.minecraft.item.ItemStack;
import net.minecraft.item.Items;
import net.minecraft.network.packet.c2s.play.BookUpdateC2SPacket;
import net.minecraft.text.LiteralText;
import net.minecraft.text.Text;
import net.minecraft.util.Hand;
import org.spongepowered.asm.mixin.Final;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Shadow;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
@Mixin(ClientPlayerEntity.class)
public abstract class KanyeQuoteMixin {
@Shadow public abstract void sendSystemMessage(Text message, UUID sender);
@Shadow @Final protected MinecraftClient client;
private Quotes kq;
@Inject(at=@At("TAIL"),method="<init>")
public void ClientPlayerEntity(CallbackInfo ci){
kq = new Quotes("http://api.kanye.rest");
}
@Inject(at=@At("HEAD"), method="useBook", cancellable = true)
public void useBook(ItemStack book, Hand hand, CallbackInfo ci){
if(kq.stopBookUse) ci.cancel();
}
@Inject( at = @At("HEAD"), method = "sendChatMessage", cancellable = true)
public void onChatMessage(String message, CallbackInfo ci){
if(message.equals(".kanye")){
kq.stopBookUse = true;
kq.getQuotes(50);
ci.cancel();
return;
} else if(message.startsWith(".kanye")){
kq.stopBookUse = true;
String m = message.substring(7);
try {
int pages = Integer.valueOf(m);
if(pages<=50 && pages>0)
kq.getQuotes(pages);
else if(pages<0){
this.sendSystemMessage(new LiteralText("Oh, so you`re a funny guy?"), null);
kq.getQuotes(Math.abs(pages));
}
else kq.getQuotes(50);
ci.cancel();
} catch(Exception e){
this.sendSystemMessage(new LiteralText("invalid arguments: .kanye <Number of Quotes max 50>"),null);
ci.cancel();
}
}
}
@Inject(at = @At("TAIL"), method = "tick", cancellable = true)
public void onTick(CallbackInfo ci) {
if (kq.readyToRead) {
ClientPlayerEntity player = (ClientPlayerEntity) (Object) this;
List<String> pages = kq.getPages();
if (player.getInventory().getMainHandStack().getItem() == Items.WRITABLE_BOOK)
player.networkHandler.sendPacket(new BookUpdateC2SPacket(player.getInventory().selectedSlot, pages, Optional.of("Book of Endless Wisdom")));
kq.readyToRead = false;
kq.stopBookUse = false;
kq.emptyPages();
}
}
} |
from django.contrib.auth.signals import user_logged_in # user_logged_out, user_login_failed
from django.db.models import signals
from django.dispatch import Signal
from django.db import transaction
from django.dispatch import receiver
from django.shortcuts import get_object_or_404
from django.utils import timezone
from django.contrib.auth import get_user_model
User = get_user_model()
from blog_api.users.models import User, VerificationCode, PasswordResetCode
@receiver(signals.post_save, sender=User)
def send_user_verification_email_signal(sender, instance, created, **kwargs):
'''Send user a verification email on first save.'''
if created:
code = VerificationCode.objects.create(user_to_verify=instance)
transaction.on_commit(
lambda: code.send_user_verification_email()
)
new_registration = Signal(providing_args=["ip_address", "user_username"])
@receiver(new_registration)
def record_ip_on_new_registration(sender, task_id, **kwargs):
username = kwargs['user_username']
ip_address = kwargs['ip_address']
user = get_object_or_404(User, username=username)
user.ip_address = ip_address
user.save()
|
<filename>java/dagger/internal/codegen/bindinggraphvalidation/DependsOnProductionExecutorValidator.java
/*
* Copyright (C) 2018 The Dagger Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dagger.internal.codegen.bindinggraphvalidation;
import static dagger.internal.codegen.extension.DaggerStreams.instancesOf;
import static javax.tools.Diagnostic.Kind.ERROR;
import dagger.internal.codegen.binding.KeyFactory;
import dagger.internal.codegen.compileroption.CompilerOptions;
import dagger.model.BindingGraph;
import dagger.model.BindingGraph.MaybeBinding;
import dagger.model.Key;
import dagger.spi.BindingGraphPlugin;
import dagger.spi.DiagnosticReporter;
import javax.inject.Inject;
/**
* Reports an error on all bindings that depend explicitly on the {@code @Production Executor} key.
*/
// TODO(dpb,beder): Validate this during @Inject/@Provides/@Produces validation.
final class DependsOnProductionExecutorValidator implements BindingGraphPlugin {
private final CompilerOptions compilerOptions;
private final KeyFactory keyFactory;
@Inject
DependsOnProductionExecutorValidator(CompilerOptions compilerOptions, KeyFactory keyFactory) {
this.compilerOptions = compilerOptions;
this.keyFactory = keyFactory;
}
@Override
public String pluginName() {
return "Dagger/DependsOnProductionExecutor";
}
@Override
public void visitGraph(BindingGraph bindingGraph, DiagnosticReporter diagnosticReporter) {
if (!compilerOptions.usesProducers()) {
return;
}
Key productionImplementationExecutorKey = keyFactory.forProductionImplementationExecutor();
Key productionExecutorKey = keyFactory.forProductionExecutor();
bindingGraph.network().nodes().stream()
.flatMap(instancesOf(MaybeBinding.class))
.filter(node -> node.key().equals(productionExecutorKey))
.flatMap(productionExecutor -> bindingGraph.requestingBindings(productionExecutor).stream())
.filter(binding -> !binding.key().equals(productionImplementationExecutorKey))
.forEach(binding -> reportError(diagnosticReporter, binding));
}
private void reportError(DiagnosticReporter diagnosticReporter, dagger.model.Binding binding) {
diagnosticReporter.reportBinding(
ERROR, binding, "%s may not depend on the production executor", binding.key());
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.