text
stringlengths
1
1.05M
package com.acgist.snail.gui.javafx; import java.util.BitSet; import java.util.Random; import org.junit.jupiter.api.Test; import com.acgist.snail.context.SystemThreadContext; import com.acgist.snail.gui.javafx.window.statistics.CanvasPainter; import com.acgist.snail.utils.ThreadUtils; import javafx.application.Application; import javafx.scene.Group; import javafx.scene.Scene; import javafx.scene.control.Label; import javafx.scene.layout.Background; import javafx.scene.layout.BackgroundFill; import javafx.scene.layout.HBox; import javafx.scene.paint.Color; import javafx.scene.text.Text; import javafx.scene.text.TextFlow; import javafx.stage.Stage; public class CanvasPainterTest extends Application { @Override public void start(Stage primaryStage) throws Exception { final int size = 990; final int begin = 250; final int end = 520; final BitSet bitSet = new BitSet(); final BitSet selectBitSet = new BitSet(); final Random random = new Random(); for (int jndex = begin; jndex < end; jndex++) { final int index = random.nextInt(size); if(begin <= index && index <= end) { bitSet.set(index); } selectBitSet.set(jndex); } final Color[] colors = new Color[] { Color.rgb(0x22, 0xAA, 0x22), Color.rgb(0xFF, 0xEE, 0x99) }; final CanvasPainter painter = CanvasPainter.newInstance( 12, 50, size, new BitSet[] { bitSet, selectBitSet }, colors ); final Group root = new Group(); root.getChildren().add(painter.build().draw().canvas()); final HBox hBox = new HBox(); final String[] tabs = new String[] { "ๆœ‰ๆ•ˆๆ•ฐๆฎ", "ๆ— ๆ•ˆๆ•ฐๆฎ" }; for (int index = 0; index < tabs.length; index++) { final Text text = new Text(tabs[index]); final Label label = new Label(); label.setPrefSize(10, 10); label.setBackground(new Background(new BackgroundFill(colors[index], null, null))); final TextFlow textFlow = new TextFlow(label, text); hBox.getChildren().add(textFlow); } root.getChildren().add(hBox); SystemThreadContext.submit(() -> { while(true) { final int index = random.nextInt(size); if(begin <= index && index <= end) { bitSet.set(index); painter.draw(); } ThreadUtils.sleep(100); } }); final Scene scene = new Scene(root); primaryStage.setScene(scene); primaryStage.setTitle("็”ปๅธƒ"); primaryStage.show(); } @Test public void test() { launch(); } }
<reponame>HarryMarch/nodejs-project<gh_stars>0 import { Injectable, Logger } from '@nestjs/common'; import { KafkaService } from '../kafka/kafka.service'; import { KafkaPayload } from '../kafka/kafka.message'; import { KafkaHostConfig } from '../config/kafka.config'; @Injectable() export class ProducerService { private logger: Logger = new Logger(ProducerService.name); constructor(private readonly kafkaService: KafkaService) {} /** * Send message to Kafka broker * * @param {Object} body * @return {*} {Promise<Object>} * @memberof ProducerService */ async send(body: Record<string, unknown>): Promise<Record<string, unknown>> { // build Kafka message payload const payload: KafkaPayload = { messageId: '' + new Date().valueOf(), body: body, messageType: KafkaHostConfig.KAFKA_MESSAGE_TYPE, topicName: KafkaHostConfig.KAFKA_TOPIC, }; // send message to broker const value = await this.kafkaService.sendMessage( KafkaHostConfig.KAFKA_TOPIC, payload, ); // log status this.logger.log('message status: ' + JSON.stringify(value)); return body; } }
import os def list_static_assets(): assets_directory = 'assets' static_assets = [] for filename in os.listdir(assets_directory): if os.path.isfile(os.path.join(assets_directory, filename)): static_assets.append(filename) return static_assets
// Code generated by protoc-gen-go. DO NOT EDIT. // source: NotificationService.proto package pb import ( context "context" fmt "fmt" proto "github.com/golang/protobuf/proto" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" status "google.golang.org/grpc/status" math "math" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type ScheduleNotificationRequest struct { NotificationId string `protobuf:"bytes,1,opt,name=notificationId,proto3" json:"notificationId,omitempty"` SendAt string `protobuf:"bytes,2,opt,name=sendAt,proto3" json:"sendAt,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *ScheduleNotificationRequest) Reset() { *m = ScheduleNotificationRequest{} } func (m *ScheduleNotificationRequest) String() string { return proto.CompactTextString(m) } func (*ScheduleNotificationRequest) ProtoMessage() {} func (*ScheduleNotificationRequest) Descriptor() ([]byte, []int) { return fileDescriptor_8a9b3db0602ab621, []int{0} } func (m *ScheduleNotificationRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ScheduleNotificationRequest.Unmarshal(m, b) } func (m *ScheduleNotificationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_ScheduleNotificationRequest.Marshal(b, m, deterministic) } func (m *ScheduleNotificationRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_ScheduleNotificationRequest.Merge(m, src) } func (m *ScheduleNotificationRequest) XXX_Size() int { return xxx_messageInfo_ScheduleNotificationRequest.Size(m) } func (m *ScheduleNotificationRequest) XXX_DiscardUnknown() { xxx_messageInfo_ScheduleNotificationRequest.DiscardUnknown(m) } var xxx_messageInfo_ScheduleNotificationRequest proto.InternalMessageInfo func (m *ScheduleNotificationRequest) GetNotificationId() string { if m != nil { return m.NotificationId } return "" } func (m *ScheduleNotificationRequest) GetSendAt() string { if m != nil { return m.SendAt } return "" } type ScheduleNotificationResponse struct { Done bool `protobuf:"varint,1,opt,name=done,proto3" json:"done,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *ScheduleNotificationResponse) Reset() { *m = ScheduleNotificationResponse{} } func (m *ScheduleNotificationResponse) String() string { return proto.CompactTextString(m) } func (*ScheduleNotificationResponse) ProtoMessage() {} func (*ScheduleNotificationResponse) Descriptor() ([]byte, []int) { return fileDescriptor_8a9b3db0602ab621, []int{1} } func (m *ScheduleNotificationResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ScheduleNotificationResponse.Unmarshal(m, b) } func (m *ScheduleNotificationResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_ScheduleNotificationResponse.Marshal(b, m, deterministic) } func (m *ScheduleNotificationResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_ScheduleNotificationResponse.Merge(m, src) } func (m *ScheduleNotificationResponse) XXX_Size() int { return xxx_messageInfo_ScheduleNotificationResponse.Size(m) } func (m *ScheduleNotificationResponse) XXX_DiscardUnknown() { xxx_messageInfo_ScheduleNotificationResponse.DiscardUnknown(m) } var xxx_messageInfo_ScheduleNotificationResponse proto.InternalMessageInfo func (m *ScheduleNotificationResponse) GetDone() bool { if m != nil { return m.Done } return false } type TriggerNotificationRequest struct { NotificationId string `protobuf:"bytes,1,opt,name=notificationId,proto3" json:"notificationId,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *TriggerNotificationRequest) Reset() { *m = TriggerNotificationRequest{} } func (m *TriggerNotificationRequest) String() string { return proto.CompactTextString(m) } func (*TriggerNotificationRequest) ProtoMessage() {} func (*TriggerNotificationRequest) Descriptor() ([]byte, []int) { return fileDescriptor_8a9b3db0602ab621, []int{2} } func (m *TriggerNotificationRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_TriggerNotificationRequest.Unmarshal(m, b) } func (m *TriggerNotificationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_TriggerNotificationRequest.Marshal(b, m, deterministic) } func (m *TriggerNotificationRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_TriggerNotificationRequest.Merge(m, src) } func (m *TriggerNotificationRequest) XXX_Size() int { return xxx_messageInfo_TriggerNotificationRequest.Size(m) } func (m *TriggerNotificationRequest) XXX_DiscardUnknown() { xxx_messageInfo_TriggerNotificationRequest.DiscardUnknown(m) } var xxx_messageInfo_TriggerNotificationRequest proto.InternalMessageInfo func (m *TriggerNotificationRequest) GetNotificationId() string { if m != nil { return m.NotificationId } return "" } type TriggerNotificationResponse struct { Done bool `protobuf:"varint,1,opt,name=done,proto3" json:"done,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *TriggerNotificationResponse) Reset() { *m = TriggerNotificationResponse{} } func (m *TriggerNotificationResponse) String() string { return proto.CompactTextString(m) } func (*TriggerNotificationResponse) ProtoMessage() {} func (*TriggerNotificationResponse) Descriptor() ([]byte, []int) { return fileDescriptor_8a9b3db0602ab621, []int{3} } func (m *TriggerNotificationResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_TriggerNotificationResponse.Unmarshal(m, b) } func (m *TriggerNotificationResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_TriggerNotificationResponse.Marshal(b, m, deterministic) } func (m *TriggerNotificationResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_TriggerNotificationResponse.Merge(m, src) } func (m *TriggerNotificationResponse) XXX_Size() int { return xxx_messageInfo_TriggerNotificationResponse.Size(m) } func (m *TriggerNotificationResponse) XXX_DiscardUnknown() { xxx_messageInfo_TriggerNotificationResponse.DiscardUnknown(m) } var xxx_messageInfo_TriggerNotificationResponse proto.InternalMessageInfo func (m *TriggerNotificationResponse) GetDone() bool { if m != nil { return m.Done } return false } func init() { proto.RegisterType((*ScheduleNotificationRequest)(nil), "ScheduleNotificationRequest") proto.RegisterType((*ScheduleNotificationResponse)(nil), "ScheduleNotificationResponse") proto.RegisterType((*TriggerNotificationRequest)(nil), "TriggerNotificationRequest") proto.RegisterType((*TriggerNotificationResponse)(nil), "TriggerNotificationResponse") } func init() { proto.RegisterFile("NotificationService.proto", fileDescriptor_8a9b3db0602ab621) } var fileDescriptor_8a9b3db0602ab621 = []byte{ // 208 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0xf4, 0xcb, 0x2f, 0xc9, 0x4c, 0xcb, 0x4c, 0x4e, 0x2c, 0xc9, 0xcc, 0xcf, 0x0b, 0x4e, 0x2d, 0x2a, 0xcb, 0x4c, 0x4e, 0xd5, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x57, 0x8a, 0xe5, 0x92, 0x0e, 0x4e, 0xce, 0x48, 0x4d, 0x29, 0xcd, 0x49, 0x45, 0x56, 0x14, 0x94, 0x5a, 0x58, 0x9a, 0x5a, 0x5c, 0x22, 0xa4, 0xc6, 0xc5, 0x97, 0x87, 0x24, 0xec, 0x99, 0x22, 0xc1, 0xa8, 0xc0, 0xa8, 0xc1, 0x19, 0x84, 0x26, 0x2a, 0x24, 0xc6, 0xc5, 0x56, 0x9c, 0x9a, 0x97, 0xe2, 0x58, 0x22, 0xc1, 0x04, 0x96, 0x87, 0xf2, 0x94, 0x8c, 0xb8, 0x64, 0xb0, 0x1b, 0x5f, 0x5c, 0x90, 0x9f, 0x57, 0x9c, 0x2a, 0x24, 0xc4, 0xc5, 0x92, 0x92, 0x9f, 0x97, 0x0a, 0x36, 0x95, 0x23, 0x08, 0xcc, 0x56, 0x72, 0xe1, 0x92, 0x0a, 0x29, 0xca, 0x4c, 0x4f, 0x4f, 0x2d, 0xa2, 0xc0, 0x45, 0x4a, 0x86, 0x5c, 0xd2, 0x58, 0x4d, 0xc1, 0x6d, 0xb1, 0xd1, 0x1e, 0x46, 0x2e, 0x61, 0x2c, 0x21, 0x25, 0x14, 0xcc, 0x25, 0x82, 0xcd, 0x13, 0x42, 0x32, 0x7a, 0x78, 0x82, 0x4e, 0x4a, 0x56, 0x0f, 0xaf, 0xcf, 0x03, 0xb8, 0x84, 0xb1, 0xb8, 0x4f, 0x48, 0x5a, 0x0f, 0xb7, 0xdf, 0xa5, 0x64, 0xf4, 0xf0, 0x78, 0xc9, 0x89, 0x25, 0x8a, 0xa9, 0x20, 0x29, 0x89, 0x0d, 0x1c, 0xaf, 0xc6, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, 0x2b, 0xb1, 0xc9, 0x3d, 0xf4, 0x01, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. var _ context.Context var _ grpc.ClientConn // This is a compile-time assertion to ensure that this generated file // is compatible with the grpc package it is being compiled against. const _ = grpc.SupportPackageIsVersion4 // NotificationServiceClient is the client API for NotificationService service. // // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. type NotificationServiceClient interface { ScheduleNotification(ctx context.Context, in *ScheduleNotificationRequest, opts ...grpc.CallOption) (*ScheduleNotificationResponse, error) TriggerNotification(ctx context.Context, in *TriggerNotificationRequest, opts ...grpc.CallOption) (*TriggerNotificationResponse, error) } type notificationServiceClient struct { cc *grpc.ClientConn } func NewNotificationServiceClient(cc *grpc.ClientConn) NotificationServiceClient { return &notificationServiceClient{cc} } func (c *notificationServiceClient) ScheduleNotification(ctx context.Context, in *ScheduleNotificationRequest, opts ...grpc.CallOption) (*ScheduleNotificationResponse, error) { out := new(ScheduleNotificationResponse) err := c.cc.Invoke(ctx, "/NotificationService/ScheduleNotification", in, out, opts...) if err != nil { return nil, err } return out, nil } func (c *notificationServiceClient) TriggerNotification(ctx context.Context, in *TriggerNotificationRequest, opts ...grpc.CallOption) (*TriggerNotificationResponse, error) { out := new(TriggerNotificationResponse) err := c.cc.Invoke(ctx, "/NotificationService/TriggerNotification", in, out, opts...) if err != nil { return nil, err } return out, nil } // NotificationServiceServer is the server API for NotificationService service. type NotificationServiceServer interface { ScheduleNotification(context.Context, *ScheduleNotificationRequest) (*ScheduleNotificationResponse, error) TriggerNotification(context.Context, *TriggerNotificationRequest) (*TriggerNotificationResponse, error) } // UnimplementedNotificationServiceServer can be embedded to have forward compatible implementations. type UnimplementedNotificationServiceServer struct { } func (*UnimplementedNotificationServiceServer) ScheduleNotification(ctx context.Context, req *ScheduleNotificationRequest) (*ScheduleNotificationResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method ScheduleNotification not implemented") } func (*UnimplementedNotificationServiceServer) TriggerNotification(ctx context.Context, req *TriggerNotificationRequest) (*TriggerNotificationResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method TriggerNotification not implemented") } func RegisterNotificationServiceServer(s *grpc.Server, srv NotificationServiceServer) { s.RegisterService(&_NotificationService_serviceDesc, srv) } func _NotificationService_ScheduleNotification_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(ScheduleNotificationRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(NotificationServiceServer).ScheduleNotification(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/NotificationService/ScheduleNotification", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(NotificationServiceServer).ScheduleNotification(ctx, req.(*ScheduleNotificationRequest)) } return interceptor(ctx, in, info, handler) } func _NotificationService_TriggerNotification_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(TriggerNotificationRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(NotificationServiceServer).TriggerNotification(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/NotificationService/TriggerNotification", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(NotificationServiceServer).TriggerNotification(ctx, req.(*TriggerNotificationRequest)) } return interceptor(ctx, in, info, handler) } var _NotificationService_serviceDesc = grpc.ServiceDesc{ ServiceName: "NotificationService", HandlerType: (*NotificationServiceServer)(nil), Methods: []grpc.MethodDesc{ { MethodName: "ScheduleNotification", Handler: _NotificationService_ScheduleNotification_Handler, }, { MethodName: "TriggerNotification", Handler: _NotificationService_TriggerNotification_Handler, }, }, Streams: []grpc.StreamDesc{}, Metadata: "NotificationService.proto", }
/** * @return \yii\db\ActiveQuery */ public function getManifest() { // Assuming the "Manifest" model is related to the current model through a foreign key, e.g., 'manifest_id' return $this->hasOne(Manifest::class, ['id' => 'manifest_id']); }
<filename>src/components/Creds/CreateAccountPage/CreateForm.js import React, { Component } from "react"; import PropTypes from "prop-types"; import Email from "../Email"; import Password from "../Password"; import ErrorText from "../ErrorText"; import Submit from "../Submit"; import axios from "axios"; import API from "../../../config/api"; const createAccountAPI = `${API.users}/createAccount`; const loginAPI = `${API.users}/login`; const initialState = { email: "", password: "", confirmPassword: "", errorText: "", errorVisible: false }; const errorMap = email => { let map = { "email is required": "Missing Field(s).", "password is required": "Missing Field(s).", "Please enter a valid email address": "Invalid Email.", "Password is too short": "Password must be at least 8 characters.", "Passwords must be equal": "Passwords must match." } map[`No user with email ${email} found.`] = "Invalid Credentials."; return map; }; // Form to enter user credentials class CreateForm extends Component { constructor(props) { super(props); this.state = initialState; } // Shows error text to user showError(message) { this.setState({ errorText: errorMap(this.state.email)[message] || message, errorVisible: true }); } // Callback that fires when create account is pressed createAccount() { const creds = { email: this.state.email, password: this.state.password } if (this.state.password === this.state.confirmPassword) { axios.post(createAccountAPI, creds) .then(response => { response = response.data; if (response.ok) { this.login(creds); } else { this.showError(response.message); } }) .catch(err => { console.log(err); this.showError("Something went wrong. Try again?"); }); } else { this.showError("Passwords must be equal"); } } // Logs user in if account was created successfully, otherwise bounces to login page login(creds) { axios.post(loginAPI, creds) .then(response => { response = response.data; if (response.ok) { this.props.history.push("/main/"); } else { this.props.history.push("/login/"); } }) .catch(err => { console.log(err); }); } render() { return ( <div className="AuthForm"> <Email name="email" placeholder="Email Address" onChange={e => { this.setState({ errorVisible: false, email: e.target.value }); }} value={this.state.email}/> <Password name="password" placeholder="Password" onChange={e => { this.setState({ errorVisible: false, password: e.target.value }); }} value={this.state.password}/> <Password name="<PASSWORD>Password" placeholder="<PASSWORD>" onChange={e => { this.setState({ errorVisible: false, confirmPassword: e.target.value }); }} value={this.state.confirmPassword}/> {this.state.errorVisible? <ErrorText value={this.state.errorText}/> : null} <Submit value="Submit" onClick={this.createAccount.bind(this)}/> </div> ) } } CreateForm.propTypes = { history: PropTypes.shape({ push: PropTypes.func.isRequired }) }; export default CreateForm;
from django.contrib import admin from academy_app.models import CourseLectures, Courses, Enroll, Payment, PaymentDetail, Curriculum # custom Admin text admin.site.site_header = "CyberAcademy Admin" admin.site.site_title = "CyberAcademy Admin Panel" admin.site.index_title = "Welcome to CyberAcademy" # course curriculum class CurriculumAdmin(admin.ModelAdmin): list_display = ['course', 'title', 'description'] class meta: model = Curriculum # course videos class CourseLectureAdmin(admin.TabularInline): model = CourseLectures # courses class CourseAdmin(admin.ModelAdmin): inlines = [CourseLectureAdmin] list_display = ['course_title', 'duration', 'level', 'fee', 'description', 'prerequisites', 'thumbnail', 'created'] class Meta: model = Courses # payment model class PaymentAdmin(admin.ModelAdmin): list_display = ['user', 'course', 'payment_status', 'total_amount', 'payment_date', 'payment_detail'] class Meta: model = Payment # payment detail class PaymentDetailAdmin(admin.ModelAdmin): list_display = ['bank', 'card_number', 'expiry', 'holder_name'] class Meta: model = PaymentDetail # enroll model class EnrollAdmin(admin.ModelAdmin): list_display = ['user', 'course', 'payment_id', 'first_name', 'last_name', 'email', 'phone', 'address'] class Meta: model = Enroll # registering models admin.site.register(Courses, CourseAdmin) admin.site.register(Payment, PaymentAdmin) admin.site.register(PaymentDetail, PaymentDetailAdmin) admin.site.register(Enroll, EnrollAdmin) admin.site.register(Curriculum, CurriculumAdmin)
<filename>src/main/java/au/gov/ga/geodesy/igssitelog/support/marshalling/moxy/EffectiveDatesAdapter.java package au.gov.ga.geodesy.igssitelog.support.marshalling.moxy; import javax.xml.bind.annotation.adapters.XmlAdapter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import au.gov.ga.geodesy.igssitelog.domain.model.EffectiveDates; import au.gov.ga.geodesy.igssitelog.interfaces.xml.MarshallingException; import au.gov.ga.geodesy.igssitelog.util.DateUtil; public class EffectiveDatesAdapter extends XmlAdapter<String, EffectiveDates> { private static final Logger log = LoggerFactory.getLogger(EffectiveDatesAdapter.class); private static final String formatPattern = "(CCYY-MM-DD)"; @Override public String marshal(EffectiveDates dates) throws Exception { if (dates == null) { return null; } StringBuilder buf = new StringBuilder(); if (dates.getFrom() == null) { buf.append(formatPattern); } else { buf.append(DateUtil.format(dates.getFrom())); } buf.append("/"); if (dates.getTo() == null) { buf.append(formatPattern); } else { buf.append(DateUtil.format(dates.getTo())); } return buf.toString(); } @Override public EffectiveDates unmarshal(String inputDateString) throws MarshallingException { if (inputDateString == null) { return null; } inputDateString = inputDateString.trim(); if (inputDateString.equals("")) { return null; } if (inputDateString.startsWith("(")) { inputDateString = inputDateString.substring(1, inputDateString.length()); } if (inputDateString.endsWith(")")) { inputDateString = inputDateString.substring(0, inputDateString.length() - 1); } if (inputDateString.equals("CCYY-MM-DD/CCYY-MM-DD")) { return null; } EffectiveDates dates = new EffectiveDates(); String[] splits = inputDateString.split("/"); dates.setFrom(DateUtil.parse(splits[0].trim())); if (splits.length > 1) { dates.setTo(DateUtil.parse(splits[1].trim())); } if (dates.getFrom() != null || dates.getTo() != null) { return dates; } else { return null; } } }
// Copyright 2008-2013 The Apache Software Foundation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.apache.tapestry5.internal.services; import org.apache.tapestry5.LinkSecurity; import org.apache.tapestry5.services.ComponentEventRequestParameters; import org.apache.tapestry5.services.PageRenderRequestParameters; import java.io.IOException; /** * Used to manage the relationship between the security of a request and the security of a page. By secure, we mean * whether a request uses HTTPS and whether a page demands the use of HTTPS. * * @see org.apache.tapestry5.services.Request#isSecure() */ public interface RequestSecurityManager { /** * Checks the page to see if it is secure; if so, and the request is not secure, then a redirect to the page is * generated and sent. * * @param parameters parameters for the current request * @return true if a redirect was sent, false if normal processing should continue * @throws IOException */ boolean checkForInsecurePageRenderRequest(PageRenderRequestParameters parameters) throws IOException; /** * Checks the target page of the component event request to see if it is secure; if so, and the * request is not secure, then a redirect to the page is generated and sent, preserving the * original component event request. * * @param parameters parameters for the current request * @return true if a redirect was sent, false if normal processing should continue * @throws IOException * @since 5.2.0.0 */ boolean checkForInsecureComponentEventRequest(ComponentEventRequestParameters parameters) throws IOException; /** * Determines if the page security does not match the request's security. Returns {@link LinkSecurity#SECURE} * or {@link LinkSecurity#INSECURE} if the request security matches the pages. Otherwise, returns * {@link LinkSecurity#FORCE_SECURE} or {@link LinkSecurity#FORCE_INSECURE} (which will force fully qualified URLs to be generated when * rendering). * * @param pageName for the security check * @return security for this request, as applied to indicated page */ LinkSecurity checkPageSecurity(String pageName); }
<reponame>Clumsy-Coder/myStocksRN /* eslint-disable jest/no-commented-out-tests */ import * as actions from 'src/redux/Stocks/Actions'; import { ActionTypes, Actions, DataDomain } from 'src/redux/Stocks/Types'; import * as testdata from 'jest.testdata'; describe('Stock action creators', () => { describe('Stock quotes', () => { it('Should create an action to fetch Stock quote', () => { const expectedAction = { type: ActionTypes.FETCH_STOCK_QUOTE, stockSymbol: testdata.stockSymbol1, }; expect(actions.fetchStockQuote(testdata.stockSymbol1)).toEqual(expectedAction); }); it('Should create an action to fetch Stock quote PENDING', () => { const expectedAction = { type: ActionTypes.FETCH_STOCK_QUOTE_PENDING, stockSymbol: testdata.stockSymbol1, }; expect(actions.fetchStockQuotePending(testdata.stockSymbol1)).toEqual(expectedAction); }); it('Should create an action to fetch Stock quote FULFILLED', () => { const expectedAction = { type: ActionTypes.FETCH_STOCK_QUOTE_FULFILLED, stockSymbol: testdata.stockSymbol1, payload: testdata.stockQuoteData1, }; expect( actions.fetchStockQuoteFulfilled(testdata.stockSymbol1, testdata.stockQuoteData1), ).toEqual(expectedAction); }); it('Should create an action to fetch Stock quote REJECTED', () => { const expectedAction = { type: ActionTypes.FETCH_STOCK_QUOTE_REJECTED, stockSymbol: testdata.stockSymbol1, error: new Error(''), }; expect(actions.fetchStockQuoteRejected(testdata.stockSymbol1, new Error(''))).toEqual( expectedAction, ); }); }); describe('Stock Chart', () => { it('Should create an action to fetch Stock Chart', () => { const expectedAction: Actions.Chart.FetchAction = { type: ActionTypes.FETCH_STOCK_CHART, stockSymbol: testdata.stockSymbol1, chartRange: DataDomain.ChartRange.MAX, }; expect(actions.fetchStockChart(testdata.stockSymbol1, 'max')).toEqual(expectedAction); }); it('Should create an action to fetch Stock Chart PENDING', () => { const expectedAction: Actions.Chart.FetchPendingAction = { type: ActionTypes.FETCH_STOCK_CHART_PENDING, stockSymbol: testdata.stockSymbol1, }; expect(actions.fetchStockChartPending(testdata.stockSymbol1)).toEqual(expectedAction); }); it('Should create an action to fetch Stock Chart FULFILLED', () => { const expectedAction: Actions.Chart.FetchFulfilledAction = { type: ActionTypes.FETCH_STOCK_CHART_FULFILLED, stockSymbol: testdata.stockSymbol1, payload: testdata.stockChartData1, }; expect( actions.fetchStockChartFulfilled(testdata.stockSymbol1, testdata.stockChartData1), ).toEqual(expectedAction); }); it('Should create an action to fetch Stock Chart REJECTED', () => { const expectedAction: Actions.Chart.FetchRejectedAction = { type: ActionTypes.FETCH_STOCK_CHART_REJECTED, stockSymbol: testdata.stockSymbol1, error: new Error(''), }; expect(actions.fetchStockChartRejected(testdata.stockSymbol1, new Error(''))).toEqual( expectedAction, ); }); }); describe('Stock search', () => { it('Should create an action to set stock search keyword', () => { const expectedAction: Actions.Search.SetSearchKeywordAction = { type: ActionTypes.SET_SEARCH_KEYWORD, keyword: testdata.stockSymbol1, }; expect(actions.setSearchKeyword(testdata.stockSymbol1)).toEqual(expectedAction); }); it('Should create an action to clear stock search keyword', () => { const expectedAction: Actions.Search.ClearSearchKeywordAction = { type: ActionTypes.CLEAR_SEARCH_KEYWORD, }; expect(actions.clearSearchKeyword(testdata.stockSymbol1)).toEqual(expectedAction); }); // it(`Should create an action for ${ActionTypes.SET_STOCK_METADATA}`, () => { // const expectedAction: Actions.Search.SetStockMetadata = { // type: ActionTypes.SET_STOCK_METADATA, // stockSymbol: testdata.stockSymbol1, // payload: testdata.stockSearchData1.bestMatches[0], // }; // expect( // actions.setStockMetadata(testdata.stockSymbol1, testdata.stockSearchData1.bestMatches[0]), // ).toEqual(expectedAction); // }); it('Should create an action fetch Stock search', () => { const expectedAction: Actions.Search.FetchAction = { type: ActionTypes.SEARCH_KEYWORD, keyword: testdata.stockSymbol1, }; expect(actions.fetchStockSearch(testdata.stockSymbol1)).toEqual(expectedAction); }); it('Should create an action fetch Stock search PENDING', () => { const expectedAction: Actions.Search.FetchPendingAction = { type: ActionTypes.SEARCH_KEYWORD_PENDING, keyword: testdata.stockSymbol1, }; expect(actions.fetchStockSearchPending(testdata.stockSymbol1)).toEqual(expectedAction); }); // it('Should create an action fetch Stock search, FULFILLED', () => { // const expectedAction: Actions.Search.FetchFulfilledAction = { // type: ActionTypes.SEARCH_KEYWORD_FULFILLED, // keyword: testdata.stockSymbol1, // payload: testdata.stockSearchData1, // }; // expect( // actions.fetchStockSearchFulfilled(testdata.stockSymbol1, testdata.stockSearchData1), // ).toEqual(expectedAction); // }); it('Should create an action fetch Stock search, REJECTED', () => { const expectedAction: Actions.Search.FetchRejectedAction = { type: ActionTypes.SEARCH_KEYWORD_REJECTED, keyword: testdata.stockSymbol1, error: new Error(''), }; expect(actions.fetchStockSearchRejected(testdata.stockSymbol1, new Error(''))).toEqual( expectedAction, ); }); }); describe('Stock quote batch fetching', () => { it('Should create an action to fetch a batch of stock quotes', () => { const expectedAction: Actions.Batch.FetchQuoteAction = { type: ActionTypes.FETCH_STOCK_QUOTE_BATCH, }; expect(actions.fetchStockQuoteBatch()).toEqual(expectedAction); }); }); describe('Stock Symbols Metadata', () => { it('Should create an action to fetch Stock Symbols Metadata', () => { const expectedAction: Actions.SymbolsMetadata.FetchAction = { type: ActionTypes.FETCH_SYMBOLS_METADATA, }; expect(actions.fetchSymbolsMetadata()).toEqual(expectedAction); }); it('Should create an action to fetch Stock Symbols Metadata PENDING', () => { const expectedAction: Actions.SymbolsMetadata.FetchPendingAction = { type: ActionTypes.FETCH_SYMBOLS_METADATA_PENDING, }; expect(actions.fetchSymbolsMetadataPending()).toEqual(expectedAction); }); it('Should create an action to fetch Stock Symbols Metadata FULFILLED', () => { const expectedAction: Actions.SymbolsMetadata.FetchFulfilledAction = { type: ActionTypes.FETCH_SYMBOLS_METADATA_FULFILLED, payload: [testdata.symbolsMetadata1, testdata.symbolsMetadata2, testdata.symbolsMetadata3], }; expect( actions.fetchSymbolsMetadataFulfilled([ testdata.symbolsMetadata1, testdata.symbolsMetadata2, testdata.symbolsMetadata3, ]), ).toEqual(expectedAction); }); it('Should create an action to fetch Stock Symbols Metadata REJECTED', () => { const expectedAction: Actions.SymbolsMetadata.FetchRejectedAction = { type: ActionTypes.FETCH_SYMBOLS_METADATA_REJECTED, error: new Error(''), }; expect(actions.fetchSymbolsMetadataRejected(new Error(''))).toEqual(expectedAction); }); }); });
// 15667. 2018 ์—ฐ์„ธ๋Œ€ํ•™๊ต ํ”„๋กœ๊ทธ๋ž˜๋ฐ ๊ฒฝ์ง„๋Œ€ํšŒ // 2019.10.09 // ์ˆ˜ํ•™ #include<iostream> #include<vector> #include<cmath> using namespace std; int main() { int n; cin >> n; int k = 1; int sum = 0; while (1) { int sum = pow(k, 0) + pow(k, 1) + pow(k, 2); if (sum == n) { break; } k++; } cout << k << endl; return 0; }
<filename>jgrapht-master/jgrapht-core/src/test/java/org/jgrapht/generate/WindmillGraphsGeneratorTest.java<gh_stars>1-10 /* * (C) Copyright 2017-2018, by <NAME> and Contributors. * * JGraphT : a free Java graph-theory library * * This program and the accompanying materials are dual-licensed under * either * * (a) the terms of the GNU Lesser General Public License version 2.1 * as published by the Free Software Foundation, or (at your option) any * later version. * * or (per the licensee's choosing) * * (b) the terms of the Eclipse Public License v1.0 as published by * the Eclipse Foundation. */ package org.jgrapht.generate; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.util.*; import org.jgrapht.*; import org.jgrapht.alg.shortestpath.*; import org.jgrapht.alg.util.*; import org.jgrapht.graph.*; import org.junit.*; /** * Tests for GeneralizedPetersenGraphGenerator * * @author <NAME> */ public class WindmillGraphsGeneratorTest { @Test public void testCubicalGraph() { Graph<Integer, DefaultEdge> g = new SimpleGraph<>(DefaultEdge.class); GeneralizedPetersenGraphGenerator<Integer, DefaultEdge> gpgg = new GeneralizedPetersenGraphGenerator<>(4, 1); gpgg.generateGraph(g, new IntegerVertexFactory(), null); this.validateBasics(g, 8, 12, 3, 3, 4); assertTrue(GraphTests.isBipartite(g)); assertTrue(GraphTests.isCubic(g)); } // --------------Tests for Windmill graphs --------------------- @Test public void testGraph1a() { Graph<Integer, DefaultEdge> g = new SimpleGraph<>(DefaultEdge.class); new WindmillGraphsGenerator<Integer, DefaultEdge>( WindmillGraphsGenerator.Mode.WINDMILL, 3, 4) .generateGraph(g, new IntegerVertexFactory(), null); assertEquals(10, g.vertexSet().size()); assertEquals(18, g.edgeSet().size()); this.verifyVertexDegree(g, WindmillGraphsGenerator.Mode.WINDMILL, 3, 4); } @Test public void testGraph2a() { Graph<Integer, DefaultEdge> g = new SimpleGraph<>(DefaultEdge.class); new WindmillGraphsGenerator<Integer, DefaultEdge>( WindmillGraphsGenerator.Mode.WINDMILL, 4, 3) .generateGraph(g, new IntegerVertexFactory(), null); assertEquals(9, g.vertexSet().size()); assertEquals(12, g.edgeSet().size()); this.verifyVertexDegree(g, WindmillGraphsGenerator.Mode.WINDMILL, 4, 3); } @Test public void testGraph3a() { Graph<Integer, DefaultEdge> g = new SimpleGraph<>(DefaultEdge.class); new WindmillGraphsGenerator<Integer, DefaultEdge>( WindmillGraphsGenerator.Mode.WINDMILL, 3, 5) .generateGraph(g, new IntegerVertexFactory(), null); assertEquals(13, g.vertexSet().size()); assertEquals(30, g.edgeSet().size()); this.verifyVertexDegree(g, WindmillGraphsGenerator.Mode.WINDMILL, 3, 5); } // --------------Tests for Dutch Windmill Graphs --------------- @Test public void testButterflyGraph() { Graph<Integer, DefaultEdge> g = NamedGraphGenerator.butterflyGraph(); this.validateBasics(g, 5, 6, 1, 2, 3); this.verifyVertexDegree(g, WindmillGraphsGenerator.Mode.DUTCHWINDMILL, 2, 3); assertTrue(GraphTests.isEulerian(g)); } @Test public void testGraph2b() { Graph<Integer, DefaultEdge> g = new SimpleGraph<>(DefaultEdge.class); new WindmillGraphsGenerator<Integer, DefaultEdge>( WindmillGraphsGenerator.Mode.DUTCHWINDMILL, 4, 3) .generateGraph(g, new IntegerVertexFactory(), null); assertEquals(9, g.vertexSet().size()); assertEquals(12, g.edgeSet().size()); this.verifyVertexDegree(g, WindmillGraphsGenerator.Mode.DUTCHWINDMILL, 4, 3); } @Test public void testGraph3b() { Graph<Integer, DefaultEdge> g = new SimpleGraph<>(DefaultEdge.class); new WindmillGraphsGenerator<Integer, DefaultEdge>( WindmillGraphsGenerator.Mode.DUTCHWINDMILL, 3, 5) .generateGraph(g, new IntegerVertexFactory(), null); assertEquals(13, g.vertexSet().size()); assertEquals(15, g.edgeSet().size()); this.verifyVertexDegree(g, WindmillGraphsGenerator.Mode.DUTCHWINDMILL, 3, 5); } private <V, E> void validateBasics( Graph<V, E> g, int vertices, int edges, int radius, int diameter, int girt) { assertEquals(vertices, g.vertexSet().size()); assertEquals(edges, g.edgeSet().size()); GraphMeasurer<V, E> gm = new GraphMeasurer<>(g); assertEquals(radius, gm.getRadius(), 0.00000001); assertEquals(diameter, gm.getDiameter(), 0.00000001); assertEquals(girt, GraphMetrics.getGirth(g), 0.00000001); } private <V, E> void verifyVertexDegree(Graph<V, E> g, WindmillGraphsGenerator.Mode mode, int m, int n) { List<V> vertices = new ArrayList<>(g.vertexSet()); if (mode == WindmillGraphsGenerator.Mode.DUTCHWINDMILL) { assertEquals(2 * m, g.degreeOf(vertices.get(0))); // degree of center vertex for (int i = 1; i < vertices.size(); i++) assertEquals(2, g.degreeOf(vertices.get(i))); // degree of other vertices } else { assertEquals(m * (n - 1), g.degreeOf(vertices.get(0))); // degree of center vertex for (int i = 1; i < vertices.size(); i++) assertEquals(n - 1, g.degreeOf(vertices.get(i))); // degree of other vertices } } }
#!/usr/bin/env bash # BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE set -e PLATFORM="manylinux2014_x86_64" CUDA_VERSION=9.0 AWKWARD_VERSION=`cat VERSION_INFO | tr -d '[:space:]'` if [[ "$CUDA_VERSION" == "11.0" ]]; then export DOCKER_IMAGE_TAG="11.0-devel-ubuntu18.04" elif [[ "$CUDA_VERSION" == "10.2" ]]; then export DOCKER_IMAGE_TAG="10.2-devel-ubuntu18.04" elif [[ "$CUDA_VERSION" == "10.1" ]]; then export DOCKER_IMAGE_TAG="10.1-devel-ubuntu18.04" elif [[ "$CUDA_VERSION" == "10.0" ]]; then export DOCKER_IMAGE_TAG="10.0-devel-ubuntu18.04" elif [[ "$CUDA_VERSION" == "9.2" ]]; then export DOCKER_IMAGE_TAG="9.2-devel-ubuntu18.04" elif [[ "$CUDA_VERSION" == "9.1" ]]; then export DOCKER_IMAGE_TAG="9.1-devel-ubuntu16.04" elif [[ "$CUDA_VERSION" == "9.0" ]]; then export DOCKER_IMAGE_TAG="9.0-devel-ubuntu16.04" elif [[ "$CUDA_VERSION" == "8.0" ]]; then export DOCKER_IMAGE_TAG="8.0-devel-ubuntu16.04" else echo "Docker image for CUDA version" $CUDA_VERSION "is not known" exit 1 fi rm -rf build dist mkdir build cp -r src/awkward_cuda_kernels build echo "__version__ ='"$AWKWARD_VERSION"'" >> build/awkward_cuda_kernels/__init__.py echo "cuda_version ='"$CUDA_VERSION"'" >> build/awkward_cuda_kernels/__init__.py echo "docker_image ='docker.io/nvidia/cuda:"$DOCKER_IMAGE_TAG"'" >> build/awkward_cuda_kernels/__init__.py export DOCKER_ARGS="-v`pwd`:/home -w/home docker.io/nvidia/cuda:"$DOCKER_IMAGE_TAG export BUILD_SHARED_LIBRARY="nvcc -arch=sm_35 -std=c++11 -Xcompiler -fPIC -Xcompiler -DVERSION_INFO="$AWKWARD_VERSION" -Iinclude src/cuda-kernels/*.cu --shared -o build/awkward_cuda_kernels/libawkward-cuda-kernels.so" docker run $DOCKER_ARGS $BUILD_SHARED_LIBRARY cat > build/cuda-setup.py << EOF import setuptools from setuptools import setup setup(name = "awkward-cuda-kernels", packages = ["awkward_cuda_kernels"], package_dir = {"": "build"}, package_data = {"awkward_cuda_kernels": ["*.so"]}, version = open("VERSION_INFO").read().strip(), author = "Jim Pivarski", author_email = "pivarski@princeton.edu", maintainer = "Jim Pivarski", maintainer_email = "pivarski@princeton.edu", description = "CUDA plug-in for Awkward Array, enables GPU-bound arrays and operations.", long_description = "This plug-in is experimental. Instructions on how to use it will be provided with its first stable release.", long_description_content_type = "text/markdown", url = "https://github.com/scikit-hep/awkward-1.0", download_url = "https://github.com/scikit-hep/awkward-1.0/releases", license = "BSD 3-clause", test_suite = "tests-cuda", python_requires = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*", install_requires = ["cupy>=7.0.0"], classifiers = [ # "Development Status :: 1 - Planning", # "Development Status :: 2 - Pre-Alpha", "Development Status :: 3 - Alpha", # "Development Status :: 4 - Beta", # "Development Status :: 5 - Production/Stable", # "Development Status :: 6 - Mature", # "Development Status :: 7 - Inactive", "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Intended Audience :: Science/Research", "License :: OSI Approved :: BSD License", "Operating System :: POSIX :: Linux", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Topic :: Scientific/Engineering", "Topic :: Scientific/Engineering :: Information Analysis", "Topic :: Scientific/Engineering :: Mathematics", "Topic :: Scientific/Engineering :: Physics", "Topic :: Software Development", "Topic :: Utilities", ]) EOF python build/cuda-setup.py bdist_wheel cd dist rm -f awkward_cuda_kernels-$AWKWARD_VERSION-py3-none-$PLATFORM.whl unzip awkward_cuda_kernels-$AWKWARD_VERSION-py3-none-any.whl cp awkward_cuda_kernels-$AWKWARD_VERSION.dist-info/WHEEL tmp_WHEEL cat tmp_WHEEL | sed "s/Root-Is-Purelib: true/Root-Is-Purelib: false/" | sed "s/Tag: py3-none-any/Tag: py3-none-"$PLATFORM"/" > awkward_cuda_kernels-$AWKWARD_VERSION.dist-info/WHEEL zip awkward_cuda_kernels-$AWKWARD_VERSION-py3-none-$PLATFORM.whl -r awkward_cuda_kernels awkward_cuda_kernels-$AWKWARD_VERSION.dist-info cd .. if [ "$1" == "--install" ]; then pip install dist/awkward_cuda_kernels-$AWKWARD_VERSION-py3-none-$PLATFORM.whl fi
<filename>src/Backend/peripheral_definition_generator.cpp // Copyright 2021 <NAME> // Author: <NAME> <<EMAIL>> // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "Backend/peripheral_definition_generator.h" peripheral_definition_generator::peripheral_definition_generator(const Depfile &file, std::shared_ptr<bus_crossbar> xbar, std::shared_ptr<data_store> &d) { bus_root = std::move(xbar); ver = "1.0"; d_store = d; walk_bus_structure(bus_root); } void peripheral_definition_generator::walk_bus_structure(const std::shared_ptr<bus_crossbar> &node) { for(auto &item: node->get_children()){ auto& ptr = *item; std::string ret; if (typeid(ptr) == typeid(bus_crossbar)) { walk_bus_structure(std::static_pointer_cast<bus_crossbar>(item)); } else if(typeid(ptr) == typeid(bus_module)) { auto mod = std::static_pointer_cast<bus_module>(item); HDL_Resource res = d_store->get_HDL_resource(mod->get_module_type()); generate_peripheral(res); } } while(!submodules_to_generate.empty()){ auto working_set = submodules_to_generate; submodules_to_generate.clear(); for(auto &item:working_set){ generate_peripheral(item); } } } void peripheral_definition_generator::generate_peripheral(HDL_Resource &res) { nlohmann::json specs; if(peripheral_defs.contains(res.getName())) return; specs["name"] = res.getName(); specs["version"] = ver; std::vector<nlohmann::json> regs; for(auto &item:res.get_documentation().get_registers()){ regs.push_back(generate_register(item)); } specs["registers"] = regs; peripheral_defs[res.getName()] = specs; for(auto &item:res.get_submodules()){ std::string item_test = item.get_module_type(); auto resource = d_store->get_HDL_resource(item.get_module_type()); submodules_to_generate.push_back(resource); } } nlohmann::json peripheral_definition_generator::generate_register(register_documentation &doc) { nlohmann::json ret; ret["ID"] = doc.get_name(); ret["register_name"] = doc.get_name(); ret["description"] = doc.get_description(); std::string dir; if(doc.get_read_allowed()) dir += "R"; if(doc.get_write_allowed()) dir += "W"; ret["direction"] = dir; std::ostringstream off; off<< "0x" << std::hex << doc.get_offset(); ret["offset"] = off.str(); ret["register_format"] = "single"; std::vector<nlohmann::json> fields = {}; for(auto &item:doc.get_fields()){ fields.push_back(generate_field(item)); } ret["fields"] = fields; return ret; } nlohmann::json peripheral_definition_generator::generate_field(field_documentation &doc) { nlohmann::json ret; ret["name"] = doc.get_name(); ret["description"] = doc.get_description(); ret["offset"] = doc.get_starting_position(); ret["length"] = doc.get_length(); return ret; } void peripheral_definition_generator::write_definition_file(const std::string &path) { std::string str = peripheral_defs.dump(); std::ofstream ss(path); ss<<str; ss.close(); }
class ContentPages: """ ContentSettings short summary. ContentSettings description. @version 1.0 @author music """ pass # Base class with no specific attributes or methods class ArticleContent(ContentPages): def __init__(self, title, author, content): self.title = title self.author = author self.content = content def display_content(self): print(f"Title: {self.title}") print(f"Author: {self.author}") print(f"Content: {self.content}") class VideoContent(ContentPages): def __init__(self, title, duration, resolution): self.title = title self.duration = duration self.resolution = resolution def display_content(self): print(f"Title: {self.title}") print(f"Duration: {self.duration} seconds") print(f"Resolution: {self.resolution}")
# import libraries import pandas as pd import numpy as np from sklearn.ensemble import RandomForestClassifier # read in data data = pd.read_csv('data.csv') # split data into features and classes X = data.iloc[:,:-1] y = data.iloc[:,-1] # create and train the model model = RandomForestClassifier(n_estimators=1000) model.fit(X, y)
#!/bin/bash function current_os() { local os='Unknown' if [ "$(uname)" == 'Darwin' ]; then os='Mac' elif [ "$(uname -s | cut -b 1-5)" == 'Linux' ]; then os='Linux' elif [ "$(uname -s | cut -b 1-10)" == 'MINGW32_NT' ]; then os='Cygwin' fi echo "$os" } function setting_json_dir() { local os=$1 local dir_name='' if [ "$os" == 'Mac' ]; then dir_name="$HOME/Library/Application Support/Code/User" elif [ "$os" == 'Linux' ]; then dir_name="$HOME/.config/Code/User" else dir_name="C:\Users\(ใƒฆใƒผใ‚ถใƒผๅ)\AppData\Roaming\Code\User" # useless fi echo "$dir_name" } cd "$(dirname "$0")" || return link_to="$(pwd)"/.vscode/settings.json link_from="$(setting_json_dir $(current_os))"/settings.json ln -sf "$link_to" "$link_from"
package handler import ( "net/http" "github.com/hallucino5105/<%= projectNameSnakeCase %>/pkg/urlarg" "github.com/labstack/echo" ) func HandlerSample() echo.HandlerFunc { return func(c echo.Context) error { arg, err := urlarg.ParseArg(c) if err != nil { return echo.NewHTTPError(http.StatusInternalServerError, err) } else if arg == nil { return echo.NewHTTPError(http.StatusInternalServerError, "invalid arguments") } return c.JSON(http.StatusOK, map[string]interface{}{ "res": "api sample", }) } }
/* Copyright 2009-2015 <NAME> * * This file is part of the MOEA Framework. * * The MOEA Framework is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or (at your * option) any later version. * * The MOEA Framework is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public * License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with the MOEA Framework. If not, see <http://www.gnu.org/licenses/>. */ package org.moeaframework.core.operator.real; import org.moeaframework.core.PRNG; import org.moeaframework.core.Settings; import org.moeaframework.core.Solution; import org.moeaframework.core.Variable; import org.moeaframework.core.Variation; import org.moeaframework.core.variable.RealVariable; /** * Simulated binary crossover (SBX) operator. SBX attempts to simulate the * offspring distribution of binary-encoded single-point crossover on * real-valued decision variables. An example of this distribution, * which favors offspring nearer to the two parents, is shown below. * <p> * <img src="doc-files/SBX-1.png" alt="Example SBX operator distribution" /> * <p> * The distribution index controls the shape of the offspring distribution. * Larger values for the distribution index generates offspring closer to the * parents. * <p> * This operator is type-safe. * <p> * References: * <ol> * <li><NAME>. and <NAME>. "Simulated Binary Crossover for Continuous * Search Space." Indian Institute of Technology, Kanpur, India. * Technical Report No. IITK/ME/SMD-94027, 1994. * </ol> */ public class SBX implements Variation { /** * The probability of applying this SBX operator to each variable. */ private final double probability; /** * The distribution index of this SBX operator. */ private final double distributionIndex; /** * Constructs a SBX operator with the specified probability and * distribution index. * * @param probability the probability of applying this SBX operator to each * variable * @param distributionIndex the distribution index of this SBX operator */ public SBX(double probability, double distributionIndex) { this.probability = probability; this.distributionIndex = distributionIndex; } /** * Returns the probability of applying this SBX operator to each variable. * * @return the probability of applying this SBX operator to each variable */ public double getProbability() { return probability; } /** * Returns the distribution index of this SBX operator. * * @return the distribution index of this SBX operator */ public double getDistributionIndex() { return distributionIndex; } @Override public int getArity() { return 2; } @Override public Solution[] evolve(Solution[] parents) { Solution result1 = parents[0].copy(); Solution result2 = parents[1].copy(); if (PRNG.nextDouble() <= probability) { for (int i = 0; i < result1.getNumberOfVariables(); i++) { Variable variable1 = result1.getVariable(i); Variable variable2 = result2.getVariable(i); if (PRNG.nextBoolean() && (variable1 instanceof RealVariable) && (variable2 instanceof RealVariable)) { evolve((RealVariable)variable1, (RealVariable)variable2, distributionIndex); } } } return new Solution[] { result1, result2 }; } /* * The following source code is modified from the DTLZ variator module for * PISA. This implementation was chosen over Kalyanmoy Deb's original SBX * implementation due to license incompatibilities with the LGPL. The DTLZ * variator module license is provided below. * * Copyright (c) 2002-2003 Swiss Federal Institute of Technology, * Computer Engineering and Networks Laboratory. All rights reserved. * * PISA - A Platform and Programming Language Independent Interface for * Search Algorithms. * * DTLZ - Scalable Test Functions for MOEAs - A variator module for PISA * * Permission to use, copy, modify, and distribute this software and its * documentation for any purpose, without fee, and without written * agreement is hereby granted, provided that the above copyright notice * and the following two paragraphs appear in all copies of this * software. * * IN NO EVENT SHALL THE SWISS FEDERAL INSTITUTE OF TECHNOLOGY, COMPUTER * ENGINEERING AND NETWORKS LABORATORY BE LIABLE TO ANY PARTY FOR DIRECT, * INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF * THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF THE SWISS * FEDERAL INSTITUTE OF TECHNOLOGY, COMPUTER ENGINEERING AND NETWORKS * LABORATORY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * THE SWISS FEDERAL INSTITUTE OF TECHNOLOGY, COMPUTER ENGINEERING AND * NETWORKS LABORATORY, SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS * ON AN "AS IS" BASIS, AND THE SWISS FEDERAL INSTITUTE OF TECHNOLOGY, * COMPUTER ENGINEERING AND NETWORKS LABORATORY HAS NO OBLIGATION TO * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. */ /** * Evolves the specified variables using the SBX operator. * * @param v1 the first variable * @param v2 the second variable * @param distributionIndex the distribution index of this SBX operator */ public static void evolve(RealVariable v1, RealVariable v2, double distributionIndex) { double x0 = v1.getValue(); double x1 = v2.getValue(); double dx = Math.abs(x1 - x0); if (dx > Settings.EPS) { double lb = v1.getLowerBound(); double ub = v1.getUpperBound(); double bl; double bu; if (x0 < x1) { bl = 1 + 2 * (x0 - lb) / dx; bu = 1 + 2 * (ub - x1) / dx; } else { bl = 1 + 2 * (x1 - lb) / dx; bu = 1 + 2 * (ub - x0) / dx; } //use symmetric distributions if (bl < bu) { bu = bl; } else { bl = bu; } double p_bl = 1 - 1 / (2 * Math.pow(bl, distributionIndex + 1)); double p_bu = 1 - 1 / (2 * Math.pow(bu, distributionIndex + 1)); double u = PRNG.nextDouble(); //prevent out-of-bounds values if PRNG draws the value 1.0 if (u == 1.0) { u = Math.nextAfter(u, -1.0); } double u0 = u * p_bl; double u1 = u * p_bu; double b0; double b1; if (u0 <= 0.5) { b0 = Math.pow(2 * u0, 1 / (distributionIndex + 1)); } else { b0 = Math.pow(0.5 / (1 - u0), 1 / (distributionIndex + 1)); } if (u1 <= 0.5) { b1 = Math.pow(2 * u1, 1 / (distributionIndex + 1)); } else { b1 = Math.pow(0.5 / (1 - u1), 1 / (distributionIndex + 1)); } if (x0 < x1) { v1.setValue(0.5 * (x0 + x1 + b0 * (x0 - x1))); v2.setValue(0.5 * (x0 + x1 + b1 * (x1 - x0))); } else { v1.setValue(0.5 * (x0 + x1 + b1 * (x0 - x1))); v2.setValue(0.5 * (x0 + x1 + b0 * (x1 - x0))); } //this makes PISA's SBX compatible with other implementations //which swap the values if (PRNG.nextBoolean()) { double temp = v1.getValue(); v1.setValue(v2.getValue()); v2.setValue(temp); } //guard against out-of-bounds values if (v1.getValue() < lb) { v1.setValue(lb); } else if (v1.getValue() > ub) { v1.setValue(ub); } if (v2.getValue() < lb) { v2.setValue(lb); } else if (v2.getValue() > ub) { v2.setValue(ub); } } } }
<filename>extension/types/response.ts<gh_stars>10-100 import { CodeTour, EnhancedCodeTourStep } from './code-tour' export interface RedirectResponse { action: 'REDIRECT' url: string } export interface StepResponse { action: 'STEP' step: EnhancedCodeTourStep } export interface CodeTourResponse { action: 'CODE_TOUR' codeTour: CodeTour } export type Response = RedirectResponse | StepResponse | CodeTourResponse
#!/usr/bin/env bash URL=$1 # https://git.test.site/ REGISTRATION_TOKEN=$2 # aKw3RAukuZgPqwRzgnCz TAG_LIST=$3 # common DOCKER_IMAGE=${4:-alpine} if [[ -z $URL || -z $REGISTRATION_TOKEN || -z $TAG_LIST || -z $DOCKER_IMAGE ]]; then echo "Usage: $0 {url} {token} {tags} {docker_image:alpine}" exit 1 fi echo "URL: $URL" echo "Token: $REGISTRATION_TOKEN" echo "Tag-list: $TAG_LIST" echo "Docker image: $DOCKER_IMAGE" CONTAINER_LIST=`docker ps --all | grep gitlab_runner | awk '{printf("%s\n", $1);}'` REGISTER_SCRIPT=23-gitlab-runner-register.sh for i in $CONTAINER_LIST; do "$SHELL" "$REGISTER_SCRIPT" "$i" "$URL" "$REGISTRATION_TOKEN" "$TAG_LIST" "$DOCKER_IMAGE" done echo "Loop done."
#!/bin/bash CUDA_VISIBLE_DEVICES=2,3,4,5,6,7 PYTHONPATH=.. python3 ../examples/run_glue.py \ --task_name ssc \ --do_train \ --do_eval \ --do_lower_case \ --data_dir ~/data-eval \ --model_type bert \ --model_name_or_path bert-large-uncased \ --max_seq_length 512 \ --filter_long_sequences_train \ --per_gpu_train_batch_size 32 \ --learning_rate 2e-5 \ --num_train_epochs 3.0 \ --output_dir ~/eval/ssc-large-512-notrunctrain/
#!/bin/bash # author: Liang Gong if [ "$(uname)" == "Darwin" ]; then # under Mac OS X platform NODE='node' elif [ "$(expr substr $(uname -s) 1 5)" == "Linux" ]; then # under GNU/Linux platform NODE='nodejs' fi cd directory-traversal/hdsdhhksjd RED='\033[0;31m' BLUE='\033[0;34m' GREEN='\033[0;32m' NC='\033[0m' # No Color # start the server echo -e "\t[${GREEN}start vulnerable server${NC}]: ${BLUE}hdsdhhksjd${NC}" $NODE test.js >/dev/null 2>&1 & vulnpid=$! # wait for the server to get started sleep 1.5s echo -e "\t[${GREEN}server root directory${NC}]: `pwd`" # utilize directory traversal to get files outside the working directory # trigger directory traversal issues: send a request to retrieve the confidential file outside the working directory $NODE attack.js # kill the vulnerable npm package's process kill -9 $vulnpid
def find_prime_factors(num): num_factors = [] for i in range(2, num + 1): while num % i == 0: num_factors.append(i) num //= i return num_factors prime_factors = find_prime_factors(90) print(prime_factors)
def levelOrder(root): if not root: return [] result = [] queue = [root] while queue: level_values = [] next_level = [] for node in queue: level_values.append(node.val) if node.left: next_level.append(node.left) if node.right: next_level.append(node.right) result.append(level_values) queue = next_level return result
def sum_n(n): # Initialize the sum total = 0 # Compute the sum of all numbers from 1 to n for num in range(1, n+1): total += num # Return the result return total # Test n = 8 print(sum_n(n)) # Output 36
SELECT * FROM table ORDER BY accumulation DESC LIMIT 1;
import net.runelite.mapping.Export; import net.runelite.mapping.ObfuscatedName; import net.runelite.mapping.ObfuscatedSignature; @ObfuscatedName("km") public class class303 { @ObfuscatedName("f") @ObfuscatedSignature( descriptor = "Lli;" ) @Export("NetCache_socket") public static AbstractSocket NetCache_socket; @ObfuscatedName("f") @ObfuscatedSignature( descriptor = "(Lir;I)V", garbageValue = "-1939913696" ) public static void method5526(AbstractArchive var0) { VarcInt.VarcInt_archive = var0; // L: 17 } // L: 18 @ObfuscatedName("o") @ObfuscatedSignature( descriptor = "(II)Lje;", garbageValue = "1697768546" ) @Export("VarpDefinition_get") public static VarpDefinition VarpDefinition_get(int var0) { VarpDefinition var1 = (VarpDefinition)VarpDefinition.VarpDefinition_cached.get((long)var0); // L: 23 if (var1 != null) { // L: 24 return var1; } else { byte[] var2 = VarpDefinition.VarpDefinition_archive.takeFile(16, var0); // L: 25 var1 = new VarpDefinition(); // L: 26 if (var2 != null) { // L: 27 var1.decode(new Buffer(var2)); } VarpDefinition.VarpDefinition_cached.put(var1, (long)var0); // L: 28 return var1; // L: 29 } } @ObfuscatedName("b") @ObfuscatedSignature( descriptor = "([BIIB)Ljava/lang/String;", garbageValue = "-109" ) @Export("decodeStringCp1252") public static String decodeStringCp1252(byte[] var0, int var1, int var2) { char[] var3 = new char[var2]; // L: 105 int var4 = 0; // L: 106 for (int var5 = 0; var5 < var2; ++var5) { // L: 107 int var6 = var0[var5 + var1] & 255; // L: 108 if (var6 != 0) { // L: 109 if (var6 >= 128 && var6 < 160) { // L: 110 char var7 = class300.cp1252AsciiExtension[var6 - 128]; // L: 111 if (var7 == 0) { // L: 112 var7 = '?'; } var6 = var7; // L: 113 } var3[var4++] = (char)var6; // L: 115 } } return new String(var3, 0, var4); // L: 117 } @ObfuscatedName("ix") @ObfuscatedSignature( descriptor = "(IIIIII)V", garbageValue = "-1225560249" ) @Export("drawScrollBar") static final void drawScrollBar(int var0, int var1, int var2, int var3, int var4) { NPCComposition.scrollBarSprites[0].drawAt(var0, var1); // L: 10063 NPCComposition.scrollBarSprites[1].drawAt(var0, var3 + var1 - 16); // L: 10064 Rasterizer2D.Rasterizer2D_fillRectangle(var0, var1 + 16, 16, var3 - 32, Client.field665); // L: 10065 int var5 = var3 * (var3 - 32) / var4; // L: 10066 if (var5 < 8) { // L: 10067 var5 = 8; } int var6 = (var3 - 32 - var5) * var2 / (var4 - var3); // L: 10068 Rasterizer2D.Rasterizer2D_fillRectangle(var0, var6 + var1 + 16, 16, var5, Client.field666); // L: 10069 Rasterizer2D.Rasterizer2D_drawVerticalLine(var0, var6 + var1 + 16, var5, Client.field668); // L: 10070 Rasterizer2D.Rasterizer2D_drawVerticalLine(var0 + 1, var6 + var1 + 16, var5, Client.field668); // L: 10071 Rasterizer2D.Rasterizer2D_drawHorizontalLine(var0, var6 + var1 + 16, 16, Client.field668); // L: 10072 Rasterizer2D.Rasterizer2D_drawHorizontalLine(var0, var6 + var1 + 17, 16, Client.field668); // L: 10073 Rasterizer2D.Rasterizer2D_drawVerticalLine(var0 + 15, var6 + var1 + 16, var5, Client.field667); // L: 10074 Rasterizer2D.Rasterizer2D_drawVerticalLine(var0 + 14, var6 + var1 + 17, var5 - 1, Client.field667); // L: 10075 Rasterizer2D.Rasterizer2D_drawHorizontalLine(var0, var6 + var5 + var1 + 15, 16, Client.field667); // L: 10076 Rasterizer2D.Rasterizer2D_drawHorizontalLine(var0 + 1, var5 + var6 + var1 + 14, 15, Client.field667); // L: 10077 } // L: 10078 }
<filename>client-js/constants.js const WS_REST_URL_LOCAL = 'http://localhost:3000/api/v1/version';
#!/usr/bin/env bash curl -sS https://getcomposer.org/installer | php mv composer.phar /usr/local/bin/composer composer global require 'fabpot/php-cs-fixer=*' 'instaclick/php-code-sniffer=*' 'sensiolabs/security-checker=*' mv /root/.composer /home/vagrant/.composer
""" Develop a program for comparing two datasets for discrepancies """ # The datasets dataset_1 = [[1, 2, 3, 4, 5], [2, 3, 4, 5, 6], [3, 4, 5, 6, 7], ... ] dataset_2 = [[2, 3, 4, 5, 6], [3, 4, 5, 6, 7], [4, 5, 6, 7, 8], ... ] def compare_datasets(dataset_1, dataset_2): # Initialize the differences list differences = [] # Iterate over the datasets for row_1, row_2 in zip(dataset_1, dataset_2): # Compare row in both datasets for col_1, col_2 in zip(row_1, row_2): if col_1 != col_2: # Store the difference differences.append((col_1, col_2)) # Return the differences return differences
package thread.condition.consumer; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Lock; public class Consumer extends Thread { private Lock lock; private int timeToProcessWithSeconds = 1; private Queue queue; private Condition condition; public Consumer(Lock lock, Queue queue) { this.lock = lock; this.queue = queue; this.condition = lock.newCondition(); } @Override public void run() { while (true) { try { lock.lock(); while (queue.blank()) { condition.await(); } System.out.println(Thread.currentThread() + " is consuming " + queue.pop()); condition.signalAll(); } catch (InterruptedException e) { e.printStackTrace(); } finally { lock.unlock(); } sleepInterval(); } } private void sleepInterval() { try { Thread.sleep(TimeUnit.SECONDS.toMillis(timeToProcessWithSeconds)); } catch (InterruptedException e) { e.printStackTrace(); } } public Lock getLock() { return lock; } public void setLock(Lock lock) { this.lock = lock; } }
<filename>lib/cbor/test/test_aws_cbor_alloc.c /* * Amazon FreeRTOS CBOR Library V1.0.0 * Copyright (C) 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of * the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * * http://aws.amazon.com/freertos * http://www.FreeRTOS.org */ #include "aws_cbor_alloc.h" #include "unity_fixture.h" #include <string.h> TEST_GROUP(aws_cbor_alloc); TEST_SETUP(aws_cbor_alloc) { } TEST_TEAR_DOWN(aws_cbor_alloc) { } TEST_GROUP_RUNNER(aws_cbor_alloc) { RUN_TEST_CASE(aws_cbor_alloc, ReallocImpl_returns_new_pointer); RUN_TEST_CASE(aws_cbor_alloc, ReallocImpl_copies_content_from_old_location); RUN_TEST_CASE( aws_cbor_alloc, ReallocImpl_returns_null_when_allocation_fails); } TEST(aws_cbor_alloc, ReallocImpl_returns_new_pointer) { char *malloced_ptr = pxCBOR_malloc(10); char *realloced_ptr = CBOR_ReallocImpl(malloced_ptr, 15); TEST_ASSERT_NOT_EQUAL(malloced_ptr, realloced_ptr); pxCBOR_free(realloced_ptr); } TEST(aws_cbor_alloc, ReallocImpl_copies_content_from_old_location) { char expected[] = "Original string"; char *malloced_copy = pxCBOR_malloc(sizeof(expected)); strncpy(malloced_copy, expected, sizeof(expected)); char *realloced_copy = CBOR_ReallocImpl(malloced_copy, sizeof(expected) * 3 / 2); TEST_ASSERT_EQUAL_STRING(expected, realloced_copy); pxCBOR_free(realloced_copy); } TEST(aws_cbor_alloc, ReallocImpl_returns_null_when_allocation_fails) { char *malloced_ptr = pxCBOR_malloc(10); UnityMalloc_MakeMallocFailAfterCount(0); char *realloced_ptr = CBOR_ReallocImpl(malloced_ptr, 15); pxCBOR_free(malloced_ptr); TEST_ASSERT_EQUAL_PTR(NULL, realloced_ptr); }
import { Gpio } from 'onoff'; import * as SunCalc from 'suncalc'; import { clearTimeout } from 'timers'; import * as TuyaDevice from 'tuyapi'; import * as winston from 'winston'; type Binary = 0 | 1; function invert(value: Binary) { return value ? 0 : 1; } const LATITUDE = 51.5; // London latitude const LONGITUDE = -0.1; // London longitude const TUYA_ID = process.env.TUYA_ID; const TUYA_KEY = process.env.TUYA_KEY; const IP = process.env.IP; if (!TUYA_ID || !TUYA_KEY) { throw new Error('You need to provide Tuya auth info!'); } const TIMER_DURATION_S = parseInt(process.env.TIMER_DURATION || '300', 10); const TIMER_DURATION_MS = TIMER_DURATION_S * 1000; const TIMER_ACTIVATION_DELAY_S = parseInt( process.env.TIMER_ACTIVATION_DELAY || '5', 10, ); const TIMER_ACTIVATION_DELAY_MS = TIMER_ACTIVATION_DELAY_S * 1000; const envPins = [ ['DOOR_PIN', process.env.DOOR_PIN || '17'], ['DOOR_LED_PIN', process.env.DOOR_LED_PIN || '27'], ['MOTION_PIN', process.env.MOTION_PIN || '23'], ['MOTION_LED_PIN', process.env.MOTION_LED_PIN || '22'], ['TIMER_LED_PIN', process.env.TIMER_LED_PIN || '18'], ]; const pins = envPins.map(([pinName, pin]) => { const parsed = parseInt(pin, 10); if (isNaN(parsed)) { throw new Error( `Error while parsing ${pinName} value: ${pin}. The value needs to be a proper GPIO number`, ); } return parsed; }); const [ DOOR_PIN, DOOR_LED_PIN, MOTION_PIN, MOTION_LED_PIN, TIMER_LED_PIN, ] = pins; interface IState { door: Binary; motion: Binary; lastTimerCreation: Date; motionTimer: NodeJS.Timer | null; } const state: IState = { door: 0, motion: 0, lastTimerCreation: new Date(0), motionTimer: null, }; function isAfterSunset() { const now = new Date(); const times = SunCalc.getTimes(now, LATITUDE, LONGITUDE); return now >= times.sunsetStart || now <= times.sunriseEnd; } async function retry<T>( actionName: string, action: () => Promise<T>, times: number, ) { let retried; for (retried = 0; retried < times; retried++) { if (retried > 0) { winston.warn( `Action "${actionName}" failed! Retrying ${retried}th time...`, ); } try { await action(); winston.info(`Action "${actionName}" succeeded!`); return retried; } catch { // just advance } } winston.error(`Action "${actionName}" failed`); return null; } async function main() { const gpios = [ new Gpio(DOOR_PIN, 'in', 'both'), new Gpio(MOTION_PIN, 'in', 'both'), new Gpio(DOOR_LED_PIN, 'out'), new Gpio(MOTION_LED_PIN, 'out'), new Gpio(TIMER_LED_PIN, 'out'), ]; const [door, motion, doorLed, motionLed, timerLed] = gpios; const tuya = new TuyaDevice({ id: TUYA_ID, key: TUYA_KEY, ip: IP, }); door.watch(async (err, value) => { if (err) { winston.error('Error while reading door pin:', err); return; } state.door = invert(value); winston.info(state.door ? 'Door opened!' : 'Door closed!'); doorLed.writeSync(state.door); if (state.motionTimer) { clearTimeout(state.motionTimer); state.motionTimer = null; timerLed.writeSync(0); } if (!state.door) { timerLed.writeSync(1); state.lastTimerCreation = new Date(); state.motionTimer = setTimeout(async () => { state.motionTimer = null; timerLed.writeSync(0); winston.info('No presence detected'); if (isAfterSunset()) { winston.info('Is after sunset so light likely on, turning light off'); await retry('Set false on Tuya', () => tuya.set({ set: false }), 3); } }, TIMER_DURATION_MS); } else { if (isAfterSunset()) { winston.info('Is after sunset, turning light on'); try { await retry('Set true on Tuya', () => tuya.set({ set: true }), 3); } catch (e) { winston.error('Connection error to Tuya occured'); } } } }); motion.watch((err, value) => { if (err) { winston.error('Error while reading motion pin:', err); return; } state.motion = value; winston.info(state.motion ? 'Motion appeared!' : 'Motion disappeared!'); motionLed.writeSync(value); if (state.motion && state.motionTimer) { if ( new Date().getTime() - state.lastTimerCreation.getTime() < TIMER_ACTIVATION_DELAY_MS ) { winston.info('Motion probably due to door closing, disregarding'); return; } winston.info('Presence detected, keeping light on'); clearTimeout(state.motionTimer); state.motionTimer = null; timerLed.writeSync(0); } }); function unexport() { for (const gpio of gpios) { gpio.unexport(); } } process.on('SIGINT', unexport); process.on('SIGTERM', unexport); } main();
<gh_stars>1-10 package elasta.composer.state.handlers.impl; import elasta.authorization.Authorizer; import elasta.commons.Utils; import elasta.composer.Events; import elasta.composer.Msg; import elasta.composer.model.response.builder.AuthorizationErrorModelBuilder; import elasta.composer.state.handlers.AuthorizeStateHandler; import elasta.core.flow.Flow; import elasta.core.flow.StateTrigger; import elasta.core.promise.intfs.Promise; import io.vertx.core.json.JsonObject; import java.util.Objects; /** * Created by sohan on 6/30/2017. */ final public class AuthorizeStateHandlerImpl implements AuthorizeStateHandler<Object, Object> { final Authorizer authorizer; final String action; final AuthorizationErrorModelBuilder authorizationErrorModelBuilder; public AuthorizeStateHandlerImpl(Authorizer authorizer, String action, AuthorizationErrorModelBuilder authorizationErrorModelBuilder) { Objects.requireNonNull(authorizer); Objects.requireNonNull(action); Objects.requireNonNull(authorizationErrorModelBuilder); this.authorizer = authorizer; this.action = action; this.authorizationErrorModelBuilder = authorizationErrorModelBuilder; } @Override public Promise<StateTrigger<Msg<Object>>> handle(Msg<Object> msg) throws Throwable { return authorizer .authorize( Authorizer.AuthorizeParams.builder() .action(action) .userId(msg.userId()) .request(msg.body()) .build() ) .map(authorize -> { if (Utils.not(authorize)) { return Flow.trigger( Events.authorizationError, msg.withBody( authorizationErrorModelBuilder.build( AuthorizationErrorModelBuilder.BuildParams.builder().build() ) ) ); } return Flow.trigger(Events.next, msg); }); } }
package gui; import java.awt.Color; import java.awt.Graphics; import java.awt.Image; import java.awt.Point; import java.awt.event.KeyEvent; import java.util.ArrayList; import main.Game; public class Gui { public final Game game; public ArrayList<Button> buttons = new ArrayList<Button>(); public Image backGroundImage; public Color backGroundColor; public Gui(Game theGame, Image backGround, Color backGroundCol){ game = theGame; backGroundImage = backGround; backGroundColor = backGroundCol; addButtons(); } public Gui(Image backGroundImage, Color backGroundColor) { this(Game.game, backGroundImage, backGroundColor); } public Gui(Image backGround){ this(backGround, null); } public Gui(Color backGround){ this(null, backGround); } public void update(){} public void paint(Graphics gr){ if(backGroundColor != null){ gr.setColor(backGroundColor); gr.fillRect(0, 0, game.getWidth(), game.getHeight()); } if(backGroundImage != null) gr.drawImage(backGroundImage, 0, 0, game.getWidth(), game.getHeight(), null); paintButtons(gr); } public void click(Point mouse){ int t = 0; while(t < buttons.size()){ if(buttons.get(t).isHit(mouse)) buttonClicked(buttons.get(t)); ++t; } } public void rightClick(Point mouse){} public void keyTyped(KeyEvent event){} public void buttonClicked(Button button){} public void addButtons(){} protected void paintButtons(Graphics gr){ int t = 0; while(t < buttons.size()){ buttons.get(t).paint(gr); ++t; } } }
#!/bin/bash set -eou pipefail baseBranch=$1 secondaryBranch=$2 commitDiffCount='' baseBranchPath=$(git branch -r | grep "$baseBranch" | xargs) secondaryBranchPath=$(git branch -r | grep "$secondaryBranch" | xargs) commitDiffCount=$(git log --oneline "$secondaryBranchPath" \^"$baseBranchPath" | wc -l) echo '{"commitDiffCount": "'"$commitDiffCount"'"}'
#!/bin/bash STREAM=../stream BANKS=8 CAPACITY=2 LINKS=4 BSIZE=64 QDEPTH=64 XDEPTH=128 VAULTS=16 #NRQSTS=134217728 NRQSTS=33554432 #NRQSTS=4096 DRAMS=20 THREADS=16 SIMD=8 echo "Executing : $STREAM -b $BANKS -c $CAPACITY -l $LINKS -m $BSIZE -n 1 -q $QDEPTH -x $XDEPTH\ -d $DRAMS -v $VAULTS -N $NRQSTS -T $THREADS -s $SIMD" $STREAM -b $BANKS -c $CAPACITY -l $LINKS -m $BSIZE -n 1 -q $QDEPTH -x $XDEPTH\ -d $DRAMS -v $VAULTS -N $NRQSTS -T $THREADS -s $SIMD
<reponame>otienonick/Insta_clone_Django<filename>instagram/views.py from django.shortcuts import render,redirect from .models import Post,Likes,Profile,Comment from django.utils import timezone from .forms import ProfileUpdateForm,CommentForm,PostForm from django.contrib.auth.decorators import login_required from datetime import datetime # Create your views here. @login_required(login_url = '/accounts/login/') def post(request): posts = Post.objects.all().filter(created_date__lte = timezone.now()).order_by('-created_date') return render(request, 'insta/post.html', {'posts':posts}) def profile(request): user = Profile.objects.get_or_create(user=request.user) user = request.user posts = Post.objects.filter(author=request.user).order_by('-created_date') if request.method == 'POST': p_form = ProfileUpdateForm(request.POST,request.FILES,instance = request.user.profile) if p_form.is_valid(): p_form.save() # messages.success(request,f'your account has been updated') return redirect('profile') else: p_form = ProfileUpdateForm(instance = request.user.profile) return render(request, 'insta/profile.html', {'p_form':p_form,"posts" : posts,'user' : user}) def like(request,post_id): user = request.user post = Post.objects.get(id = post_id) current_likes = post.likes liked = Likes.objects.filter(user = user,post = post).count() if not liked: Likes.objects.create(user = user,post = post) current_likes = current_likes + 1 else: Likes.objects.filter(user = user,post = post).delete() current_likes = current_likes - 1 post.likes = current_likes post.save() return redirect('post') def new_comment(request,pk): post = Post.objects.get(pk = pk) if request.method == 'POST': form = CommentForm(request.POST) if form.is_valid(): name = request.user.username comment= form.cleaned_data['comment'] obj = Comment(post = post,name = name,comment = comment,date = datetime.now()) obj.save() return redirect('post') else: form = CommentForm() return render(request, 'insta/comment.html', {"form": form}) def new_post(request): current_user = request.user if request.method == 'POST': form = PostForm(request.POST, request.FILES) # We pass in the request.FILES argument because we are going to be uploading an Image file and we want to process that in our form. if form.is_valid(): post = form.save(commit = False) post.author = current_user post.save() return redirect('post') else: form = PostForm() return render(request, 'insta/create_post.html', {"form": form}) def search_results(request): if 'photos' in request.GET and request.GET["photos"]: search_term = request.GET.get("photos") searched_articles = Post.search_by_name(search_term) message = f"{search_term}" return render(request, 'insta/search.html',{"message":message,"photos": searched_articles}) else: message = "You haven't searched for any term" return render(request, 'insta/search.html',{"message":message}) def delete_post(request,pk): post = Post.objects.get(pk = pk) if request.method == 'POST': post.delete() return redirect('post') return render(request, 'insta/delete_post.html',{}) def update_post(request,pk): post = Post.objects.get(id = pk) form = PostForm(instance = post) if request.method == 'POST': form = PostForm(request.POST, request.FILES,instance = post) # We pass in the request.FILES argument because we are going to be uploading an Image file and we want to process that in our form. if form.is_valid(): post.save() return redirect('post') return render(request, 'insta/update_post.html',{'form' :form})
import requests def submit_code(url: str, problem_id: str, language: str, source: str, language_code: dict) -> None: data = { 'localid': problem_id, 'language': language_code[language], 'code': source } session = requests.Session() session.post(url, data=data)
<reponame>joeledwardson/JSON-node-editor import Rete from "rete"; let s = Rete.Socket("pls"); let b = typeof s; let a = 1;
/** * Copyright 2015 IBM Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ibm.bluelistproxy.internal; import java.io.InputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.HashMap; import java.util.Map; import java.util.Random; import java.util.logging.Logger; import org.apache.commons.codec.binary.Hex; import com.cloudant.client.api.Database; import com.ibm.json.java.JSONArray; import com.ibm.json.java.JSONObject; /** * Processes all database permissions related requests. * This includes set permissions (create _users database, add _users database entry for user, * update database permissions for user) and remove permissions (update database permissions for user * and remove _users database entry for user). */ public class PermissionsHandler { private static final String CLASS_NAME = PermissionsHandler.class.getName(); private static final Logger logger = Logger.getLogger(CLASS_NAME); /** * Update permissions for user for this database: * - if _users database does not exist create it and the view * - if entry for user does not exist in _users database, add user * - if user does not have admins permissions for database, add the permissions * * @param userName The user name * @param databaseName The database name * @throws BlueListProxyException Thrown if the permissions could not be set. */ public static void setPermissions(String userName, String databaseName) throws BlueListProxyException { final String METHOD_NAME = "setPermissions"; logger.entering(CLASS_NAME, METHOD_NAME, new Object[] {userName, databaseName}); // Start with creating the _users database and proceed to updating the permissions createUsersDatabase(userName, databaseName); logger.exiting(CLASS_NAME, METHOD_NAME); } /** * Remove permissions for user for this database: * - clear admins permissions for database * - remove user from _users database * - clear user from user credentials cache * * @param userName The user name * @param databaseName The database name * @throws BlueListProxyException Thrown if the permissions could not be removed. */ public static void removePermissions(String userName, String databaseName) throws BlueListProxyException { final String METHOD_NAME = "removePermissions"; logger.entering(CLASS_NAME, METHOD_NAME, new Object[] {userName, databaseName}); // Initialize flags/exceptions for cleanup in case of error BlueListProxyException rootException = null; try { // Remove this user's access to the database removeUserAccess(userName, databaseName); } catch(BlueListProxyException blpe) { logger.fine("An error occurred removing access to database("+databaseName+") for user ("+userName+"); response error = " + blpe.getMessage()); rootException = blpe; } try { // Delete the user from the user's database deleteUserEntry(userName, databaseName); } catch(BlueListProxyException blpe) { logger.fine("An error occurred removing user ("+userName+") from the _users database; response error = " + blpe.getMessage()); if (rootException == null) rootException = blpe; } // Clear the user credentials from the cache KeyPassManager.getInstance().clearCredentials(userName); // If there was an exception, throw the original exception now if (rootException != null) { throw rootException; } logger.exiting(CLASS_NAME, METHOD_NAME); } /** * Create the _users database. * * @param userName The user name * @param databaseName The database name * @throws BlueListProxyException Thrown if the _users database could not be created. */ public static void createUsersDatabase(String userName, String databaseName) throws BlueListProxyException { final String METHOD_NAME = "createUsersDatabase"; logger.entering(CLASS_NAME, METHOD_NAME, new Object[] {userName, databaseName}); // See if the _users database currently exists boolean dbExists = BlueListProxyUtils.dbExists("_users"); // If the database does not exist, create it and add the view if ( !dbExists ) { // Create the _users database try { logger.fine("Creating database: _users"); KeyPassManager.getInstance().getAdminCloudantClient().createDB("_users"); logger.fine("Created _users database; adding view"); // Create the _users database view createUsersDatabaseView(userName, databaseName); } catch(BlueListProxyException blpe) { logger.severe("Failed to create database (_users); error = " + blpe.getMessage()); throw blpe; } catch(Exception e) { logger.severe("Failed to create database (_users); error = " + e.getMessage()); throw new BlueListProxyException("Failed to create database (_users)", e); } } // If the database does exist, add/update user else { // Add/update user boolean userDocExists = BlueListProxyUtils.dbDocExists("_users", "org.couchdb.user:" + userName); // If the _users document already exists, update permissions if (userDocExists) { logger.fine("User ("+userName+") _users database info exists; Updating access"); addUserAccess(userName, databaseName); } // If the _users document does not exist, create it else { logger.fine("User ("+userName+") _users database info does not exist; Creating it and adding access"); createUserEntry(userName, databaseName); } } logger.exiting(CLASS_NAME, METHOD_NAME); } /** * Create the _users database view. * * @param userName The user name * @param databaseName The database name * @throws BlueListProxyException Thrown if the _users database view could not be created. */ public static void createUsersDatabaseView(String userName, String databaseName) throws BlueListProxyException { final String METHOD_NAME = "createUsersDatabaseView"; logger.entering(CLASS_NAME, METHOD_NAME, new Object[] {userName, databaseName}); final String VIEW_NAME = "_design/_imfdata_usersview"; // Create the view try { // Create view with map function Map<String,Object> users = new HashMap<String,Object>(1); users.put("map", "function(doc) {\n emit(doc._id, doc);\n}"); Map<String,Object> views = new HashMap<String,Object>(1); views.put("users", users); Map<String,Object> viewddoc = new HashMap<String,Object>(2); viewddoc.put("_id", VIEW_NAME); viewddoc.put("views", views); logger.fine("Creating _users view: " + VIEW_NAME); Database db = KeyPassManager.getInstance().getAdminCloudantClient().database("_users", false); db.save(viewddoc); logger.fine("Created _users database view"); // Create the user entry createUserEntry(userName, databaseName); } catch(BlueListProxyException blpe) { logger.severe("Failed to create users view for database (_users); error = " + blpe.getMessage()); throw blpe; } catch(Exception e) { logger.severe("Failed to create users view for database (_users); error = " + e.getMessage()); throw new BlueListProxyException("Failed to create users view for database (_users)", e); } logger.exiting(CLASS_NAME, METHOD_NAME); } /** * Create _users database entry for specific user. * * @param userName The user name * @param databaseName The database name * @throws BlueListProxyException Thrown if the _users database entry could not be created. */ public static void createUserEntry(String userName, String databaseName) throws BlueListProxyException { final String METHOD_NAME = "createUserEntry"; logger.entering(CLASS_NAME, METHOD_NAME, new Object[] {userName, databaseName}); final String DOC_NAME = "org.couchdb.user:" + userName; // Generate password, salt, and encrypted password byte[] randomBytes = new byte[8]; new Random().nextBytes(randomBytes); String password = Hex.encodeHexString(randomBytes); new Random().nextBytes(randomBytes); String salt = Hex.encodeHexString(randomBytes); String encryptedPass_hex; MessageDigest md; try{ encryptedPass_hex = KeyPassManager.getInstance().getCryptoUtil().encrypt(password, salt); md = MessageDigest.getInstance("SHA1"); md.update((password+salt).getBytes()); } catch(NoSuchAlgorithmException nsae) { logger.severe("Exception caught generating password for user ("+userName+"); exception = " + nsae.getMessage()); throw new BlueListProxyException("Exception caught generating password for user ("+userName+")", nsae); } byte [] password_sha = md.digest(); String password_sha_hex = Hex.encodeHexString(password_sha); // Create request body JSONObject body_credentials = new JSONObject(); body_credentials.put("_id", DOC_NAME); body_credentials.put("name", userName); body_credentials.put("password", <PASSWORD>); body_credentials.put("salt", salt); body_credentials.put("password_sha", password_sha_hex); body_credentials.put("roles", new JSONArray()); body_credentials.put("type", "user"); // Create the _users document try { // Create _users document logger.fine("Creating _users document: " + DOC_NAME); Database db = KeyPassManager.getInstance().getAdminCloudantClient().database("_users", false); db.save(body_credentials); // Add permissions logger.fine("_users database document for user ("+userName+") created; Adding access."); addUserAccess(userName, databaseName); } catch(BlueListProxyException blpe) { logger.severe("Failed to create database document (_users/org.couchdb.user:"+userName+"); error = " + blpe.getMessage()); throw blpe; } catch(Exception e) { logger.severe("Failed to create database document (_users/org.couchdb.user:"+userName+"); error = " + e.getMessage()); throw new BlueListProxyException("Failed to create database document (_users/org.couchdb.user:"+userName+")", e); } logger.exiting(CLASS_NAME, METHOD_NAME); } /** * Update user permissions for database. * * @param userName The user name * @param databaseName The database name * @throws BlueListProxyException Thrown if the database permissions could not be updated. */ public static void addUserAccess(String userName, String databaseName) throws BlueListProxyException { final String METHOD_NAME = "addUserAccess"; logger.entering(CLASS_NAME, METHOD_NAME, new Object[] {userName, databaseName}); final String DOC_NAME = "_security"; // Obtain the database security document; it should be there InputStream jsonStream = null; try { // Get security document logger.fine("Retrieving _security document for database " + databaseName); Database db = KeyPassManager.getInstance().getAdminCloudantClient().database(databaseName, false); jsonStream = db.find(DOC_NAME); JSONObject jsonBody = JSONObject.parse(jsonStream); // Determine if the security document for this database already exists boolean existingSecurityDoc = false; if (jsonBody.containsKey("couchdb_auth_only") || jsonBody.containsKey("admins") || jsonBody.containsKey("members")) { existingSecurityDoc = true; } // Update security admins info for this user to give admins access boolean existingMember = false; jsonBody.put("couchdb_auth_only", true); JSONObject admins = (JSONObject)jsonBody.get("admins"); if (admins == null){ admins = new JSONObject(); JSONArray namesArray = new JSONArray(); namesArray.add(userName); admins.put("names", namesArray); jsonBody.put("admins", admins); } else { JSONArray namesArray = (JSONArray)admins.get("names"); if (namesArray != null) { existingMember = namesArray.contains(userName); if (existingMember == false) { namesArray.add(userName); } } else { namesArray = new JSONArray(); namesArray.add(userName); admins.put("names", namesArray); } } // If member does not already exist, then update the permissions if (existingMember == false) { JSONObject members = (JSONObject)jsonBody.get("members"); // Update security members info for this user to give admins access if (members == null && existingSecurityDoc == false) { JSONArray namesArray = new JSONArray(); JSONArray rolesArray = new JSONArray(); rolesArray.add("_admin"); members = new JSONObject(); members.put("names", namesArray); members.put("roles", rolesArray); jsonBody.put("members", members); } Object idObj = jsonBody.get("_id"); if (!(idObj instanceof String)) jsonBody.put("_id", DOC_NAME); // Store the updated document logger.fine("Setting permissions for database: " + databaseName); if (existingSecurityDoc) db.update(jsonBody); else db.save(jsonBody); logger.fine("Permissions for user ("+userName+") and database ("+databaseName+") set successfully."); } // User already exists as member else { logger.fine("Permissions for user ("+userName+") and database ("+databaseName+") already exist; nothing more to do"); } } catch(BlueListProxyException blpe) { logger.severe("Failed to set permissions for database ("+databaseName+"); error = " + blpe.getMessage()); throw blpe; } catch(Exception e) { logger.severe("Failed to set permissions for database ("+databaseName+"); error = " + e.getMessage()); throw new BlueListProxyException("Failed to set database "+databaseName+" permissions", e); } finally { if (jsonStream != null) { try { jsonStream.close(); } catch(Exception e) {} } } logger.exiting(CLASS_NAME, METHOD_NAME); } /** * Delete _users database entry for specific user. * * @param userName The user name * @param databaseName The database name * @throws BlueListProxyException Thrown if the _users database entry could not be deleted. */ public static void deleteUserEntry(String userName, String databaseName) throws BlueListProxyException { final String METHOD_NAME = "deleteUserEntry"; logger.entering(CLASS_NAME, METHOD_NAME, new Object[] {userName, databaseName}); final String DOC_NAME = "org.couchdb.user:" + userName; // Get user credentials InputStream jsonStream = null; try { // See if _users document exists if ( BlueListProxyUtils.dbDocExists("_users", DOC_NAME) ) { logger.fine("Retrieving _users document: " + DOC_NAME); Database db = KeyPassManager.getInstance().getAdminCloudantClient().database("_users", false); jsonStream = db.find(DOC_NAME); JSONObject jsonBody = JSONObject.parse(jsonStream); logger.fine("_users database document for user ("+userName+") retrieved; removing it."); db.remove(jsonBody); logger.fine("_users database entry for user ("+userName+") deleted"); } else { logger.fine("_users database entry for user ("+userName+") does not exist; nothing more to do"); } } catch(BlueListProxyException blpe) { logger.severe("Failed to delete _users database document ("+DOC_NAME+"); error = " + blpe.getMessage()); throw blpe; } catch(Exception e) { logger.severe("Failed to delete _users database document ("+DOC_NAME+"); error = " + e.getMessage()); throw new BlueListProxyException("Failed to delete _users database document ("+DOC_NAME+")", e); } finally { if (jsonStream != null) { try { jsonStream.close(); } catch(Exception e) {} } } logger.exiting(CLASS_NAME, METHOD_NAME); } /** * Remove user permissions for database. * * @param userName The user name * @param databaseName The database name * @throws BlueListProxyException Thrown if the database permissions could not be removed. */ public static void removeUserAccess(String userName, String databaseName) throws BlueListProxyException { final String METHOD_NAME = "removeUserAccess"; logger.entering(CLASS_NAME, METHOD_NAME, new Object[] {userName, databaseName}); final String DOC_NAME = "_security"; // Obtain the database security document; it should be there InputStream jsonStream = null; try { // See if _security document exists if ( BlueListProxyUtils.dbDocExists(databaseName, DOC_NAME) ) { // Get security document logger.fine("Retrieving _security document for database " + databaseName); Database db = KeyPassManager.getInstance().getAdminCloudantClient().database(databaseName, false); jsonStream = db.find(DOC_NAME); JSONObject jsonBody = JSONObject.parse(jsonStream); // Update security admins info for this user to give admins access boolean existingMember = false; JSONObject admins = (JSONObject)jsonBody.get("admins"); if (admins != null){ JSONArray namesArray = (JSONArray)admins.get("names"); existingMember = namesArray.contains(userName); if (existingMember == true) { namesArray.remove(userName); } } // If member exists, then update the permissions if (existingMember == true) { Object idObj = jsonBody.get("_id"); if (!(idObj instanceof String)) jsonBody.put("_id", DOC_NAME); Object revObj = jsonBody.get("_rev"); // Store the updated document logger.fine("Updating _security document for database: " + databaseName); if (revObj instanceof String) db.update(jsonBody); else db.save(jsonBody); logger.fine("Permissions for user ("+userName+") and database ("+databaseName+") updated successfully."); } // User already does not exist as member else { logger.fine("Permissions for user ("+userName+") and database ("+databaseName+") does not exist; nothing more to do"); } } // User already does not exist as member else { logger.fine("Permissions for user ("+userName+") and database ("+databaseName+") do not exist; nothing more to do"); } } catch(BlueListProxyException blpe) { logger.severe("Failed to remove permissions for database ("+databaseName+"); error = " + blpe.getMessage()); throw blpe; } catch(Exception e) { logger.severe("Failed to remove permissions for database ("+databaseName+"); error = " + e.getMessage()); throw new BlueListProxyException("Failed to remove database "+databaseName+" permissions", e); } finally { if (jsonStream != null) { try { jsonStream.close(); } catch(Exception e) {} } } logger.exiting(CLASS_NAME, METHOD_NAME); } }
#!/usr/bin/env bash # Use this script to test if a given TCP host/port are available WAITFORIT_cmdname=${0##*/} echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi } usage() { cat << USAGE >&2 Usage: $WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args] -h HOST | --host=HOST Host or IP under test -p PORT | --port=PORT TCP port under test Alternatively, you specify the host and port as host:port -s | --strict Only execute subcommand if the test succeeds -q | --quiet Don't output any status messages -t TIMEOUT | --timeout=TIMEOUT Timeout in seconds, zero for no timeout -- COMMAND ARGS Execute command with args after the test finishes USAGE exit 1 } wait_for() { if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT" else echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout" fi WAITFORIT_start_ts=$(date +%s) while : do if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then nc -z $WAITFORIT_HOST $WAITFORIT_PORT WAITFORIT_result=$? else (echo > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1 WAITFORIT_result=$? fi if [[ $WAITFORIT_result -eq 0 ]]; then WAITFORIT_end_ts=$(date +%s) echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds" break fi sleep 1 done return $WAITFORIT_result } wait_for_wrapper() { # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692 if [[ $WAITFORIT_QUIET -eq 1 ]]; then timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT & else timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT & fi WAITFORIT_PID=$! trap "kill -INT -$WAITFORIT_PID" INT wait $WAITFORIT_PID WAITFORIT_RESULT=$? if [[ $WAITFORIT_RESULT -ne 0 ]]; then echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT" fi return $WAITFORIT_RESULT } # process arguments while [[ $# -gt 0 ]] do case "$1" in *:* ) WAITFORIT_hostport=(${1//:/ }) WAITFORIT_HOST=${WAITFORIT_hostport[0]} WAITFORIT_PORT=${WAITFORIT_hostport[1]} shift 1 ;; --child) WAITFORIT_CHILD=1 shift 1 ;; -q | --quiet) WAITFORIT_QUIET=1 shift 1 ;; -s | --strict) WAITFORIT_STRICT=1 shift 1 ;; -h) WAITFORIT_HOST="$2" if [[ $WAITFORIT_HOST == "" ]]; then break; fi shift 2 ;; --host=*) WAITFORIT_HOST="${1#*=}" shift 1 ;; -p) WAITFORIT_PORT="$2" if [[ $WAITFORIT_PORT == "" ]]; then break; fi shift 2 ;; --port=*) WAITFORIT_PORT="${1#*=}" shift 1 ;; -t) WAITFORIT_TIMEOUT="$2" if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi shift 2 ;; --timeout=*) WAITFORIT_TIMEOUT="${1#*=}" shift 1 ;; --) shift WAITFORIT_CLI=("$@") break ;; --help) usage ;; *) echoerr "Unknown argument: $1" usage ;; esac done if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then echoerr "Error: you need to provide a host and port to test." usage fi WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-60} WAITFORIT_STRICT=${WAITFORIT_STRICT:-0} WAITFORIT_CHILD=${WAITFORIT_CHILD:-0} WAITFORIT_QUIET=${WAITFORIT_QUIET:-0} # Check to see if timeout is from busybox? WAITFORIT_TIMEOUT_PATH=$(type -p timeout) WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH) WAITFORIT_BUSYTIMEFLAG="" if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then WAITFORIT_ISBUSY=1 # Check if busybox timeout uses -t flag # (recent Alpine versions don't support -t anymore) if timeout &>/dev/stdout | grep -q -e '-t '; then WAITFORIT_BUSYTIMEFLAG="-t" fi else WAITFORIT_ISBUSY=0 fi if [[ $WAITFORIT_CHILD -gt 0 ]]; then wait_for WAITFORIT_RESULT=$? exit $WAITFORIT_RESULT else if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then wait_for_wrapper WAITFORIT_RESULT=$? else wait_for WAITFORIT_RESULT=$? fi fi if [[ $WAITFORIT_CLI != "" ]]; then if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess" exit $WAITFORIT_RESULT fi exec "${WAITFORIT_CLI[@]}" else exit $WAITFORIT_RESULT fi
#!/bin/bash #SBATCH --account=def-lombaert #SBATCH --gres=gpu:v100l:1 # Number of GPUs (per node) #SBATCH --cpus-per-task=8 # CPU cores/threads #SBATCH --mem=46G # memory (per node) #SBATCH --time=05-00:00 # time (DD-HH:MM) #SBATCH --mail-user=pierre-luc.delisle@live.com #SBATCH --mail-type=BEGIN #SBATCH --mail-type=END #SBATCH --mail-type=FAIL #SBATCH --mail-type=REQUEUE #SBATCH --mail-type=ALL #SBATCH --output=%x-%j.out #SBATCH --output=WGAN_canada_scaled_gaussian_filter_disc_ratio_0_25.out #SBATCH --job-name=WGAN_canada_scaled_gaussian_filter_disc_ratio_0_25 nvidia-smi source /home/pld2602/venv/bin/activate CUDA_VISIBLE_DEVICES=0 python /project/def-lombaert/pld2602/code/deepNormalizev5/main_cc.py --config=/project/def-lombaert/pld2602/code/deepNormalizev5/deepNormalize/experiments/experiments_canada/WGAN_scaled/disc_ratio_gaussian_filter/config_disc_ratio_0.25.yaml
# Use these commands to generate the LAMMPS input script and data file # Create LAMMPS input files this way: cd moltemplate_files # NOTE: It would be much simpler to create the polymer in this example by # adding some .move() and .rot() commands directly into the "system.lt" file. # But instead, I wanted to demonstrate how to use the "genpoly_lt.py" tool # to create a polymer that follows the shape of an arbitrary curve. # # I use the "genpoly_lt.py" to generate a moltemplate file (.LT file) # describing the polymer you want to simulate. You must specify the # name of the moltemplate object which will be used as the monomer subunit # in the final polymer (eg. "DNAMonomer"), as well as any bonds (or angles # or dihedrals) linking one monomer to the next monomer, as well as the # helical twist angle (if applicable). All of the details regarding # the behaviour of the polymer are contained in the "dnamonomer.lt" file # which defines the "DNAMonomer" object, as well as a link to the file # which defines "DNAForceField" (which DNAMonomer uses). For details, see: # https://github.com/jewettaij/moltemplate/blob/master/doc/doc_genpoly_lt.md genpoly_lt.py -circular yes \ -helix 102.7797 \ -bond Backbone a a \ -bond Backbone b b \ -dihedral MajorGroove b b a a 0 1 1 2 \ -dihedral Torsion a a b b 1 0 0 1 \ -polymer-name 'DNAPolymer' \ -inherits 'DNAForceField' \ -monomer-name 'DNAMonomer' \ -header 'import "dna_monomer.lt"' \ -padding 20,20,60 \ < init_crds_polymer_backbone.raw > dna_polymer.lt # (Note: The "-helix" parameter represents the twist-per-monomer (ฮ”ฯ†) at the # start of the simulation. Example "genpoly_lt.py -helix 102.857 ...") # Add twist motors. # If I only wanted to add a single twist motor, it would be easy to manually # add some extra lines to the "dna_polymer.lt" file. However here I wrote # this script to make it possible to put many, many twist motors along the # polymer. To do that, I created a new script named "genpoly_modify_lt.py" # which generates many modifications to a polymer at user-defined locations. # It's overkill for what we need in this example since we only use 1 motor. # "genpoly_modify_lt.py" needs to know the length of the polymer we created. # Count the number of non-blank, non-comment lines in the coordinate file: N_MONOMERS=`awk '{if ((NF>0) && (substr($1,1,1)!="#")) {n++}} END{print n}' < init_crds_polymer_backbone.raw` echo '' >> dna_polymer.lt echo 'import "dna_twist_motor.lt"' >> dna_polymer.lt echo '' >> dna_polymer.lt # Now run the script that makes (potentially) # many modifications to the polymer. # In our case it will modify the polymer to add a twist motor. # The position of that motor is in the file "mod_locations.txt" # (which currently only has one entry). For more details, see: # https://github.com/jewettaij/moltemplate/blob/master/doc/doc_genpoly_modify_lt.md genpoly_modify_lt.py \ -circular yes \ -polymer-name DNAPolymer \ -length $N_MONOMERS \ -locations mod_locations.txt \ -bond Motor a a 1 2 \ -bond Disable b b 1 2 \ -dihedral MajorGrooveML b b a a 0 1 1 2 \ -dihedral MajorGrooveMR a a b b 1 2 2 3 \ -dihedral Disable a a b b 2 1 1 2 \ -dihedral Disable b b a a 1 2 2 3 \ -dihedral Disable b a a b 1 1 2 2 \ -set-atoms 6 "system.in.types" "type" b b a a b b 0 1 1 2 2 3 Bm2 Bm Am Am Bm Bm2 \ -fix-nbody 4 "fix_twist_torque_5_kcal_per_radian.in" fxTw all twist torque b a a b 1 1 2 2 "5.0" \ -fix-nbody 4 "fix_twist_constant_rate.in" fxTw all twist constrain b a a b 1 1 2 2 "5.0 100 8640" \ >> dna_polymer.lt # NOTE: To force the motor to twist at a constant rate (instead of applying # a constant torque), use this instead. # # -fix-nbody 4 "fix_twist_rate_5.0_100_14400.in" fxTw all twist torque b a a b 1 1 2 2 "5.0 100 14400" # (WARNING: Simulation can become numerically unstable if twisted too far.) # ---------- OPTIONAL ------------------------------------- # --- Delete the bond interfering with the twist motor. --- echo '' >> dna_polymer.lt echo 'DNAPolymer {' >> dna_polymer.lt # Note: We already disabled this bond using "-bond Disable b b 1 2" # (by setting its spring constant to 0). However you actually have # to delete that bond if you want it not to appear in visualization # software tools like VMD (which was my goal). To delete the bond, # you have to know its $bond: name. Bonds generated by genpoly_lt.py # have names like "genp_bondi_j", where "j" indicates the monomer (from # mod_locations.txt) and "i" represents the bond-per-monomer (2 here). awk -v N=$N_MONOMERS '{print " delete genp_bond2_"1+($1+1)%N}' < mod_locations.txt >> dna_polymer.lt awk -v N=$N_MONOMERS '{print " delete gpm_bond2_"1+($1)%N}' < mod_locations.txt >> dna_polymer.lt echo '}' >> dna_polymer.lt # ---------- OPTIONAL ------------------------------------- # Then run moltemplate on "system.lt". # (Note: "system.lt" contains a reference to the polymer file we created.) moltemplate.sh system.lt # This will generate various files with names ending in *.in* and *.data. # These files are the input files directly read by LAMMPS. Move them to # the parent directory (or wherever you plan to run the simulation). mv -f system.in* fix_twist*.in system.data ../ # Optional: # The "./output_ttree/" directory is full of temporary files generated by # moltemplate. They can be useful for debugging, but are usually thrown away. rm -rf output_ttree/ # Optional: Delete other temporary files: rm -f init_crds_polymer_backbone.raw rm -f dna_polymer.lt cd ../
#!/bin/bash rm out* rm *.out echo "translate" cd $1 pwd ../../translate-factored.sh cd ../../.. echo "run MAPlan" ~/workspace-git/maplan-dev/bin/search --ma-factor-dir -p $1 -s astar -H lm-cut --print-heur-init --max-private-ids-per-state $2 --track-states -o plan.out > std.out OUT_FOLDER="${1/benchmarks/traces}/${2}" mkdir -p $OUT_FOLDER echo "analyze" cat std.out | grep "VAR" | sed 's/VAR://g' > $OUT_FOLDER/variables.out cat std.out | grep "OP" | sed 's/OP://g' > $OUT_FOLDER/operators.out rm $OUT_FOLDER/states.out for outfile in out_* do cat $outfile | grep "STATE" | sed 's/STATE://g'>> $OUT_FOLDER/states.out done filenamelist=(out_*) count=${#filenamelist[@]} count=$((count-1)) echo "agents 0 - $count" for agent in `seq 0 $count` do echo "AGENT $agent" OUT="$OUT_FOLDER/agent${agent}.json" echo "output=$OUT" echo $'{\n' > $OUT echo $'\"variables\":[' >> $OUT cat $OUT_FOLDER/variables.out | grep "\"agentID\":$agent" | head -n -1 > $OUT_FOLDER/temp while read LINE do echo "$LINE," >> $OUT done < $OUT_FOLDER/temp cat $OUT_FOLDER/variables.out | grep "\"agentID\":$agent" | tail -n 1 >> $OUT echo $'],\n' >> $OUT echo $'\"operators\":[' >> $OUT cat $OUT_FOLDER/operators.out | grep "\"agentID\":$agent" | head -n -1 > $OUT_FOLDER/temp while read LINE do echo "$LINE," >> $OUT done < $OUT_FOLDER/temp cat $OUT_FOLDER/operators.out | grep "\"agentID\":$agent" | tail -n 1 >> $OUT echo $'],\n' >> $OUT echo $'\"states\":[' >> $OUT cat $OUT_FOLDER/states.out | grep "\"agentID\":$agent" | head -n -1 > $OUT_FOLDER/temp while read LINE do echo "$LINE," >> $OUT done < $OUT_FOLDER/temp cat $OUT_FOLDER/states.out | grep "\"agentID\":$agent" | tail -n 1 >> $OUT echo $'],\n' >> $OUT echo $'\"plan\":[' >> $OUT #cat plan.out cat plan.out | tr '()' '\"\"' | head -n -1 > $OUT_FOLDER/temp while read LINE do echo "$LINE," >> $OUT done < $OUT_FOLDER/temp #cat $OUT_FOLDER/temp cat plan.out | tr '()' '\"\"' | tail -n 1 >> $OUT echo $']\n}' >> $OUT rm $OUT_FOLDER/temp done
import UIKit class ViewController: UIViewController { @IBOutlet weak var resultLabel: UILabel! override func viewDidLoad() { super.viewDidLoad() generateDiceRoll() } override func viewDidAppear(_ animated: Bool) { rollDice() } // MARK: - Action @IBAction func rollButtonTapped(_ sender: UIButton) { rollDice() } // MARK: - Helper private func generateDiceRoll() -> Int { return Int.random(in: 1...6) } private func rollDice() { let diceRollResult = generateDiceRoll() resultLabel.text = "\(diceRollResult)" let animationDuration = 0.3 UIView.animate(withDuration: animationDuration, delay: 0, options: .curveEaseOut, animations: { self.resultLabel.transform = CGAffineTransform(rotationAngle: .pi/2) }, completion: { finished in UIView.animate(withDuration: animationDuration, delay: 0, options: .curveEaseIn, animations: { self.resultLabel.transform = .identity }, completion: nil) }) } }
<gh_stars>10-100 import classNames from "classnames"; import {ButtonHTMLAttributes, DetailedHTMLProps, FC} from "react"; import "./TextInputAdornment.scss"; export interface TextInputAdornmentProps extends DetailedHTMLProps<ButtonHTMLAttributes<HTMLButtonElement>, HTMLButtonElement> { title: string; } export const TextInputAdornment: FC<TextInputAdornmentProps> = ({title, className, children, ...other}) => ( <button type="button" className={classNames("text-input-adornment", className)} aria-label={title} title={title} {...other}> {children} </button> );
package com.xiaochen.mobilesafe.receiver; import com.xiaochen.mobilesafe.utlis.ConstantValue; import com.xiaochen.mobilesafe.utlis.SpUtils; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.telephony.SmsManager; import android.telephony.TelephonyManager; public class BootReceiver extends BroadcastReceiver { @Override public void onReceive(Context context, Intent intent) { //่Žทๅ–้‡ๅฏๅŽ็š„SIMๅกๅบๅˆ—ๅท TelephonyManager manager = (TelephonyManager) context.getSystemService(Context.TELEPHONY_SERVICE); String simSerialNumber = manager.getSimSerialNumber(); //่Žทๅ–ๅญ˜ๅœจspไธญ็š„SIMๅกๅบๅˆ—ๅท String sim_number = SpUtils.getStringSp(context, ConstantValue.SIM_NUMBER, ""); //ไธค่€…่ฟ›่กŒๆฏ”่พƒ ๅฆ‚ๆžœไธไธ€ๆ ทๅฐฑๅ‘้€ๆŠฅ่ญฆ็Ÿญไฟก if(!simSerialNumber.equals(sim_number)){ //่Žทๅ–spไธญ็š„ๅฎ‰ๅ…จๅท็  String phone_number = SpUtils.getStringSp(context, ConstantValue.CONTACT_PHONE_NUMBER, null); SmsManager smsManager = SmsManager.getDefault(); smsManager.sendTextMessage(phone_number, null, "ๆฃ€ๆต‹ๅˆฐๆ‚จๅฆไธ€ไธชๆ‰‹ๆœบๆ›ดๆขSIMๅก", null, null); } } }
import tkinter as tk from datetime import datetime # Main window root = tk.Tk() root.title("Current Time") # Label to display the current time time_label = tk.Label(root) time_label.pack() # Function to update the current time def update_time(): time = datetime.now().strftime("%H:%M:%S") time_label.configure(text=time) time_label.after(1000, update_time) # Schedule to call this function after 1s # Call the update_time() function update_time() # Main root loop root.mainloop()
<reponame>nikolay-is/BlogSystem-Node const Article = require('mongoose').model('Article') const Comment = require('mongoose').model('Comment') module.exports = { addPost: (req, res) => { let articleId = req.params.id let userId = req.user._id let reqComment = req.body let articleObj = { article: articleId, comment: reqComment.comment, author: userId } Comment.create(articleObj) .then(comment => { Article .findById(articleId) .then(article => { article.comments.push(comment._id) article .save() .then(() => { res.redirect(`/article/details/${articleId}`) }) }) }) }, editGet: (req, res) => { let id = req.params.id Comment.findById(id) .populate('author') .populate('article') .then(comment => { if (!req.user.isInRole('Admin') && !req.user.isAuthor(comment.article)) { res.redirect(`/article/details/${comment.article.id}`) } else { res.render('comment/edit', { comment: comment }) } }) }, editPost: (req, res) => { let id = req.params.id let reqComment = req.body Comment.findById(id) .populate('author') .then(comment => { if (!req.user.isInRole('Admin') && !req.user.isAuthor(comment.article)) { res.redirect('/users/login') } else { comment.comment = reqComment.comment comment .save() .then(() => { res.redirect(`/article/details/${comment.article}`) }) } }) }, deleteGet: (req, res) => { let id = req.params.id Comment.findById(id) .populate('author') .populate('article') .then(comment => { res.render('comment/delete', { comment: comment }) }) }, deletePost: (req, res) => { let id = req.params.id Comment.findByIdAndRemove(id) .then(comment => { Article.findByIdAndUpdate(comment.article, { $pull: {'comments': {id: comment._id}} }) .then(article => { res.redirect(`/article/details/${article._id}`) }) }) } }
#! /bin/sh -- # Copyright (c) 2013, The Linux Foundation. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # * Neither the name of The Linux Foundation nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED # WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE # OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN # IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. set -e cd `dirname $0` && exec ./iommutest.sh $@
/* Copyright 2016-Present Couchbase, Inc. Use of this software is governed by the Business Source License included in the file licenses/BSL-Couchbase.txt. As of the Change Date specified in that file, in accordance with the Business Source License, use of this software will be governed by the Apache License, Version 2.0, included in the file licenses/APL2.txt. */ var searchData= [ ['shared_20keys',['Shared Keys',['../group___f_l_shared_keys.html',1,'']]], ['slices',['Slices',['../group___f_l_slice.html',1,'']]], ['save',['save',['../struct_c4_doc_put_request.html#ad2fc7c1ff26b20c387bdb6c7ee8f24e8',1,'C4DocPutRequest']]], ['scheme',['scheme',['../struct_c4_address.html#ae4e57a571b29486d9629b1209a8a374d',1,'C4Address']]], ['selectedrev',['selectedRev',['../struct_c4_document.html#a786f2fafe6221939119f97d91587e96d',1,'C4Document']]], ['sequence',['sequence',['../struct_c4_document_info.html#afabd4bff5f16dad26ec8ffa0889642b1',1,'C4DocumentInfo::sequence()'],['../struct_c4_document.html#ab6d7b7f75bf2b684b5bfd325f9a41037',1,'C4Document::sequence()'],['../struct_c4_revision.html#a62bb8ff181a95903dd74a99c53e476b6',1,'C4Revision::sequence()'],['../struct_c4_collection_change.html#a095ce84e4d3e22e0778bcc40f6d2b980',1,'C4CollectionChange::sequence()'],['../struct_c4_document_ended.html#a0c5a6f068cbca1186d88d26d9b662d2c',1,'C4DocumentEnded::sequence()']]], ['size',['size',['../struct_f_l_slice.html#a8fb4c8187bdd5a8d645809d5d0a72c90',1,'FLSlice::size()'],['../struct_f_l_slice_result.html#a622d35e8c1031bb88d0fe89cfaca12e1',1,'FLSliceResult::size()']]], ['socketfactory',['socketFactory',['../struct_c4_replicator_parameters.html#af962ba02b775d3bb0e2a52b6ce3345b3',1,'C4ReplicatorParameters']]], ['sqlitedomain',['SQLiteDomain',['../group___errors.html#ggabb4faf1bef1be6c39f496e0ffdf8cdd0ab14b819a5f23eedbfb517cd9ba1419d5',1,'c4Error.h']]], ['start',['start',['../struct_c4_full_text_match.html#ac2514656fd432684c916955e8385c731',1,'C4FullTextMatch']]], ['stepover',['STEPOVER',['../_base_8h.html#aba2fee9d572b22e3ab4c2683d02bae91',1,'Base.h']]], ['stopwords',['stopWords',['../struct_c4_index_options.html#abebf72c4814f14eba754882429d2d661',1,'C4IndexOptions']]], ['storageengine',['storageEngine',['../struct_c4_database_config.html#a312a3524d12a83cf51e44b28d54a9a32',1,'C4DatabaseConfig']]] ];
<reponame>calag4n/belle const ratingConfig = { preventFocusStyleForTouchAndClick: true, }; export default ratingConfig;
package mpp.workers; import java.awt.Image; import java.awt.image.BufferedImage; import java.io.File; import java.io.IOException; import java.util.concurrent.ExecutionException; import javax.annotation.Nonnegative; import javax.annotation.Nonnull; import javax.imageio.ImageIO; import javax.swing.SwingWorker; import mpp.imagenes.ImageManager; import mpp.imagenes.db.ImageDB; public class LoadScaledImageWorker extends SwingWorker<Image, Void> { private ImageManager view; private ImageDB db; private File f; private int width; private int height; public LoadScaledImageWorker(ImageManager view, ImageDB db, File f, int width, int height) { this.view = view; this.db = db; this.f = f; this.width = width; this.height = height; } @Override protected Image doInBackground() throws Exception { Image image = loadScaled(f, width, height); if (image == null) { // reportar return null; } return image; } @Override protected void done() { try { Image resultado = get(); if(resultado != null) { db.addImage(f); view.addThumbnail(resultado, f); view.setInfo("Imagen "+f+" cargada\n", 100); } } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } } protected Image loadScaled(@Nonnull File f, @Nonnegative int w, @Nonnegative int h) throws IOException { System.out.println("Processing: " + f); BufferedImage img = ImageIO.read(f); if (img == null) return null; return img.getScaledInstance(w, h, Image.SCALE_FAST); } }
./gradlew clean asciidoctor mkdir -p build/docs/latest/reference mkdir -p build/docs/$(cat version.txt)/reference cp -R docs/slack-spring-boot-docs/build/generated-docs/html5/. build/docs/$(cat version.txt)/reference cp -R docs/slack-spring-boot-docs/build/generated-docs/html5/. build/docs/latest/reference
import { renderHook } from '@testing-library/react-hooks'; import useDidUpdateEffect from '../index'; describe('UseDidUpdateEffect tests:', () => { it('should use DidUpdateEffect', () => { const { result } = renderHook(() => useDidUpdateEffect(f => f)); expect(result.current).toBe(true); }); });
#!/bin/bash file=/etc/samba/smb.conf workgroup="${WORKGROUP:-"workgroup"}" sed -i 's|^\( *workgroup = \).*|\1'"$workgroup"'|' $file if [ -f /shares.conf ]; then found=0 for share in $(egrep -o "^\[(.*)\]" /shares.conf|sed 's/\[//;s/\]//'); do grep -iq "\[$share\]" $file if [ $? -eq 0 ]; then found=1 fi done if [ $found -eq 0 ]; then cat /shares.conf >> $file fi fi if [ ! -d /data/logs ]; then mkdir -p /data/logs fi LOGFILE="/data/logs/samba.log" if [ ! -d /data ]; then mkdir -p /data fi if [ ! -f $LOGFILE ]; then touch $LOGFILE chmod 666 $LOGFILE fi exec tail -f $LOGFILE & exec smbd -F --no-process-group </dev/null exec "$@"
#!/usr/bin/env bash usage_list=("-outconfig: Configuration, typically a quadruplet such as 'netcoreapp5.0-Linux-Release-x64', used to name output directory.") usage_list+=("-staticLibLink: Optional argument to statically link any native library.") __scriptpath="$(cd "$(dirname "$0")"; pwd -P)" __nativeroot="$__scriptpath"/Unix __RepoRootDir="$(cd "$__scriptpath"/../../..; pwd -P)" __artifactsDir="$__RepoRootDir/artifacts" handle_arguments() { case "$1" in outconfig|-outconfig) __outConfig="$2" __ShiftArgs=1 ;; staticliblink|-staticliblink) __StaticLibLink=1 ;; *) __UnprocessedBuildArgs="$__UnprocessedBuildArgs $1" esac } # Set the various build properties here so that CMake and MSBuild can pick them up __BuildArch=x64 __TargetOS=Linux __BuildType=Debug __CMakeArgs="" __Compiler=clang __CompilerMajorVersion= __CompilerMinorVersion= __CrossBuild=0 __IsMSBuildOnNETCoreSupported=0 __PortableBuild=1 __RootBinDir="$__RepoRootDir/artifacts" __SkipConfigure=0 __SkipGenerateVersion=0 __StaticLibLink=0 __UnprocessedBuildArgs= __VerboseBuild=false source "$__RepoRootDir"/eng/native/build-commons.sh # Set cross build if [[ "$__BuildArch" == wasm ]]; then if [[ -z "$EMSDK_PATH" ]]; then echo "Error: Should set EMSDK_PATH environment variable pointing to emsdk root." exit 1 fi source "$EMSDK_PATH"/emsdk_env.sh elif [[ "$__TargetOS" == iOS ]]; then # nothing to do here true elif [[ "$__TargetOS" == Android && -z "$ROOTFS_DIR" ]]; then # nothing to do here true else __CMakeArgs="-DFEATURE_DISTRO_AGNOSTIC_SSL=$__PortableBuild $__CMakeArgs" __CMakeArgs="-DCMAKE_STATIC_LIB_LINK=$__StaticLibLink $__CMakeArgs" if [[ "$__BuildArch" != x86 && "$__BuildArch" != x64 ]]; then __CrossBuild=1 echo "Set CrossBuild for $__BuildArch build" fi fi if [[ "$__TargetOS" == OSX ]]; then # set default OSX deployment target __CMakeArgs="-DCMAKE_OSX_DEPLOYMENT_TARGET=10.13 $__CMakeArgs" elif [[ "$__TargetOS" == Android && -z "$ROOTFS_DIR" ]]; then if [[ -z "$ANDROID_NDK_HOME" ]]; then echo "Error: You need to set the ANDROID_NDK_HOME environment variable pointing to the Android NDK root." exit 1 fi # keep ANDROID_NATIVE_API_LEVEL in sync with src/mono/Directory.Build.props __CMakeArgs="-DCMAKE_TOOLCHAIN_FILE=$ANDROID_NDK_HOME/build/cmake/android.toolchain.cmake -DANDROID_STL=none -DANDROID_NATIVE_API_LEVEL=21 $__CMakeArgs" # workaround init-compiler.sh trying to detect clang, it's handled in android.toolchain.cmake already export CLR_CC=$(which false) export CLR_CXX=$(which false) if [[ "$__BuildArch" == x64 ]]; then __CMakeArgs="-DANDROID_ABI=x86_64 $__CMakeArgs" elif [[ "$__BuildArch" == x86 ]]; then __CMakeArgs="-DANDROID_ABI=x86 $__CMakeArgs" elif [[ "$__BuildArch" == arm64 ]]; then __CMakeArgs="-DANDROID_ABI=arm64-v8a $__CMakeArgs" elif [[ "$__BuildArch" == arm ]]; then __CMakeArgs="-DANDROID_ABI=armeabi-v7a $__CMakeArgs" else echo "Error: Unknown Android architecture $__BuildArch." exit 1 fi elif [[ "$__TargetOS" == iOS ]]; then __CMakeArgs="-DCMAKE_SYSTEM_NAME=iOS $__CMakeArgs" if [[ "$__BuildArch" == x64 ]]; then # set default iOS simulator deployment target (8.0 is the minimum supported by Xcode 11) # keep in sync with src/mono/Directory.Build.props __CMakeArgs="-DCMAKE_OSX_SYSROOT=iphonesimulator -DCMAKE_OSX_DEPLOYMENT_TARGET=8.0 -DCMAKE_OSX_ARCHITECTURES=\"x86_64\" $__CMakeArgs" elif [[ "$__BuildArch" == arm64 ]]; then # set default iOS device deployment target (7.0 is the minimum supported by Xcode 11) # keep in sync with src/mono/Directory.Build.props __CMakeArgs="-DCMAKE_OSX_SYSROOT=iphoneos -DCMAKE_OSX_DEPLOYMENT_TARGET=7.0 -DCMAKE_OSX_ARCHITECTURES=\"arm64\" $__CMakeArgs" elif [[ "$__BuildArch" == arm ]]; then # set default iOS device deployment target (7.0 is the minimum supported by Xcode 11) # keep in sync with src/mono/Directory.Build.props __CMakeArgs="-DCMAKE_OSX_SYSROOT=iphoneos -DCMAKE_OSX_DEPLOYMENT_TARGET=7.0 -DCMAKE_OSX_ARCHITECTURES=\"armv7;armv7s\" $__CMakeArgs" else echo "Error: Unknown iOS architecture $__BuildArch." exit 1 fi fi # Set the remaining variables based upon the determined build configuration __outConfig="${__outConfig:-"$__TargetOS-$__BuildArch-$__BuildType"}" __IntermediatesDir="$__RootBinDir/obj/native/$__outConfig" __BinDir="$__RootBinDir/bin/native/$__outConfig" # Specify path to be set for CMAKE_INSTALL_PREFIX. # This is where all built CoreClr libraries will copied to. __CMakeBinDir="$__BinDir" export __CMakeBinDir # Make the directories necessary for build if they don't exist setup_dirs # Check prereqs. check_prereqs # Build the corefx native components. build_native "$__BuildArch" "$__nativeroot" "$__nativeroot" "$__IntermediatesDir" "native libraries component"
module.exports = [ 'EI', 'EURL', 'GIE', 'SA', 'SARL', 'SAS', 'SCOP', 'SCA', 'SCI', 'SCS', 'SEM', 'SEP', 'SNC', 'SICAF', 'SICAV', ];
#!/bin/sh kind create cluster --config cluster.yaml # make worker roles kubectl label node kind-worker node-role.kubernetes.io/worker= kubectl label node kind-worker2 node-role.kubernetes.io/worker= kubectl label node kind-worker3 node-role.kubernetes.io/worker= # load balancer kubectl apply -f https://raw.githubusercontent.com/metallb/metallb/v0.9.5/manifests/namespace.yaml kubectl apply -f https://raw.githubusercontent.com/metallb/metallb/v0.9.5/manifests/metallb.yaml ## On first install only kubectl create secret generic -n metallb-system memberlist --from-literal=secretkey="$(openssl rand -base64 128)" # find cluster network #kubectl get nodes -o wide #NAME STATUS ROLES AGE VERSION INTERNAL-IP EXTERNAL-IP OS-IMAGE KERNEL-VERSION CONTAINER-RUNTIME #kind-control-plane Ready master 20m v1.18.2 172.18.0.2 <none> Ubuntu 19.10 4.4.0-142-generic containerd://1.3.3-14-g449e9269 #kind-worker Ready <none> 19m v1.18.2 172.18.0.3 <none> Ubuntu 19.10 4.4.0-142-generic containerd://1.3.3-14-g449e9269 # # cluster network is 172.18.0.0/16 # check 'addresses' in load-balancer.yaml kubectl apply -f load-balancer.yaml #helm install rkvd ../deploy/rkvd
<filename>src/main/factories/usecases/barbecue-participant/save/save-participant.ts import { SaveParticipant } from '@/domain/usecases' import { DbSaveParticipant } from '@/data/usecases' import { ParticipantsMongoRepository } from '@/infra/db/mongodb' export const makeDbSaveParticipant = (): SaveParticipant => { const participantsMongoRepository = new ParticipantsMongoRepository() return new DbSaveParticipant(participantsMongoRepository) }
// file: server.js const express = require('express'); const fs = require('fs'); const app = express(); const port = 3000; const mongoose = require('mongoose'); // connect to mongoDB mongoose.connect('mongodb://localhost:27017/books', {useNewUrlParser: true}); let db = mongoose.connection; // check connection db.once('open', () => { console.log('Connected to MongoDB'); }); // set up the database schema let bookSchema = new mongoose.Schema({ title: String, author: String, genre: String, }); let Book = mongoose.model('Book', bookSchema); // read the data from the JSON file let rawdata = fs.readFileSync('data.json'); let books = JSON.parse(rawdata); // add the books to the database Book.collection.insertMany(books, (err, docs) => { console.log(`${docs.insertedCount} books added to the database`); }); app.get('/books', (req, res) => { let query = {}; // parse the query parameters if (req.query.title) { query.title = req.query.title; } if (req.query.author) { query.author = req.query.author; } if (req.query.genre) { query.genre = req.query.genre; } // query the database Book.find(query, (err, books) => { if (err) { console.log(err); } else { res.json(books); } }); }); app.listen(port, () => { console.log(`Listening on port ${port}`); });
<gh_stars>1-10 { "targets": [ { "target_name": "addon", "dependencies": [ "libwpa_supplicant" ], "include_dirs": [ "./src/common", "<!(node -e \"require('nan')\")" ], "cflags": [ "-O2" ], "sources": [ "./src/WpaClientSocket.cpp" ] }, { "target_name": "libwpa_supplicant", "type": "static_library", "defines": [ "CONFIG_CTRL_IFACE", "CONFIG_CTRL_IFACE_UNIX" ], "include_dirs": [ "./src/common", "./src/utils" ], "cflags": [ "-Wall", "-Wextra" ], "sources": [ "./src/common/wpa_ctrl.c", "./src/utils/os_unix.c" ] } ] }
<gh_stars>0 #define HAS_VTK 1 #define DO_MEDIAN 1 #define DO_MEAN 2 #include "LaShellAtlas.h" #include <numeric> /* * Author: * Dr. <NAME> * Department of Biomedical Engineering, King's College London * Email: <EMAIL> 'dot' <EMAIL> * Copyright (c) 2017 * * This application constructs an atlas from a list of target shells, preferably with the same topology * The application requires two inputs, a source shell and a list of target shells. The target shells are used to * construct the atlas based on the toplogy of the source shell. The aggregated data (median/mean) from target is used * to compute the value at each vertex of the atlas. */ int main(int argc, char * argv[]) { char* input_f1, *output_f, *input_f2; bool foundArgs1 = false, foundArgs2 = false, foundArgs3 = false; int method = DO_MEDIAN, is_topology_equal = USE_DIRECT_COPY; if (argc >= 1) { for (int i = 1; i < argc; i++) { if (i + 1 != argc) { if (string(argv[i]) == "-i") { input_f1 = argv[i + 1]; foundArgs1 = true; } else if (string(argv[i]) == "-o") { output_f = argv[i + 1]; foundArgs2 = true; } else if (string(argv[i]) == "-t") { input_f2 = argv[i + 1]; foundArgs3 = true; } else if (string(argv[i]) == "-m") { method = atoi(argv[i + 1]); } else if (string(argv[i]) == "-e") { is_topology_equal = atoi(argv[i + 1]); } } } } if (!(foundArgs1 && foundArgs2 && foundArgs3)) { cerr << "Cheeck your parameters\n\nUsage:" "\nCalculates the atlas on source from a list of target shells, preferably same topology " "\nNote that multiple target shells can be specified with their filenames as a list within a txt file" "\nNote that by defalt the median displacement is computed\n" "\n(Mandatory)\n\t-i <source_mesh_vtk> \n\t-t <target mesh filenames as list txt>\n\t-o <output file>\n== Optional ==\n\t-m (1=mean, 2=median)" "\n\t-e (target shell topology: 1 - equal, 2 - not equal)" << endl; exit(1); } else { LaShell* source = new LaShell(input_f1); LaShell* la_out = new LaShell(); LaShellAtlas* algorithm = new LaShellAtlas(); algorithm->SetInputData(source); algorithm->SetInputMultipleTargets(input_f2); switch (method) { case DO_MEDIAN: algorithm->SetAggregateMethodToMedian(); break; case DO_MEAN: algorithm->SetAggregateMethodToMean(); break; } switch (is_topology_equal) { case USE_DIRECT_COPY: algorithm->SetAtlasConstructionToUseDirectCopy(); break; case USE_CLOSEST_POINT: algorithm->SetAtlasConstructionToUseClosestPoint(); break; } algorithm->Update(); la_out = algorithm->GetOutput(); la_out->ExportVTK(output_f); } }
class PrendusResearch extends Polymer.Element { static get is() { return 'prendus-research'; } } +window.customElements.define(PrendusResearch.is, PrendusResearch);
#!/bin/sh pwd = $PWD; cd .. ./gradlew bJ cd $pwd
<filename>tests/bdd/050_spies.t.js StartTest(function(t) { t.testExtJS(function (t) { t.describe('Spy executing strategies', function (t) { var obj t.beforeEach(function () { obj = { someProp : null, setSomeProp : function (value) { this.someProp = value; return this } } }) t.it("Spy should track the calls to it", function (t) { t.spyOn(obj, 'setSomeProp') obj.setSomeProp() obj.setSomeProp(0, 1, 1) obj.setSomeProp(0, 1) t.expect(obj.setSomeProp).toHaveBeenCalled() t.expect(obj.setSomeProp).toHaveBeenCalledWith(0, 1, t.any(Number)) t.is(obj.someProp, null, "`someProp` hasn't change") t.isInstanceOf(obj.setSomeProp.and, Siesta.Test.BDD.Spy) t.is(obj.setSomeProp.calls.any(), true) t.is(obj.setSomeProp.calls.count(), 3) t.isDeeply(obj.setSomeProp.calls.argsFor(2), [ 0, 1 ]) t.isDeeply(obj.setSomeProp.calls.allArgs(), [ [], [ 0, 1, 1], [ 0, 1 ] ]) t.isDeeply(obj.setSomeProp.calls.mostRecent(), { object : obj, args : [ 0, 1 ], returnValue : undefined }) t.isDeeply(obj.setSomeProp.calls.first(), { object : obj, args : [], returnValue : undefined }) obj.setSomeProp.calls.reset() t.is(obj.setSomeProp.calls.any(), false) t.is(obj.setSomeProp.calls.count(), 0) }); t.it("Spy should be able to call through and stub", function (t) { t.spyOn(obj, 'setSomeProp').callThrough() obj.setSomeProp(1) t.expect(obj.setSomeProp).toHaveBeenCalled() t.is(obj.someProp, 1, "`someProp` has changed") obj.setSomeProp.and.stub() obj.setSomeProp(11) t.is(obj.someProp, 1, "`someProp` hasn't changed") }); t.it("Spy should be able to call fake", function (t) { t.spyOn(obj, 'setSomeProp').callFake(function () { this.someProp = 11; return 'fake' }) t.is(obj.setSomeProp(1), 'fake', 'Return value from fake function') t.expect(obj.setSomeProp).toHaveBeenCalledWith(1) t.is(obj.someProp, 11, "`someProp` has been changed by the fake function") }); t.it("Spy should be able to throw", function (t) { t.spyOn(obj, 'setSomeProp').throwError('wrong') t.expect(function () { obj.setSomeProp(1) }).toThrow() }); t.it("Spy should be able to return value", function (t) { t.spyOn(obj, 'setSomeProp').returnValue(11) t.is(obj.setSomeProp(1), 11, "`someProp` has been changed by the fake function") t.is(obj.someProp, null, "`someProp` hasn't change") }); }) t.describe('Standalone spies', function (t) { t.it("Should be able to create a spy", function (t) { var spy = t.createSpy('007') spy() spy(0, 1) spy(0, 1, '1') t.expect(spy).toHaveBeenCalled() t.expect(spy).toHaveBeenCalledWith(0, t.any(Number), t.any(String)) t.isInstanceOf(spy.and, Siesta.Test.BDD.Spy) t.is(spy.calls.any(), true) t.is(spy.calls.count(), 3) spy.calls.reset() t.is(spy.calls.any(), false) t.is(spy.calls.count(), 0) }) t.it("Should be able to create a spy object", function (t) { var spyObj = t.createSpyObj('007', [ 'shoot', 'seduce']) t.isDeeply(spyObj, { shoot : t.any(Function), seduce : t.any(Function) }) spyObj.shoot('gun') spyObj.seduce('Girl1') spyObj.seduce('Girl2') t.expect(spyObj.shoot).toHaveBeenCalledWith('gun') t.expect(spyObj.seduce).toHaveBeenCalledWith('Girl1') t.expect(spyObj.seduce).toHaveBeenCalledWith('Girl2') }) }) t.describe('Spies removal after the spec', function (t) { var obj = { someProp : null, setSomeProp : function (value) { this.someProp = value; return this } } t.it("Setting up the spy", function (t) { var spy = t.spyOn(obj, 'setSomeProp') obj.setSomeProp() obj.setSomeProp(0, 1, '1') obj.setSomeProp(0, '1') t.is(obj.someProp, null, "`someProp` hasn't change") t.expect(spy).toHaveBeenCalled() t.expect(spy).toHaveBeenCalledWith(0, t.any(Number), t.any(String)) }) t.it("Spy should be removed in this spec", function (t) { obj.setSomeProp(0) t.is(obj.someProp, 0, "`someProp` has change - spy has been removed") t.notOk(obj.setSomeProp.__SIESTA_SPY__, "Spy has been removed from object") }) }) }) });
import { IDepthHistory, IChartItem } from 'shared/types/models'; import * as NS from '../namespace'; export function selectChartData(state: NS.IReduxState): IChartItem[] { return state.data.history; } export function selectDepthHistory(state: NS.IReduxState): IDepthHistory { return state.data.depthHistory; } export function selectCurrentCandle(state: NS.IReduxState): IChartItem { return state.data.currentCandle; } export function selectError(state: NS.IReduxState): string | undefined { return state.data.error; } export function selectIndicatorsDialogState(state: NS.IReduxState): boolean { return state.ui.modals.indicatorsDialog.isOpen; }
from flask import Flask, request import requests app = Flask(__name__) @app.route('/search', methods=['GET']) def search(): query = request.args.get('query') payload = {'query': query} r = requests.get('http://library/search', params=payload) results = r.json().get('results') response = {'books': [book['title'] for book in results]} return response if __name__ == '__main__': app.run()
from django.test import TestCase from rest_framework.test import APIClient from rest_framework import status from myapp.models import Recipe class RecipeAPITestCase(TestCase): def setUp(self): self.client = APIClient() def test_create_recipe(self): # Define the payload for creating a recipe payload = { 'title': 'Spaghetti Carbonara', 'description': 'Delicious pasta dish with eggs, cheese, and pancetta', 'prep_time': 20, 'cook_time': 15, 'difficulty': 'Medium', } # Make a POST request to the recipes endpoint res = self.client.post('/api/recipes/', payload) # Assert the status code of the response to be 201 (HTTP_CREATED) self.assertEqual(res.status_code, status.HTTP_201_CREATED) # Retrieve the created recipe from the database created_recipe = Recipe.objects.get(id=res.data['id']) # Check if the attributes of the created recipe match the provided payload for key in payload.keys(): self.assertEqual(payload[key], getattr(created_recipe, key))
#include <gflags/gflags.h> #if defined(BUILD_MONOLITHIC) DECLARE_string(message); // in gflags_delcare_test.cc #else DEFINE_string(message, "", "The message to print"); #endif void gflags_print_message(); // in gflags_declare_flags.cc #if defined(BUILD_MONOLITHIC) #define main(cnt, arr) gflags_test_declare_main(cnt, arr) #endif int main(int argc, const char** argv) { GFLAGS_NAMESPACE::SetUsageMessage("Test compilation and use of gflags_declare.h"); GFLAGS_NAMESPACE::ParseCommandLineFlags(&argc, &argv, true); gflags_print_message(); return 0; }
<reponame>ioannis-mylonas/orange-talents-03-template-ecommerce package bootcamp.mercado.usuario.autenticacao; import bootcamp.mercado.usuario.Usuario; import io.jsonwebtoken.JwtException; import io.jsonwebtoken.Jwts; import io.jsonwebtoken.SignatureAlgorithm; import java.util.Date; public class Token { private final String token; private final Long id; public Token(Usuario usuario, String secret, String expiration, String issuer) throws JwtException { Long expireMillis = Long.parseLong(expiration); this.id = usuario.getId(); Date hoje = new Date(); Date expiracao = new Date(hoje.getTime() + expireMillis); this.token = Jwts.builder() .setIssuer(issuer) .setSubject(this.id.toString()) .setIssuedAt(hoje) .setExpiration(expiracao) .signWith(SignatureAlgorithm.HS256, secret) .compact(); } public Token(String token, String secret) throws JwtException { this.token = token; this.id = Long.parseLong(Jwts.parser() .setSigningKey(secret) .parseClaimsJws(token) .getBody() .getSubject()); } public String getToken() { return token; } public Long getId() { return id; } }
#!/bin/sh if [ -d "./community" ] then rm -fr ./community fi rm -fr *.md git clone https://gitee.com/openeuler/community fd=$(dirname $0) for dir in $(ls ./community/sig/) do if [ "$dir"x = "sig-template"x ] then continue fi if [ -d ./community/sig/$dir ] then sed -i "s/\r//g" ./community/sig/$dir/OWNERS mt=$(sed '1d' ./community/sig/$dir/OWNERS | awk '{print $2}' | tr '\n' ', ') sed -e "s/{{sig_name}}/$dir/g" $fd/sig.temp > $dir.md sed -i "s/{{maintainers}}/$mt/g" $dir.md if [ -f ./community/sig/$dir/README.md ] then mail=$(grep -P "[a-zA-Z|-]+@openeuler.org" -o ./community/sig/$dir/README.md | head -1) if [ "$mail"x = "sig-yousigname@openeuler.org"x ] then continue fi if [ -n "$mail" ] then echo "replace mail to $mail" sed -i "s/dev@openeuler.org/$mail/g" $dir.md fi fi fi done rm -rf ./community
const { Pool } = require('pg'); // Create a new connection pool using your database info const pool = new Pool({ user: 'username', host: 'hostname', database: 'databasename', password: 'password' }); // Query the username column pool.query('SELECT username FROM users', (err, res) => { if (err) { console.error(err); } // Log all the usernames to the console for (let row of res.rows) { console.log(row.username); } // Outputs all the usernames: // Bob // Alice // Lisa // Etc... });
<reponame>DrItanium/durandal<gh_stars>1-10 #ifndef _indirect_pass_registry_h #define _indirect_pass_registry_h #include "llvm/ADT/StringRef.h" #include "llvm/ADT/DenseMap.h" #include "llvm/ADT/StringMap.h" #include "indirect/IndirectPassHeader.h" #include "indirect/RegisterIndirectPass.h" #include "indirect/IndirectPassGenerator.h" /* * In order to keep track of indirect passes it's necessary to define a new * PassRegistry that keeps track of extra information while interacting with * llvm::PassRegistry automatically. */ namespace indirect { class IndirectPassRegistry { public: typedef llvm::StringMap<indirect::IndirectPassHeader*> PassHeaderMapType; typedef llvm::StringMap<indirect::RegisterIndirectPass*> RegisterIndirectPassMapType; typedef llvm::StringMap<indirect::IndirectPassGeneratorBase*> PassGeneratorMapType; private: PassHeaderMapType registeredIndirectPasses; RegisterIndirectPassMapType registeredIndirectPassInfos; PassGeneratorMapType registeredPassGenerators; public: IndirectPassRegistry(); ~IndirectPassRegistry(); static IndirectPassRegistry* getIndirectPassRegistry(); template<class PassGeneratorClass> static IndirectPassGeneratorBase* buildPassGenerator() { return new PassGeneratorClass(); } static const void* getUniqueId(const char* name); static const void* getUniqueId(llvm::StringRef name); static const void* getUniqueId(char* name); template<class PassGeneratorClass> void registerPassGenerator(char* name) { registerPassGenerator<PassGeneratorClass>( llvm::StringRef((const char*)name)); } template<class PassGeneratorClass> void registerPassGenerator(const char* name) { registerPassGenerator<PassGeneratorClass>( llvm::StringRef(name)); } template<class PassGeneratorClass> void registerPassGenerator(llvm::StringRef name) { assert((registeredPassGenerators.find(name) == registeredPassGenerators.end()) && "Pass generator already registered!"); registeredPassGenerators[name] = IndirectPassRegistry::buildPassGenerator<PassGeneratorClass>(); } void registerIndirectPassHeader(indirect::IndirectPassHeader* header); void unregisterIndirectPassHeader(indirect::IndirectPassHeader* header); void unregisterIndirectPassHeader(char* name); void unregisterIndirectPassHeader(const char* name); void unregisterIndirectPassHeader(llvm::StringRef name); IndirectPassHeader* getIndirectPassHeader(char* name) const; IndirectPassHeader* getIndirectPassHeader(const char* name) const; IndirectPassHeader* getIndirectPassHeader(llvm::StringRef name) const; llvm::PassInfo* getIndirectPassInfo(char* name) const; llvm::PassInfo* getIndirectPassInfo(const char* name) const; llvm::PassInfo* getIndirectPassInfo(llvm::StringRef name) const; llvm::Pass* createPass(char* name); llvm::Pass* createPass(const char* name); llvm::Pass* createPass(llvm::StringRef name); }; } #endif
<reponame>Krishnatech/spree Spree::Taxon.class_eval do class << self def roots Spree::Amazon::Taxon.roots end def find(taxon_id) Spree::Amazon::Taxon.find(taxon_id) end end end
class CustomClass: def __init__(self): self.name = "" self.age = 0 self.gender = "" @classmethod def from_dict(cls, d): obj = cls() for key, val in d.items(): if hasattr(obj, key): setattr(obj, key, val) return obj def __str__(self): return f"Name: {self.name}, Age: {self.age}, Gender: {self.gender}"
class AddSuggestedIndexes < ActiveRecord::Migration def change commit_db_transaction add_index :characters, [:slug], algorithm: :concurrently add_index :comments, [:user_id], algorithm: :concurrently add_index :library_events, [:library_entry_id], algorithm: :concurrently add_index :manga, [:slug], algorithm: :concurrently add_index :post_likes, [:user_id], algorithm: :concurrently end end
<filename>js/sykepengesoknad-gammel-plattform/sykepengesoknader/FremtidigSoknadTeaser.js<gh_stars>1-10 import React, { Component } from 'react'; import PropTypes from 'prop-types'; import { Knapp } from 'nav-frontend-knapper'; import { getLedetekst, tilLesbarDatoMedArstall, tilLesbarPeriodeMedArstall } from '@navikt/digisyfo-npm'; import { soknadPt, sykepengesoknad as sykepengesoknadPt } from '../../propTypes/index'; import Lightbox from '../../components/Lightbox'; import { InngangspanelHeader, InngangspanelIkon, InngangspanelInnhold, InngangspanelTekst, InngangspanelUndertekst, } from '../../components/Inngangspanel'; const SoknadLightbox = ({ soknad, onClose }) => { return (<Lightbox onClose={onClose}> <h3 className="modal__tittel">{getLedetekst('soknader.teaser.fremtidig.dato-tittel')}</h3> <p>{ getLedetekst('soknader.teaser.fremtidig.dato-info', { '%DATO%': tilLesbarDatoMedArstall(soknad.tom), }) }</p> <div className="knapperad"> <Knapp onClick={onClose}>Lukk</Knapp> </div> </Lightbox>); }; SoknadLightbox.propTypes = { soknad: sykepengesoknadPt, onClose: PropTypes.func, }; class FremtidigSoknadTeaser extends Component { constructor(props) { super(props); this.state = { vis: false, }; } render() { const { soknad } = this.props; return (<article aria-labelledby={`soknader-header-${soknad.id}`}> <button className="inngangspanel inngangspanel--inaktivt" onClick={(e) => { e.preventDefault(); this.setState({ vis: true, }); }}> <InngangspanelIkon ikon={`${process.env.REACT_APP_CONTEXT_ROOT}/img/svg/soknader.svg`} /> <InngangspanelInnhold> <InngangspanelHeader id={`soknad-header-${soknad.id}`} meta={getLedetekst('soknad.teaser.dato.fremtidig', { '%DATO%': tilLesbarDatoMedArstall(soknad.tom) })} tittel={getLedetekst('soknad.teaser.tittel')} status={getLedetekst(`soknad.teaser.status.${soknad.status}`)} /> <InngangspanelTekst Tag="p"> { getLedetekst('soknad.teaser.tekst', { '%PERIODE%': tilLesbarPeriodeMedArstall(soknad.fom, soknad.tom), }) } </InngangspanelTekst> { soknad.arbeidsgiver ? (<InngangspanelUndertekst> {soknad.arbeidsgiver.navn} </InngangspanelUndertekst>) : null } </InngangspanelInnhold> </button> { this.state.vis ? <SoknadLightbox soknad={soknad} onClose={() => { this.setState({ vis: false, }); }} /> : null } </article>); } } FremtidigSoknadTeaser.propTypes = { soknad: PropTypes.oneOfType([sykepengesoknadPt, soknadPt]), }; export default FremtidigSoknadTeaser;
def is_equal(list1, list2): """ This function determines if two linked lists are equal. """ # if the linked lists are empty, they are equal if list1 == None and list2 == None: return True # if one of the linked lists is empty, they are not equal if list1 == None or list2 == None: return False # traverse the linked lists curr1 = list1 curr2 = list2 while curr1 != None and curr2 != None: # if the current nodes are not equal, return False if curr1.val != curr2.val: return False curr1 = curr1.next curr2 = curr2.next # if the linked lists have different lengths, they are not equal if curr1 != None or curr2 != None: return False # if none of the cases above is true, the lists are equal return True
import React from "react"; import { IEyeButtonProps } from "../Common.type"; export interface ITextInputProps extends IEyeButtonProps { width?: string; margin?: string; type: "text" | "password"; value: string; name?: string; placeholder?: string; disabled?: boolean; setValue: (e: React.ChangeEvent<HTMLInputElement>) => void | Promise<void>; //๊ทธ๋ƒฅ setState ํ• ๋•Œ๋Š” ํƒ€์ž…์„ ์•„์ง ์ง€์ • ๋ชปํ•จ }
class Point{ private: int x; int y; public: Point(); Point(int x, int y); void set(int x, int y); int getX(); int getY(); };
<filename>thread/src/main/java/com/java/study/chapter7/BrokenPrimerProducerCancel.java package com.java.study.chapter7; import java.math.BigInteger; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; public class BrokenPrimerProducerCancel extends Thread { private final BlockingQueue<BigInteger> queue; private volatile boolean cancelled = false; public BrokenPrimerProducerCancel(BlockingQueue<BigInteger> queue){ this.queue = queue; } public void cancel() { cancelled = true; } public void run() { BigInteger p = BigInteger.ONE; int i = 0; while (true) { try { System.out.println(i); i ++; Thread.sleep(100000000); } catch (InterruptedException e) { e.printStackTrace(); } } } public static void main(String[] args) throws InterruptedException { BlockingQueue<BigInteger> queue = new ArrayBlockingQueue<>(100); BrokenPrimerProducerCancel brokenPrimerProducer = new BrokenPrimerProducerCancel(queue); brokenPrimerProducer.start(); Thread.sleep(1000); System.out.println(brokenPrimerProducer.isInterrupted()); brokenPrimerProducer.interrupt(); System.out.println(brokenPrimerProducer.isInterrupted()); brokenPrimerProducer.start(); BrokenPrimerProducerCancel.interrupted(); // brokenPrimerProducer.cancel(); } }
<reponame>vyacheslav-lonschakov/SchemaBinding class SchemaBinding def get_properties(object) properties = (object.class.instance_methods - Object.methods).grep(/^\w+=$/) properties.map { |x| x.to_s.gsub('=', '').to_sym } end def call_bind(object) object.clear_binding if defined?(object.clear_binding) object.binding if defined?(object.binding) object.get_binding if defined?(object.get_binding) end def option_is_true?(opts, key) get_option(opts, key, false) end def option_is_false?(opts, key) get_option(opts, key, true) end def get_option(opts, key, default = nil) (opts.include?(key) && !opts[key].nil?) ? opts[key] : default end def validate_property_options(opts) fail ArgumentError, 'property can\'t be required and ignored' if option_is_true?(opts, :required) && option_is_true?(opts, :ignored) end def is_cloneable?(object) %w(array hash).include?(object.class.to_s.downcase) end end
class Solution: def merge_two_sorted_lists(self, l1, l2): # Helper method to merge two sorted lists # Implementation is not shown def merge_k_sorted_lists(self, lists): if len(lists) == 0: return None if len(lists) == 1: return lists[0] while len(lists) > 1: merged_lists = [] while len(lists) >= 2: l1 = lists.pop(0) l2 = lists.pop(0) merged_lists.append(self.merge_two_sorted_lists(l1, l2)) if lists: merged_lists.append(lists.pop(0)) lists = merged_lists return lists[0]
import { DocumentDir } from './getDocumentDir' export type ScrollType = 'default' | 'reverse' | 'negative' // Source from https://github.com/alitaheri/normalize-scroll-left let cachedType: ScrollType /** * Based on the jquery plugin https://github.com/othree/jquery.rtl-scroll-type * * Types of scrollLeft, assuming scrollWidth=100 and direction is rtl. * * Type | <- Most Left | Most Right -> | Initial * ---------------- | ------------ | ------------- | ------- * default | 0 | 100 | 100 * negative (spec*) | -100 | 0 | 0 * reverse | 100 | 0 | 0 * * Edge 85: default * Safari 14: negative * Chrome 85: negative * Firefox 81: negative * IE11: reverse * * spec* https://drafts.csswg.org/cssom-view/#dom-window-scroll */ export function detectScrollType(): ScrollType { if (cachedType) { return cachedType } const dummy = document.createElement('div') const container = document.createElement('div') container.style.width = '10px' container.style.height = '1px' dummy.appendChild(container) dummy.dir = 'rtl' dummy.style.fontSize = '14px' dummy.style.width = '4px' dummy.style.height = '1px' dummy.style.position = 'absolute' dummy.style.top = '-1000px' dummy.style.overflow = 'scroll' document.body.appendChild(dummy) cachedType = 'reverse' if (dummy.scrollLeft > 0) { cachedType = 'default' } else { dummy.scrollLeft = 1 if (dummy.scrollLeft === 0) { cachedType = 'negative' } } document.body.removeChild(dummy) return cachedType } // Based on https://stackoverflow.com/a/24394376 export function getNormalizedScrollLeft( element: Element, direction: DocumentDir ): number { const scrollLeft = element.scrollLeft // Perform the calculations only when direction is rtl to avoid messing up the ltr behavior if (direction !== 'rtl') { return scrollLeft } const type = detectScrollType() switch (type) { case 'negative': return element.scrollWidth - element.clientWidth + scrollLeft case 'reverse': return element.scrollWidth - element.clientWidth - scrollLeft default: return scrollLeft } }
use std::collections::HashSet; fn is_english_word(word: &str) -> bool { let english_words = [ "programming", "language", "coding", "rust", ]; let english_words_set: HashSet<&str> = HashSet::from_iter(english_words.iter()); return english_words_set.contains(word); } fn main() { println!("Is 'programming' an English word: {}", is_english_word("programming")); }
<gh_stars>1-10 // // Licensed to Green Energy Corp (www.greenenergycorp.com) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. Green Enery Corp licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. // #ifndef __MULTIPLEXING_DATA_OBSERVER_H_ #define __MULTIPLEXING_DATA_OBSERVER_H_ #include <opendnp3/APL/DataInterfaces.h> #include <opendnp3/APL/Lock.h> #include <vector> namespace apl { /** DataObserver for sending updates to multiple data observers. */ class MultiplexingDataObserver : public apl::IDataObserver { public: MultiplexingDataObserver(); MultiplexingDataObserver(IDataObserver* apObserver1); MultiplexingDataObserver(IDataObserver* apObserver1, IDataObserver* apObserver2); void AddObserver(IDataObserver* apObserver1); private: std::vector<IDataObserver*> mObservers; SigLock mLock; void _Start(); void _End(); void StartOrEnd(bool aStart); void _Update(const Binary& arPoint, size_t aIndex); void _Update(const Analog& arPoint, size_t aIndex); void _Update(const Counter& arPoint, size_t aIndex); void _Update(const ControlStatus& arPoint, size_t aIndex); void _Update(const SetpointStatus& arPoint, size_t aIndex); template <typename T> void PassThrough(const T& arPoint, size_t aIndex); }; } #endif
<reponame>FRH-Code-Data/Appendix import time import sys from tensorboardX import SummaryWriter sys.path.append('options') from train_options import TrainOptions sys.path.append('data') from data_loader import CreateDataLoader sys.path.append('model') from model_Loader import CreateModel sys.path.append('util') from utils import error as err opt = TrainOptions().parse() data_loader = CreateDataLoader(opt) model = CreateModel(opt) writer = SummaryWriter('logs') err = err(model.save_dir) for epoch in range(opt.count_epoch + 1, opt.epochs + 1): epoch_start_time = time.time() err.initialize() for i, data in enumerate(data_loader): model.forward(data) model.optimize_G_parameters() if(i % opt.D_interval == 0): model.optimize_D_parameters() err.add(model.Loss_G.data.item(), model.Loss_D.data.item()) LOSSG, LOSSD = err.print_errors(epoch) writer.add_scalar('loss_g', LOSSG, epoch) writer.add_scalar('loss_d', LOSSD, epoch) print('End of epoch {0} \t Time Taken: {1} sec\n'.format(epoch, time.time()-epoch_start_time)) model.save_result(epoch) if epoch % opt.save_epoch_freq == 0: print('Saving the model at the end of epoch {}\n'.format(epoch)) model.save(epoch)
<filename>TeamCode/src/main/java/org/firstinspires/ftc/teamcode/subsystem/drive/drivecontroller/PID/hardware/PIDGyro.java package org.firstinspires.ftc.teamcode.subsystem.drive.drivecontroller.PID.hardware; import org.firstinspires.ftc.teamcode.hardware.Directions; import org.firstinspires.ftc.teamcode.hardware.devices.RobotMotor; import org.firstinspires.ftc.teamcode.subsystem.drive.DriveSystem; import org.firstinspires.ftc.teamcode.subsystem.drive.drivecontroller.PID.PID; import org.firstinspires.ftc.teamcode.subsystem.drive.drivecontroller.PID.PIDBuilder; import org.firstinspires.ftc.teamcode.subsystem.drive.drivecontroller.PID.consts.PIDConstants; import org.firstinspires.ftc.teamcode.subsystem.sensors.gyro.GyroSensorSystem; public class PIDGyro implements PID.PidInput { private PID drivePIDController = null; private RobotMotor leftFrontMotor, leftRearMotor, rightFrontMotor, rightRearMotor = null; private GyroSensorSystem gyroSensorSystem; private Directions direction; private double defaultPower; public PIDGyro(PIDConstants pidConstants, DriveSystem driveSystem, GyroSensorSystem gyroSensorSystem, Directions direction, double defaultPower) { //specify encoder or gyro PIDBuilder drivePIDBuilder = new PIDBuilder().setKP(pidConstants.getKP()) .setKI(pidConstants.getKI()) .setKD(pidConstants.getKD()) .setKF(pidConstants.getKF()) .setTOLERANCE(pidConstants.getTOLERANCE()) .setSETTLING_TIME(pidConstants.getSETTLING_TIME()) .setTARGET_RANGE(pidConstants.getTARGET_MIN_RANGE(), pidConstants.getTARGET_MAX_RANGE()) .setTARGET(pidConstants.getTARGET()) .setOUTPUT_RANGE(pidConstants.getMIN_OUTPUT(), pidConstants.getMAX_OUTPUT()) .setTARGET(pidConstants.getTARGET()) .setINVERTED(pidConstants.isINVERTED()) .setABSOLUTE_SETPOINT(pidConstants.isABSOLUTE_SETPOINT()) .setNO_OSCILLATION(pidConstants.isNO_OSCILLATION()); this.leftFrontMotor = driveSystem.getLeftFrontMotor(); this.leftRearMotor = driveSystem.getLeftRearMotor(); this.rightFrontMotor = driveSystem.getRightFrontMotor(); this.rightRearMotor = driveSystem.getRightRearMotor(); this.gyroSensorSystem = gyroSensorSystem; this.direction = direction; this.defaultPower = defaultPower; drivePIDController = new PID(drivePIDBuilder, this); } public boolean isOnTarget() { return drivePIDController.isOnTarget(); } /** * Returns the output of the PID loop * * @return correction value to add or subtract original power from depending on direction of rot. */ public double getOutput() { return getOutputPower(); } public void reset() { drivePIDController.reset(); } @Override public double getInput(PID pid) { return gyroSensorSystem.getHeading(); } private double getOutputPower() { return drivePIDController.getOutput(); } }
#/bin/sh condadir=`dirname $0` condadir=`readlink -f $condadir` srcdir=`dirname $condadir` docker build -t tvm-cuda100-forge $condadir -f $condadir/Dockerfile.cuda100 docker run --rm -v $srcdir:/workspace tvm-cuda100-forge docker build -t tvm-cuda92-forge $condadir -f $condadir/Dockerfile.cuda92 docker run --rm -v $srcdir:/workspace tvm-cuda92-forge sudo chown -R `whoami` $condadir/pkg
<gh_stars>1-10 /** * Copyright (c) 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.react.modules.systeminfo; import android.os.Build; import com.facebook.react.bridge.BaseJavaModule; import com.facebook.react.module.annotations.ReactModule; import java.util.HashMap; import java.util.Map; import javax.annotation.Nullable; /** * Module that exposes Android Constants to JS. */ @ReactModule(name = "AndroidConstants") public class AndroidInfoModule extends BaseJavaModule { private static final String IS_TESTING = "IS_TESTING"; @Override public String getName() { return "AndroidConstants"; } @Override public @Nullable Map<String, Object> getConstants() { HashMap<String, Object> constants = new HashMap<>(); constants.put("Version", Build.VERSION.SDK_INT); constants.put("ServerHost", AndroidInfoHelpers.getServerHost()); constants.put("isTesting", "true".equals(System.getProperty(IS_TESTING))); return constants; } }
import React from 'react'; const App = ({ countries }) => { const sortedCountries = countries.sort((a, b) => { if (a.name < b.name) { return -1; } if (a.name > b.name) { return 1; } return 0; }); return ( <div> <table> <thead> <tr> <th>Name</th> <th>Population</th> </tr> </thead> <tbody> {sortedCountries.map(country => ( <tr key={country.name}> <td>{country.name}</td> <td>{country.population}</td> </tr> ))} </tbody> </table> </div> ); }; export default App;
<gh_stars>1-10 typedef struct sol_struct { double *c_keep; double *inv_v_keep; double *inv_marg_u_keep; double *inv_v_adj; double *inv_marg_u_adj; double *c_adj; double *d_adj; double *inv_w; double *q; double *q_c; double *q_m; } sol_struct;
#!/bin/bash # Capture the JSON output from the Weather API curl -o wdata.json "api.weather.com/[...]" # Extract the relevant information using jq echo "$(jq .current.temp wdata.json)"
<reponame>googleapis/googleapis-gen<filename>google/cloud/dialogflow/cx/v3beta1/google-cloud-dialogflow-cx-v3beta1-ruby/proto_docs/google/cloud/dialogflow/cx/v3beta1/validation_message.rb # frozen_string_literal: true # Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Auto-generated by gapic-generator-ruby. DO NOT EDIT! module Google module Cloud module Dialogflow module CX module V3beta1 # Agent/flow validation message. # @!attribute [rw] resource_type # @return [::Google::Cloud::Dialogflow::CX::V3beta1::ValidationMessage::ResourceType] # The type of the resources where the message is found. # @!attribute [rw] resources # @return [::Array<::String>] # The names of the resources where the message is found. # @!attribute [rw] resource_names # @return [::Array<::Google::Cloud::Dialogflow::CX::V3beta1::ResourceName>] # The resource names of the resources where the message is found. # @!attribute [rw] severity # @return [::Google::Cloud::Dialogflow::CX::V3beta1::ValidationMessage::Severity] # Indicates the severity of the message. # @!attribute [rw] detail # @return [::String] # The message detail. class ValidationMessage include ::Google::Protobuf::MessageExts extend ::Google::Protobuf::MessageExts::ClassMethods # Resource types. module ResourceType # Unspecified. RESOURCE_TYPE_UNSPECIFIED = 0 # Agent. AGENT = 1 # Intent. INTENT = 2 # Intent training phrase. INTENT_TRAINING_PHRASE = 8 # Intent parameter. INTENT_PARAMETER = 9 # Multiple intents. INTENTS = 10 # Multiple training phrases. INTENT_TRAINING_PHRASES = 11 # Entity type. ENTITY_TYPE = 3 # Multiple entity types. ENTITY_TYPES = 12 # Webhook. WEBHOOK = 4 # Flow. FLOW = 5 # Page. PAGE = 6 # Multiple pages. PAGES = 13 # Transition route group. TRANSITION_ROUTE_GROUP = 7 end # Severity level. module Severity # Unspecified. SEVERITY_UNSPECIFIED = 0 # The agent doesn't follow Dialogflow best practices. INFO = 1 # The agent may not behave as expected. WARNING = 2 # The agent may experience failures. ERROR = 3 end end # Resource name and display name. # @!attribute [rw] name # @return [::String] # Name. # @!attribute [rw] display_name # @return [::String] # Display name. class ResourceName include ::Google::Protobuf::MessageExts extend ::Google::Protobuf::MessageExts::ClassMethods end end end end end end
<filename>open-sphere-base/core/src/main/java/io/opensphere/core/util/ThreePhaseChangeListener.java package io.opensphere.core.util; import java.util.concurrent.Phaser; /** * A listener which can provide status as to whether it is prepared to accept a * state change. Typically the notifier should request permission from each * listener to switch states. Once each listener has become ready to switch, * each listener will be given the opportunity to handle any pre-commit detail * which are required. Once pre-commit has been completed the new state is * committed. * * @param <T> The type which contains the details of the state change. */ public interface ThreePhaseChangeListener<T> { /** * Commit the pending state. A listener cannot reject a commit. * * @param state The state which is being committed. * @param phaser A phaser that may be used by other threads to delay the * state transition. */ void commit(T state, Phaser phaser); /** * Allow listeners to handle an pre-commit details. Any listener which * returns true from this method should guarantee that it will be able to * commit. * * @param pendingState The state which will be committed. * @param phaser A phaser that may be used by other threads to delay the * state transition. * @return true when the listener accepts the pre-commit request. * @throws PropertyChangeException If there is a problem attempting the * state change. * @throws InterruptedException If the thread is interrupted. */ boolean preCommit(T pendingState, Phaser phaser) throws PropertyChangeException, InterruptedException; /** * Request that the listener count down the latch when it is prepared to * switch to the pending state. * * @param pendingState The state which will be committed. * @param phaser A phaser that may be used by other threads to delay the * state transition. * @return true when the listener accepts the prepare request. * @throws PropertyChangeException If there is a problem attempting the * state change. * @throws InterruptedException If the thread is interrupted. */ boolean prepare(T pendingState, Phaser phaser) throws PropertyChangeException, InterruptedException; }
TERMUX_PKG_HOMEPAGE=https://www.libsdl.org/projects/SDL_mixer/release-1.2.html TERMUX_PKG_DESCRIPTION="A simple multi-channel audio mixer" TERMUX_PKG_LICENSE="ZLIB" TERMUX_PKG_MAINTAINER="Leonid Pliushch <leonid.pliushch@gmail.com>" TERMUX_PKG_VERSION=1.2.12 TERMUX_PKG_REVISION=15 TERMUX_PKG_SRCURL=https://www.libsdl.org/projects/SDL_mixer/release/SDL_mixer-${TERMUX_PKG_VERSION}.tar.gz TERMUX_PKG_SHA256=1644308279a975799049e4826af2cfc787cad2abb11aa14562e402521f86992a TERMUX_PKG_DEPENDS="libvorbis, sdl"
/* * arch/arm/mach-sunxi/pm/ccmu-sun50iw1p1.h * * Copyright 2012 (c) njubietech. * gq.yang (<EMAIL>) * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #ifndef __MEM_CCMU_SUN8W11P1_H__ #define __MEM_CCMU_SUN8W11P1_H__ typedef union { __u32 dwval; struct { __u32 FactorM:2; /*bit0, PLL1 Factor M */ __u32 reserved0:2; /*bit2, reserved */ __u32 FactorK:2; /*bit4, PLL1 factor K */ __u32 reserved1:2; /*bit6, reserved */ __u32 FactorN:5; /*bit8, PLL1 Factor N */ __u32 reserved2:3; /*bit13, reserved */ __u32 FactorP:2; /*bit16, PLL1 Factor P */ __u32 reserved3:6; /*bit18, reserved */ __u32 SigmaEn:1; /*bit24, sigma delta enbale */ __u32 reserved4:3; /*bit25, reserved */ __u32 Lock:1; /*bit28, pll is stable flag, 1-pll has stabled */ __u32 reserved5:2; /*bit29, reserved */ __u32 PLLEn:1; /*bit31, 0-disable, 1-enable, (24Mhz*N*K)/(M*P) */ } bits; } __ccmu_pll1_reg0000_t; typedef struct __CCMU_PLL2_REG0008 { __u32 FactorM:5; /*bit0, PLL2 prev division M */ __u32 reserved0:3; /*bit5, reserved */ __u32 FactorN:7; /*bit8, PLL2 factor N */ __u32 reserved1:1; /*bit15, reserved */ __u32 FactorP:4; /*bit16, PLL2 factor P */ __u32 reserved2:4; /*bit20, reserved */ __u32 SdmEn:1; /*bit24, pll sdm enable, factorN only low 4 bits valid when enable */ __u32 reserved3:3; /*bit25, reserved */ __u32 Lock:1; /*bit28, pll stable flag */ __u32 reserved4:2; /*bit29, reserved */ __u32 PLLEn:1; /*bit31, PLL2 enable */ } __ccmu_pll2_reg0008_t; typedef struct __CCMU_MEDIA_PLL { __u32 FactorM:4; /*bit0, PLL3 FactorM */ __u32 reserved0:4; /*bit4, reserved */ __u32 FactorN:7; /*bit8, PLL factor N */ __u32 reserved1:5; /*bit15, reserved */ __u32 SdmEn:1; /*bit20, sdm enable */ __u32 reserved2:3; /*bit21, reserved */ __u32 ModeSel:1; /*bit24, PLL mode select */ __u32 FracMod:1; /*bit25, PLL out is 0:270Mhz, 1:297Mhz */ __u32 reserved3:2; /*bit26, reserved */ __u32 Lock:1; /*bit28, lock flag */ __u32 reserved4:1; /*bit29, reserved */ __u32 CtlMode:1; /*bit30, control mode, 0-controled by cpu, 1-control by DE */ __u32 PLLEn:1; /*bit31, PLL3 enable */ } __ccmu_media_pll_t; typedef struct __CCMU_PLL5_REG0020 { __u32 FactorM:2; /*bit0, PLL5 factor M */ __u32 reserved0:2; /*bit2, reserved */ __u32 FactorK:2; /*bit4, PLL1 factor K */ __u32 reserved1:2; /*bit6, reserved */ __u32 FactorN:5; /*bit8, PLL5 factor N */ __u32 reserved2:7; /*bit13, reserved */ __u32 PllUpdate:1; /*bit20, cfg update, set to 1 to validate the pll */ __u32 reserved3:3; /*bit21, reserved */ __u32 SdmEn:1; /*bit24, pll sdm enable, factorN only low 4 bits valid when enable */ __u32 reserved4:3; /*bit25, reserved */ __u32 Lock:1; /*bit28, lock flag,pll stable flag */ __u32 reserved5:2; /*bit29, reserved */ __u32 PLLEn:1; /*bit31, PLL5 Enable */ } __ccmu_pll5_reg0020_t; typedef struct __CCMU_PLL6_REG0028 { __u32 FactorM:2; /*bit0, PLL6 factor M */ __u32 reserved0:2; /*bit2, reserved */ __u32 FactorK:2; /*bit4, PLL6 factor K */ __u32 reserved1:2; /*bit6, reserved */ __u32 FactorN:5; /*bit8, PLL6 factor N */ __u32 reserved2:3; /*bit13, reserved */ __u32 Pll24MPdiv:2; /*bit16, PLL 24M output clock post divider */ __u32 Pll24MOutEn:1; /*bit18, PLL 24M output enable */ __u32 reserved3:5; /*bit19, reserved */ __u32 PllClkOutEn:1; /*bit24, pll clock output enable */ __u32 PLLBypass:1; /*bit25, PLL6 output bypass enable */ __u32 reserved4:2; /*bit26, reserved */ __u32 Lock:1; /*bit28, lock flag */ __u32 reserved5:2; /*bit29, reserved */ __u32 PLLEn:1; /*bit31, PLL6 enable */ } __ccmu_pll6_reg0028_t; #define AC327_CLKSRC_LOSC (0) #define AC327_CLKSRC_HOSC (1) #define AC327_CLKSRC_PLL1 (2) typedef union { __u32 dwval; struct { __u32 AXIClkDiv:2; /*bit0, AXI clock divide ratio, 00-1, 01-2, 10-3, 11-4 */ __u32 reserved0:14; /*bit2, reserved */ __u32 CpuClkSrc:2; /*bit16, CPU1/2/3/4 clock source select, 00-internal LOSC, 01-HOSC, 10/11-PLL1 */ __u32 reserved2:14; /*bit18, reserved */ } bits; } __ccmu_sysclk_ratio_reg0050_t; #define AHB1_CLKSRC_LOSC (0) #define AHB1_CLKSRC_HOSC (1) #define AHB1_CLKSRC_AXI (2) #define AHB1_CLKSRC_PLL6 (3) typedef union { __u32 dwval; struct { __u32 reserved0:4; /*bit0, reserved */ __u32 Ahb1Div:2; /*bit4, ahb1 clock divide ratio, 1/2/4/8 */ __u32 Ahb1PreDiv:2; /*bit6, ahb1 clock pre-divide ratio 1/2/3/4 */ __u32 Apb1Div:2; /*bit8, apb1 clock divide ratio 2/2/4/8, source is ahb1 */ __u32 reserved1:2; /*bit10, reserved */ __u32 Ahb1ClkSrc:2; /*bit12, ahb1 clock source select, 00-LOSC, 01-OSC24M, 10-AXI, 11-PLL6/ahb1_pre_div */ __u32 reserved2:18; /*bit26, reserved */ } bits; } __ccmu_ahb1_ratio_reg0054_t; #define APB2_CLKSRC_LOSC (0) #define APB2_CLKSRC_HOSC (1) #define APB2_CLKSRC_PLL24M (2) #define APB2_CLKSRC_PLL6 (3) typedef union { __u32 dwval; struct { __u32 DivM:5; /*bit0, clock divide ratio m */ __u32 reserved:11; /*bit5, reserved */ __u32 DivN:2; /*bit16, clock pre-divide ratio 1/2/4/8 */ __u32 reserved1:6; /*bit18, reserved */ __u32 ClkSrc:2; /*bit24, clock source select, 00-LOSC, 01-OSC24M, 10/11-PLL6 */ __u32 reserved2:6; /*bit26, reserved */ } bits; } __ccmu_apb2_ratio_reg0058_t; typedef struct __CCMU_REG_LIST { volatile __ccmu_pll1_reg0000_t Pll1Ctl; /*0x0000, PLL1 control, cpux */ volatile __u32 reserved0; /*0x0004, reserved */ volatile __ccmu_pll2_reg0008_t Pll2Ctl; /*0x0008, PLL2 control, audio */ volatile __u32 reserved1; /*0x000c, reserved */ volatile __u32 Pll3Ctl; /*0x0010, PLL3 control, video0 */ volatile __u32 reserved2; /*0x0014, reserved */ volatile __u32 Pll4Ctl; /*0x0018, PLL4 control, ve */ volatile __u32 reserved3; /*0x001c, reserved */ volatile __ccmu_pll5_reg0020_t Pll5Ctl; /*0x0020, PLL5 control, ddr0 ctrl */ volatile __u32 reserved4; /*0x0024, reserved */ volatile __ccmu_pll6_reg0028_t Pll6Ctl; /*0x0028, PLL6 control, periph0 */ volatile __u32 Pll7Ctl; /*0x002c, PLL7 ctrl, PLL7 control, periph1 */ volatile __u32 PllVideo1; /*0x0030, Pll videio1 reg */ volatile __u32 PllSata; /*0x0034, sata ctrl reg */ volatile __u32 Pll8Ctl; /*0x0038, PLL8 control, gpu */ volatile __u32 reserved5; /*0x003c, reserved */ volatile __u32 PllMipi; /*0x0040, MIPI PLL control */ volatile __u32 reserved6; /*0x0044, reserved */ volatile __u32 Pll10Ctl; /*0x0048, PLL10 control, de */ volatile __u32 PllDdr1Ctl; /*0x004c, pll ddr1 ctrl reg */ volatile __ccmu_sysclk_ratio_reg0050_t SysClkDiv; /*0x0050, cpux/axi clock divide ratio */ volatile __ccmu_ahb1_ratio_reg0054_t Ahb1Div; /*0x0054, ahb1/apb1 clock divide ratio */ volatile __ccmu_apb2_ratio_reg0058_t Apb2Div; /*0x0058, apb2 clock divide ratio */ volatile __u32 reserved7; /*0x005c, reserved */ volatile __u32 AhbGate0; /*0x0060, bus gating reg0 */ volatile __u32 AhbGate1; /*0x0064, bus gating reg1 */ volatile __u32 Apb1Gate; /*0x0068, bus gating reg2 */ volatile __u32 Apb2Gate0; /*0x006c, bus gating reg3 */ volatile __u32 Apb2Gate1; /*0x0070, bus gating reg4 */ volatile __u32 Ths; /*0x0074, ths clk reg */ volatile __u32 reserved8[2]; /*0x0078, reseved */ volatile __u32 Nand0; /*0x0080, nand clock */ volatile __u32 reserved9; /*0x0084, reserved */ volatile __u32 Sd0; /*0x0088, sd/mmc0 clock */ volatile __u32 Sd1; /*0x008c, sd/mmc1 clock */ volatile __u32 Sd2; /*0x0090, sd/mmc2 clock */ volatile __u32 Sd3; /*0x0094, sd/mmc3 clock */ volatile __u32 Ts; /*0x0098, ts clk reg */ volatile __u32 Ce; /*0x009c, ce clk reg */ volatile __u32 Spi0; /*0x00a0, spi controller 0 clock */ volatile __u32 Spi1; /*0x00a4, spi controller 1 clock */ volatile __u32 Spi2; /*0x00a8, spi controller 2 clock */ volatile __u32 Spi3; /*0x00ac, spi controller 3 clock */ volatile __u32 I2s0; /*0x00b0, daudio-0 clock */ volatile __u32 I2s1; /*0x00b4, daudio-1 clock */ volatile __u32 I2s2; /*0x00b8, daudio-2 clock */ volatile __u32 Ac97; /*0x00bc, ac97 clock */ volatile __u32 SpdifClk; /*0x00c0, spdif clock */ volatile __u32 KeyPadClk; /*0x00c4, keypad clock */ volatile __u32 Sata0Clk; /*0x00c8, sata clock */ volatile __u32 UsbClk; /*0x00cc, usb phy clock */ volatile __u32 Ir0Clk; /*0x00d0, ir0 clock */ volatile __u32 Ir1Clk; /*0x00d4, ir1 clock */ volatile __u32 reserved10[6]; /*0x00d8, reserved*/ volatile __u32 PllDdrAuxiliary; /*0x00f0, pll ddr auxiloary reg */ volatile __u32 DramCfg; /*0x00f4, dram configuration clock */ volatile __u32 PllDdr1Cfg; /*0x00f8, pll ddr1 config reg */ volatile __u32 MbusResetReg; /*0x00fc, mbus reset reg */ volatile __u32 DramGate; /*0x0100, dram module clock */ volatile __u32 DeClk; /*0x0104, DE clock reg */ volatile __u32 DeMpClk; /*0x0108, DE_MP clock reg */ volatile __u32 reserved11; /*0x010c, de_mp clock */ volatile __u32 Lcd0Ch0; /*0x0110, tcon lcd0 clock */ volatile __u32 Lcd1Ch0; /*0x0114, tcon lcd1 clock */ volatile __u32 Tv0Ch0; /*0x0118, tcon tv0 clock */ volatile __u32 Tv1Ch0; /*0x011c, tcon tv1 clock */ volatile __u32 reserved12; /*0x0120, reserved */ volatile __u32 DeinterlaceClk; /*0x0124, deinterlace clock */ volatile __u32 reserved13[2]; /*0x0128, reserved */ volatile __u32 CsiMisc; /*0x0130, reserved */ volatile __u32 Csi0; /*0x0134, csi0 module clock */ volatile __u32 reserved14; /*0x0138, reserved */ volatile __u32 Ve; /*0x013c, Ve clock reg */ volatile __u32 Adda; /*0x0140, ac digital clock register */ volatile __u32 Avs; /*0x0144, avs module clock */ volatile __u32 reserved15[2]; /*0x0148, reserved */ volatile __u32 HdmiClk; /*0x0150, HDMI clock reg */ volatile __u32 HdmiSlowClk; /*0x0154, HDMI slow clock */ volatile __u32 reserved16; /*0x0158, reserved */ volatile __u32 MBus0; /*0x015C, MBUS controller 0 clock */ volatile __u32 reserved17; /*0x0160, reserved clock */ volatile __u32 Gmac; /*0x0164, gmac clock */ volatile __u32 MipiDsiClk; /*0x0168, MIPI_DSI clock */ volatile __u32 reserved18[5]; /*0x016c-0x017c, reserved */ volatile __u32 TvE0Clk; /*0x0180, TVE0 clock */ volatile __u32 TvE1Clk; /*0x0184, TVE1 clock */ volatile __u32 TvD0Clk; /*0x0188, TVD0 clock */ volatile __u32 TvD1Clk; /*0x018c, TVD1 clock */ volatile __u32 TvD2Clk; /*0x0190, TVD2 clock */ volatile __u32 TvD3Clk; /*0x0194, TVD3 clock */ volatile __u32 reserved19[2]; /*0x0198, reserved */ volatile __u32 GpuClk; /*0x01a0, GPU clock */ volatile __u32 reserved20[19]; /*0x01a4 ~ 0x01ec, reserved */ volatile __u32 OutAClk; /*0x1f0, clock ouput A reg */ volatile __u32 OutBClk; /*0x1f4, clock ouput B reg */ volatile __u32 reserved21[8]; /*0x01f8 ~ 0x0214, reserved */ volatile __u32 PllSataBias; /*0x218*/ volatile __u32 PllPeriph1Bias; /*0x21c, periph1 hsic bias reg */ volatile __u32 PllxBias[1]; /*0x220, pll cpux bias reg */ volatile __u32 PllAudioBias; /*0x224, pll audio bias reg */ volatile __u32 PllVideo0Bias; /*0x228, pll vedio bias reg */ volatile __u32 PllVeBias; /*0x22c, pll ve bias reg */ volatile __u32 PllDram0Bias; /*0x230, pll dram0 bias reg */ volatile __u32 PllPeriph0Bias; /*0x234, pll periph0 bias reg */ volatile __u32 PllVideo1Bias; /*0x238, pll video1 bias reg */ volatile __u32 PllGpuBias; /*0x23c, pll gpu bias reg */ volatile __u32 reserved22[2]; /*0x240, reserved */ volatile __u32 PllDeBias; /*0x248, pll de bias reg */ volatile __u32 PllDram1BiasReg; /*0x24c, pll dram1 bias */ volatile __u32 Pll1Tun; /*0x250, pll1 tun,cpux tuning reg */ volatile __u32 reserved23[3]; /*0x254-0x25c, reserved */ volatile __u32 PllDdr0Tun; /*0x260, pll ddr0 tuning */ volatile __u32 reserved24[3]; /*0x264-0x26c, reserved */ volatile __u32 pllMipiTun; /*0x70, mipi tuning reg*/ volatile __u32 reserved32[2]; /*0x274-0x278, reserved */ volatile __u32 PllPeriph1Pattern; /*0x27c, pll pre1ph1 pattern control reg */ volatile __u32 Pll1Pattern; /*0x280, pll cpux pattern reg */ volatile __u32 PllAudioPattern; /*0x284, pll audio pattern reg */ volatile __u32 PllVedio0Pattern; /*0x288, pll vedio pattern reg */ volatile __u32 PllVePattern; /*0x28c, pll ve pattern reg */ volatile __u32 PllDdr0Pattern; /*0x290, pll ddr0 pattern reg */ volatile __u32 reserved25; /*0x294, reserved */ volatile __u32 PllVedio1Pattern; /*0x298, pll video1 pattern reg */ volatile __u32 PllGpuPattern; /*0x29c, pll gpu pattern reg */ volatile __u32 reserved26[2]; /*0x2a0, reserved */ volatile __u32 PllDePattern; /*0x2a8, pll de pattern reg */ volatile __u32 PllDram1PatternReg0; /*0x2ac, pll dram1 pattern reg0 */ volatile __u32 PllDram1PatternReg1; /*0x2b0, pll dram1 pattern reg1 */ volatile __u32 reserved27[3]; /*0x2b4, reserved */ volatile __u32 AhbReset0; /*0x02c0, bus soft reset register 0 */ volatile __u32 AhbReset1; /*0x02c4, bus soft reset register 1 */ volatile __u32 AhbReset2; /*0x02c8, bus soft reset register 2 */ volatile __u32 reserved28; /*0x02cc, reserved */ volatile __u32 Apb1Reset; /*0x02d0, bus soft reset register 3 */ volatile __u32 reserved29; /*0x02d4, reserved */ volatile __u32 Apb2Reset; /*0x02d8, bus soft reset register 4 */ volatile __u32 reserved30[9]; /*0x02dc-0x2fc, reserved */ volatile __u32 PsCtrl; /*0x300, PS control register */ volatile __u32 PsCnt; /*0x304, PS counter register */ volatile __u32 reserved31[2]; /*0x308,0x30c reserved*/ volatile __u32 Sys32kClk; /*0x310, system 32k clk reg*/ volatile __u32 reserved33[3]; /*0x314-0x31c, reserved*/ volatile __u32 PllLockCtrl; /*0x320, Pll Lock Ctrl register */ } __ccmu_reg_list_t; #endif /* #ifndef __MEM_CCMU_SUN50W1P1_H__ */