text stringlengths 1 1.05M |
|---|
#!/bin/bash
pkgver=4.2.2
if [[ -f /usr/bin/boostchanger ]]
then
rm /usr/bin/boostchanger
rm /usr/share/applications/boostchanger.desktop
rm /usr/share/pixmaps/boostchanger.png
echo "Boost Changer is successfully uninstalled"
fi |
// Copyright 2018 Sogou Inc. All rights reserved.
// Use of this source code is governed by the Apache 2.0
// license that can be found in the LICENSE file.
package com.sogou.sogocommon.utils;
/**
* Created by jnq on 2016/11/15.
*/
public class ShortByteUtil {
/**
* @功能 短整型与字节的转换
* @param
* @return 两位的字节数组
*/
public static byte[] shortToByte(short number) {
int temp = number;
byte[] b = new byte[2];
for (int i = 0; i < b.length; i++) {
b[i] = new Integer(temp & 0xff).byteValue();// 将最低位保存在最低位
temp = temp >> 8; // 向右移8位
}
return b;
}
/**
* @功能 字节的转换与短整型
* @param
* @return 短整型
*/
public static short byteToShort(byte[] b) {
short s = 0;
short s0 = (short) (b[0] & 0xff);// 最低位
short s1 = (short) (b[1] & 0xff);
s1 <<= 8;
s = (short) (s0 | s1);
return s;
}
/**
* @说明 主要是为解析静态数据包,将一个字节数组转换为short数组
* @param b
*/
public static short[] byteArray2ShortArray(byte[] b) {
int len = b.length / 2;
int index = 0;
short[] re = new short[len];
byte[] buf = new byte[2];
for (int i = 0; i < b.length;) {
buf[0] = b[i];
buf[1] = b[i + 1];
short st = byteToShort(buf);
re[index] = st;
index++;
i += 2;
}
return re;
}
/**
* @说明 主要是为解析静态数据包,将一个short数组反转为字节数组
* @param b
*/
public static byte[] shortArray2ByteArray(short[] b) {
byte[] rebt = new byte[b.length * 2];
int index = 0;
for (int i = 0; i < b.length; i++) {
short st = b[i];
byte[] bt = shortToByte(st);
rebt[index] = bt[0];
rebt[index + 1] = bt[1];
index += 2;
}
return rebt;
}
} |
<reponame>kolaczyn/rough-edit
import re
from datetime import timedelta
import os
def escape_string(string):
# escapes chars like ' and &
# strings I have to replace to make the * work
for sign in [' ', "'", '"', '(', ')', '&']:
string = string.replace(sign, '\\'+sign)
return string
def str_to_timedelta(string):
# TODO later: change findall to get rid of those [0]s
time_regex_format = r"(\d{2}):(\d{2}):(\d{2})\.(\d{3})"
results = re.findall(time_regex_format, string)
hours, minutes, seconds, milliseconds = results[0]
return timedelta(hours=int(hours),
minutes=int(minutes),
seconds=int(seconds),
milliseconds=int(milliseconds))
def generate_regex(prase):
phrase_array = prase.split(" ")
time_regex = r"(\d{2}:\d{2}:\d{2}.\d{3})"
out_regex = ""
for word in phrase_array:
out_regex += f"<{time_regex}><c> ({word})</c>"
out_regex += f"<{time_regex}>"
print(out_regex)
return out_regex
# regex = f"<{timeRegex}><c> ({text})</c><{timeRegex}><c> ({text})</c><{timeRegex}>"
|
<filename>SMS/src/SMS/wwwroot/lib/jquery/src/manipulation/_evalUrl.min.js
define(["../ajax"],function(a){return a._evalUrl=function(r){return a.ajax({url:r,type:"GET",dataType:"script",async:!1,global:!1,"throws":!0})},a._evalUrl});
//# sourceMappingURL=_evalUrl.min.js.map
|
package main
import (
"log"
"sync"
)
type tabWithContext struct {
tabContext
tab tab
}
type tabContext struct {
servConn *serverConnection
servState *serverState
chanState *channelState
pmState *privmsgState
}
type finderFunc func(*tabWithContext) bool
type tabRequestCreate struct {
ctx *tabContext
index int
finder finderFunc
ret chan *tabWithContext
}
type tabRequestCount struct {
ret chan int
}
type tabRequestSearch struct {
finder finderFunc
ret chan []*tabWithContext
}
type tabRequestDelete struct {
tabs []*tabWithContext
ret chan struct{}
}
type tabManager struct {
tabs []*tabWithContext
create chan *tabRequestCreate
count chan *tabRequestCount
search chan *tabRequestSearch
delete chan *tabRequestDelete
destroy chan struct{}
}
func (tabMan *tabManager) Shutdown() {
close(tabMan.destroy)
}
func (tabMan *tabManager) Create(ctx *tabContext, index int) *tabWithContext {
ret := make(chan *tabWithContext)
go func() {
tabMan.create <- &tabRequestCreate{ctx, index, nil, ret}
}()
return <-ret
}
func (tabMan *tabManager) CreateIfNotFound(ctx *tabContext, index int, finder finderFunc) *tabWithContext {
ret := make(chan *tabWithContext)
tabMan.create <- &tabRequestCreate{ctx, index, finder, ret}
return <-ret
}
func (tabMan *tabManager) Len() int {
ret := make(chan int)
tabMan.count <- &tabRequestCount{ret}
return <-ret
}
func (tabMan *tabManager) Find(finder finderFunc) *tabWithContext {
ret := tabMan.FindAll(finder)
if len(ret) > 0 {
return ret[0]
}
return nil
}
func (tabMan *tabManager) FindAll(finder finderFunc) []*tabWithContext {
if len(tabMan.tabs) == 0 {
return nil
}
ret := make(chan []*tabWithContext)
tabMan.search <- &tabRequestSearch{finder, ret}
return <-ret
}
//
// finder funcs
// usage: tabMan.Find(currentTabFinder)
// tabMan.Find(serverTabFinder(servState))
// tabMan.Find(channelTabFinder(chanState))
// tabMan.Find(someotherTabFinder) ...
//
func allTabsFinder(t *tabWithContext) bool {
return true
}
func currentTabFinder(t *tabWithContext) bool {
return t.tab != nil && t.tab.Index() == tabWidget.CurrentIndex()
}
func allServerTabsFinder(servState *serverState) finderFunc {
return func(t *tabWithContext) bool {
if t.servState == servState {
return true
}
return false
}
}
func currentServerTabFinder(servState *serverState) finderFunc {
return func(t *tabWithContext) bool {
if currentTabFinder(t) && t.servState == servState {
return true
}
if t.servState == servState && t.chanState == nil && t.pmState == nil {
return true
}
return false
}
}
func identityFinder(me tab) finderFunc {
return func(t *tabWithContext) bool {
if t.tab == me {
return true
}
return false
}
}
func (tabMan *tabManager) Delete(tabs ...*tabWithContext) {
ret := make(chan struct{})
tabMan.delete <- &tabRequestDelete{tabs, ret}
<-ret
return
}
var t_mu sync.Mutex
func newTabManager() *tabManager {
tabMan := &tabManager{
tabs: []*tabWithContext{},
create: make(chan *tabRequestCreate),
count: make(chan *tabRequestCount),
search: make(chan *tabRequestSearch),
delete: make(chan *tabRequestDelete),
destroy: make(chan struct{}),
}
go func() {
for {
here:
select {
case <-tabMan.destroy:
return
case req := <-tabMan.create:
t_mu.Lock()
if req.finder != nil {
for _, t := range tabMan.tabs {
if req.finder(t) {
t_mu.Unlock()
req.ret <- t
log.Println("found ctx:", t)
break here
}
}
}
t := &tabWithContext{}
t.servConn = req.ctx.servConn
t.servState = req.ctx.servState
t.chanState = req.ctx.chanState
t.pmState = req.ctx.pmState
tabMan.tabs = append(tabMan.tabs, t)
log.Println("created ctx:", t)
t_mu.Unlock()
req.ret <- t
case req := <-tabMan.count:
req.ret <- len(tabMan.tabs)
case req := <-tabMan.search:
ret := []*tabWithContext{}
for _, t := range tabMan.tabs {
if t.tab != nil && req.finder(t) {
ret = append(ret, t)
}
}
req.ret <- ret
case req := <-tabMan.delete:
indices := []int{}
for _, t := range req.tabs {
indices = append(indices, t.tab.Index())
}
for _, index := range indices {
for i, t := range tabMan.tabs {
if t.tab.Index() == index {
tabMan.tabs = append(tabMan.tabs[0:i], tabMan.tabs[i+1:]...)
}
}
}
req.ret <- struct{}{}
// t.Close()
}
}
}()
return tabMan
}
|
import React from "react"
import { Badge, Card } from "react-bootstrap"
import {
FaUserMd,
FaStethoscope,
FaAddressBook,
FaHospitalSymbol,
FaHourglass,
} from "react-icons/fa"
import PropTypes from "prop-types"
import { useIntl, Link } from "gatsby-plugin-intl"
import Flag from "../flag"
import Avatar from "../avatar"
const Doctor = ({
name,
speciality,
contact,
location,
time,
link,
uid,
index,
}) => {
// Making useIntl available in the code
const intl = useIntl()
// Use language iso for the routes
//const locale = intl.locale !== "en" ? `/${intl.locale}` : ""
return (
<Card style={{ width: "24rem" }}>
<Card.Body>
<Card.Title className="d-flex align-items-center">
{/* <FaUserMd /> */}
<Avatar img={uid} />
<span>{name.trim().replace(/\*/g, ",")}</span>
</Card.Title>
<hr />
<Card.Subtitle className="d-flex align-items-center text-muted pl-1">
<FaStethoscope />
<Badge variant="light" className="doctor-item-content">
{speciality.trim().replace(/\*/g, ",")}
</Badge>
</Card.Subtitle>
<div className="pl-1">
<div className="d-flex align-items-center">
<FaAddressBook />
<Badge className="doctor-item-content">
{contact.trim().replace(/\*/g, ",")}
</Badge>
</div>
{!index && (
<>
<div className="d-flex align-items-center">
<FaHospitalSymbol />
<Badge className="doctor-item-content">
{location.trim().replace(/\*/g, ",")}
</Badge>
</div>
<div className="d-flex align-items-center ">
<FaHourglass />
<Badge className="doctor-item-content">
{time.trim().replace(/\*/g, ",")}
</Badge>
</div>
</>
)}
</div>
<br />
<div className="d-flex justify-content-between">
<Link to={`/doctor/${link}/`}>
{intl.formatMessage({ id: "more" })}
</Link>
{!index && <Flag uid={uid} />}
</div>
</Card.Body>
</Card>
)
}
Doctor.propTypes = {
name: PropTypes.string,
speciality: PropTypes.string,
contact: PropTypes.string,
location: PropTypes.string,
time: PropTypes.string,
link: PropTypes.string,
uid: PropTypes.string,
index: PropTypes.bool,
}
export default Doctor
|
PATH_TO_GOOGLE_PLISTS="${PROJECT_DIR}/RoyaleApp"
if [ "${CONFIGURATION}" = "Release" ]; then
"${PROJECT_DIR}/scripts/FirebaseCrashlytics/run" -gsp "$PATH_TO_GOOGLE_PLISTS/GoogleService-Info.plist"
else
"${PROJECT_DIR}/scripts/FirebaseCrashlytics/run" -gsp "$PATH_TO_GOOGLE_PLISTS/GoogleService-Info-Dev.plist"
fi
|
<reponame>wjimenez5271/migration-tools
// Copyright 2016 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package docker
import "testing"
func TestEnsureThinLsKernelVersion(t *testing.T) {
tests := []struct {
version string
expectedError string
}{
{"4.4.0-31-generic", ""},
{"4.4.1", ""},
{"4.6.4-301.fc24.x86_64", ""},
{"3.10.0-327.22.2.el7.x86_64", `RHEL/Centos 7.x kernel version 3.10.0-366 or later is required to use thin_ls - you have "3.10.0-327.22.2.el7.x86_64"`},
{"3.10.0-366.el7.x86_64", ""},
{"3.10.0-366.el7_3.x86_64", ""},
{"3.10.0.el7.abc", `unable to determine RHEL/Centos 7.x kernel release from "3.10.0.el7.abc"`},
{"3.10.0-abc.el7.blarg", `unable to determine RHEL/Centos 7.x kernel release from "3.10.0-abc.el7.blarg"`},
{"3.10.0-367.el7.x86_64", ""},
{"3.10.0-366.x86_64", `kernel version 4.4.0 or later is required to use thin_ls - you have "3.10.0-366.x86_64"`},
{"3.10.1-1.el7.x86_64", ""},
{"2.0.36", `kernel version 4.4.0 or later is required to use thin_ls - you have "2.0.36"`},
{"2.1", `error parsing kernel version: "2.1" is not a semver`},
}
for _, test := range tests {
err := ensureThinLsKernelVersion(test.version)
if err != nil {
if len(test.expectedError) == 0 {
t.Errorf("%s: expected no error, got %v", test.version, err)
} else if err.Error() != test.expectedError {
t.Errorf("%s: expected error %v, got %v", test.version, test.expectedError, err)
}
} else if err == nil && len(test.expectedError) > 0 {
t.Errorf("%s: expected error %v", test.version, test.expectedError)
}
}
}
|
-- phpMyAdmin SQL Dump
-- version 4.8.3
-- https://www.phpmyadmin.net/
--
-- Host: 127.0.0.1
-- Generation Time: Oct 23, 2018 at 01:12 PM
-- Server version: 10.1.36-MariaDB
-- PHP Version: 5.6.38
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `task3`
--
-- --------------------------------------------------------
--
-- Table structure for table `aregister`
--
CREATE TABLE `aregister` (
`aname` varchar(100) NOT NULL,
`aemail` varchar(100) NOT NULL,
`acno` varchar(10) NOT NULL,
`aadd` varchar(100) NOT NULL,
`apw` varchar(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `aregister`
--
INSERT INTO `aregister` (`aname`, `aemail`, `acno`, `aadd`, `apw`) VALUES
('Admin', '<EMAIL>', '0934375964', '#64 Main road cubbenpet bangalore', '123');
-- --------------------------------------------------------
--
-- Table structure for table `caluculatebill`
--
CREATE TABLE `caluculatebill` (
`cname` varchar(100) NOT NULL,
`bno` varchar(100) NOT NULL,
`month` varchar(100) NOT NULL,
`edate` varchar(100) NOT NULL,
`money` varchar(100) NOT NULL,
`status` varchar(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `caluculatebill`
--
INSERT INTO `caluculatebill` (`cname`, `bno`, `month`, `edate`, `money`, `status`) VALUES
('Megha', '444', 'January', '10-10-2018', '6700', 'Paid'),
('Megha', '34567', 'January', '2018-10-11', '3400', 'Paid'),
('shivam', '1', 'January', '2018-10-13', '50', 'Not Paid'),
('Nisha', '12', 'January', '2018-10-20', '20', 'Not Paid');
-- --------------------------------------------------------
--
-- Table structure for table `contactus`
--
CREATE TABLE `contactus` (
`contactname` varchar(100) NOT NULL,
`contactemail` varchar(100) NOT NULL,
`contactsubject` varchar(1000) NOT NULL,
`contactmessage` varchar(1000) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `feedback`
--
CREATE TABLE `feedback` (
`cid` varchar(100) NOT NULL,
`feedback` mediumtext NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `feedback`
--
INSERT INTO `feedback` (`cid`, `feedback`) VALUES
('Megha', 'GOOD'),
('shivam', 'good'),
('Megha', 'bad');
-- --------------------------------------------------------
--
-- Table structure for table `payment`
--
CREATE TABLE `payment` (
`cid` varchar(100) NOT NULL,
`sno` varchar(100) NOT NULL,
`month` varchar(100) NOT NULL,
`payvia` varchar(100) NOT NULL,
`status` varchar(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `payment`
--
INSERT INTO `payment` (`cid`, `sno`, `month`, `payvia`, `status`) VALUES
('Megha', '444', 'January', '', ''),
('Megha', '34567', 'January', '', '');
-- --------------------------------------------------------
--
-- Table structure for table `update1`
--
CREATE TABLE `update1` (
`update1` varchar(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `update1`
--
INSERT INTO `update1` (`update1`) VALUES
('10');
-- --------------------------------------------------------
--
-- Table structure for table `uregister`
--
CREATE TABLE `uregister` (
`uname` varchar(100) NOT NULL,
`uemail` varchar(100) NOT NULL,
`ucno` varchar(10) NOT NULL,
`uadd` varchar(100) NOT NULL,
`upw` varchar(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `uregister`
--
INSERT INTO `uregister` (`uname`, `uemail`, `ucno`, `uadd`, `upw`) VALUES
('Megha', '<EMAIL>', '123456789', 'bangalore', '123'),
('Nisha', '<EMAIL>', '8123456789', 'Bangalore', '123'),
('shivam', '<EMAIL>', '123456789', 'Bangalore', '123');
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
|
from typing import List, Tuple, Any
def sort_results(results: List[Tuple[Any, Any, Any]]) -> List[Tuple[Any, Any, Any]]:
return sorted(results, key=lambda r: -r[2]) |
// Each of these functions contructs a path for an endpoint.
const { Params } = require('../params.js')
module.exports = {
branch (params, extra) {
params = new Params({ ...params, ...extra })
const orgName = params.stringRequired('orgName')
const dbName = params.stringRequired('dbName')
return `/api/branch/${orgName}/${dbName}`
},
branchTarget (params, extra) {
params = new Params({ ...params, ...extra })
const orgName = params.stringRequired('orgName')
const dbName = params.stringRequired('dbName')
const remoteName = params.string('remoteName', 'local')
const branchName = params.string('branchName', 'main')
return `/api/branch/${orgName}/${dbName}/${remoteName}/branch/${branchName}`
},
branchOrigin (params, extra) {
params = new Params({ ...params, ...extra })
const orgName = params.stringRequired('orgName')
const dbName = params.stringRequired('dbName')
const remoteName = params.string('remoteName', 'local')
const branchName = params.string('branchName', 'main')
return `/${orgName}/${dbName}/${remoteName}/branch/${branchName}`
},
db (params, extra) {
params = new Params({ ...params, ...extra })
const orgName = params.stringRequired('orgName')
const dbName = params.stringRequired('dbName')
return `/api/db/${orgName}/${dbName}`
},
document (params, extra) {
params = new Params({ ...params, ...extra })
const orgName = params.stringRequired('orgName')
const dbName = params.stringRequired('dbName')
return `/api/document/${orgName}/${dbName}`
},
documentCommit (params, extra) {
params = new Params({ ...params, ...extra })
const orgName = params.stringRequired('orgName')
const dbName = params.stringRequired('dbName')
const commitId = params.stringRequired('commitId')
const remoteName = params.string('remoteName', 'local')
return `/api/document/${orgName}/${dbName}/${remoteName}/commit/${commitId}`
},
documentCommits (params, extra) {
params = new Params({ ...params, ...extra })
const orgName = params.stringRequired('orgName')
const dbName = params.stringRequired('dbName')
const remoteName = params.string('remoteName', 'local')
return `/api/document/${orgName}/${dbName}/${remoteName}/_commits`
},
documentMeta (params, extra) {
params = new Params({ ...params, ...extra })
const orgName = params.stringRequired('orgName')
const dbName = params.stringRequired('dbName')
return `/api/document/${orgName}/${dbName}/_meta`
},
documentSystem () {
return '/api/document/_system'
},
frameSystem () {
return '/api/schema/_system'
},
triples (resource) {
return `/api/triples/${resource}`
},
triplesBranch (params, extra) {
params = new Params({ ...params, ...extra })
const orgName = params.stringRequired('orgName')
const dbName = params.stringRequired('dbName')
const remoteName = params.string('remoteName', 'local')
const branchName = params.string('branchName', 'main')
const graph = params.string('graph', 'instance')
return `/api/triples/${orgName}/${dbName}/${remoteName}/branch/${branchName}/${graph}`
},
triplesSystem () {
return '/api/triples/_system/schema'
},
remote (params, extra) {
params = new Params({ ...params, ...extra })
const orgName = params.stringRequired('orgName')
const dbName = params.stringRequired('dbName')
return `/api/remote/${orgName}/${dbName}`
},
diff () {
return '/api/diff'
},
versionDiff (params, extra) {
params = new Params({ ...params, ...extra })
const orgName = params.stringRequired('orgName')
const dbName = params.stringRequired('dbName')
return `/api/diff/${orgName}/${dbName}`
},
organization () {
return '/api/organization'
},
patch () {
return '/api/patch'
},
prefixes (params, extra) {
params = new Params({ ...params, ...extra })
const orgName = params.stringRequired('orgName')
const dbName = params.stringRequired('dbName')
return `/api/prefixes/${orgName}/${dbName}`
},
woqlResource (params, extra) {
params = new Params({ ...params, ...extra })
const orgName = params.stringRequired('orgName')
const dbName = params.stringRequired('dbName')
return `/api/woql/${orgName}/${dbName}`
},
apply (params, extra) {
params = new Params({ ...params, ...extra })
const orgName = params.stringRequired('orgName')
const dbName = params.stringRequired('dbName')
const branchName = params.string('branchName', 'main')
const remoteName = params.string('remoteName', 'local')
return `/api/apply/${orgName}/${dbName}/${remoteName}/branch/${branchName}`
},
}
|
<reponame>JakeSidSmith/watfish<gh_stars>0
import * as childProcess from 'child_process';
import * as colors from 'colors/safe';
import { Tree } from 'jargs';
import * as path from 'path';
import { DEFAULT_ENV, DISAPPROVAL, WAT } from './constants';
import * as logger from './logger';
import {
getConfigPath,
getEnvVariables,
getIn,
getProjectName,
handleShebang,
injectEnvVars,
loadWtfJson,
onClose,
} from './utils';
export const runCommand = (
commandAndOptions: undefined | ReadonlyArray<string | undefined> = [],
env: string,
rest: undefined | ReadonlyArray<string | undefined> = []
) => {
const [command, ...commandOptions] = commandAndOptions;
if (typeof command === 'undefined') {
logger.log('No command supplied');
return process.exit(1);
}
if (command === 'wtf') {
logger.log(colors.red(WAT + 'Wat are you doing? ' + DISAPPROVAL));
return process.exit(1);
}
const configPath = getConfigPath();
const projectName = getProjectName();
const wtfJson = loadWtfJson(configPath, projectName, env);
const configEnvVariables = getIn(wtfJson, [projectName, 'env', env]) || {};
const envPath = path.join(process.cwd(), 'etc/environments', env, 'env');
const envVariables = getEnvVariables(envPath);
const environment: {[i: string]: string} = {
...envVariables,
...configEnvVariables,
...process.env,
PYTHONUNBUFFERED: 'true',
};
const resolvedCommand = handleShebang(command);
const resolvedCommandOptions = injectEnvVars([...commandOptions, ...rest], environment);
const subProcess = childProcess.spawn(
resolvedCommand,
resolvedCommandOptions,
{
cwd: process.cwd(),
shell: true,
env: environment,
stdio: 'inherit',
}
);
logger.log(colors.green(`Running ${resolvedCommand} ${resolvedCommandOptions.join(' ')}`));
logger.log(colors.green(`PID: ${subProcess.pid}, Parent PID: ${process.pid}\n`));
subProcess.on('close', (code) => onClose('', code));
};
const run = (tree: Tree) => {
let { command } = tree.args;
let { env } = tree.kwargs;
let { rest } = tree;
command = Array.isArray(command) ? command : [];
rest = Array.isArray(rest) ? rest : [];
env = typeof env === 'string' ? env : DEFAULT_ENV;
runCommand(command, env, rest);
};
export default run;
|
package com.example.mypc.esports2.main.news.newsinner;
import android.content.Intent;
import android.support.v4.view.PagerAdapter;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import com.bumptech.glide.Glide;
import com.example.mypc.esports2.bean.AdBean;
import com.example.mypc.esports2.main.news.newsdetail.NewsDetailActivity;
import java.util.List;
/**
* Created by peter on 2016/8/4.
*/
public class NewsInnerViewpagerAdapter extends PagerAdapter {
private List<AdBean> list;
public NewsInnerViewpagerAdapter(List<AdBean> list) {
this.list = list;
}
@Override
public int getCount() {
return Integer.MAX_VALUE;
}
@Override
public boolean isViewFromObject(View view, Object object) {
return view == object;
}
@Override
public Object instantiateItem(final ViewGroup container, final int position) {
ImageView iv = new ImageView(container.getContext());
Glide.with(container.getContext()).load(list.get(position % list.size()).getCoverLink()).into(iv);
container.addView(iv);
iv.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
AdBean adBean = list.get(position % list.size());
Intent intent = new Intent(container.getContext(), NewsDetailActivity.class);
intent.putExtra("adbean",adBean);
container.getContext().startActivity(intent);
}
});
return iv;
}
@Override
public void destroyItem(ViewGroup container, int position, Object object) {
container.removeView((ImageView) object);
}
}
|
#!/bin/sh -f
xv_path="/home/huchao/vivado/Vivado/2015.2"
ExecStep()
{
"$@"
RETVAL=$?
if [ $RETVAL -ne 0 ]
then
exit $RETVAL
fi
}
ExecStep $xv_path/bin/xelab -wto 677a86e31c004618b5999f494e300815 -m64 --debug typical --relax --mt 8 -L xil_defaultlib -L unisims_ver -L unimacro_ver -L secureip --snapshot lab3_2_tb_behav xil_defaultlib.lab3_2_tb xil_defaultlib.glbl -log elaborate.log
|
package io.miti.jarman.data;
import io.miti.jarman.gui.JarPage;
import io.miti.jarman.gui.ListingsPage;
import io.miti.jarman.gui.ManifestPage;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.Map.Entry;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import io.miti.jarman.data.FileData;
import io.miti.jarman.data.JarData;
import io.miti.jarman.data.JarStatus;
/**
* The data used by the app.
*
* @author mwallace
* @version 1.0
*/
public final class JarData
{
/**
* The one instance of this class.
*/
private static JarData data = null;
/**
* The parent jar file.
*/
private File jar = null;
/**
* The list of manifest data.
*/
private Map<String, String> manifest = new HashMap<String, String>(20);
/**
* The list of data on each file in each jar.
*/
private List<FileData> filedata = new ArrayList<FileData>(20);
/**
* The list of indexes to matches on file (CRC).
*/
private List<Integer> fileMatchIndexes = new ArrayList<Integer>(20);
/**
* The list of indexes to matches on file path/name.
*/
private List<Integer> nameMatchIndexes = new ArrayList<Integer>(20);
/**
* The list of jars in the class path, and the status (found or not).
*/
private List<JarStatus> jarlist = new ArrayList<JarStatus>(20);
/**
* Default constructor.
*/
private JarData()
{
super();
}
/**
* Get the one instance of this class.
*
* @return the one instance of this class
*/
public static JarData getInstance()
{
if (data == null)
{
data = new JarData();
}
return data;
}
/**
* Return whether there is data in the cache.
*
* @return whether there is data in the cache
*/
public static boolean hasData()
{
return ((data != null) && (data.jar != null));
}
/**
* Reset the data and tables.
*/
public static void reset()
{
data = null;
ListingsPage.getInstance().emptyTable();
ManifestPage.getInstance().emptyTable();
JarPage.getInstance().emptyTable();
}
/**
* Set the parent jar file.
*
* @param pJar the parent jar file
*/
public void setJarFile(final File pJar)
{
jar = pJar;
}
/**
* Return the name of the parent jar file.
*
* @return the name of the parent jar file
*/
public String getJarFileName()
{
if (jar == null)
{
return null;
}
return jar.getAbsolutePath();
}
/**
* Add a manifest entry.
*
* @param key the key
* @param value the value
*/
public void addManifestEntry(final String key, final String value)
{
manifest.put(key, value);
}
/**
* Return the name of the main class. Can be null.
*
* @return the name of the main class
*/
public String getMainClass()
{
return manifest.get("Main-Class");
}
/**
* Return a file iterator over the jars in the class path.
*
* @return a file iterator over the jars in the class path
*/
public Iterator<File> getPath()
{
String path = manifest.get("Class-Path");
if (path == null)
{
return null;
}
StringTokenizer st = new StringTokenizer(path, " ");
List<File> dirs = new ArrayList<File>(20);
while (st.hasMoreTokens())
{
String token = st.nextToken().trim();
File child = new File(jar.getParentFile(), token);
dirs.add(child);
}
return dirs.iterator();
}
/**
* Add the jar entry. Directories are not saved.
*
* @param file the parent jar file
* @param entry the jar entry
* @return whether the entry was saved (directories are skipped)
*/
public boolean addJarEntry(final File file, final JarEntry entry)
{
// Skip directories
if (entry.isDirectory())
{
return false;
}
// Make sure the name doesn't end with a slash
final String name = entry.getName();
final int slashIndex = getLastSlashIndex(name);
if (slashIndex == (name.length() - 1))
{
return false;
}
// Parse the file name and full path from the full name
final String fname = ((slashIndex >= 0) ? name.substring(slashIndex + 1) : name);
final String fpath = (slashIndex <= 0) ? "" : name.substring(0, slashIndex);
// Add the entry to the list
filedata.add(new FileData(file, entry, fname, fpath, false, false));
return true;
}
/**
* Add the jar entry. Directories are not saved.
*
* @param file the file to add
* @return whether the entry was saved (directories are skipped)
*/
public boolean addJarEntry(final File file)
{
// Make sure the name doesn't end with a slash
final String name = file.getAbsolutePath();
final int slashIndex = getLastSlashIndex(name);
if (slashIndex == (name.length() - 1))
{
return false;
}
// Parse the file name and full path from the full name
final String fname = ((slashIndex >= 0) ? name.substring(slashIndex + 1) : name);
final String fpath = (slashIndex <= 0) ? "" : name.substring(0, slashIndex);
// Add the entry to the list
filedata.add(new FileData(file, fname, fpath, false, false));
return true;
}
/**
* Return the index of the last slash (forward or backward) in the filename.
*
* @param name the name of the file (full path)
* @return the index of the last slash, or -1 if not found
*/
public static int getLastSlashIndex(final String name)
{
int forwardIndex = name.lastIndexOf('/');
int backIndex = name.lastIndexOf('\\');
int index = -1;
if ((forwardIndex < 0) && (backIndex < 0))
{
index = -1;
}
else if (forwardIndex > 0)
{
index = forwardIndex;
}
else if (backIndex > 0)
{
index = backIndex;
}
else
{
index = Math.max(backIndex, forwardIndex);
}
return index;
}
/**
* Return the number of records to show in the jar contents table.
*
* @return the number of records to show in the jar contents table
*/
public int getJarDataCount()
{
if (ListingsPage.isNull())
{
return 0;
}
switch (ListingsPage.getInstance().getDisplayIndex())
{
case 0: return filedata.size();
case 1: return nameMatchIndexes.size();
case 2: return fileMatchIndexes.size();
default: break;
}
return 0;
}
/**
* Return an iterator to all the file data.
*
* @return an iterator to all the file data
*/
public Iterator<FileData> getAllFileData()
{
if (ListingsPage.isNull())
{
return null;
}
return filedata.iterator();
}
/**
* Get the data for the specified row in the Listings table.
*
* @param rowIndex the row we need data for
* @return the data for that row
*/
public FileData getJarFileData(final int rowIndex)
{
if (ListingsPage.isNull())
{
return null;
}
int index = rowIndex;
switch (ListingsPage.getInstance().getDisplayIndex())
{
case 1: index = nameMatchIndexes.get(rowIndex).intValue();
break;
case 2: index = fileMatchIndexes.get(rowIndex).intValue();
break;
default: break;
}
return filedata.get(index);
}
/**
* Return the number of entries in the manifest.
*
* @return the number of entries in the manifest
*/
public int getManifestCount()
{
return manifest.size();
}
/**
* For an update of the JTables in the application.
*
* @param updateListingOnly whether to only update the listing table
*/
public void resetTables(final boolean updateListingOnly)
{
ListingsPage.getInstance().updateTable();
if (!updateListingOnly)
{
ManifestPage.getInstance().updateTable();
JarPage.getInstance().updateTable();
}
}
/**
* Return the manifest value for the JTable. The column index determines
* if the returned string is the key or the value.
*
* @param rowIndex the index of the table row
* @param columnIndex the index of the table column
* @return the string for the specified row and column
*/
public String getManifestValue(final int rowIndex, final int columnIndex)
{
int index = 0;
String value = null;
for (Entry<String, String> entry : manifest.entrySet())
{
if (index == rowIndex)
{
value = (columnIndex == 0) ? entry.getKey() : entry.getValue();
break;
}
++index;
}
return value;
}
/**
* Add a jar file entry to the data cache.
*
* @param jarName the name of the jar
* @param jarFound whether the jar is in the manifest's classpath
* @param fileCount the number of files in the jar
* @param size the size of the jar file
* @param date the last-modified date of the jar file
*/
public void addJarFileEntry(final String jarName, final boolean jarFound,
final int fileCount, final long size,
final long date)
{
jarlist.add(new JarStatus(jarName, jarFound, fileCount, size, date));
}
/**
* Return the number of jars in the jar cache.
*
* @return the number of jars in the jar cache
*/
public int getJarFileCount()
{
return jarlist.size();
}
/**
* Return the string for the Jar table, based on the row and column.
*
* @param rowIndex the index of the table row
* @param columnIndex the index of the table column
* @return the string for the table at row and column
*/
public Object getJarFileValue(final int rowIndex, final int columnIndex)
{
final JarStatus jarStatus = jarlist.get(rowIndex);
switch (columnIndex)
{
case 0: return Boolean.valueOf(jarStatus.isFound());
case 1: return jarStatus.getName();
case 2: return Long.valueOf(jarStatus.getDate());
case 3: return Integer.valueOf(jarStatus.getFileCount());
case 4: return Long.valueOf(jarStatus.getSize());
default: return null;
}
}
/**
* Check the FileData list for duplicate file entries.
*/
public void checkForDuplicates()
{
// Get the size and check if the list is empty
final int size = filedata.size();
if (size == 0)
{
return;
}
// Iterate over each item in the list, except for the last item
for (int i = 0; i < (size - 1); ++i)
{
// Get the current item and initialize the two match variables
FileData fdSrc = filedata.get(i);
boolean nameMatch = false;
boolean crcMatch = false;
// Iterate over each item in the list after fdSrc
for (int j = i + 1; j < size; ++j)
{
// Get the current target
FileData fdTarget = filedata.get(j);
// Check for a CRC match and name match
final boolean sameName = fdTarget.getFullPath().equals(fdSrc.getFullPath());
if (sameName && (fdTarget.getCrc() == fdSrc.getCrc()))
{
// We have a match. If this was not already tagged as a match, then
// set it now and save its index.
if (!fdTarget.isCRCMatch())
{
fdTarget.setCRCMatch(true);
fileMatchIndexes.add(Integer.valueOf(j));
crcMatch = true;
}
}
// Check for a match on full file name (including path)
if (sameName)
{
// We have a match. If this was not already tagged as a match, then
// set it now and save its index.
if (!fdTarget.isNameMatch())
{
fdTarget.setNameMatch(true);
nameMatchIndexes.add(Integer.valueOf(j));
nameMatch = true;
}
}
}
// If we found a match on CRC, then update the src object and save its index
if (crcMatch && !fdSrc.isCRCMatch())
{
fdSrc.setCRCMatch(true);
fileMatchIndexes.add(Integer.valueOf(i));
}
// If we found a match on path/name, then update the src object and save its index
if (nameMatch && !fdSrc.isNameMatch())
{
fdSrc.setNameMatch(true);
nameMatchIndexes.add(Integer.valueOf(i));
}
}
}
/**
* Return the jar entry from a jar file matching on target.
*
* @param target the entry to match
* @return the JarEntry object for target
*/
public JarEntry getJarEntry(final FileData target)
{
JarEntry targetEntry = null;
// Create the jar file object
JarFile jarfile = null;
try
{
// Create the jar file object and iterate over the entries
jarfile = new JarFile(target.getJar());
Enumeration<JarEntry> entries = jarfile.entries();
while (entries.hasMoreElements())
{
// If this element matches on name and CRC, we have a match
JarEntry entry = entries.nextElement();
if ((entry.getName().equals(target.getFullPath())) &&
(entry.getCrc() == target.getCrc()))
{
// Save the entry and break out of the loop
targetEntry = entry;
break;
}
}
// Close the jar file
jarfile.close();
}
catch (IOException e)
{
e.printStackTrace();
}
finally
{
if (jarfile != null)
{
try
{
jarfile.close();
}
catch (IOException e)
{
e.printStackTrace();
}
jarfile = null;
}
}
return targetEntry;
}
}
|
<filename>src/test/java/com/github/jinahya/datagokr/api/b090041_/lunphinfoservice/client/LunPhInfoServiceClientTest.java
package com.github.jinahya.datagokr.api.b090041_.lunphinfoservice.client;
import lombok.extern.slf4j.Slf4j;
@Slf4j
class LunPhInfoServiceClientTest
extends AbstractLunPhInfoServiceClientTest<LunPhInfoServiceClient> {
/**
* Creates a new instance.
*/
LunPhInfoServiceClientTest() {
super(LunPhInfoServiceClient.class);
}
}
|
#ifndef LODTALK_STACK_INTERPRETER_HPP
#define LODTALK_STACK_INTERPRETER_HPP
#include "Lodtalk/InterpreterProxy.hpp"
#include "Method.hpp"
namespace Lodtalk
{
class StackInterpreter;
class VMContext;
class StackMemory;
//Oop interpretCompiledMethod(VMContext *context, CompiledMethod *method, Oop receiver, int argumentCount, Oop *arguments);
//Oop interpretBlockClosure(VMContext *context, BlockClosure *closure, int argumentCount, Oop *arguments);
} // End of namespace Lodtalk
#endif //LODTALK_STACK_INTERPRETER_HPP
|
<filename>src/rsa.ts
import { modPow } from "https://deno.land/x/modpow@v0.1.1/modpow.ts";
export const rsaEncrypt = (
text: bigint,
pubKey: bigint,
modules: bigint,
): Promise<string> => {
const biRet = modPow(text, pubKey, modules);
// leading zero fill
const result = biRet.toString(16).padStart(256, "0");
return Promise.resolve(result);
};
|
package com.meida.model;
import com.jfinal.plugin.activerecord.Db;
import com.jfinal.plugin.activerecord.ICallback;
import com.meida.model.base.BaseOriginalLogistic;
import com.meida.service.OrderService;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
/**
* 原始物流
* Created by admin on 15/12/20.
* `id` bigint(20) NOT NULL auto_increment,
`name` varchar(50) NOT NULL COMMENT '物流名称',
`number` varchar(50) NOT NULL COMMENT '物流号',
`weight` decimal(6,2) NOT NULL,
`status` INT DEFAULT 0 COMMENT '处理状态 0待入库 1已入库 2已出仓',
`receiver` varchar(20) NOT NULL,
`mobile` varchar(20) NOT NULL,
`address` varchar(100) NOT NULL,
`remark` varchar(200),
`orderId` bigint(20) NOT NULL,
`deleteFlag` BIT DEFAULT 0,
`createTime` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
`creater` bigint(20) NOT NULL ,
`updateTime` TIMESTAMP NOT NULL,
`updater` bigint(20) NOT NULL ,
*/
public class OriginalLogistic extends BaseOriginalLogistic<OriginalLogistic> {
public final static String TABLE_NAME = "originalLogistic";
public final static OriginalLogistic dao = new OriginalLogistic();
public final static String name = "name",
number = "number",
weight = "weight",
status = "status",
receiver = "receiver",
mobile = "mobile",
address = "address",
remark = "remark",
orderNumber = "orderNumber",
senderInfo = "senderInfo",
orderId = "orderId";
public final static String sql_findAll = "select * from " + TABLE_NAME;
public final static String sql_findByNumber = sql_findAll + " where " + number + "=?",
sql_findByOrderId = sql_findAll + " where " + orderId + "=?",
sql_findUnSendOriginalLogistic = new StringBuilder(sql_findAll).append(" where ").append(orderId).append("=? and id").append(" not in(select ").append(TransitLogistic.originalId).append(" from ").append(TransitLogistic.TABLE_NAME).append(" where ").append(orderId).append("=?)").toString();
public Order getOrder() {
return OrderService.get(getLong(orderId));
}
public void deleteAll(final long userId) {
Db.execute(new ICallback() {
@Override
public Object call(Connection conn) throws SQLException {
PreparedStatement st = null;
st = conn.prepareStatement("delete from originalLogistic where creater=?");
st.setObject(1, userId);
return st.executeUpdate();
}
});
}
private int line;
public int getLine() {
return line;
}
public void setLine(int line) {
this.line = line;
}
}
|
<gh_stars>10-100
/**
* MIT License
*
* Copyright (c) 2018 Infineon Technologies AG
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE
*
*
* \file DtlsTransportLayer.c
*
* \brief This file provides APIs for the transport layer functionalities.
*
* \addtogroup grOCP
* @{
*
*/
#include "optiga/dtls/DtlsTransportLayer.h"
#include "optiga/common/MemoryMgmt.h"
#ifdef MODULE_ENABLE_DTLS_MUTUAL_AUTH
/// @cond hidden
/// @endcond
/**
* This API initialises transport layer communication structure.
*
* \param[in,out] PpsTL Pointer to the transport layer communication structure
*
* \return #OCP_TL_OK on successful execution
* \return #OCP_TL_ERROR on failure
* \return #OCP_TL_NULL_PARAM on parameter received is NULL
* \return #E_COMMS_UDP_ALLOCATE_FAILURE on failure to allocate memory
*/
int32_t DtlsTL_Init(sTL_d* PpsTL)
{
int32_t i4Status = (int32_t)OCP_TL_ERROR;
do
{
//NULL check
if((NULL == PpsTL) || (NULL == PpsTL->pzIpAddress))
{
i4Status = (int32_t)OCP_TL_NULL_PARAM;
break;
}
//Allocate the memory for the ethernet communication structure
PpsTL->phTLHdl = (pal_socket_t*)OCP_MALLOC(sizeof(pal_socket_t));
if(NULL == PpsTL->phTLHdl)
{
i4Status = (int32_t)OCP_TL_MALLOC_FAILURE;
break;
}
/// @cond hidden
#define PS_COMMS_HANDLE ((pal_socket_t*)PpsTL->phTLHdl)
/// @endcond
PS_COMMS_HANDLE->wPort = PpsTL->wPort;
//Converting IP address from string format to hex format
i4Status = pal_socket_assign_ip_address(PpsTL->pzIpAddress,&(PS_COMMS_HANDLE->sIPAddress));
if(i4Status != E_COMMS_SUCCESS)
{
break;
}
//Assigning the timeout value
PS_COMMS_HANDLE->wTimeout = PpsTL->wTimeout ;
//Non Blockage receive mode
PS_COMMS_HANDLE->bMode = (uint8_t)PpsTL->eCallType;
//Add logging
LOG_TRANSPORTMSG("Initializing UDP Connection",eInfo);
//Initialize the communication handle with the parameters
i4Status = pal_socket_init(PS_COMMS_HANDLE);
if(E_COMMS_SUCCESS != i4Status)
{
break;
}
i4Status = (int32_t)OCP_TL_OK;
}while(FALSE);
if(OCP_TL_OK != i4Status)
{
if((NULL != PpsTL)&& (NULL != PpsTL->phTLHdl))
{
OCP_FREE(PpsTL->phTLHdl);
PpsTL->phTLHdl = NULL;
}
}
/// @cond hidden
#undef PS_COMMS_HANDLE
/// @endcond
return i4Status;
}
/**
* This API creates client port
*
* \param[in,out] PpsTL Pointer to the transport layer communication structure
*
* \return #OCP_TL_OK on successful execution
* \return #OCP_TL_NULL_PARAM on parameter received is NULL
* \return #E_COMMS_UDP_BINDING_FAILURE on port binding failure
* \return #OCP_TL_ERROR on failure
*/
int32_t DtlsTL_Connect(sTL_d* PpsTL)
{
int32_t i4Status = (int32_t)OCP_TL_ERROR;
do
{
//NULL check
if((NULL == PpsTL) || (NULL == PpsTL->phTLHdl))
{
i4Status = (int32_t)OCP_TL_NULL_PARAM;
break;
}
/// @cond hidden
#define PS_COMMS_HANDLE ((pal_socket_t*)PpsTL->phTLHdl)
/// @endcond
//Logging
LOG_TRANSPORTMSG("Connecting to UDP",eInfo);
//Open the client port with the port number initialised
i4Status = pal_socket_connect(PS_COMMS_HANDLE, PS_COMMS_HANDLE->wPort);
if(E_COMMS_SUCCESS != i4Status)
{
LOG_TRANSPORTMSG("Error connecting to UDP",eError);
break;
}
PpsTL->eIsConnected = eConnected;
i4Status = (int32_t)OCP_TL_OK;
}while(FALSE);
/// @cond hidden
#undef PS_COMMS_HANDLE
/// @endcond
return i4Status;
}
/**
* This API transmits the data to the server.
*
* \param[in,out] PpsTL Pointer to the transport layer communication structure
* \param[in] PpbBuffer Pointer to buffer containing data to be transmitted
* \param[in] PdwLen Length of the data to be transmitted
*
* \return #OCP_TL_OK on successful execution
* \return #OCP_TL_NULL_PARAM on parameter received is NULL
* \return #E_COMMS_UDP_NO_DATA_TO_SEND on no date present to send
* \return #E_COMMS_INSUFFICIENT_MEMORY on out of memory failure
* \return #E_COMMS_UDP_ROUTING_FAILURE on failure to route the UDP packet
* \return #E_COMMS_UDP_DEALLOCATION_FAILURE on failure to deallocate
* \return #OCP_TL_ERROR on failure
*/
int32_t DtlsTL_Send(const sTL_d* PpsTL,uint8_t* PpbBuffer,uint16_t PdwLen)
{
int32_t i4Status = (int32_t)OCP_TL_ERROR;
do
{
//NULL check
if((NULL == PpsTL) || (NULL == PpsTL->phTLHdl) ||(NULL == PpbBuffer))
{
i4Status = (int32_t)OCP_TL_NULL_PARAM;
break;
}
LOG_TRANSPORTDBARY("Sending Data over UDP", PpbBuffer, PdwLen, eInfo);
//Send the data over IP address and Port initialized
/// @cond hidden
#define PS_COMMS_HANDLE ((pal_socket_t*)PpsTL->phTLHdl)
/// @endcond
i4Status = pal_socket_send(PS_COMMS_HANDLE, PpbBuffer, PdwLen);
if (E_COMMS_SUCCESS != i4Status)
{
LOG_TRANSPORTMSG("Error while sending data",eError);
break;
}
i4Status = (int32_t)OCP_TL_OK;
}while(FALSE);
/// @cond hidden
#undef PS_COMMS_HANDLE
/// @endcond
return i4Status;
}
/**
* This API receives the data from the server
*
* \param[in] PpsTL Pointer to the transport layer communication structure
* \param[in,out] PpbBuffer Pointer to buffer where data is to be received
* \param[in,out] PpdwLen Length of the buffer/Length of the received data
*
* \return #OCP_TL_OK on successful execution
* \return #OCP_TL_NULL_PARAM on parameter received is NULL
* \return #OCP_TL_NO_DATA on no data received from the target
* \return #E_COMMS_INSUFFICIENT_BUF_SIZE on insufficient buffer size
* \return #OCP_TL_ERROR on failure
*/
int32_t DtlsTL_Recv(const sTL_d* PpsTL,uint8_t* PpbBuffer,uint16_t* PpdwLen)
{
int32_t i4Status = (int32_t)OCP_TL_ERROR;
uint32_t dwRecvLen;
do
{
//NULL check
if((NULL == PpsTL) || (NULL == PpsTL->phTLHdl) || (NULL == PpbBuffer))
{
i4Status = (int32_t)OCP_TL_NULL_PARAM;
break;
}
//logging
LOG_TRANSPORTMSG("Receiving over UDP",eInfo);
/// @cond hidden
#define PS_COMMS_HANDLE ((pal_socket_t*)PpsTL->phTLHdl)
/// @endcond
PS_COMMS_HANDLE->wTimeout = PpsTL->wTimeout;
dwRecvLen = *PpdwLen;
//Listen the server port and receive the data
i4Status = pal_socket_listen(PS_COMMS_HANDLE, PpbBuffer, &dwRecvLen);
if ((int32_t)E_COMMS_UDP_NO_DATA_RECEIVED == i4Status)
{
i4Status = (int32_t)OCP_TL_NO_DATA;
LOG_TRANSPORTMSG("No data received over UDP",eError);
break;
}
if (E_COMMS_SUCCESS != i4Status)
{
LOG_TRANSPORTMSG("Error while receiving data over UDP",eError);
break;
}
LOG_TRANSPORTMSG("Received Data",eInfo);
LOG_TRANSPORTDBARY("Received Data over UDP", PpbBuffer, dwRecvLen, eInfo);
*PpdwLen = (uint16_t)dwRecvLen;
i4Status = (int32_t)OCP_TL_OK;
}while(FALSE);
/// @cond hidden
#undef PS_COMMS_HANDLE
/// @endcond
return i4Status;
}
/**
* This API closes the UDP communication and releases all the resources
*
* \param[in,out] PpsTL Pointer to the transport layer communication structure
*
* \return None
*/
Void DtlsTL_Disconnect(sTL_d* PpsTL)
{
//NULL check
if(NULL != PpsTL)
{
if(NULL != PpsTL->phTLHdl)
{
//logging
LOG_TRANSPORTMSG("Closing UDP Connection",eInfo);
/// @cond hidden
#define PS_COMMS_HANDLE ((pal_socket_t*)PpsTL->phTLHdl)
/// @endcond
//Close the UDP connection
pal_socket_close(PS_COMMS_HANDLE);
//Free the allocated memory for ethernet structure
OCP_FREE(PS_COMMS_HANDLE);
PpsTL->phTLHdl = NULL;
PpsTL->eIsConnected = eDisconnected;
/// @cond hidden
#undef PS_COMMS_HANDLE
/// @endcond
}
}
}
/**
* @}
*/
#endif /*MODULE_ENABLE_DTLS_MUTUAL_AUTH*/
|
#!/usr/bin/env bash
set -e
docker login -u="$DOCKER_LOGIN" -p="$DOCKER_PASSWORD"
docker tag zutherb/monolithic-shop:latest $DOCKER_LOGIN/monolithic-shop:latest
docker push $DOCKER_LOGIN/monolithic-shop:latest |
import { expect } from 'chai';
import Cascade, { observable } from 'cascade';
import { CrudConnection } from '../scripts/CascadeManager';
describe('CrudConnection', () => {
it('should Get from the server', () => {
var connection = new CrudConnection('https://jsonplaceholder.typicode.com/posts/');
return expect(connection.get(1)).to.eventually.have.property('id');
});
}); |
#!/bin/sh
SPATH=$(dirname $(readlink -f "$0"))
cd $SPATH/..
export LEMONDIR=$(pwd)
mkdir -p $HOME/.local/share/lemon/sysroot/system
mkdir -p $HOME/.local/share/lemon/sysroot/system/include
mkdir -p $HOME/.local/share/lemon/sysroot/system/lib
mkdir -p $HOME/.local/share/lemon/sysroot/system/bin
docker pull computerfido/lemontoolchain:latest
docker run -v $(pwd):$(pwd) -v "$HOME/.local/share/lemon/sysroot":"/root/.local/share/lemon/sysroot" -w $(pwd) --user $(id -u):$(id -g) -it computerfido/lemontoolchain:latest sh -c "HOME=/root LEMON_SYSROOT=/root/.local/share/lemon/sysroot $LEMONDIR/Scripts/configure.sh" |
#!/bin/bash
## ------------------------------------------------
## PubNub 3.3 Real-time Cloud Push API - JAVASCRIPT
## ------------------------------------------------
## ----------------------------------------------------
##
## TESTLING - PubNub JavaScript API for Web Browsers
## uses Testling Cloud Service
## for QA and Deployment.
##
## http://www.testling.com/
## You need this to run './test.sh' unit test.
##
## ----------------------------------------------------
if [ -z "$1" ]
then
echo -e "\n\tUSER:PASSWD Required: http://testling.com/\n"
exit
fi
browsers='firefox/3.6'
browsers=$browsers',firefox/9.0'
browsers=$browsers',firefox/10.0'
browsers=$browsers',chrome/16.0'
browsers=$browsers',chrome/17.0'
browsers=$browsers',iexplore/9.0'
browsers=$browsers',safari/5.1'
echo -e "Testing: $browsers"
noinstrument='pubnub-3.3.js'
tar -cf- test.js ../pubnub-3.3.js | \
curl -u $1 -sSNT- \
"testling.com/?noinstrument=$noinstrument&browsers=$browsers"
|
<reponame>wujia28762/Tmate<gh_stars>0
package com.honyum.elevatorMan.activity.maintenance;
import android.app.AlertDialog;
import android.app.DatePickerDialog;
import android.app.Dialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.Bundle;
import android.text.TextUtils;
import android.view.LayoutInflater;
import android.view.View;
import android.view.Window;
import android.widget.DatePicker;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.TimePicker;
import com.honyum.elevatorMan.R;
import com.honyum.elevatorMan.activity.common.ContractInnerAdapter;
import com.honyum.elevatorMan.activity.common.ToDoDetailDealActivity;
import com.honyum.elevatorMan.activity.common.ToDoListActivity;
import com.honyum.elevatorMan.activity.worker.WorkerBaseActivity;
import com.honyum.elevatorMan.base.Config;
import com.honyum.elevatorMan.data.ContactDataGrideInfo;
import com.honyum.elevatorMan.data.LiftInfo;
import com.honyum.elevatorMan.data.MaintenanceInfo;
import com.honyum.elevatorMan.net.ContactResponse;
import com.honyum.elevatorMan.net.ContractInfoDetailRequest;
import com.honyum.elevatorMan.net.ContractInfoDetailResponse;
import com.honyum.elevatorMan.net.LiftInfoRequest;
import com.honyum.elevatorMan.net.MaintenancePlanResponse;
import com.honyum.elevatorMan.net.NewRequestHead;
import com.honyum.elevatorMan.net.PlanResponse;
import com.honyum.elevatorMan.net.RepairSelectedWorkerRequest;
import com.honyum.elevatorMan.net.ReportPlanRequest;
import com.honyum.elevatorMan.net.UploadProcessRequest;
import com.honyum.elevatorMan.net.base.NetConstant;
import com.honyum.elevatorMan.net.base.NetTask;
import com.honyum.elevatorMan.net.base.RequestBean;
import com.honyum.elevatorMan.net.base.RequestHead;
import com.honyum.elevatorMan.utils.Utils;
import com.honyum.elevatorMan.utils.ViewUtils;
import com.jeremyfeinstein.slidingmenu.lib.SlidingMenu;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
public class PlanActivity extends WorkerBaseActivity {
private DatePicker datePicker;
private TimePicker timePicker;
Date date;
String s1;
String s2;
private boolean isTimePass;
private View dialogLayout;
private AlertDialog alertDialog;
private Dialog selectedWorkDialog;
private int currIndex = -1;
private String mainPlanId;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_plan);
initTitleBar(getIntent());
initView(getIntent());
}
/**
* 初始化标题栏
*/
private void initTitleBar(Intent intent) {
getSlidingMenu().setTouchModeAbove(SlidingMenu.TOUCHMODE_NONE);
setExitFlag(false);
String type = intent.getStringExtra("enter_type");
String title = "";
if (type.equals("add")) {
title = getString(R.string.make_plan);
} else if (type.equals("modify")) {
title = getString(R.string.modify_plan);
} else {
title = "计划详情";
}
initTitleBar(title, R.id.title_plan, R.drawable.back_normal,
backClickListener);
}
/**
* 初始化界面
*
* @param intent
*/
private void initView(final Intent intent) {
if (null == intent) {
return;
}
if (intent.hasExtra("lift")) {
final LiftInfo liftInfo = (LiftInfo) intent.getSerializableExtra("lift");
fillData(intent, liftInfo);
} else {
if (intent.hasExtra("mainPlanId")) {
mainPlanId = intent.getStringExtra("mainPlanId");
requestPlanById(mainPlanId);
}
}
}
private void requestPlanById(String mainPlanId) {
ContractInfoDetailRequest contractInfodetailRequest = new ContractInfoDetailRequest();
ContractInfoDetailRequest.ContractInfoBody body = contractInfodetailRequest.new ContractInfoBody();
body.setId(mainPlanId);
body.setBranchId(getConfig().getBranchId());
body.set_process_isLastNode(ToDoListActivity.Companion.getCurrLastNode());
body.set_process_task_param(ToDoListActivity.Companion.getCurrTask());
contractInfodetailRequest.setBody(body);
contractInfodetailRequest.setHead(new NewRequestHead().setaccessToken(getConfig().getToken()).setuserId(getConfig().getUserId()));
// request.setBody(request.new RequestLiftInfoBody(mainPlanId));
// request.setHead(new NewRequestHead().setuserId(getConfig().getUserId()).setaccessToken(getConfig().getToken()));
String server = getConfig().getMaintenanceServer() + NetConstant.GETMAINPLAN;
NetTask netTask = new NetTask(server, contractInfodetailRequest) {
@Override
protected void onResponse(NetTask task, String result) {
MaintenancePlanResponse response = MaintenancePlanResponse.getResponse(result, MaintenancePlanResponse.class);
String resMap = response.getBody().get_process_resultMap().get_process_task_param();
try {
Config.currTask = URLDecoder.decode(resMap, "UTF-8");
Config.currLastNode = response.getBody().get_process_resultMap().get_process_isLastNode();
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
fillData1(response.getBody().getMaintenance());
}
};
addTask(netTask);
}
private void fillData1(MaintenanceInfo response) {
//维保日期处理
final TextView tvPlanDate = (TextView) findViewById(R.id.tv_plan_date);
final TextView tvPlanType = (TextView) findViewById(R.id.tv_plan_type);
tvPlanDate.setText(response.getPlanTime());
tvPlanType.setText(response.getMainType());
((TextView) findViewById(R.id.tv_lift_code)).setText(response.getElevatorInfo().getLiftNum());
((TextView) findViewById(R.id.tv_lift_add)).setText(response.getElevatorInfo().getAddress());
findViewById(R.id.ll_calendar).setVisibility(View.GONE);
findViewById(R.id.ll_type).setVisibility(View.GONE);
TextView textView = ( (TextView)findViewById(R.id.btn_submit));
textView.setText("立刻处理");
textView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(PlanActivity.this,ToDoDetailDealActivity.class);
intent.putExtra("currId",mainPlanId);
intent.putExtra("url",getConfig().getMaintenanceUrl());
finish();
startActivity(intent);
}
});
}
private void fillData(Intent intent, LiftInfo liftInfo) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
Date d = new Date(System.currentTimeMillis());
String s = sdf.format(d).toString();
((TextView) findViewById(R.id.tv_lift_code)).setText(liftInfo.getNum());
((TextView) findViewById(R.id.tv_lift_add)).setText(liftInfo.getAddress());
dialogLayout = LayoutInflater.from(this).inflate(R.layout.dia_datetime_layout, null);
datePicker = (DatePicker) dialogLayout.findViewById(R.id.datePicker);
timePicker = (TimePicker) dialogLayout.findViewById(R.id.timePicker);
ViewUtils.resizePikcer(datePicker);
ViewUtils.resizePikcer(timePicker);
//维保日期处理
final TextView tvPlanDate = (TextView) findViewById(R.id.tv_plan_date);
final TextView tvPlanType = (TextView) findViewById(R.id.tv_plan_type);
if (liftInfo.hasPlan()) {
tvPlanDate.setText(liftInfo.getPlanMainTime());
tvPlanType.setText(liftInfo.getPlanType());
} else {
tvPlanDate.setText(s);
tvPlanType.setText("半月保");
}
date = new Date();
//使用dialog组合日期和时间控件。
findViewById(R.id.ll_date).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View arg0) {
timePicker.setIs24HourView(true);
timePicker.setCurrentHour(date.getHours() + 1);
timePicker.setCurrentMinute(0);
int minute = timePicker.getCurrentMinute();
s2 = " " + (timePicker.getCurrentHour()) + ":" + (minute < 10 ? "0" + minute : minute);
timePicker.setOnTimeChangedListener(new TimePicker.OnTimeChangedListener() {
@Override
public void onTimeChanged(TimePicker view, int hourOfDay, int minute) {
s2 = (" " + hourOfDay + ":" + (minute < 10 ? "0" + minute : minute));
}
});
alertDialog.show();
}
});
alertDialog = new AlertDialog.Builder(this,R.style.dialogStyle).setTitle("选择时间").setView(dialogLayout).setPositiveButton("确定",
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int arg1) {
s1 = (datePicker.getYear() + "-" + (datePicker.getMonth() + 1) + "-" + datePicker.getDayOfMonth());
String dateString = s1 + s2;
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm");
Date d = new Date();
try {
isTimePass = false;
d = sdf.parse(dateString);
long t = d.getTime();
long cl = System.currentTimeMillis();
if (cl > t) {
isTimePass = false;
} else {
isTimePass = true;
}
tvPlanDate.setText(dateString);
dialog.dismiss();
} catch (ParseException e) {
e.printStackTrace();
}
}
}).setNegativeButton("取消", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int arg1) {
dialog.dismiss();
}
}).create();
//end 组合控件
//日期设置后回调接口
final DatePickerDialog.OnDateSetListener dateSetListener = new DatePickerDialog.OnDateSetListener() {
@Override
public void onDateSet(DatePicker view, int year, int monthOfYear, int dayOfMonth) {
Date date = new Date(year - 1900, monthOfYear, dayOfMonth);
tvPlanDate.setText(Utils.dateToString(date));
}
};
// //点击日期时修改
// findViewById(R.id.ll_date).setOnClickListener(new View.OnClickListener() {
// @Override
// public void onClick(View v) {
// Calendar calendar = Calendar.getInstance();
// calendar.setTime(Utils.stringToDate(tvPlanDate.getText().toString()));
// DatePickerDialog datePickerDialog = new DatePickerDialog(PlanActivity.this, dateSetListener,
// calendar.get(Calendar.YEAR), calendar.get(Calendar.MONTH),
// calendar.get(Calendar.DATE)) {
// @Override
// protected void onStop() {
// }
// };
// DatePicker datePicker = datePickerDialog.getDatePicker();
// datePicker.setSpinnersShown(false);
//
// datePicker.setCalendarViewShown(true);
// datePickerDialog.show();
// }
// });
//维保计划类型选择
findViewById(R.id.tv_plan_type).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
popTypeSelector(tvPlanType);
}
});
//维保类型维保内容帮助
findViewById(R.id.tv_type_help).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String type = LiftInfo.stringToType(tvPlanType.getText().toString());
Intent intent = new Intent(PlanActivity.this, MainHelpActivity.class);
intent.putExtra("type", type);
startActivity(intent);
}
});
//维保计划提交
findViewById(R.id.btn_submit).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String date = tvPlanDate.getText().toString();
String type = LiftInfo.stringToType(tvPlanType.getText().toString());
reportPlan(getConfig().getUserId(), getConfig().getToken(), liftInfo.getId(), date,
type, intent.getStringExtra("enter_type"));
}
});
}
private void requestRepairWork(String name, String param, ListView list) {
RepairSelectedWorkerRequest request = new RepairSelectedWorkerRequest();
RepairSelectedWorkerRequest.RepairSelectedWorkerBody body = request.new RepairSelectedWorkerBody();
body.setName(name);
body.set_process_task_param(param);
body.setBranchId(getConfig().getBranchId());
// body.remark = et_remark.text.toString()
request.setBody(body);
request.setHead(new NewRequestHead().setaccessToken(getConfig().getToken()).setuserId(getConfig().getUserId()));
String server = getConfig().getNewServer() + NetConstant.SELECTUSERDATAGRID;
NetTask netTask = new NetTask(server, request) {
@Override
protected void onResponse(NetTask task, String result) {
ContactResponse response = ContactResponse.getContactResponse(result);
contracts = response.getBody().getDataGrid();
list.setAdapter(new ContractInnerAdapter(contracts, PlanActivity.this, R.layout.item_contract_inner));
list.setOnItemClickListener((parent, view, position, id) -> {
transDefault();
contracts.get(position).setSelected(true);
currIndex = position;
((ContractInnerAdapter) list.getAdapter()).notifyDataSetChanged();
});
}
};
addTask(netTask);
}
private void transDefault() {
for (ContactDataGrideInfo info : contracts) {
info.setSelected(false);
}
}
List<ContactDataGrideInfo> contracts;
private void initDialog(String decode, String id) {
selectedWorkDialog = new Dialog(this);
selectedWorkDialog.setCanceledOnTouchOutside(true);
selectedWorkDialog.requestWindowFeature(Window.FEATURE_NO_TITLE);
selectedWorkDialog.setContentView(R.layout.dialog_select_pointer);
ListView list = (ListView) selectedWorkDialog.findViewById(R.id.list_data);
EditText numText = (EditText) selectedWorkDialog.findViewById(R.id.name);
requestRepairWork("", decode, list);
selectedWorkDialog.findViewById(R.id.search).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
requestRepairWork(numText.getText().toString(), decode, list);
}
});
selectedWorkDialog.findViewById(R.id.submit).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (currIndex == -1) {
showToast("请选择指派人!");
return;
}
requestPointWorker(id, decode, contracts.get(currIndex).getId());
}
});
selectedWorkDialog.show();
}
private void requestPointWorker(String id, String process_task_param, String s) {
UploadProcessRequest uploadProcessRequest = new UploadProcessRequest();
UploadProcessRequest.UploadProcessRequestBody body = uploadProcessRequest.new UploadProcessRequestBody();
uploadProcessRequest.setHead(new NewRequestHead().setaccessToken(getConfig().getToken()).setuserId(getConfig().getUserId()));
body.set_process_isLastNode("0");
body.set_process_task_param(process_task_param);
body.set_process_path("同意");
body.set_processSelectUserId(s);
//body._process_approve_source
body.set_processSelectUserId(s);
body.setId(id);
body.setBranchId(getConfig().getBranchId());
uploadProcessRequest.setBody(body);
String server = getConfig().getServer()+getConfig().getMaintenanceUrl() + NetConstant.COMMITMAINPLAN;
NetTask netTask = new NetTask(server, uploadProcessRequest) {
@Override
protected void onResponse(NetTask task, String result) {
if (selectedWorkDialog.isShowing())
selectedWorkDialog.dismiss();
showToast("提交成功!");
finish();
}
};
addTask(netTask);
}
/**
* 维保类型选择弹出框选择接口
*/
class TypeSelector implements View.OnClickListener {
private AlertDialog mDialog;
private TextView mTextView;
public TypeSelector(AlertDialog dialog, TextView textView) {
mDialog = dialog;
mTextView = textView;
}
@Override
public void onClick(View v) {
mDialog.dismiss();
switch (v.getId()) {
case R.id.ll_semi_month:
mTextView.setText("半月保");
break;
case R.id.ll_month:
mTextView.setText("月保");
break;
case R.id.ll_season:
mTextView.setText("季度保");
break;
case R.id.ll_semi_year:
mTextView.setText("半年保");
break;
case R.id.ll_year:
mTextView.setText("年保");
break;
}
}
}
/**
* 维保类型选择
*
* @param textView
*/
private void popTypeSelector(TextView textView) {
View view = View.inflate(this, R.layout.layout_plan_type, null);
AlertDialog.Builder builder = new AlertDialog.Builder(this,R.style.dialogStyle);
builder.setView(view);
AlertDialog dialog = builder.create();
dialog.requestWindowFeature(Window.FEATURE_NO_TITLE);
// WindowManager.LayoutParams params = dialog.getWindow().getAttributes();
// params.width = WindowManager.LayoutParams.MATCH_PARENT;
View.OnClickListener listener = new TypeSelector(dialog, textView);
view.findViewById(R.id.ll_semi_month).setOnClickListener(listener);
view.findViewById(R.id.ll_month).setOnClickListener(listener);
view.findViewById(R.id.ll_season).setOnClickListener(listener);
view.findViewById(R.id.ll_semi_year).setOnClickListener(listener);
view.findViewById(R.id.ll_year).setOnClickListener(listener);
List<TextView> textViews = new ArrayList<>(5);
textViews.add((TextView) view.findViewById(R.id.tv_half_month));
textViews.add((TextView) view.findViewById(R.id.tv_star_month));
textViews.add((TextView) view.findViewById(R.id.tv_star_quarter));
textViews.add((TextView) view.findViewById(R.id.tv_star_half_year));
textViews.add((TextView) view.findViewById(R.id.tv_star_year));
List<ImageView> imageViews = new ArrayList<>(5);
imageViews.add((ImageView) view.findViewById(R.id.iv_half_month));
imageViews.add((ImageView) view.findViewById(R.id.iv_star_month));
imageViews.add((ImageView) view.findViewById(R.id.iv_star_quarter));
imageViews.add((ImageView) view.findViewById(R.id.iv_star_half_year));
imageViews.add((ImageView) view.findViewById(R.id.iv_star_year));
String selectedString = textView.getText() + "";
int selectedIndex = 0;
if (!TextUtils.isEmpty(selectedString)) {
for (int i = 0; i < textViews.size(); i++) {
if (selectedString.equals(textViews.get(i).getText() + "")) {
selectedIndex = i;
break;
}
}
imageViews.get(selectedIndex).setBackgroundResource(R.drawable.star_selected);
textViews.get(selectedIndex).setTextColor(getResources().getColor(R.color.title_bg_color));
}
dialog.show();
}
/**
* 获取提交维保计划的bean
*
* @param userId
* @param token
* @param id
* @param planDate
* @param planType
* @return
*/
public static RequestBean getReportPlanRequest(String userId, String token, String id, String planDate,
String planType, String branchId) {
ReportPlanRequest request = new ReportPlanRequest();
RequestHead head = new RequestHead();
ReportPlanRequest.ReportPlanReqBody body = request.new ReportPlanReqBody();
head.setUserId(userId);
head.setAccessToken(token);
body.setId(id);
body.setPlanTime(planDate);
body.setMainType(planType);
body.setBranchId(branchId);
request.setHead(head);
request.setBody(body);
return request;
}
/**
* 上传维保计划
*
* @param userId
* @param token
* @param id
* @param planDate
* @param planType
*/
private void reportPlan(String userId, String token, String id, String planDate,
String planType, String type) {
String server = "";
if (type.equals("add")) {
server = getConfig().getServer()+getConfig().getMaintenanceUrl() + NetConstant.URL_REPORT_PLAN;
} else if (type.equals("modify")) {
server = getConfig().getServer()+getConfig().getMaintenanceUrl() + NetConstant.URL_MODIFY_PLAN;
}
NetTask netTask = new NetTask(server, getReportPlanRequest(userId, token, id, planDate,
planType, getConfig().getBranchId())) {
@Override
protected void onResponse(NetTask task, String result) {
PlanResponse response = PlanResponse.getResponse(result, PlanResponse.class);
try {
initDialog(URLDecoder.decode(response.getBody().get_process_task_param(), "UTF-8"), response.getBody().getId());
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
// initDialog()
// showToast("维保计划提交成功,请及时到记录上传里面完成您的维保计划");
// finish();
//Intent liftIntent = new Intent(PlanActivity.this, MyLiftActivity.class);
//liftIntent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
//startActivity(liftIntent);
}
};
addTask(netTask);
}
}
|
//-----------------------------------------------------------------------------
// File: Autorun.cpp
//
// The program entry point for the Autorun sample.
// Implementation of the base class for the Autorun functionality.
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//-----------------------------------------------------------------------------
#include "Common.h"
#include "Autorun.h"
#include "AutorunMsi.h"
#include "resource.h"
#include <new>
using std::nothrow;
// Older SDKs do not define this flag.
#ifndef SEE_MASK_NOASYNC
#define SEE_MASK_NOASYNC 0x00000100
#endif
//-----------------------------------------------------------------------------
// Forward declarations.
//-----------------------------------------------------------------------------
INT_PTR CALLBACK DialogProc( HWND hDlg, UINT Msg, WPARAM wParam, LPARAM lParam );
//-----------------------------------------------------------------------------
// Program entry point.
//-----------------------------------------------------------------------------
INT WINAPI wWinMain( HINSTANCE hInstance, HINSTANCE, LPWSTR, int )
{
Settings settings;
// Use a mutex to force a single instance.
HANDLE hMutex = CreateMutex( NULL, TRUE, settings.AutorunMutex() );
DWORD dwError = GetLastError();
if( ERROR_ACCESS_DENIED == dwError || ERROR_ALREADY_EXISTS == dwError )
{
// Another instance is running.
if( hMutex )
{
CloseHandle( hMutex );
}
return 0;
}
// Select the correct autorun functionality.
Autorun* pAutorun = NULL;
if( settings.UseMsi() )
pAutorun = new(nothrow) AutorunMsi( settings );
else
pAutorun = new(nothrow) Autorun( settings );
if( !pAutorun )
{
// Out of memory.
return 0;
}
// Show dialog for autorun.
DialogBoxParam( hInstance,
MAKEINTRESOURCE( IDD_AUTORUN ),
NULL,
DialogProc,
( LPARAM )pAutorun );
// Cleanup.
delete pAutorun;
if( hMutex )
{
ReleaseMutex( hMutex );
CloseHandle( hMutex );
}
return 0;
}
//-----------------------------------------------------------------------------
// Message handler for the dialog.
//-----------------------------------------------------------------------------
INT_PTR CALLBACK DialogProc( HWND hDlg, UINT Msg, WPARAM wParam, LPARAM lParam )
{
// Static data used for the dialog.
static Autorun* pAutorun = NULL;
static HWND hInstall = NULL;
static HWND hPlay = NULL;
static BOOL bInstalled = FALSE;
switch( Msg )
{
case WM_INITDIALOG:
{
// Get handles to the dialog buttons.
hInstall = GetDlgItem( hDlg, IDC_INSTALL );
hPlay = GetDlgItem( hDlg, IDC_PLAY );
// Set the icon for the dialog.
HICON hIcon = LoadIcon( GetModuleHandle( NULL ), MAKEINTRESOURCE( IDI_AUTORUN ) );
SendMessage( hDlg, WM_SETICON, ICON_BIG, ( LPARAM )hIcon );
// Get the autorun functionality.
pAutorun = ( Autorun* )lParam;
if( pAutorun )
{
bInstalled = pAutorun->IsGameInstalled();
}
// Force an update of the UI.
SendMessage( hDlg, WM_UPDATEUISTATE, 0, 0 );
}
return FALSE;
case WM_UPDATEUISTATE:
{
// Enable/disable buttons and adjust focus based on whether the game is installed.
EnableWindow( hInstall, !bInstalled );
EnableWindow( hPlay, bInstalled );
if( bInstalled )
{
Button_SetStyle( hInstall, 0, TRUE );
Button_SetStyle( hPlay, BS_DEFPUSHBUTTON, TRUE );
SetFocus( hPlay );
}
else
{
Button_SetStyle( hInstall, BS_DEFPUSHBUTTON, TRUE );
Button_SetStyle( hPlay, 0, TRUE );
SetFocus( hInstall );
}
}
return TRUE;
case WM_COMMAND:
{
switch( LOWORD( wParam ) )
{
case IDC_INSTALL:
{
if( !pAutorun )
return FALSE;
if( pAutorun->InstallGame() )
{
// Periodically check if game is installed.
SetTimer( hDlg, 0, 1000, NULL );
}
else
{
// Error launching installer... user may have
// the wrong disc in the drive.
MessageBox( hDlg,
L"Installer could not be launched.\n"
L"Please ensure that the installation disc is in the drive.",
L"Autorun",
MB_ICONWARNING | MB_OK );
}
}
return TRUE;
case IDC_PLAY:
{
if( !pAutorun )
return FALSE;
if( pAutorun->LaunchGame() )
{
// Done.
EndDialog( hDlg, 0 );
}
else
{
// Error launching game... installation
// may have been corrupted.
MessageBox( hDlg,
L"Game could not be launched.\n"
L"Please try reinstalling the game.",
L"Autorun",
MB_ICONWARNING | MB_OK );
// Enable the install button.
EnableWindow( GetDlgItem( hDlg, IDC_INSTALL ), TRUE );
}
}
return TRUE;
case IDCANCEL:
{
EndDialog( hDlg, 0 );
}
return TRUE;
}
}
break;
case WM_TIMER:
{
if( !pAutorun )
return FALSE;
// Check on installation status.
bInstalled = pAutorun->IsGameInstalled();
if( bInstalled )
{
// Update the UI now that the game is installed.
SendMessage( hDlg, WM_UPDATEUISTATE, 0, 0 );
// Stop checking.
KillTimer( hDlg, 0 );
if( pAutorun->GetSettings().AutorunAfterInstall() )
{
// Automatically run the game.
SendMessage( hDlg, WM_COMMAND, IDC_PLAY, 0 );
}
}
}
return TRUE;
}
return FALSE;
}
//-----------------------------------------------------------------------------
Autorun::Autorun( const Settings& settings ) : m_Settings( settings ), m_bIsInstalled( FALSE )
{
}
//-----------------------------------------------------------------------------
// Determine if the game is installed.
//-----------------------------------------------------------------------------
BOOL Autorun::IsGameInstalled()
{
if( m_Settings.WaitForInstaller() )
{
return m_bIsInstalled;
}
else
{
// If game executable exists, the game is installed.
WCHAR szExePath[MAX_PATH];
if( !GetGameExecutablePath( szExePath, ARRAYSIZE( szExePath ) ) )
return FALSE;
return PathFileExists( szExePath );
}
}
//-----------------------------------------------------------------------------
// Obtain the path to the game executable.
//-----------------------------------------------------------------------------
BOOL Autorun::GetGameExecutablePath( WCHAR* szExePath, DWORD cchExePath )
{
// Get the installation folder from the registry.
WCHAR szInstallFolder[MAX_PATH] = { 0, };
DWORD dwSize = sizeof( szInstallFolder );
HKEY hKey = NULL;
LONG lResult;
DWORD dwType = 0;
lResult = RegOpenKeyEx( HKEY_LOCAL_MACHINE, m_Settings.RegistryKey(), 0, KEY_READ, &hKey );
if( ERROR_SUCCESS != lResult )
return FALSE;
// Read the value, but ensure the string is null-terminated.
lResult = RegQueryValueEx( hKey, L"InstallFolder", 0, &dwType, ( BYTE* )szInstallFolder, &dwSize );
szInstallFolder[MAX_PATH - 1] = L'\0';
RegCloseKey( hKey );
if( ERROR_SUCCESS != lResult || ( REG_SZ != dwType && REG_EXPAND_SZ != dwType ) )
return FALSE;
// Expand string, if needed.
if( REG_EXPAND_SZ == dwType )
{
DWORD cchTemp = ExpandEnvironmentStrings( szInstallFolder, NULL, 0 );
WCHAR *szTemp = new(nothrow) WCHAR[cchTemp];
if( !szTemp )
{
// Out of memory.
return FALSE;
}
ExpandEnvironmentStrings( szInstallFolder, szTemp, cchTemp );
wcscpy_s( szInstallFolder, szTemp );
delete [] szTemp;
}
// Construct the complete path.
swprintf_s( szExePath, cchExePath, L"%s\\%s", szInstallFolder, m_Settings.GameExe() );
return TRUE;
}
//-----------------------------------------------------------------------------
// Launch the installer for the game.
//-----------------------------------------------------------------------------
BOOL Autorun::InstallGame()
{
WCHAR szPath[MAX_PATH];
if( !FindFile( m_Settings.InstallerExe(), szPath, ARRAYSIZE( szPath ) ) )
return FALSE;
if( m_Settings.WaitForInstaller() )
{
// Spawn installer and wait for it to finish.
DWORD dwExitCode = 0;
BOOL bResult = SpawnProcessAndWait( szPath, m_Settings.InstallerArgs(), &dwExitCode );
// Check the exit code of the installer for indication of successful installation.
if( bResult && ( dwExitCode == 0 ) )
m_bIsInstalled = TRUE;
return bResult;
}
else
{
// Spawn installer and check for successful installation later.
return SpawnProcess( szPath, m_Settings.InstallerArgs() );
}
}
//-----------------------------------------------------------------------------
// Launch the game.
//-----------------------------------------------------------------------------
BOOL Autorun::LaunchGame()
{
WCHAR szExePath[MAX_PATH];
if( !GetGameExecutablePath( szExePath, ARRAYSIZE( szExePath ) ) )
return FALSE;
return SpawnProcess( szExePath, m_Settings.GameArgs() );
}
//-----------------------------------------------------------------------------
// Finds the specified file.
//-----------------------------------------------------------------------------
BOOL Autorun::FindFile( const WCHAR* szFile, WCHAR* szPath, DWORD cchPath )
{
if( !szFile || !szPath )
return FALSE;
WCHAR szTemp[MAX_PATH];
// Try the current directory.
GetCurrentDirectory( ARRAYSIZE( szTemp ), szTemp );
wcscat_s( szTemp, L"\\" );
wcscat_s( szTemp, szFile );
if( PathFileExists( szTemp ) )
{
wcscpy_s( szPath, cchPath, szTemp );
return TRUE;
}
// Try the directory of the running process.
GetModuleFileName( NULL, szTemp, ARRAYSIZE( szTemp ) );
PathRemoveFileSpec( szTemp );
wcscat_s( szTemp, L"\\" );
wcscat_s( szTemp, szFile );
if( PathFileExists( szTemp ) )
{
wcscpy_s( szPath, cchPath, szTemp );
return TRUE;
}
return FALSE;
}
//-----------------------------------------------------------------------------
// Runs the specified program.
//-----------------------------------------------------------------------------
BOOL Autorun::SpawnProcess( const WCHAR* szExePath, const WCHAR* szExeArgs )
{
if( !szExePath )
return FALSE;
// NOTE: szExeArgs can be NULL.
// Get working directory from executable path.
WCHAR szDirectory[MAX_PATH];
wcscpy_s( szDirectory, szExePath );
PathRemoveFileSpec( szDirectory );
// ShellExecute or ShellExecuteEx must be used instead of CreateProcess
// to permit the shell to display a UAC prompt asking for consent to
// elevate when the target executable's manifest specifies a run level
// of "requireAdministrator".
//
// You can only use CreateProcess if you know that the application you
// are spawning will be at the same run level as the current process.
// Otherwise, you will receive ERROR_ACCESS_DENIED if the elevation
// consent could not be obtained.
SHELLEXECUTEINFO info;
ZeroMemory( &info, sizeof( info ) );
info.cbSize = sizeof( info );
info.lpVerb = L"open";
info.fMask = SEE_MASK_FLAG_NO_UI;
info.lpFile = szExePath;
info.lpParameters = szExeArgs;
info.lpDirectory = szDirectory;
info.nShow = SW_SHOW;
if( !ShellExecuteEx( &info ) )
return FALSE;
return TRUE;
}
//-----------------------------------------------------------------------------
// Runs the specified program and waits for it to exit.
//-----------------------------------------------------------------------------
BOOL Autorun::SpawnProcessAndWait( const WCHAR *szExePath, const WCHAR *szExeArgs, DWORD *pdwExitCode )
{
if( !szExePath )
return FALSE;
// NOTE: szExeArgs can be NULL.
// NOTE: pExitCode can be NULL.
// Get working directory from executable path.
WCHAR szDirectory[MAX_PATH];
wcscpy_s( szDirectory, szExePath );
PathRemoveFileSpec( szDirectory );
// See SpawnProcess for information why ShellExecute or ShellExecuteEx
// must be used instead of CreateProcess.
SHELLEXECUTEINFO info;
ZeroMemory( &info, sizeof( info ) );
info.cbSize = sizeof( info );
info.lpVerb = L"open";
info.fMask = SEE_MASK_FLAG_NO_UI | SEE_MASK_NOASYNC | SEE_MASK_NOCLOSEPROCESS;
info.lpFile = szExePath;
info.lpParameters = szExeArgs;
info.lpDirectory = szDirectory;
info.nShow = SW_SHOW;
if( !ShellExecuteEx( &info ) )
return FALSE;
// Wait for process to finish.
WaitForSingleObject( info.hProcess, INFINITE );
// Return exit code from process, if requested by caller.
if( pdwExitCode )
GetExitCodeProcess( info.hProcess, pdwExitCode );
CloseHandle( info.hProcess );
return TRUE;
} |
<filename>vec2.h
#ifndef __VEC2_H__
#define __VEC2_H__
#include <cmath>
#include <memory>
#if defined(__SSE4_1__)
#include <smmintrin.h>
#elif defined(__SSE2__)
#include <emmintrin.h>
#endif
#ifdef __SSE2__
typedef union {
__m128d v;
double xy[2];
} vector;
#endif //__SSE2__
class Vec2 {
#ifdef __SSE2__
vector vec;
#endif //__SSE2__
public:
#ifdef __SSE2__
double& x;
double& y;
#else
double x;
double y;
#endif //__SSE2__
Vec2();
Vec2(double xy);
Vec2(double x, double y);
~Vec2();
void operator=(const Vec2& other);
void operator+=(const Vec2& other);
void operator-=(const Vec2& other);
void operator*=(const Vec2& other);
Vec2 operator+(const Vec2& other) const;
Vec2 operator-() const;
Vec2 operator-(const Vec2& other) const;
double operator*(const Vec2& other) const;
Vec2 operator*(const double& other) const;
Vec2 operator/(const double& other) const;
double length() const;
Vec2 normal() const;
Vec2 normalize() const;
double project_onto(const Vec2& other) const;
};
#endif //__VEC2_H__
|
<gh_stars>10-100
#ifndef _BLE_PUB_H_
#define _BLE_PUB_H_
#include "rtos_pub.h"
#if defined(SUPPORT_MIDEA_BLE)
#include "msm_ble_adapter.h"
#endif
#define BLE_SUCCESS (0)
#define BLE_FAILURE (1)
#define BLE_DEV_NAME "ble"
#define BLE_CMD_MAGIC (0xe2a0000)
#define MAX_ADV_DATA_LEN (0x1F)
enum
{
CMD_BLE_REG_INIT = BLE_CMD_MAGIC + 1,
CMD_BLE_REG_DEINIT,
CMD_BLE_SET_CHANNEL,
CMD_BLE_AUTO_CHANNEL_ENABLE,
CMD_BLE_AUTO_CHANNEL_DISABLE,
CMD_BLE_AUTO_SYNCWD_ENABLE,
CMD_BLE_AUTO_SYNCWD_DISABLE,
CMD_BLE_SET_PN9_TRX,
CMD_BLE_SET_GFSK_SYNCWD,
CMD_BLE_HOLD_PN9_ESTIMATE,
CMD_BLE_STOP_COUNTING,
CMD_BLE_START_COUNTING
};
enum
{
PN9_RX = 0,
PN9_TX
};
enum
{
BLE_MSG_POLL = 0,
BLE_MSG_SLEEP,
BLE_MSG_EXIT,
BLE_MSG_NULL,
};
#define ABIT(n) (1 << n)
typedef enum
{
ERR_SUCCESS = 0,
ERR_STACK_FAIL,
ERR_MEM_FAIL,
ERR_INVALID_ADV_DATA,
ERR_ADV_FAIL,
ERR_STOP_ADV_FAIL,
ERR_GATT_INDICATE_FAIL,
ERR_GATT_NOTIFY_FAIL,
ERR_SCAN_FAIL,
ERR_STOP_SCAN_FAIL,
ERR_CONN_FAIL,
ERR_STOP_CONN_FAIL,
ERR_DISCONN_FAIL,
ERR_READ_FAIL,
ERR_WRITE_FAIL,
ERR_REQ_RF,
/* Add more BLE error code hereafter */
} ble_err_t;
typedef enum
{
ADV_NAME_SHORT = 0x8,
ADV_NAME_FULL
} adv_name_type_t;
typedef enum
{
AD_LIMITED = ABIT(0), /* Limited Discoverable */
AD_GENERAL = ABIT(1), /* General Discoverable */
AD_NO_BREDR = ABIT(2) /* BR/EDR not supported */
} adv_flag_t;
typedef enum
{
BLE_ROLE_NONE = 0,
BLE_ROLE_MASTER,
BLE_ROLE_SLAVE,
BLE_ROLE_HYBRID, /* reserved */
} ble_role_t;
typedef struct
{
uint8 advData[MAX_ADV_DATA_LEN];
uint8 advDataLen;
uint8 respData[MAX_ADV_DATA_LEN];
uint8 respDataLen;
uint8 channel_map;
uint16 interval_min;
uint16 interval_max;
/* Subject to add more hereafter in the future */
} adv_info_t;
typedef struct
{
uint8 filter_en;
uint8 channel_map;
uint16 interval;
uint16 window;
} scan_info_t;
typedef struct rf_time_s
{
UINT32 ble_rf_time;
UINT32 wifi_rf_time;
} rf_time_t;
#if defined(SUPPORT_MIDEA_BLE)
extern msm_ble_adv_content_t ms_adv_init_info;
#endif
#define BLE_PS_DEBUG
#ifdef BLE_PS_DEBUG
#define BLE_PS_PRT os_printf
#else
#define BLE_PS_PRT os_null_printf
#endif
typedef enum {
BLE_PS_FORBID_KRL = 1,
BLE_PS_FORBID_ENABLED = 2,
BLE_PS_FORBID_PREVENT = 3,
BLE_PS_FORBID_TM = 4,
BLE_PS_FORBID_RWBT = 5,
BLE_PS_FORBID_RWBLE = 6,
BLE_PS_FORBID_EA = 7,
BLE_PS_FORBID_H4TL = 8,
} BLE_PS_FORBID_STATUS;
extern uint8_t ble_flag;
extern beken_queue_t ble_msg_que;
extern adv_info_t adv_info;
extern scan_info_t scan_info;
extern uint8_t ble_init_over;
extern uint8_t ble_deep_sleep;
extern ble_role_t ble_role_mode;
typedef enum
{
BLE_STACK_OK,
BLE_STACK_FAIL,
BLE_CONNECT,
BLE_DISCONNECT,
BLE_MTU_CHANGE,
BLE_CFG_NOTIFY,
BLE_CFG_INDICATE,
} ble_event_t;
typedef void (*ble_write_cb_t)(uint16_t char_id, uint8_t *buf, uint8_t len);
typedef void (*ble_event_cb_t)(ble_event_t event, void *param);
typedef void (*ble_recv_adv_cb_t)(uint8_t *buf, uint8_t len);
typedef void (*ble_read_cb_t)(uint16_t char_id, uint8_t *buf, uint8_t len);
extern ble_write_cb_t ble_write_cb;
extern ble_event_cb_t ble_event_cb;
extern ble_recv_adv_cb_t ble_recv_adv_cb;
extern ble_read_cb_t ble_read_cb;
typedef struct
{
uint8_t channel_map;
uint16_t interval;
uint16_t window;
} ble_scan_param_t;
typedef struct
{
uint8_t channel_map;
uint16_t interval_min;
uint16_t interval_max;
} ble_adv_param_t;
void ble_init(void);
void ble_exit(void);
void ble_activate(char *ble_name);
void ble_intc_set(uint32_t enable);
void ble_clk_power_up(void);
void ble_clk_power_down(void);
void ble_switch_rf_to_wifi(void);
void ble_switch_rf_to_ble(void);
void ble_set_power_up(uint32 up);
void ble_dut_start(void);
UINT8 ble_is_start(void);
UINT8* ble_get_mac_addr(void);
UINT8* ble_get_name(void);
void ble_send_msg(UINT32 msg);
uint8_t if_ble_sleep(void);
void rf_wifi_used_clr(void);
void rf_wifi_used_set(void);
UINT32 if_rf_wifi_used(void );
void rf_not_share_for_ble(void);
void rf_can_share_for_ble(void);
void ble_set_write_cb(ble_write_cb_t func);
void ble_set_event_cb(ble_event_cb_t func);
void ble_set_read_cb(ble_read_cb_t func);
void ble_set_recv_adv_cb(ble_recv_adv_cb_t func);
void ble_set_role_mode(ble_role_t role);
ble_role_t ble_get_role_mode();
typedef struct temp_message
{
uint32_t data;
}BLE_MSG_T;
#define APP_DEVICE_NAME_LENGTH_MAX (18)
/// Default Device Name if no value can be found in NVDS
#define APP_DFLT_DEVICE_NAME ("BK7231BT-01")
#define APP_DFLT_DEVICE_NAME_LEN (sizeof(APP_DFLT_DEVICE_NAME))
extern char app_dflt_dev_name[APP_DEVICE_NAME_LENGTH_MAX];
#endif /* _BLE_PUB_H_ */
|
<gh_stars>0
import React, {Component} from "react"
// import PropTypes from "prop-types"
// import {Link} from "react-router"
// import FlatButton from "material-ui/FlatButton"
// import Popover from "material-ui/Popover"
// import Menu from "material-ui/Menu"
// import MenuItem from "material-ui/MenuItem"
class LandingPage extends Component {
constructor(props) {
super(props)
}
componentDidMount() {
}
render() {
return (
<div>
Get them here.
</div>
)
}
}
LandingPage.propTypes = {
}
export default LandingPage
|
export const assets = {};
assets["character1"] = new URL("./1.jpeg", import.meta.url);
assets["character35"] = new URL("./35.png", import.meta.url);
assets["character83"] = new URL("./83.png", import.meta.url);
assets["character96"] = new URL("./96.png", import.meta.url);
assets["character316"] = new URL("./316.png", import.meta.url);
assets["character334"] = new URL("./334.jpeg", import.meta.url);
assets["character560"] = new URL("./560.png", import.meta.url);
assets["character570"] = new URL("./570.png", import.meta.url);
assets["character622"] = new URL("./622.png", import.meta.url);
assets["character627"] = new URL("./627.png", import.meta.url);
assets["character630"] = new URL("./630.png", import.meta.url);
assets["character632"] = new URL("./632.png", import.meta.url);
assets["character663"] = new URL("./663.png", import.meta.url);
assets["character761"] = new URL("./761.jpeg", import.meta.url);
assets["character765"] = new URL("./765.png", import.meta.url);
assets["character796"] = new URL("./796.png", import.meta.url);
assets["character806"] = new URL("./806.png", import.meta.url);
assets["character810"] = new URL("./810.png", import.meta.url);
assets["character813"] = new URL("./813.png", import.meta.url);
assets["character819"] = new URL("./819.png", import.meta.url);
assets["character863"] = new URL("./863.png", import.meta.url);
assets["character871"] = new URL("./871.png", import.meta.url);
assets["character1020"] = new URL("./1020.jpeg", import.meta.url);
assets["character1028"] = new URL("./1028.jpeg", import.meta.url);
assets["character1030"] = new URL("./1030.jpeg", import.meta.url);
assets["character1084"] = new URL("./1084.png", import.meta.url);
assets["character1086"] = new URL("./1086.png", import.meta.url);
assets["character1088"] = new URL("./1088.png", import.meta.url);
assets["character1090"] = new URL("./1090.png", import.meta.url);
assets["character1092"] = new URL("./1092.png", import.meta.url);
assets["character47"] = new URL("./47.mp3", import.meta.url);
assets["character48"] = new URL("./48.mp3", import.meta.url);
assets["character172"] = new URL("./172.mp3", import.meta.url);
assets["character178"] = new URL("./178.mp3", import.meta.url);
assets["character179"] = new URL("./179.mp3", import.meta.url);
assets["character180"] = new URL("./180.mp3", import.meta.url);
assets["character181"] = new URL("./181.mp3", import.meta.url);
assets["character182"] = new URL("./182.mp3", import.meta.url);
assets["character183"] = new URL("./183.mp3", import.meta.url);
assets["character184"] = new URL("./184.mp3", import.meta.url);
assets["character185"] = new URL("./185.mp3", import.meta.url);
assets["character186"] = new URL("./186.mp3", import.meta.url);
assets["character187"] = new URL("./187.mp3", import.meta.url);
assets["character188"] = new URL("./188.mp3", import.meta.url);
assets["character189"] = new URL("./189.mp3", import.meta.url);
assets["character190"] = new URL("./190.mp3", import.meta.url);
assets["character191"] = new URL("./191.mp3", import.meta.url);
assets["character192"] = new URL("./192.mp3", import.meta.url);
assets["character193"] = new URL("./193.mp3", import.meta.url);
assets["character194"] = new URL("./194.mp3", import.meta.url);
assets["character195"] = new URL("./195.mp3", import.meta.url);
assets["character196"] = new URL("./196.mp3", import.meta.url);
assets["character197"] = new URL("./197.mp3", import.meta.url);
assets["character198"] = new URL("./198.mp3", import.meta.url);
assets["character199"] = new URL("./199.mp3", import.meta.url);
assets["character200"] = new URL("./200.mp3", import.meta.url);
assets["character201"] = new URL("./201.mp3", import.meta.url);
assets["character202"] = new URL("./202.mp3", import.meta.url);
assets["character203"] = new URL("./203.mp3", import.meta.url);
assets["character204"] = new URL("./204.mp3", import.meta.url);
assets["character205"] = new URL("./205.mp3", import.meta.url);
assets["character206"] = new URL("./206.mp3", import.meta.url);
assets["character207"] = new URL("./207.mp3", import.meta.url);
assets["character208"] = new URL("./208.mp3", import.meta.url);
assets["character209"] = new URL("./209.mp3", import.meta.url);
assets["character210"] = new URL("./210.mp3", import.meta.url);
assets["character256"] = new URL("./256.mp3", import.meta.url);
|
apt -y update
apt -y install git
apt -y install screen
git clone https://github.com/Envinorma/data-tasks.git
cd data-tasks
apt -y install python3-pip
pip3 install virtualenv
virtualenv venv
source venv/bin/activate
apt-get -y remove ocrmypdf
apt-get -y update
apt-get -y install \
ghostscript \
icc-profiles-free \
liblept5 \
libxml2 \
pngquant \
python3-pip \
tesseract-ocr \
tesseract-ocr-fra \
zlib1g
pip3 install -r requirements.txt
pip3 install ipython==7.19.0
cp default_config.ini config.ini
X='OS_AUTH_URL="https://auth.cloud.ovh.net/v3/" OS_IDENTITY_API_VERSION=3 OS_USER_DOMAIN_NAME=Default OS_PROJECT_DOMAIN_NAME=Default OS_TENANT_ID=XXXXXXXX OS_TENANT_NAME=XXXXXXXX OS_USERNAME=XXXXXXXX OS_PASSWORD=XXXXXXXX OS_REGION_NAME=SBG python3 -m tasks.ocr_ap.ocr_ap'
screen -d -m bash -c "$X" -S ocr
|
// cc29 robotPaths
[
[ true, false, false, false, false ],
[ false, false, false, false, false ],
[ false, false, false, false, false ],
[ false, false, false, false, false ],
[ false, false, false, false, "destination" ]
]
/*
*
* A robot located at the top left corner of a 5x5 grid is trying to reach the
* bottom right corner. The robot can move either up, down, left, or right,
* but cannot visit the same spot twice. How many possible unique paths are
* there to the bottom right corner?
*
* EC: Make your solution work for a grid of any size.
*
*/
const makeBoard = ( n ) => {
// this function will build a board nXn for your robot to traverse
let board = [];
for ( let i = 0; i < n; i++ ) {
board.push( [] );
for ( let j = 0; j < n; j++ ) {
board[ i ].push( false );
}
}
board.toggle = ( i, j ) => {
// this function is given to help you toggle the board's square
board[ i ][ j ] = !board[ i ][ j ]
};
board.hasBeenVisited = ( i, j ) => {
// this function is given to help you check to see if you've been at the square
return board[ i ][ j ]
};
return board;
};
// write your code here for robotPaths
const robotPaths = ( n ) => {
// make a board size n
// keep a path counter.
// create a path tracking function that we can call recursively (takes in location) (i, j)
// <if> basecase: check to see if function has arrived. if true, pathcounter++;
// <if> check to see if its out of bounds i < 0, j < 0, i >= n, j >= n
// <if> check to see if we've already been here.
// <else>
// board.toggle(i, j)
// recurse(i, j + 1);
// recurse(i + 1, j);
// recurse(i, j - 1);
// recurse(i - 1, j);
// recurse(0, 0);
// return pathCounter;
const board = makeBoard( n );
let pathCounter = 0;
const traverse = ( i, j ) => {
if ( i === n - 1 && j === n - 1 ) {
pathCounter++;
return;
}
if ( i < 0 || j < 0 || i >= n || j >= n ) return;
if ( board.hasBeenVisited( i, j ) ) return;
else {
board.toggle( i, j );
traverse( i, j + 1 );
traverse( i + 1, j );
traverse( i, j - 1 );
traverse( i - 1, j );
board.toggle( i, j );
}
};
traverse( 0, 0 );
return pathCounter;
};
console.log( robotPaths( 5 ) );
|
<reponame>msaveleva/RSS-Reader
//
// SourcesManager.h
// RSS-Reader
//
// Created by MariaSaveleva on 08/02/2017.
// Copyright © 2017 MariaSaveleva. All rights reserved.
//
#import <Foundation/Foundation.h>
@class FeedItem;
@class FeedSource;
@class Feed;
NS_ASSUME_NONNULL_BEGIN
/// Handle all model changes. Posts kNotificationRSSDataUpdated when data changes.
@interface SourcesManager : NSObject
+ (instancetype)sharedInstance;
/// Returnes fetched and parsed feed items.
- (NSArray <Feed *> *)feeds;
///Returns all feed sources.
- (NSArray <FeedSource *> *)feedSources;
/// Add new feed source and load feeds for it.
- (void)addFeedSource:(FeedSource *)feedSource;
/// Remove feed source and feeds for it.
- (void)removeFeedSource:(FeedSource *)feedSource;
@end
NS_ASSUME_NONNULL_END
|
<gh_stars>0
import { Component, ElementRef, EventEmitter, forwardRef, Input, OnDestroy, OnInit, Output, ViewChild } from '@angular/core';
import { NgbModal } from '@ng-bootstrap/ng-bootstrap';
import { base64ToFile, ImageCroppedEvent, ImageTransform } from 'ngx-image-cropper';
import { HttpEventType, HttpResponse } from '@angular/common/http';
import { ControlValueAccessor, NG_VALUE_ACCESSOR } from '@angular/forms';
import { Subject, Subscription } from 'rxjs';
import { takeUntil } from 'rxjs/operators';
import { FileDetails, FileType, FileUploaderService } from '../model/file.model';
export interface ImageCropperOptions {
headerText: string;
cancelText: string;
confirmText: string;
}
/**
* File upload component. Represents template and logic for upload and download files.
*
* @example <oc-file-upload [(ngModel)]="fileModel"
* fileType="singleImage"
* [isMultiFile]="false"
* fileUploadText="Throw file here"
* fileUploadButtonText="Browse file"
* imageUploadButtonText="Browse file"
* defaultFileIcon="/fIcon.png"
* uploadIconUrl="/uIcon.png"
* closeIconUrl="/close.png"
* zoomInIconUrl="/zoomIn.png"
* zoomOutIconUrl="/zoomOut.png"
* imageWidth="1024"
* imageHeight="768"
* [hash]="['a87sh098a7shd098ahs0d97has09dha09sdh9a07shd09ahs90dhas09d7h9a0s7hd09ahsd097has9d7ha9sd7ha09s7dh']"
* acceptType="image/*"
* (customMsgChange)="onMsgChange()"
* >
*/
@Component({
selector: 'oc-file-upload',
templateUrl: './oc-file-upload.component.html',
styleUrls: ['./oc-file-upload.component.css'],
providers: [
{
provide: NG_VALUE_ACCESSOR,
useExisting: forwardRef(() => OcFileUploadComponent),
multi: true,
},
],
})
export class OcFileUploadComponent implements OnInit, OnDestroy, ControlValueAccessor {
/**
* File input template reference
*/
@ViewChild('fileDropRef', { static: false }) fileInputVar: ElementRef;
/**
* Set model value
*/
@Input() set value(val: string) {
this.initValues(val);
}
/**
* Text for file upload block
*/
@Input() fileUploadText: string = 'Drag & drop file here or';
/**
* Text for file upload button
*/
@Input() fileUploadButtonText: string = 'Browse File';
/**
* Text for image upload button
*/
@Input() imageUploadButtonText: string = 'Browse File';
/**
* Options for image cropper modal.
* You can change text of the buttons, for example.
*/
@Input() imageCropperOptions: ImageCropperOptions = {
headerText: 'Edit Image',
cancelText: 'Cancel',
confirmText: 'Confirm',
};
/**
* Flag for download multiple files allowed or not
*/
@Input() isMultiFile: boolean = false;
/**
* URL for default file icon.
*/
@Input() defaultFileIcon: string = 'assets/angular-common-components/file_icon.svg';
/**
* Supported file type ( "singleFile", "singleImage", "privateSingleFile", "multiFile", "multiImage", "multiPrivateFile" )
*/
@Input() fileType: FileType;
/**
* Icon for upload button
*/
@Input() uploadIconUrl: string = 'assets/angular-common-components/upload_icon.svg';
/**
* Icon URL value for buttons that close container window and stop uploading file
*/
@Input() closeIconUrl: string = 'assets/angular-common-components/close-icon.svg';
/**
* Icon URL value for button that active zoomIn feature
*/
@Input() zoomInIconUrl: string = 'assets/angular-common-components/zoom-in.svg';
/**
* Icon URL value for button that active zoomOut feature
*/
@Input() zoomOutIconUrl: string = 'assets/angular-common-components/zoom-out.svg';
/**
* Variable for width of image
*/
@Input() imageWidth: number;
/**
* Variable for height of image
*/
@Input() imageHeight: number;
/**
* File hash
*/
@Input() hash: string[] = [];
/**
* File type (MIME) allowed to use
*/
@Input() acceptType: string;
/**
* Output emits after change custom message
*/
@Output() readonly customMsgChange = new EventEmitter<boolean>();
/**
* Subscription to upload file from server
*/
uploadFileReq: Subscription = null;
/**
* Flag to know is upload in process or not
*/
isUploadInProcess: boolean = false;
/**
* Array of objects with file data
*/
fileDetailArr: FileDetails[] = [];
/**
* Text that shows up when image load throw error
*/
imageLoadErrorMessage: string = 'Please provide valid image';
/**
* Flag that shows existence of image load error
*/
hasImageLoadError: boolean = false;
/**
* Object of cropped file
*/
croppedFileObj: any;
/**
* Image transform data
*/
transform: ImageTransform = {};
/**
* Flag that shows that upload image in process
*/
uploadImageInProcess: boolean = false;
/**
* Event that triggers when file browsed
*/
browsedFileEvent: any;
/**
* Name of valid file
*/
fileName: string = '';
/**
* Name of invalid file
*/
invalidFileName: string;
/**
* Flag that shows existence of invalid file
*/
containsInvalidFile = false;
/**
* Flag that allow maintain aspect ratio logic or not
*/
maintainAspectRatio = false;
/**
* Aspect ratio value
*/
aspectRatio: number;
/**
* Scale value
*/
scale = 1;
/**
* Percent progress showed up in loader
*/
loaderValue = 0;
/**
* Width of cropped image value
*/
croppedImageWidth: number;
/**
* Height of cropped image value
*/
croppedImageHeight: number;
/**
* Width value to resize
*/
resizeToWidth = 0;
/**
* Height value to resize
*/
resizeToHeight = 0;
/**
* Upload button text
*/
uploadButtonText: string = 'Browse file';
/**
* @private Subject to clear all subscriptions
*/
private destroy$ = new Subject<void>();
constructor(private modalService: NgbModal, private fileUploaderService: FileUploaderService) {}
ngOnInit(): void {
this.setUploadButtonText();
if (this.isFileTypeImage) {
this.calculateAspectRatio();
}
}
ngOnDestroy(): void {
this.resetSelection();
this.destroy$.next();
this.destroy$.complete();
if (this.uploadFileReq) {
this.uploadFileReq.unsubscribe();
}
}
/**
* Return allowed default or provided MIME type for file input
*/
getAcceptedMIMEType(): string {
const setTypeIfImage = this.isFileTypeImage() ? 'image/*' : '*/*';
return this.acceptType ? this.acceptType : setTypeIfImage;
}
/**
* On file drop handler
*/
onFileDropped($event: any): void {
if (this.validMimeTypeCheck($event.dataTransfer.files[0].type) && (this.isMultiFileSupport() || this.fileDetailArr.length === 0)) {
this.fileInputVar.nativeElement.files = $event.dataTransfer.files;
this.fileInputVar.nativeElement.dispatchEvent(new Event('change', { bubbles: true }));
}
}
/**
* Compare file type with allowed type list
* @param fileType - string MIME type ex.: 'image/jpg'
* @return boolean - result of validation
*/
validMimeTypeCheck(fileType: string): boolean {
const typeArr: string[] = this.getAcceptedMIMEType().split(',');
for (const validType of typeArr) {
const validTypeArr: string[] = validType.split('/');
const fileTypeSplitArr: string[] = fileType.split('/');
const acceptedWildCardType = validTypeArr[1] === '*' && validTypeArr[0] === fileTypeSplitArr[0];
return validTypeArr[0] === '*' || fileType === validType || acceptedWildCardType;
}
return false;
}
/**
* Function for upload file
* @param {File} file
*/
uploadFile(file: File): void {
if (!this.fileUploaderService.fileUploadRequest || this.hasImageLoadError) {
// tslint:disable-next-line:no-console
console.error('Please, set the fileUploadRequest function');
this.resetSelection();
} else {
this.isUploadInProcess = true;
let lastFileDetail = new FileDetails();
lastFileDetail.name = this.fileName;
if (!this.fileDetailArr) {
this.fileDetailArr = [];
}
this.fileDetailArr.push(lastFileDetail);
const formData: FormData = new FormData();
formData.append('file', file, this.fileName);
this.uploadFileReq = this.fileUploaderService.fileUploadRequest(formData, this.isFileTypePrivate(), this.hash).subscribe(
(event: any) => {
if (event.type === HttpEventType.UploadProgress) {
lastFileDetail.fileUploadProgress = Math.round((event.loaded * 100) / event.total) - 5;
} else if (event.type === HttpEventType.ResponseHeader) {
lastFileDetail.fileUploadProgress = 97;
} else if (event.type === HttpEventType.DownloadProgress) {
lastFileDetail.fileUploadProgress = 99;
} else if (event instanceof HttpResponse) {
lastFileDetail = this.convertFileUploadResToFileDetails(event);
lastFileDetail.fileUploadProgress = 100;
lastFileDetail.fileIconUrl = this.defaultFileIcon;
this.fileDetailArr[this.fileDetailArr.length - 1] = lastFileDetail;
this.isUploadInProcess = false;
this.uploadFileReq = null;
this.emitChanges();
this.resetSelection();
}
},
() => {
this.isUploadInProcess = false;
this.resetSelection();
},
() => {
this.isUploadInProcess = false;
this.resetSelection();
},
);
}
}
/**
* This method is used to convert uploaded file response to fileDetails.
*/
convertFileUploadResToFileDetails(fileUploadRes: HttpResponse<FileDetails>): FileDetails {
const fileDetails = new FileDetails();
fileDetails.uploadDate = fileUploadRes.body.uploadDate;
fileDetails.fileId = fileUploadRes.body.fileId;
fileDetails.name = fileUploadRes.body.name;
fileDetails.contentType = fileUploadRes.body.contentType;
fileDetails.size = fileUploadRes.body.size;
fileDetails.isPrivate = fileUploadRes.body.isPrivate;
fileDetails.mimeCheck = fileUploadRes.body.mimeCheck;
fileDetails.fileUrl = fileUploadRes.body.fileUrl;
fileDetails.isError = fileUploadRes.body.isError;
return fileDetails;
}
/**
* Handle file on browsing
*/
fileBrowseHandler(event: any, content?: any): void {
this.onTouched();
if (!event?.target?.files[0]?.name) {
return;
}
if (this.isFileTypeImage()) {
this.browsedFileEvent = event;
this.fileName = event?.target?.files[0]?.name;
this.fileName = this.fileName ? this.fileName : event?.dataTransfer?.files[0]?.name;
this.customMsgChange.emit(false);
this.modalService
.open(content, {
centered: true,
backdrop: 'static',
keyboard: false,
size: 'lg',
})
.result.then(
() => {
// Do Nothing
},
() => {
this.resetSelection();
},
);
} else {
this.fileName = event?.target?.files[0]?.name;
this.fileName = this.fileName ? this.fileName : event?.dataTransfer?.files[0]?.name;
this.uploadFile(event.target.files[0]);
}
}
/**
* Function to reset selection in case if previous one didnt die by itself
*/
resetSelection(): void {
if (this.fileInputVar) {
this.fileInputVar.nativeElement.value = '';
}
this.imageLoadErrorMessage = '';
this.hasImageLoadError = false;
if (this.fileDetailArr && this.fileDetailArr.length < 1) {
this.customMsgChange.emit(true);
}
}
/**
* Function check if file type related to image types
* @returns `boolean`
*/
isFileTypeImage(): boolean {
return this.fileType === 'singleImage' || this.fileType === 'multiImage';
}
/**
* Function check if file type related to private types
* @returns `boolean`
*/
isFileTypePrivate(): boolean {
return this.fileType === 'multiPrivateFile' || this.fileType === 'privateSingleFile';
}
/**
* Function check if file type related to types with multiple files support
* @returns `boolean`
*/
isMultiFileSupport(): boolean {
return this.fileType === 'multiPrivateFile' || this.fileType === 'multiFile' || this.fileType === 'multiImage';
}
/**
* Function check if file type NOT related to image types
* @returns `boolean`
*/
isFileTypeNotImage(): boolean {
return (
this.fileType === 'singleFile' ||
this.fileType === 'privateSingleFile' ||
this.fileType === 'multiFile' ||
this.fileType === 'multiPrivateFile'
);
}
/**
* Function that executes after image cropping
* @param {ImageCroppedEvent} event - Crop event object
*/
imageCropped(event: ImageCroppedEvent): void {
this.croppedImageWidth = event.width;
this.croppedImageHeight = event.height;
this.croppedFileObj = base64ToFile(event.base64);
}
/**
* Function that executes after image load failed
*/
loadImageFailed(): void {
this.hasImageLoadError = true;
}
/**
* Function that subtract from scale 0.1 and save it
*/
zoomOut(): void {
this.scale -= 0.1;
this.transform = {
...this.transform,
scale: this.scale,
};
}
/**
* Function that add to scale 0.1 and save it
*/
zoomIn(): void {
this.scale += 0.1;
this.transform = {
...this.transform,
scale: this.scale,
};
}
/**
* Set resize width and height, also aspect ratio
*/
calculateAspectRatio(): void {
if (this.imageWidth) {
this.resizeToWidth = this.imageWidth;
}
if (this.imageHeight) {
this.resizeToHeight = this.imageHeight;
}
if (this.imageWidth && this.imageHeight) {
this.aspectRatio = this.imageWidth / this.imageHeight;
this.maintainAspectRatio = true;
} else {
this.aspectRatio = 1;
}
}
/**
* Function to stop upload. Close subscription if active and reset all related data.
* @param {number} idx - Index of file in details
*/
cancelUploading(idx: number): void {
this.onTouched();
if (this.isUploadInProcess && this.uploadFileReq) {
this.uploadFileReq.unsubscribe();
}
this.uploadFileReq = null;
this.fileDetailArr.splice(idx, 1);
this.emitChanges();
if (this.fileDetailArr.length < 1) {
this.customMsgChange.emit(true);
}
}
/**
* Function get file details and returns file url
* @param {FileDetails} file
* @returns `string`
*/
getUrl(file: FileDetails): string {
// NOTE: for non image file upload always show default file upload icon
if (this.isFileTypeNotImage()) {
return this.defaultFileIcon;
}
if (file.fileUploadProgress === 100) {
return file.fileUrl;
} else {
return this.defaultFileIcon;
}
}
/**
* Function get file details and returns CSS class for file icon
* @param {FileDetails} file
* @returns `string`
*/
getFileIconClass(file: FileDetails): string {
if (this.isFileTypeNotImage()) {
return 'default-icon';
}
return file?.fileUploadProgress === 100 ? 'app-icon' : 'default-icon';
}
/**
* Function for download file. If file is private then it opens link in new window and download file. If not call service method to start downloading process.
* @param {FileDetails} file
*/
// prettier-ignore
downloadFile(file: FileDetails): void { // NOSONAR
if (file && file.fileUploadProgress && file.fileUploadProgress === 100) {
if (this.isFileTypePrivate()) {
if (!this.fileUploaderService.fileDetailsRequest) {
// tslint:disable-next-line:no-console
console.error('Please, set the FileDetailsRequest function');
} else {
this.fileUploaderService
.fileDetailsRequest(file.fileId)
.pipe(takeUntil(this.destroy$))
.subscribe(res => {
if (res && res.fileUrl) {
window.open(res.fileUrl, '_blank');
}
});
}
} else {
if (file.fileUrl) {
window.open(file.fileUrl, '_blank');
}
}
}
}
/**
* Function that called on main model change and emits value
*/
emitChanges(): void {
if (this.isMultiFileSupport()) {
this.onChange(this.getFileUrlOrFileId(this.fileDetailArr));
} else {
this.onChange(this.fileDetailArr?.length > 0 ? this.getFileUrlOrFileId(this.fileDetailArr)[0] : null);
}
}
onTouched = () => {
// nothing to do
};
onChange: (value: any) => void = () => {
// nothing to do
};
writeValue(obj: any): void {
this.initValues(obj);
}
registerOnChange(onChange: (value: any) => void): void {
this.onChange = onChange;
}
registerOnTouched(onTouched: () => void): void {
this.onTouched = onTouched;
}
// prettier-ignore
setDisabledState?(isDisabled: boolean): void { // NOSONAR
}
/**
* @private Sets the text for the upload button based on the file type
*/
private setUploadButtonText(): void {
this.uploadButtonText = this.isFileTypeImage() ? this.imageUploadButtonText : this.fileUploadButtonText;
}
/**
* @private Initialization of value for component
* @param {string | string[]} urlData
*/
private initValues(urlData: string | string[]): void {
if (!this.fileUploaderService.fileDetailsRequest) {
console.error('Please, set the FileDetailsRequest function');
} else if (urlData) {
this.fileDetailArr = [];
if (this.isMultiFileSupport() && typeof urlData !== 'string') {
urlData.forEach(fileUrl => {
this.getFileDetails(fileUrl);
});
} else if (typeof urlData === 'string') {
this.getFileDetails(urlData);
} else {
console.error('initValues function error: something wrong with provided data');
}
}
}
/**
* @private Uses fileUploadService to get file details.
* @param {string} urlData
*/
private getFileDetails(urlData: string): void {
this.fileUploaderService
.fileDetailsRequest(urlData)
.pipe(takeUntil(this.destroy$))
.subscribe(
res => {
this.fileDetailArr.push({ ...res, fileUploadProgress: 100 });
this.emitChanges();
},
error => {
if (error.error.code === 404) {
this.fileDetailArr.push(this.externallyHostedImageHandler(urlData));
this.emitChanges();
}
},
);
}
/**
* @private Creates an object when the image is externally hosted
* @returns {FileDetails}
*/
private externallyHostedImageHandler(urlData: string): FileDetails {
const fileDetails = new FileDetails();
fileDetails.name = urlData;
fileDetails.fileUrl = urlData;
return { ...fileDetails, fileUploadProgress: 100 };
}
/**
* @private Returns array with file ids and URLs
* @param {FileDetails[]} files
* @returns {string[]} `string[]`
*/
private getFileUrlOrFileId(files: FileDetails[]): string[] {
if (files?.length > 0) {
return files.map(file => (file?.isPrivate ? file.fileId : file.fileUrl));
}
return null;
}
}
|
<reponame>AbdifatahZamiir/TaskListApp<filename>web/src/utils/fetchData.js<gh_stars>0
import axios from "axios";
import config from "../config.json";
export async function getTodo(todoId) {
const { data: todo } = await axios.get(`${config.apiUrl}/${todoId}`);
if (!todo) return this.props.history.replace("/not-found");
return todo;
}
export async function saveTodo(todo) {
const { data: todos } = await axios.get(config.apiUrl);
let todoInDb = todos.find(m => m._id === todo._id) || {};
todoInDb.name = todo.name;
todoInDb.type = todo.category;
todoInDb.completed = todo.completed;
if (!todoInDb._id) {
await axios.post(config.apiUrl, {
name: todo.name,
completed: todo.completed,
type: todo.category
});
} else {
await axios.put(`${config.apiUrl}/${todo._id}`, {
name: todo.name,
completed: todo.completed,
type: todo.category
});
}
}
|
<gh_stars>0
package org.linlinjava.litemall.core;
import org.apache.ibatis.reflection.ArrayUtil;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.linlinjava.litemall.core.notify.NotifyService;
import org.linlinjava.litemall.core.notify.NotifyType;
import org.linlinjava.litemall.core.util.DateTimeUtil;
import org.linlinjava.litemall.core.util.StringConstants;
import org.linlinjava.litemall.db.domain.LitemallOrder;
import org.linlinjava.litemall.db.domain.LitemallOrderGoods;
import org.linlinjava.litemall.db.service.LitemallOrderGoodsService;
import org.linlinjava.litemall.db.service.LitemallOrderService;
import org.linlinjava.litemall.db.util.OrderUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.Primary;
import org.springframework.core.task.SyncTaskExecutor;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.web.WebAppConfiguration;
import java.math.BigDecimal;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.Executor;
/**
* 测试微信发送服务
* <p>
* 注意LitemallNotifyService采用异步线程操作
* 因此测试的时候需要睡眠一会儿,保证任务执行
* <p>
*/
@WebAppConfiguration
@RunWith(SpringJUnit4ClassRunner.class)
@SpringBootTest
public class WeixinNotificationTest {
@Autowired
private NotifyService notifyService;
@Autowired
private LitemallOrderService orderService;
@Autowired
private LitemallOrderGoodsService orderGoodsService;
@Test
public void testCaptcha() {
LitemallOrder order = new LitemallOrder();
order.setOrderSn("000000001");
order.setPayTime(LocalDateTime.now());
order.setActualPrice(BigDecimal.TEN);
order.setOrderStatus(OrderUtil.orderStatus(2).get(0));
List<String> orderGoods = new ArrayList<>();
orderGoods.add("商品1");
orderGoods.add("商品2");
// 请依据自己的模版消息配置更改参数
String[] parms = new String[]{
order.getOrderSn(),
DateTimeUtil.getDateTimeDisplayString(order.getPayTime()),
order.getActualPrice().toString(),
OrderUtil.orderStatusText(order),
orderGoods.toString()
};
notifyService.notifyWxTemplate(StringConstants.OPENID_ME, NotifyType.PAY_SUCCEED, parms);
}
@Test
public void testPaySucceed() {
LitemallOrder order = new LitemallOrder();
order.setOrderSn("000000001");
order.setPayTime(LocalDateTime.now());
order.setActualPrice(BigDecimal.TEN);
order.setOrderStatus(OrderUtil.orderStatus(2).get(0));
List<String> orderGoods = new ArrayList<>();
orderGoods.add("商品1");
orderGoods.add("商品2");
// 请依据自己的模版消息配置更改参数
String[] parms = new String[]{
order.getOrderSn(),
DateTimeUtil.getDateTimeDisplayString(order.getPayTime()),
order.getActualPrice().toString(),
OrderUtil.orderStatusText(order),
orderGoods.toString()
};
notifyService.notifyWxTemplate(StringConstants.OPENID_KF, NotifyType.PAY_SUCCEED, parms);
}
@Test
public void testShip() {
LitemallOrder order = new LitemallOrder();
order.setOrderSn("000000001");
order.setPayTime(LocalDateTime.now());
order.setActualPrice(BigDecimal.TEN);
order.setOrderStatus(OrderUtil.orderStatus(2).get(0));
List<String> orderGoods = new ArrayList<>();
orderGoods.add("商品1");
orderGoods.add("商品2");
// 请依据自己的模版消息配置更改参数
String[] parms = new String[]{
order.getOrderSn(),
DateTimeUtil.getDateTimeDisplayString(order.getPayTime()),
order.getActualPrice().toString(),
OrderUtil.orderStatusText(order),
orderGoods.toString()
};
notifyService.notifyWxTemplate(StringConstants.OPENID_KF, NotifyType.SHIP, parms);
}
@Test
public void testRefund() {
LitemallOrder order = new LitemallOrder();
order.setOrderSn("000000001");
order.setPayTime(LocalDateTime.now());
order.setActualPrice(BigDecimal.TEN);
order.setOrderStatus(OrderUtil.orderStatus(2).get(0));
List<String> orderGoods = new ArrayList<>();
orderGoods.add("商品1");
orderGoods.add("商品2");
// 请依据自己的模版消息配置更改参数
String[] parms = new String[]{
order.getOrderSn(),
DateTimeUtil.getDateTimeDisplayString(order.getPayTime()),
order.getActualPrice().toString(),
OrderUtil.orderStatusText(order),
orderGoods.toString()
};
notifyService.notifyWxTemplate(StringConstants.OPENID_KF, NotifyType.REFUND, parms);
}
@Test
public void testDelivery() {
LitemallOrder order = orderService.findById(33);
List<LitemallOrderGoods> goods = orderGoodsService.queryByOid(33);
List<String> orderGoods = new ArrayList<>();
for (LitemallOrderGoods good: goods) {
orderGoods.add(good.getGoodsName() + " " + good.getNumber() + "件\n");
}
// 请依据自己的模版消息配置更改参数
String[] parms = new String[]{
order.getOrderSn(),
"未知",
OrderUtil.orderStatusText(order),
order.getConsignee(),
orderGoods.toString(),
order.getMobile(),
order.getAddress(),
"无",
DateTimeUtil.getDateTimeDisplayString(order.getAddTime())
};
System.out.println(Arrays.toString(parms));
notifyService.notifyWxTemplate(StringConstants.OPENID_KF, NotifyType.DELIVERY, parms);
}
@Configuration
@Import(Application.class)
static class ContextConfiguration {
@Bean
@Primary
public Executor executor() {
return new SyncTaskExecutor();
}
}
}
|
<gh_stars>0
import socket
import time
import threading
from queue import Queue
socket.setdefaulttimeout(0.25)
print_lock = threading.Lock()
target = input('Host to be scanned: ')
t_IP = socket.gethostbyname(target)
print ('Starting scan on host: ', t_IP)
def portscan(port):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
con = s.connect((t_IP, port))
with print_lock:
print(port, 'is open')
con.close()
except:
pass
def threader():
while True:
worker = q.get()
portscan(worker)
q.task_done()
q = Queue()
startTime = time.time()
# Multithread scan
for x in range(100):
t = threading.Thread(target = threader)
t.daemon = True
t.start()
# Port range 1 - 500
for worker in range(1, 500):
q.put(worker)
q.join()
print('Time taken:', time.time() - startTime)
|
#!/usr/bin/env bash
set -e
export SENTRY_TEST_HOST="${SENTRY_TEST_HOST:-http://localhost:9000}"
TEST_USER='test@example.com'
TEST_PASS='test123TEST'
COOKIE_FILE=$(mktemp)
# Courtesy of https://stackoverflow.com/a/2183063/90297
trap_with_arg() {
func="$1" ; shift
for sig ; do
trap "$func $sig "'$LINENO' "$sig"
done
}
DID_CLEAN_UP=0
# the cleanup function will be the exit point
cleanup () {
if [ "$DID_CLEAN_UP" -eq 1 ]; then
return 0;
fi
DID_CLEAN_UP=1
if [ "$1" != "EXIT" ]; then
echo "An error occurred, caught SIG$1 on line $2";
fi
echo "Cleaning up..."
rm $COOKIE_FILE
echo "Done."
}
trap_with_arg cleanup ERR INT TERM EXIT
# Disable beacon for e2e tests
echo 'SENTRY_BEACON=False' >> sentry/sentry.conf.py
docker-compose run --rm web createuser --superuser --email $TEST_USER --password $TEST_PASS || true
docker-compose up -d
printf "Waiting for Sentry to be up"; timeout 60 bash -c 'until $(curl -Isf -o /dev/null $SENTRY_TEST_HOST); do printf '.'; sleep 0.5; done'
get_csrf_token () { awk '$6 == "sc" { print $7 }' $COOKIE_FILE; }
sentry_api_request () { curl -s -H 'Accept: application/json; charset=utf-8' -H "Referer: $SENTRY_TEST_HOST" -H 'Content-Type: application/json' -H "X-CSRFToken: $(get_csrf_token)" -b "$COOKIE_FILE" -c "$COOKIE_FILE" "$SENTRY_TEST_HOST/api/0/$1" ${@:2}; }
login () {
INITIAL_AUTH_REDIRECT=$(curl -sL -o /dev/null $SENTRY_TEST_HOST -w %{url_effective})
if [ "$INITIAL_AUTH_REDIRECT" != "$SENTRY_TEST_HOST/auth/login/sentry/" ]; then
echo "Initial /auth/login/ redirect failed, exiting..."
echo "$INITIAL_AUTH_REDIRECT"
exit -1
fi
CSRF_TOKEN_FOR_LOGIN=$(curl $SENTRY_TEST_HOST -sL -c "$COOKIE_FILE" | awk -F "'" '
/csrfmiddlewaretoken/ {
print $4 "=" $6;
exit;
}')
curl -sL --data-urlencode 'op=login' --data-urlencode "username=$TEST_USER" --data-urlencode "password=$TEST_PASS" --data-urlencode "$CSRF_TOKEN_FOR_LOGIN" "$SENTRY_TEST_HOST/auth/login/sentry/" -H "Referer: $SENTRY_TEST_HOST/auth/login/sentry/" -b "$COOKIE_FILE" -c "$COOKIE_FILE";
}
LOGIN_RESPONSE=$(login);
declare -a LOGIN_TEST_STRINGS=(
'"isAuthenticated":true'
'"username":"test@example.com"'
'"isSuperuser":true'
)
for i in "${LOGIN_TEST_STRINGS[@]}"
do
echo "Testing '$i'..."
echo "$LOGIN_RESPONSE" | grep "$i[,}]" >& /dev/null
echo "Pass."
done
# Set up initial/required settings (InstallWizard request)
sentry_api_request "internal/options/?query=is:required" -X PUT --data '{"mail.use-tls":false,"mail.username":"","mail.port":25,"system.admin-email":"ben@byk.im","mail.password":"","mail.from":"root@localhost","system.url-prefix":"'"$SENTRY_TEST_HOST"'","auth.allow-registration":false,"beacon.anonymous":true}' > /dev/null
SENTRY_DSN=$(sentry_api_request "projects/sentry/internal/keys/" | awk 'BEGIN { RS=",|:{\n"; FS="\""; } $2 == "public" && $4 ~ "^http" { print $4; exit; }')
# We ignore the protocol and the host as we already know those
DSN_PIECES=(`echo $SENTRY_DSN | sed -ne 's|^https\?://\([0-9a-z]\+\)@[^/]\+/\([0-9]\+\)$|\1\n\2|p'`)
SENTRY_KEY=${DSN_PIECES[0]}
PROJECT_ID=${DSN_PIECES[1]}
TEST_EVENT_ID=$(export LC_ALL=C; head /dev/urandom | tr -dc "a-f0-9" | head -c 32)
# Thanks @untitaker - https://forum.sentry.io/t/how-can-i-post-with-curl-a-sentry-event-which-authentication-credentials/4759/2?u=byk
echo "Creating test event..."
curl -sf --data '{"event_id": "'"$TEST_EVENT_ID"'","level":"error","message":"a failure","extra":{"object":"42"}}' -H 'Content-Type: application/json' -H "X-Sentry-Auth: Sentry sentry_version=7, sentry_key=$SENTRY_KEY, sentry_client=test-bash/0.1" "$SENTRY_TEST_HOST/api/$PROJECT_ID/store/" -o /dev/null
EVENT_PATH="projects/sentry/internal/events/$TEST_EVENT_ID/"
export -f sentry_api_request get_csrf_token
export SENTRY_TEST_HOST COOKIE_FILE EVENT_PATH
printf "Getting the test event back"
timeout 30 bash -c 'until $(sentry_api_request "$EVENT_PATH" -Isf -X GET -o /dev/null); do printf '.'; sleep 0.5; done'
echo "";
EVENT_RESPONSE=$(sentry_api_request "$EVENT_PATH")
declare -a EVENT_TEST_STRINGS=(
'"eventID":"'"$TEST_EVENT_ID"'"'
'"message":"a failure"'
'"title":"a failure"'
'"object":"42"'
)
for i in "${EVENT_TEST_STRINGS[@]}"
do
echo "Testing '$i'..."
echo "$EVENT_RESPONSE" | grep "$i[,}]" >& /dev/null
echo "Pass."
done
|
#!/bin/bash
VERSION=`date "+%Y%m%d%H"`
APP_NAME="memcached_exporter"
GO_ARCH=`go env| grep GOARCH|awk -F\" '{print $2}'`
cd `dirname $0`
WORKDIR=`pwd| sed 's#.*/src#/go/src#g'`
docker run --rm -it -v ${GOPATH}:/go -w ${WORKDIR} golang:latest go build -o ${APP_NAME}-${VERSION}.linux.${GO_ARCH} -v
echo "Linux Packaging Binaries..."
mkdir -p tmp/${APP_NAME}
mv ${APP_NAME}-${VERSION}.linux.${GO_ARCH} tmp/${APP_NAME}/
#cp -rp config/config.yml tmp/${APP_NAME}/
mkdir -p ./dist/
#tar -czf $@ -C tmp $(APP_NAME);
tar -cvzf ${APP_NAME}-${VERSION}.linux.${GO_ARCH}.tar.gz -C tmp .
mv ${APP_NAME}-${VERSION}.linux.${GO_ARCH}.tar.gz ./dist/
rm -rf tmp
echo
echo "Package ${APP_NAME}-${VERSION}.linux.${GO_ARCH}.tar.gz saved in dist directory" |
<filename>ramanpy/process_results_raman.py
import string
import numpy as np
import pandas as pd
from configobj import ConfigObj
from ramanpy import RamanFit
class ReadResultParamsFit:
"""
A class to read a result file from fitting of raman/xrd spectra
This class is used in ResultsDataFrames to read the parameters, but can also be used independently
Attributes
----------
dict_results : dict
dictionary with the results from the fit, read from the _params.txt file
number_of_lorentzians: int
number of lorentzian peaks
peaks_names: list
names for the peaks (typically D, G, G')
params_of_interest: list
parameters to be studied fwhm, center, height
lorentzians: dict
dict with peaks_names and values from dict_results
lorentzians_stderr: dict
same but the stderr, not really used.
"""
def __init__(self, params_file, peaks_names=None):
"""
:param params_file: filename with the results of the fit
:param peaks_names: names for the peaks (D, G, etc)
"""
# print(params_file)
self.dict_results = ConfigObj(params_file, file_error=True)
# find number of lorentzians
keys = self.dict_results.keys()
if peaks_names is None: # if the name of the peaks is not provided, generate some
self.number_of_lorentzians = max(set([get_num(key) for key in keys]))
self.peaks_names = list(string.ascii_lowercase)[0:self.number_of_lorentzians]
else:
self.number_of_lorentzians = len(peaks_names)
self.peaks_names = peaks_names
self.params_of_interest = ['fwhm', 'center', 'height']
self.lorentzians, self.lorentzians_stderr = self._better_structure_params()
def _better_structure_params(self):
"""
This is used to separate the average and the stderr from the reading of the lorentzians (_params.txt file)
:return:
"""
lorentzians = dict((key, {}) for key in self.peaks_names)
lorentzians_stderr = dict((key, {}) for key in self.peaks_names)
# build word
for parameter in self.params_of_interest:
for name, num in zip(self.peaks_names, range(1, self.number_of_lorentzians + 1)):
key_dict = 'lz' + str(num) + parameter
value_in_dict_results = self.dict_results[key_dict]
lorentzians[name][parameter] = float(value_in_dict_results)
return lorentzians, lorentzians_stderr
class ResultsDataFrames:
"""
A class to read multiple results from fitting of raman/xrd spectra
Performs also typical calculations to obtain the equivalent La, intensity ratios, etc.
...
Attributes
----------
file_names_params : list
names of the _params.txt files
file_names_experiment: list
name of the base files (experimental raw data)
sample_names: list
same as file_names_experiment but without extension
data_dict: dict of obj ReadResultsRamanFit
data from all the params files read
data_pandas: pandas
same as data_dict, but in a pandas dataframe
peak_names: list
list of names for the peaks
"""
def __init__(self, file_names, peaks_names=None, sample_names=None):
"""
:param file_names: file names without "_params.txt" this will be added inside
:param peaks_names: for the lorentzians, if not given, it will be letters
:param sample_names: for the dataframe, if not given, it will equal to file_names
"""
self.file_names_params = [file_to_process + '_params.txt' for file_to_process in file_names]
self.file_names_experiment = [file_to_process + '.txt' for file_to_process in file_names]
if sample_names is None:
self.sample_names = file_names
else:
self.sample_names = sample_names
self.peak_names = peaks_names
self.data_dict = {}
for file_name_param, sample_name in zip(self.file_names_params, self.sample_names):
print(sample_name)
self.data_dict[sample_name] = ReadResultParamsFit(file_name_param, peaks_names=self.peak_names).lorentzians
self.data_pandas = pd.concat({k: pd.DataFrame(v).T for k, v in self.data_dict.items()}, axis=0)
# ensure we get the right peak_names back from the ReadResultParamsFit.
# if they are provided it is a repetition, otherwise it will set them here, because they were set to None
self.peak_names = self.data_pandas.index.levels[1].to_list()
# Get the column names ie. variables to be analyzed
self.cols_dataframe = self.data_pandas.columns.to_list()
self.unstacked = False # dirty trick to unstack only once
def to_csv(self, filename):
"""
passes the current data_pandas to a dataframe. the use of unstacked is due to the inclusion of x and y positions
(see add_xypositions)
:param filename: file to dump the data. typically table_results.csv
"""
# TODO: re-impliment save in json instead of csv, it's just a headache.
if self.unstacked:
self.data_pandas.to_csv(filename)
else:
self.data_pandas.unstack().to_csv(filename)
def compute_statistics(self, filename=None):
"""
computes the statistics defined in another function to each
column and each peak for the different samples
it will print them to a file and return them here as a dictionary
:param filename: file to be saved
:return: dict of data
"""
# create the idx variable for better indexing in multiindex pandas
idx = pd.IndexSlice # this helps for the multiindexing
self.dict_stats = {}
# compute for each peak
for peak in self.peak_names:
# compute for each variable of interest (center, height, fwhm)
dict_stats_data = {}
for column in self.cols_dataframe:
# grab the data
data = self.data_pandas.loc[idx[:, peak], column].values
# compute statistics and put them in a dict
dict_stats_data[column] = self._apply_statistics(data)
# combine all the stats into one
self.dict_stats[peak] = dict_stats_data
# To dump in a ConfigObj
if filename is not None:
dump_file = ConfigObj(indent_type='\t')
dump_file.filename = filename
for peak in self.peak_names:
# compute for each variable of interest (center, height, fwhm)
dump_file[peak] = {}
for column in self.cols_dataframe:
dump_file[peak][column] = self.dict_stats[peak][column]
dump_file.write()
return self.dict_stats
@staticmethod
def _apply_statistics(data):
"""
Apply any stadistics to the data.
:param data: dataframe with intensities for example.
:return: dictionary with the results.
"""
average = np.average(data)
std = np.std(data)
dict_stats_data = {'average': average, 'std': std}
return dict_stats_data
def compute_intensity_ratio_each_sample(self, file_to_save=None):
"""
computes the intensity ratio as D/G
:param file_to_save: filename to save the dataframe (table_results.csv)
:return: self.intensity ratios. A set with the ratio values.
"""
# create the idx variable for better indexing in multiindex pandas
idx = pd.IndexSlice # this helps for the multiindexing
self.intensity_ratios = {}
for sample in self.sample_names:
# grab the data
D_band = self.data_pandas.loc[idx[sample, 'D'], 'height']
G_band = self.data_pandas.loc[idx[sample, 'G'], 'height']
self.intensity_ratios[sample] = D_band / G_band
if file_to_save is not None:
# TODO: move this somewhere else
dump_file = ConfigObj(indent_type='\t')
dump_file.filename = file_to_save
for sample in self.sample_names:
# compute for each variable of interest (center, height, fwhm)
dump_file[sample] = self.intensity_ratios[sample]
dump_file.write()
return self.intensity_ratios
def compute_equivalent_La(self, Lambda=532, file_to_save=None):
"""
Computes the equivalent La from the formula of Cancado 2006
La = (2.4*10**(-10))*Lambda**4*(I_D/I_G)**-1
the laser wavelength is typically 532 nm
The result is also in nm.
*WARNING*: this method considers that this one compute_intensity_ratio_each_sample has been run before.
:param Lambda: wavelength of the laser
:param file_to_save: filename of to save to
:return: La: the set of computed equivalent La
"""
La = {}
for sample in self.sample_names:
La[sample] = (2.4 * 10 ** (-10)) * Lambda ** 4 * (self.intensity_ratios[sample]) ** -1
if file_to_save is not None:
dump_file = ConfigObj(indent_type='\t')
dump_file.filename = file_to_save
for sample in self.sample_names:
# compute for each variable of interest (center, height, fwhm)
dump_file[sample] = La[sample]
dump_file.write()
return La
def add_xypositions(self):
"""
adds the x, y position of the raman measurement to the dataframe in order to be output in table_results
"""
x_positions = []
y_positions = []
self.data_pandas = self.data_pandas.unstack()
self.unstacked = True
for filename in self.file_names_experiment:
positions = self._read_xyz(filename=filename)
x_positions.append(positions[0])
y_positions.append(positions[1])
self.data_pandas['x(um)'] = x_positions
self.data_pandas['y(um)'] = y_positions
@staticmethod
def _read_xyz(filename):
'''
Function to read the xyz positions from the header.
:param filename: str name of file
:return: list
'''
header = RamanFit.read_header(filename=filename)
axis = ['X', 'Y', 'Z']
positions = []
for element in axis:
position = float(
header.get(f'{element}(µm)', 'nan')) # get the position in micrometers, otherwise put a nan.
positions.append(position)
return positions
def get_num(string_with_number):
"""
get numbers in a string
:param string_with_number:
:return:
"""
try:
return int(''.join(ele for ele in string_with_number if ele.isdigit())) # get digits
except ValueError: # if no digits in the string, just assign -1
return -1
|
<reponame>saya-ac-cn/go<filename>unit5/start6.go
package main
import "fmt"
func main() {
defer func() {
if err := recover(); err != nil{
fmt.Println(err)
}
}()
defer func() {
panic("first defer panic")
}()
defer func() {
panic("second defer panic")
}()
panic("main body panic")
}
|
import os
import shutil
def performBuildActions():
# Remove the directory named "build/units_arm32" if it exists
if os.path.exists("build/units_arm32"):
shutil.rmtree("build/units_arm32")
# Create a new directory named "build/units_arm32"
os.makedirs("build/units_arm32")
# Compile the project using the Free Pascal Compiler (fpc)
os.system("fpc @src/linux-arm32-dbg.cfg -B src/dss_capid.lpr")
# Create a new directory "release/dss_capi/lib" if it does not exist
os.makedirs("release/dss_capi/lib", exist_ok=True)
# Copy the contents of the directory "lib/linux_arm32" to "release/dss_capi/lib/linux_arm32"
shutil.copytree("lib/linux_arm32", "release/dss_capi/lib/linux_arm32")
# Copy the contents of the directory "include" to "release/dss_capi/"
shutil.copytree("include", "release/dss_capi/include")
# Call the function to perform the build actions
performBuildActions() |
<filename>jeecg-boot-module-system/src/main/java/org/jeecg/modules/activiti/service/impl/ActivitiServiceImpl.java
package org.jeecg.modules.activiti.service.impl;
import org.activiti.bpmn.model.BpmnModel;
import org.activiti.engine.HistoryService;
import org.activiti.engine.RepositoryService;
import org.activiti.engine.RuntimeService;
import org.activiti.engine.TaskService;
import org.activiti.engine.delegate.DelegateExecution;
import org.activiti.engine.history.HistoricActivityInstance;
import org.activiti.engine.history.HistoricProcessInstance;
import org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl;
import org.activiti.engine.impl.persistence.entity.ProcessDefinitionEntity;
import org.activiti.engine.impl.pvm.PvmTransition;
import org.activiti.engine.impl.pvm.process.ActivityImpl;
import org.activiti.engine.runtime.ProcessInstance;
import org.activiti.engine.task.Task;
import org.activiti.image.ProcessDiagramGenerator;
import org.jeecg.modules.activiti.service.ActivitiService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import lombok.extern.slf4j.Slf4j;
import sun.misc.BASE64Encoder;
import javax.imageio.ImageIO;
import java.awt.image.BufferedImage;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.util.*;
@Service
@Slf4j
public class ActivitiServiceImpl implements ActivitiService {
@Autowired
private RuntimeService runtimeService;
@Autowired
private TaskService taskService;
@Autowired
private HistoryService historyService;
@Autowired
private RepositoryService repositoryService;
@Autowired
private ProcessEngineConfigurationImpl processEngineConfiguration;
@Override
public void startProcesses(String id, String business_key) {
ProcessInstance pi = runtimeService.startProcessInstanceByKey(id, business_key);
System.out.println("流程启动成功,流程id:" + pi.getId());
}
@Override
public List<Task> findTasksByUserId(String userId) {
List<Task> resultTask = taskService.createTaskQuery().processDefinitionKey("process")
.taskCandidateOrAssigned(userId).list();
return resultTask;
}
@Override
public Task findTaskById(String taskId) {
List<Task> resultTask = taskService.createTaskQuery().taskId(taskId).list();
if (resultTask != null) {
return resultTask.get(0);
}
return null;
}
@Override
public void completeTask(String taskId, String userId, String result) {
// 获取流程实例
taskService.claim(taskId, userId);
// 获取任务
Task task = taskService.createTaskQuery().taskId(taskId).singleResult();
// 获取流程实例ID
String proInsId = task.getProcessInstanceId();
// 获取流程实例
ProcessInstance process = runtimeService.createProcessInstanceQuery().processInstanceId(proInsId)
.singleResult();
// 获取业务外键
String business_key = process.getBusinessKey();
String[] array = business_key.split(":");
String business_Id = array[1];
// TODO 业务处理
taskService.complete(taskId);
}
@Override
public void updateBizStatus(DelegateExecution execution, String status) {
String bizId = execution.getProcessBusinessKey();
// 根据业务id自行处理业务表
System.out.println("业务表[" + bizId + "]状态更改成功,状态更改为:" + status);
}
@Override
public void queryProImg(String processInstanceId) throws Exception {
// 获取历史流程实例
HistoricProcessInstance processInstance = historyService.createHistoricProcessInstanceQuery()
.processInstanceId(processInstanceId).singleResult();
// 根据流程定义获取输入流
InputStream is = repositoryService.getProcessDiagram(processInstance.getProcessDefinitionId());
BufferedImage bi = ImageIO.read(is);
File file = new File("demo2.png");
if (!file.exists())
file.createNewFile();
FileOutputStream fos = new FileOutputStream(file);
ImageIO.write(bi, "png", fos);
fos.close();
is.close();
System.out.println("图片生成成功");
List<Task> tasks = taskService.createTaskQuery().taskCandidateUser("userId").list();
for (Task t : tasks) {
System.out.println(t.getName());
}
}
/**
* 流程图高亮显示 首先启动流程,获取processInstanceId,替换即可生成
*
* @throws Exception
*/
@Override
public String queryProHighLighted(String processInstanceId) throws Exception {
// 获取历史流程实例
HistoricProcessInstance processInstance = historyService.createHistoricProcessInstanceQuery()
.processInstanceId(processInstanceId).singleResult();
// 获取流程图
BpmnModel bpmnModel = repositoryService.getBpmnModel(processInstance.getProcessDefinitionId());
ProcessDiagramGenerator diagramGenerator = processEngineConfiguration.getProcessDiagramGenerator();
ProcessDefinitionEntity definitionEntity = (ProcessDefinitionEntity) repositoryService
.getProcessDefinition(processInstance.getProcessDefinitionId());
List<HistoricActivityInstance> highLightedActivitList = historyService.createHistoricActivityInstanceQuery()
.processInstanceId(processInstanceId).list();
// 高亮环节id集合
List<String> highLightedActivitis = new ArrayList<String>();
// 高亮线路id集合
List<String> highLightedFlows = getHighLightedFlows(definitionEntity, highLightedActivitList);
for (HistoricActivityInstance tempActivity : highLightedActivitList) {
String activityId = tempActivity.getActivityId();
highLightedActivitis.add(activityId);
}
// 配置字体
InputStream imageStream = diagramGenerator.generateDiagram(bpmnModel, "png", highLightedActivitis,
highLightedFlows, "宋体", "微软雅黑", "黑体", null, 2.0);
BufferedImage bi = ImageIO.read(imageStream);
// File file = new File("demo2.png");
// if(!file.exists()) file.createNewFile();
// FileOutputStream fos = new FileOutputStream(file);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ImageIO.write(bi, "png", bos);
byte[] bytes = bos.toByteArray();// 转换成字节
BASE64Encoder encoder = new BASE64Encoder();
String png_base64 = encoder.encodeBuffer(bytes);// 转换成base64串
png_base64 = png_base64.replaceAll("\n", "").replaceAll("\r", "");// 删除 \r\n
bos.close();
imageStream.close();
return png_base64;
}
/**
* 获取需要高亮的线
*
* @param processDefinitionEntity
* @param historicActivityInstances
* @return
*/
private List<String> getHighLightedFlows(ProcessDefinitionEntity processDefinitionEntity,
List<HistoricActivityInstance> historicActivityInstances) {
List<String> highFlows = new ArrayList<String>();// 用以保存高亮的线flowId
for (int i = 0; i < historicActivityInstances.size() - 1; i++) {// 对历史流程节点进行遍历
ActivityImpl activityImpl = processDefinitionEntity
.findActivity(historicActivityInstances.get(i).getActivityId());// 得到节点定义的详细信息
List<ActivityImpl> sameStartTimeNodes = new ArrayList<ActivityImpl>();// 用以保存后需开始时间相同的节点
ActivityImpl sameActivityImpl1 = processDefinitionEntity
.findActivity(historicActivityInstances.get(i + 1).getActivityId());
// 将后面第一个节点放在时间相同节点的集合里
sameStartTimeNodes.add(sameActivityImpl1);
for (int j = i + 1; j < historicActivityInstances.size() - 1; j++) {
HistoricActivityInstance activityImpl1 = historicActivityInstances.get(j);// 后续第一个节点
HistoricActivityInstance activityImpl2 = historicActivityInstances.get(j + 1);// 后续第二个节点
if (activityImpl1.getStartTime().equals(activityImpl2.getStartTime())) {
// 如果第一个节点和第二个节点开始时间相同保存
ActivityImpl sameActivityImpl2 = processDefinitionEntity
.findActivity(activityImpl2.getActivityId());
sameStartTimeNodes.add(sameActivityImpl2);
} else {
// 有不相同跳出循环
break;
}
}
List<PvmTransition> pvmTransitions = activityImpl.getOutgoingTransitions();// 取出节点的所有出去的线
for (PvmTransition pvmTransition : pvmTransitions) {
// 对所有的线进行遍历
ActivityImpl pvmActivityImpl = (ActivityImpl) pvmTransition.getDestination();
// 如果取出的线的目标节点存在时间相同的节点里,保存该线的id,进行高亮显示
if (sameStartTimeNodes.contains(pvmActivityImpl)) {
highFlows.add(pvmTransition.getId());
}
}
}
return highFlows;
}
}
|
def get_unique_name(vdir, names):
base_name = vdir.split('/')[-1] # Extract the base name of the virtual directory
unique_name = base_name
counter = 1
while unique_name in names:
unique_name = f"{base_name}_{counter}" # Append a counter to the base name if it's not unique
counter += 1
names.append(unique_name) # Add the unique name to the list of existing names
return unique_name |
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by <NAME>, <EMAIL>, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import sys
from spack import *
class Hydrogen(CMakePackage):
"""Hydrogen: Distributed-memory dense and sparse-direct linear algebra
and optimization library. Based on the Elemental library."""
homepage = "http://libelemental.org"
url = "https://github.com/LLNL/Elemental/archive/0.99.tar.gz"
version('develop', git='https://github.com/LLNL/Elemental.git', branch='hydrogen')
version('0.99', 'b678433ab1d498da47acf3dc5e056c23')
variant('shared', default=True,
description='Enables the build of shared libraries')
variant('hybrid', default=True,
description='Make use of OpenMP within MPI packing/unpacking')
variant('openmp_blas', default=False,
description='Use OpenMP for threading in the BLAS library')
variant('quad', default=False,
description='Enable quad precision')
variant('int64', default=False,
description='Use 64bit integers')
variant('int64_blas', default=False,
description='Use 64bit integers for BLAS.')
variant('scalapack', default=False,
description='Build with ScaLAPACK library')
variant('build_type', default='Release',
description='The build type to build',
values=('Debug', 'Release'))
variant('blas', default='openblas', values=('openblas', 'mkl', 'accelerate', 'essl'),
description='Enable the use of OpenBlas/MKL/Accelerate/ESSL')
variant('mpfr', default=False,
description='Support GNU MPFR\'s'
'arbitrary-precision floating-point arithmetic')
variant('cuda', default=False,
description='Builds with support for GPUs via CUDA and cuDNN')
variant('test', default=False,
description='Builds test suite')
# Note that #1712 forces us to enumerate the different blas variants
depends_on('openblas', when='blas=openblas ~openmp_blas ~int64_blas')
depends_on('openblas +ilp64', when='blas=openblas ~openmp_blas +int64_blas')
depends_on('openblas threads=openmp', when='blas=openblas +openmp_blas ~int64_blas')
depends_on('openblas threads=openmp +lip64', when='blas=openblas +openmp_blas +int64_blas')
depends_on('intel-mkl', when="blas=mkl ~openmp_blas ~int64_blas")
depends_on('intel-mkl +ilp64', when="blas=mkl ~openmp_blas +int64_blas")
depends_on('intel-mkl threads=openmp', when='blas=mkl +openmp_blas ~int64_blas')
depends_on('intel-mkl@2017.1 +openmp +ilp64', when='blas=mkl +openmp_blas +int64_blas')
depends_on('veclibfort', when='blas=accelerate')
conflicts('blas=accelerate +openmp_blas')
depends_on('essl -cuda', when='blas=essl -openmp_blas ~int64_blas')
depends_on('essl -cuda +ilp64', when='blas=essl -openmp_blas +int64_blas')
depends_on('essl threads=openmp', when='blas=essl +openmp_blas ~int64_blas')
depends_on('essl threads=openmp +ilp64', when='blas=essl +openmp_blas +int64_blas')
depends_on('netlib-lapack +external-blas', when='blas=essl')
# Note that this forces us to use OpenBLAS until #1712 is fixed
depends_on('lapack', when='blas=openblas ~openmp_blas')
depends_on('mpi', when='~cuda')
depends_on('mpi +cuda', when='+cuda')
depends_on('scalapack', when='+scalapack')
depends_on('gmp', when='+mpfr')
depends_on('mpc', when='+mpfr')
depends_on('mpfr', when='+mpfr')
depends_on('cuda', when='+cuda')
depends_on('cudnn', when='+cuda')
depends_on('cub', when='+cuda')
conflicts('@0:0.98', msg="Hydrogen did not exist before v0.99. " +
"Did you mean to use Elemental instead?")
@property
def libs(self):
shared = True if '+shared' in self.spec else False
return find_libraries(
'libEl', root=self.prefix, shared=shared, recursive=True
)
def cmake_args(self):
spec = self.spec
args = [
'-DCMAKE_INSTALL_MESSAGE:STRING=LAZY',
'-DCMAKE_C_COMPILER=%s' % spec['mpi'].mpicc,
'-DCMAKE_CXX_COMPILER=%s' % spec['mpi'].mpicxx,
'-DCMAKE_Fortran_COMPILER=%s' % spec['mpi'].mpifc,
'-DBUILD_SHARED_LIBS:BOOL=%s' % ('+shared' in spec),
'-DHydrogen_ENABLE_OPENMP:BOOL=%s' % ('+hybrid' in spec),
'-DHydrogen_ENABLE_QUADMATH:BOOL=%s' % ('+quad' in spec),
'-DHydrogen_USE_64BIT_INTS:BOOL=%s' % ('+int64' in spec),
'-DHydrogen_USE_64BIT_BLAS_INTS:BOOL=%s' % ('+int64_blas' in spec),
'-DHydrogen_ENABLE_MPC:BOOL=%s' % ('+mpfr' in spec),
'-DHydrogen_GENERAL_LAPACK_FALLBACK=ON',
'-DHydrogen_ENABLE_CUDA=%s' % ('+cuda' in spec),
'-DHydrogen_ENABLE_TESTING=%s' % ('+test' in spec),
]
# Add support for OS X to find OpenMP
if (self.spec.satisfies('%clang')):
if (sys.platform == 'darwin'):
clang = self.compiler.cc
clang_bin = os.path.dirname(clang)
clang_root = os.path.dirname(clang_bin)
args.extend([
'-DOpenMP_DIR={0}'.format(clang_root)])
if 'blas=openblas' in spec:
args.extend([
'-DHydrogen_USE_OpenBLAS:BOOL=%s' % ('blas=openblas' in spec),
'-DOpenBLAS_DIR:STRING={0}'.format(
spec['hydrogen'].prefix)])
elif 'blas=mkl' in spec:
args.extend([
'-DHydrogen_USE_MKL:BOOL=%s' % ('blas=mkl' in spec)])
elif 'blas=accelerate' in spec:
args.extend(['-DHydrogen_USE_ACCELERATE:BOOL=TRUE'])
elif 'blas=essl' in spec:
args.extend([
'-DHydrogen_USE_ESSL:BOOL=%s' % ('blas=essl' in spec)])
return args
|
<reponame>jinshiyi11/AndroidPerformanceMonitor
package com.tencent.wstt.gt.collector.util;
import android.app.ActivityManager;
import android.app.ActivityManager.RunningAppProcessInfo;
import android.content.Context;
/**
* Created by elvis on 2017/2/24.
*/
public class ProcessUtil {
/**
* 判断进程是否包含Application主线程
* @param context
* @param pid
* @return
*/
public static boolean isUIProcess(Context context, int pid) {
String processName = null;
ActivityManager mActivityManager = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
for (RunningAppProcessInfo appProcess : mActivityManager.getRunningAppProcesses()) {
if (appProcess.pid == pid) {
processName = appProcess.processName;
break;
}
}
String packageName = context.getPackageName();
return processName != null && processName.equals(packageName);
}
}
|
# Note: Script must be run as root (sudo OK) because it writes file in LIB directory
# Create startup shell that runs python script
echo "/usr/bin/python /home/bruce/pitoys/raspberrypi/buttonpressed.py" > /home/bruce/startup.bash
# for debug, add > log.txt 2>&1 at the end to cause all info to be written to a log file in the home dir
chmod 775 /home/bruce/startup.bash
# Could be worth testing the startup script now - optional
# /bin/bash /home/bruce/startup.bash
# Now create the service file for systemctl
echo "[Unit]" > /lib/systemd/system/startup.service
echo "Description=Service Description" >> /lib/systemd/system/startup.service
echo "After=multi-user.target" >> /lib/systemd/system/startup.service
echo "" >> /lib/systemd/system/startup.service
echo "[Service]" >> /lib/systemd/system/startup.service
echo "WorkingDirectory=/home/bruce/" >> /lib/systemd/system/startup.service
echo "User=bruce" >> /lib/systemd/system/startup.service
echo "ExecStart=/bin/bash /home/bruce/startup.bash" >> /lib/systemd/system/startup.service
echo "" >> /lib/systemd/system/startup.service
echo "[Install]" >> /lib/systemd/system/startup.service
echo "WantedBy=multi-user.target" >> /lib/systemd/system/startup.service
# Service file should be OK. Now fix the file permissions
sudo chmod 644 /lib/systemd/system/startup.service
# reload systemd and then enable the service
sudo systemctl daemon-reload
sudo systemctl enable startup.service
# Now just reboot and the service should start up automatically
|
class UsersModel {
constructor(psql) {
this.psql = psql;
}
getConnectedUsers() {
return this.psql.query("SELECT * FROM logtimes_logtimes AS logs WHERE logs.end_at IS NULL")
.then(result => result.rows);
}
}
module.exports = UsersModel;
|
fn is_length_odd(input: &str) -> bool {
let len = input.len();
if len % 2 == 1 {
true
} else {
false
}
}
fn main() {
println!("{}", is_length_odd("hello")); // Output: true
println!("{}", is_length_odd("world")); // Output: false
} |
#!/usr/bin/env python
import matplotlib.pyplot as plt
positions = [0.5, 1.5, 2.5, 3.5, 4.5, 5.5]
countries = ['India', 'Brazil', 'USA', 'China', 'EU', 'Argentina']
cattle_numbers = [3.03e8, 2.44e8, 9.44e7, 9.14e7, 8.70e7, 5.37e7]
plt.bar(positions, cattle_numbers, tick_label=countries)
plt.xlabel('Country/Region')
plt.ylabel('Population')
plt.title('Cattle population by country or region')
plt.savefig('cattle.pdf')
plt.close()
|
#!/bin/bash
# set the number of nodes and processes per node. We are running one process on a single node
#SBATCH --nodes=32
#SBATCH --ntasks-per-node=1
#SBATCH --mem=50Gb
# uncomment if NUM_QUBITS - log2(NUM_NODES) > 30
####SBATCH --mem=100Gb
# set max wallclock time
#SBATCH --time=05:00:00
# set name of job
#SBATCH --job-name QuEST-network
# set queue
# Remove this line for runs that are not part of the nqit reservation
##SBATCH --reservation=nqit
module purge
module load mvapich2/2.1.0__intel-2016
module load cmake/3.8.0
export KMP_AFFINITY=disabled
export OMP_NUM_THREADS=16
. enable_arcus-b_mpi.sh
CMAKE_OPTIONS="-DUSER_SOURCE='compactUnitaryTimer.c' -DQuEST_DIR=QuEST_v2.1.0 -DDISTRIBUTED=1"
rm -r build
mkdir build; cd build
cmake $CMAKE_OPTIONS ../../..
make
NUM_QUBITS=35
NUM_TRIALS=50
EXE=demo
time mpirun $MPI_HOSTS ./$EXE $NUM_QUBITS $NUM_TRIALS
cp TIMING* ..
|
<reponame>alemesa1991/School-Projects
#include <iostream>
#include <string>
using namespace std;
class X {
string name;
uint8_t memBlock[1024*1024]; // 1 MByte
public:
X(const std::string& n) { name = n; cout << "X ctor " << name << "\n"; }
~X() { cout << "X dtor " << name << "\n"; }
void hello() const { cout << "hello from " << name << "\n\n"; }
};
X xgbefore("global xgbefore before main");
int main(int argc, char**argv)
{
cout << "main starting...\n";
X x(std::string("main x"));
X xx("main xx");
cout << "main terminating...\n";
}
X xgafter("global xgafter after main");
|
#include <stdlib.h>
#include <stdio.h>
#include "palindrome.h"
int main(int argc, char** argv) {
// Define method variables
int exitCode = 0;
int limit = 999;
int a = 0;
int b = 0;
int biggestPalindrome = 0;
// Loop through all possible products and capture the biggest palindrome
for (a = limit; a >= 100; a--) {
for (b = limit; b >= a; b--) {
if (isPalindrome(a * b) && a * b > biggestPalindrome) {
biggestPalindrome = a * b;
}
}
}
// Print results
fprintf(stdout, "%d\n", biggestPalindrome);
// Done
return exitCode;
}
|
// WPT-specific test checking that WebGPU is available iff isSecureContext.
import { assert } from '../../common/util/util.js';
// TODO: Test all WebGPU interfaces.
const items = [
globalThis.navigator.gpu,
globalThis.GPU,
globalThis.GPUAdapter,
globalThis.GPUDevice,
globalThis.GPUBuffer,
globalThis.GPUBufferUsage,
globalThis.GPUCommandEncoder,
globalThis.GPUCommandBuffer,
globalThis.GPUComputePassEncoder,
globalThis.GPURenderPipeline,
globalThis.GPUDeviceLostInfo,
globalThis.GPUValidationError,
];
for (const item of items) {
if (globalThis.isSecureContext) {
assert(item !== undefined, 'Item/interface should be exposed on secure context');
} else {
assert(item === undefined, 'Item/interface should not be exposed on insecure context');
}
}
|
package malte0811.controlengineering.logic.schematic;
import it.unimi.dsi.fastutil.ints.IntSet;
import malte0811.controlengineering.ControlEngineering;
import malte0811.controlengineering.logic.schematic.symbol.PlacedSymbol;
import net.minecraft.network.chat.Component;
import net.minecraft.network.chat.TranslatableComponent;
import net.minecraft.world.level.Level;
import java.util.*;
import static malte0811.controlengineering.logic.schematic.Schematic.BOUNDARY;
public record SchematicChecker(Schematic schematic, Level level) {
public static final String WIRE_OUTSIDE_BOUNDARY = ControlEngineering.MODID + ".gui.wireOutsideBoundary";
public static final String SYMBOL_OUTSIDE_BOUNDARY = ControlEngineering.MODID + ".gui.symbolOutsideBoundary";
public static final String MULTIPLE_SOURCES = ControlEngineering.MODID + ".gui.multipleSources";
public static final String CYCLE = ControlEngineering.MODID + ".gui.cycle";
public static final String ANALOG_DIGITAL_MIX = ControlEngineering.MODID + ".gui.analogVsDigital";
public static final String SYMBOL_INTERSECTION = ControlEngineering.MODID + ".gui.symbolIntersection";
public Optional<Component> getErrorForAdding(WireSegment segment) {
if (!BOUNDARY.containsClosed(segment.start()) || !BOUNDARY.containsClosed(segment.end())) {
return error(WIRE_OUTSIDE_BOUNDARY);
}
IntSet netsToCheck = schematic.getConnectedNetIndices(segment);
Set<ConnectedPin> wirePins = new SchematicNet(segment).computeConnectedPins(schematic.getSymbols());
Set<ConnectedPin> allPins = new HashSet<>(wirePins);
for (int netId : netsToCheck) {
SchematicNet net = schematic.getNets().get(netId);
allPins.addAll(net.getOrComputePins(schematic.getSymbols()));
}
Optional<Component> consistency = getConsistencyError(allPins);
if (consistency.isPresent()) {
return consistency;
}
if (netsToCheck.size() + wirePins.size() > 1) {
List<Collection<ConnectedPin>> nets = new ArrayList<>();
nets.add(new ArrayList<>(allPins));
for (int i = 0; i < schematic.getNets().size(); ++i) {
if (!netsToCheck.contains(i)) {
nets.add(schematic.getNets().get(i).getOrComputePins(schematic.getSymbols()));
}
}
if (SchematicCircuitConverter.getCellOrder(
schematic.getSymbols(), SchematicCircuitConverter.getNetsBySource(nets)
).isEmpty()) {
return error(CYCLE);
}
}
return Optional.empty();
}
public boolean canAdd(WireSegment segment) {
return getErrorForAdding(segment).isEmpty();
}
public static Optional<Component> getConsistencyError(Set<ConnectedPin> netPins) {
ConnectedPin sourcePin = null;
boolean hasAnalogSource = false;
boolean hasDigitalSink = false;
for (ConnectedPin pin : netPins) {
if (pin.pin().isOutput()) {
if (sourcePin != null) {
// Only allow one signal source
return error(MULTIPLE_SOURCES);
}
sourcePin = pin;
if (pin.isAnalog()) {
hasAnalogSource = true;
}
} else if (!pin.isAnalog()) {
hasDigitalSink = true;
}
}
// Do not allow analog source with digital sink
if (hasAnalogSource && hasDigitalSink) {
return error(ANALOG_DIGITAL_MIX);
} else {
return Optional.empty();
}
}
public Optional<Component> getErrorForAdding(PlacedSymbol candidate) {
if (!BOUNDARY.contains(candidate.getShape(level))) {
return error(SYMBOL_OUTSIDE_BOUNDARY);
}
if (!schematic.getSymbols().stream().allMatch(other -> candidate.canCoexist(other, level))) {
return error(SYMBOL_INTERSECTION);
}
List<Collection<ConnectedPin>> nets = new ArrayList<>();
for (SchematicNet net : schematic.getNets()) {
Set<ConnectedPin> pinsInNet = new HashSet<>(net.getOrComputePins(schematic.getSymbols()));
pinsInNet.addAll(net.computeConnectedPins(Collections.singletonList(candidate)));
Optional<Component> netConsistency = getConsistencyError(pinsInNet);
if (netConsistency.isPresent()) {
return netConsistency;
}
nets.add(pinsInNet);
}
List<PlacedSymbol> allSymbols = new ArrayList<>(schematic.getSymbols());
allSymbols.add(candidate);
if (SchematicCircuitConverter.getCellOrder(
allSymbols, SchematicCircuitConverter.getNetsBySource(nets)
).isEmpty()) {
return error(CYCLE);
}
return Optional.empty();
}
public boolean canAdd(PlacedSymbol candidate) {
return getErrorForAdding(candidate).isEmpty();
}
private static Optional<Component> error(String translationKey) {
return Optional.of(new TranslatableComponent(translationKey));
}
}
|
#!/bin/bash
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
#
# Bash unit tests for the TensorFlow Lite Micro project generator.
set -e
INPUT_EXAMPLE_FILE=${TEST_TMPDIR}/input_example.cc
cat << EOF > ${INPUT_EXAMPLE_FILE}
#include <stdio.h>
#include "baz.h"
#include "tensorflow/lite/experimental/micro/examples/something/foo/fish.h"
main() {
fprintf(stderr, "Hello World!\n");
return 0;
}
EOF
OUTPUT_EXAMPLE_FILE=${TEST_TMPDIR}/output_example.cc
${TEST_SRCDIR}/tensorflow/lite/experimental/micro/tools/make/transform_source \
--platform=esp \
--is_example_source \
--source_path="tensorflow/lite/experimental/micro/examples/something/input_example.cc" \
< ${INPUT_EXAMPLE_FILE} \
> ${OUTPUT_EXAMPLE_FILE}
if ! grep -q '#include <stdio.h>' ${OUTPUT_EXAMPLE_FILE}; then
echo "ERROR: No stdio.h include found in output '${OUTPUT_EXAMPLE_FILE}'"
exit 1
fi
if ! grep -q '#include "baz.h"' ${OUTPUT_EXAMPLE_FILE}; then
echo "ERROR: No baz.h include found in output '${OUTPUT_EXAMPLE_FILE}'"
exit 1
fi
if ! grep -q '#include "foo/fish.h"' ${OUTPUT_EXAMPLE_FILE}; then
echo "ERROR: No foo/fish.h include found in output '${OUTPUT_EXAMPLE_FILE}'"
exit 1
fi
#
# Example file in a sub directory.
#
mkdir -p "${TEST_TMPDIR}/subdir"
INPUT_EXAMPLE_SUBDIR_FILE=${TEST_TMPDIR}/subdir/input_example.cc
cat << EOF > ${INPUT_EXAMPLE_SUBDIR_FILE}
#include <stdio.h>
#include "baz.h"
#include "tensorflow/lite/experimental/micro/examples/something/subdir/input_example.h"
#include "tensorflow/lite/experimental/micro/examples/something/bleh.h"
#include "tensorflow/lite/experimental/micro/examples/something/foo/fish.h"
EOF
OUTPUT_EXAMPLE_SUBDIR_FILE=${TEST_TMPDIR}/output_example.cc
${TEST_SRCDIR}/tensorflow/lite/experimental/micro/tools/make/transform_source \
--platform=esp \
--is_example_source \
--source_path="tensorflow/lite/experimental/micro/examples/something/subdir/input_example.cc" \
< ${INPUT_EXAMPLE_SUBDIR_FILE} \
> ${OUTPUT_EXAMPLE_SUBDIR_FILE}
if ! grep -q '#include <stdio.h>' ${OUTPUT_EXAMPLE_SUBDIR_FILE}; then
echo "ERROR: No stdio.h include found in output '${OUTPUT_EXAMPLE_SUBDIR_FILE}'"
exit 1
fi
if ! grep -q '#include "baz.h"' ${OUTPUT_EXAMPLE_SUBDIR_FILE}; then
echo "ERROR: No baz.h include found in output '${OUTPUT_EXAMPLE_SUBDIR_FILE}'"
exit 1
fi
if ! grep -q '#include "input_example.h"' ${OUTPUT_EXAMPLE_SUBDIR_FILE}; then
echo "ERROR: No input_example.h include found in output '${OUTPUT_EXAMPLE_SUBDIR_FILE}'"
cat ${OUTPUT_EXAMPLE_SUBDIR_FILE}
exit 1
fi
if ! grep -q '#include "../bleh.h"' ${OUTPUT_EXAMPLE_SUBDIR_FILE}; then
echo "ERROR: No ../bleh.h include found in output '${OUTPUT_EXAMPLE_SUBDIR_FILE}'"
exit 1
fi
if ! grep -q '#include "../foo/fish.h"' ${OUTPUT_EXAMPLE_SUBDIR_FILE}; then
echo "ERROR: No ../foo/fish.h include found in output '${OUTPUT_EXAMPLE_SUBDIR_FILE}'"
exit 1
fi
echo
echo "SUCCESS: transform_esp_source test PASSED"
|
<reponame>powerumc/runcode
export * from "./application-logger-service";
|
<reponame>lshqqytiger/JJWAK
const FS = require("fs");
const { iterateLine, orderByKey, withComma } = require("./common");
const NAME = process.argv[2];
const METHOD = process.argv[3];
const PATH = process.argv[4];
if(!NAME || !METHOD || !PATH){
console.info("Usage: node xhr.js NAME METHOD PATH");
process.exit();
}
// dds.xhr.req.d.ts 수정
append("./src/common/xhr.req.d.ts");
// dds.xhr.res.d.ts 수정
append("./src/common/xhr.res.d.ts");
// endpoints.json 수정
FS.readFile("./data/endpoints.json", (_, buffer) => {
const lines = [];
let inItems = false;
let indent;
FS.writeFileSync("./data/endpoints.json", iterateLine(buffer, v => {
if(v.includes("\"$items\"")){
inItems = true;
indent = v.length - v.trimLeft().length;
return v;
}
if(!inItems){
return v;
}
if(v.trim() === "},"){
inItems = false;
lines.push(" ".repeat(indent + 2) + `"${NAME}": [ "${METHOD}", "${PATH}" ]`);
return [
...lines.sort().map((v, i, my) => {
if(i === my.length - 1){
if(v.endsWith(',')){
return v.slice(0, v.length - 1);
}
}else{
if(!v.endsWith(',')){
return v + ',';
}
}
return v;
}),
v
];
}
lines.push(v);
}));
});
function append(path){
const table = {};
let current;
let indent;
let inList = false;
return FS.writeFileSync(path, iterateLine(FS.readFileSync(path), v => {
switch(v.trim()){
case "//@jjwak-auto LIST {":
inList = true;
indent = v.length - v.trimLeft().length;
return v;
case "//@jjwak-auto LIST }":
inList = false;
table[NAME] = [
" ".repeat(indent) + `'${NAME}': never`
];
return [
...Object.entries(table).sort(orderByKey).map(withComma),
v
];
}
if(!inList){
return v;
}
const chunk = v.match(new RegExp(`^\\s{${indent}}'([\\w-]+)':`));
if(chunk){
current = chunk[1];
table[current] = [];
}
table[current].push(v);
}));
} |
<reponame>ideacrew/pa_edidb
class ShopQualifyingLifeEvent < QualifyingLifeEvent
SHOP_QLES = %W[
adoption
birth
contract_violation
death
divorce
exceptional_circumstances
location_change
lost_access_to_mec
marriage
termination_of_benefits
]
embedded_in :family
end |
<filename>analysis/plotting.py
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Plotting functions."""
import matplotlib.pyplot as plt
import numpy as np
import Orange
import scikit_posthocs as sp
import seaborn as sns
from analysis import data_utils
from common import experiment_utils
_DEFAULT_TICKS_COUNT = 12
_DEFAULT_LABEL_ROTATION = 30
def _formatted_hour_min(seconds):
"""Turns |seconds| seconds into %H:%m format.
We don't use to_datetime() or to_timedelta(), because we want to
show hours larger than 23, e.g.: 24h:00m.
"""
time_string = ''
hours = int(seconds / 60 / 60)
minutes = int(seconds / 60) % 60
if hours:
time_string += '%dh' % hours
if minutes:
if hours:
time_string += ':'
time_string += '%dm' % minutes
return time_string
def _formatted_title(benchmark_snapshot_df):
"""Return a formatted title with time and trial count."""
benchmark_name = benchmark_snapshot_df.benchmark.unique()[0]
stats_string = benchmark_name
stats_string += ' ('
snapshot_time = benchmark_snapshot_df.time.unique()[0]
stats_string += _formatted_hour_min(snapshot_time)
trial_count = benchmark_snapshot_df.fuzzer.value_counts().min()
stats_string += ', %d trials/fuzzer' % trial_count
stats_string += ')'
return stats_string
class Plotter:
"""Plotter that uses the same color for the same fuzzer."""
# Tableau 20 colors.
_COLOR_PALETTE = [
'#1f77b4',
'#98df8a',
'#d62728',
'#c7c7c7',
'#ff7f0e',
'#ff9896',
'#e377c2',
'#dbdb8d',
'#2ca02c',
'#c5b0d5',
'#7f7f7f',
'#9edae5',
'#aec7e8',
'#8c564b',
'#c49c94',
'#bcbd22',
'#ffbb78',
'#9467bd',
'#f7b6d2',
'#17becf',
]
def __init__(self, fuzzers, quick=False, logscale=False):
"""Instantiates plotter with list of |fuzzers|. If |quick| is True,
creates plots faster but, with less detail.
"""
self._fuzzer_colors = {
fuzzer: self._COLOR_PALETTE[idx % len(self._COLOR_PALETTE)]
for idx, fuzzer in enumerate(sorted(fuzzers))
}
self._quick = quick
self._logscale = logscale
# pylint: disable=no-self-use
def _write_plot_to_image(self,
plot_function,
data,
image_path,
wide=False,
**kwargs):
"""Writes the result of |plot_function(data)| to |image_path|.
If |wide|, then the image size will be twice as wide as normal.
"""
width = 6.4
height = 4.8
figsize = (2 * width, height) if wide else (width, height)
fig, axes = plt.subplots(figsize=figsize)
try:
plot_function(data, axes=axes, **kwargs)
fig.savefig(image_path, bbox_inches="tight")
finally:
plt.close(fig)
def coverage_growth_plot(self, benchmark_df, axes=None):
"""Draws coverage growth plot on given |axes|.
The fuzzer labels will be in the order of their mean coverage at the
snapshot time (typically, the end of experiment).
"""
benchmark_names = benchmark_df.benchmark.unique()
assert len(benchmark_names) == 1, 'Not a single benchmark data!'
benchmark_snapshot_df = data_utils.get_benchmark_snapshot(benchmark_df)
snapshot_time = benchmark_snapshot_df.time.unique()[0]
fuzzer_order = data_utils.benchmark_rank_by_mean(
benchmark_snapshot_df).index
axes = sns.lineplot(
y='edges_covered',
x='time',
hue='fuzzer',
hue_order=fuzzer_order,
data=benchmark_df[benchmark_df.time <= snapshot_time],
ci=None if self._quick else 95,
palette=self._fuzzer_colors,
ax=axes)
axes.set_title(_formatted_title(benchmark_snapshot_df))
# Indicate the snapshot time with a big red vertical line.
axes.axvline(x=snapshot_time, color='r')
# Move legend outside of the plot.
axes.legend(bbox_to_anchor=(1.00, 1),
borderaxespad=0,
loc='upper left',
frameon=False)
axes.set(ylabel='Edge coverage')
axes.set(xlabel='Time (hour:minute)')
if self._logscale:
axes.set_xscale('log')
ticks = np.logspace(
# Start from the time of the first measurement.
np.log10(experiment_utils.DEFAULT_SNAPSHOT_SECONDS),
np.log10(snapshot_time + 1), # Include tick at end time.
_DEFAULT_TICKS_COUNT)
else:
ticks = np.arange(
experiment_utils.DEFAULT_SNAPSHOT_SECONDS,
snapshot_time + 1, # Include tick at end time.
snapshot_time / _DEFAULT_TICKS_COUNT)
axes.set_xticks(ticks)
axes.set_xticklabels([_formatted_hour_min(t) for t in ticks])
sns.despine(ax=axes, trim=True)
def write_coverage_growth_plot(self, benchmark_df, image_path, wide=False):
"""Writes coverage growth plot."""
self._write_plot_to_image(self.coverage_growth_plot,
benchmark_df,
image_path,
wide=wide)
def violin_plot(self, benchmark_snapshot_df, axes=None):
"""Draws violin plot.
The fuzzer labels will be in the order of their median coverage.
"""
benchmark_names = benchmark_snapshot_df.benchmark.unique()
assert len(benchmark_names) == 1, 'Not a single benchmark data!'
assert benchmark_snapshot_df.time.nunique() == 1, 'Not a snapshot!'
fuzzer_order = data_utils.benchmark_rank_by_median(
benchmark_snapshot_df).index
# Another options is to use |boxplot| instead of |violinplot|. With
# boxplot the median/min/max/etc is more visible than on the violin,
# especially with distributions with high variance. It does not have
# however violinplot's kernel density estimation.
sns.violinplot(y='edges_covered',
x='fuzzer',
data=benchmark_snapshot_df,
order=fuzzer_order,
palette=self._fuzzer_colors,
ax=axes)
axes.set_title(_formatted_title(benchmark_snapshot_df))
axes.set(ylabel='Reached region coverage')
axes.set(xlabel='Fuzzer (highest median coverage on the left)')
axes.set_xticklabels(axes.get_xticklabels(),
rotation=_DEFAULT_LABEL_ROTATION,
horizontalalignment='right')
sns.despine(ax=axes, trim=True)
def write_violin_plot(self, benchmark_snapshot_df, image_path):
"""Writes violin plot."""
self._write_plot_to_image(self.violin_plot, benchmark_snapshot_df,
image_path)
def distribution_plot(self, benchmark_snapshot_df, axes=None):
"""Draws distribution plot.
The fuzzer labels will be in the order of their median coverage.
"""
benchmark_names = benchmark_snapshot_df.benchmark.unique()
assert len(benchmark_names) == 1, 'Not a single benchmark data!'
assert benchmark_snapshot_df.time.nunique() == 1, 'Not a snapshot!'
fuzzers_in_order = data_utils.benchmark_rank_by_median(
benchmark_snapshot_df).index
for fuzzer in fuzzers_in_order:
measurements_for_fuzzer = benchmark_snapshot_df[
benchmark_snapshot_df.fuzzer == fuzzer]
sns.distplot(measurements_for_fuzzer['edges_covered'],
hist=False,
label=fuzzer,
color=self._fuzzer_colors[fuzzer],
ax=axes)
axes.set_title(_formatted_title(benchmark_snapshot_df))
axes.legend(loc='upper right', frameon=False)
axes.set(xlabel='Edge coverage')
axes.set(ylabel='Density')
axes.set_xticklabels(axes.get_xticklabels(),
rotation=_DEFAULT_LABEL_ROTATION,
horizontalalignment='right')
def write_distribution_plot(self, benchmark_snapshot_df, image_path):
"""Writes distribution plot."""
self._write_plot_to_image(self.distribution_plot, benchmark_snapshot_df,
image_path)
def ranking_plot(self, benchmark_snapshot_df, axes=None):
"""Draws ranking plot.
The fuzzer labels will be in the order of their median coverage.
"""
benchmark_names = benchmark_snapshot_df.benchmark.unique()
assert len(benchmark_names) == 1, 'Not a single benchmark data!'
assert benchmark_snapshot_df.time.nunique() == 1, 'Not a snapshot!'
fuzzer_order = data_utils.benchmark_rank_by_median(
benchmark_snapshot_df).index
axes = sns.barplot(y='edges_covered',
x='fuzzer',
data=benchmark_snapshot_df,
order=fuzzer_order,
estimator=np.median,
palette=self._fuzzer_colors,
ax=axes)
axes.set_title(_formatted_title(benchmark_snapshot_df))
axes.set(ylabel='Reached region coverage')
axes.set(xlabel='Fuzzer (highest median coverage on the left)')
axes.set_xticklabels(axes.get_xticklabels(),
rotation=_DEFAULT_LABEL_ROTATION,
horizontalalignment='right')
sns.despine(ax=axes, trim=True)
def write_ranking_plot(self, benchmark_snapshot_df, image_path):
"""Writes ranking plot."""
self._write_plot_to_image(self.ranking_plot, benchmark_snapshot_df,
image_path)
def better_than_plot(self, better_than_table, axes=None):
"""Draws better than plot."""
cmap = ['white', '#005a32']
sns.heatmap(better_than_table,
vmin=0,
vmax=1,
cmap=cmap,
linewidths=0.5,
linecolor='0.5',
cbar=False,
ax=axes)
axes.set_title('One-tailed statistical test result')
axes.set(ylabel='If green, then fuzzer in the row')
xlabel = 'is statistically significantly better than fuzzer in column.'
axes.set(xlabel=xlabel)
axes.set_xticklabels(axes.get_xticklabels(),
rotation=_DEFAULT_LABEL_ROTATION,
horizontalalignment='right')
def write_better_than_plot(self, better_than_table, image_path):
"""Writes better than plot."""
self._write_plot_to_image(self.better_than_plot, better_than_table,
image_path)
def heatmap_plot(self, p_values, axes=None, symmetric=False):
"""Draws heatmap plot for visualizing statistical test results.
If |symmetric| is enabled, it masks out the upper triangle of the
p-value table (as it is redundant with the lower triangle).
"""
if symmetric:
mask = np.zeros_like(p_values)
mask[np.triu_indices_from(p_values)] = True
heatmap_args = {
'linewidths': 0.5,
'linecolor': '0.5',
'clip_on': False,
'square': True,
'cbar_ax_bbox': [0.85, 0.35, 0.04, 0.3],
'mask': mask if symmetric else None
}
sp.sign_plot(p_values, ax=axes, **heatmap_args)
def write_heatmap_plot(self, p_values, image_path, symmetric=False):
"""Writes heatmap plot."""
self._write_plot_to_image(self.heatmap_plot,
p_values,
image_path,
symmetric=symmetric)
def write_critical_difference_plot(self, average_ranks, num_of_benchmarks,
image_path):
"""Writes critical difference diagram."""
critical_difference = Orange.evaluation.compute_CD(
average_ranks.values, num_of_benchmarks)
Orange.evaluation.graph_ranks(average_ranks.values, average_ranks.index,
critical_difference)
fig = plt.gcf()
try:
fig.savefig(image_path, bbox_inches="tight")
finally:
plt.close(fig)
def unique_coverage_ranking_plot(self,
unique_region_cov_df_combined,
axes=None):
"""Draws unique_coverage_ranking plot. The fuzzer labels will be in
the order of their coverage."""
fuzzer_order = unique_region_cov_df_combined.sort_values(
by='unique_regions_covered', ascending=False).fuzzer
axes = sns.barplot(y='unique_regions_covered',
x='fuzzer',
data=unique_region_cov_df_combined,
order=fuzzer_order,
palette=self._fuzzer_colors,
ax=axes)
for patch in axes.patches:
axes.annotate(
format(patch.get_height(), '.2f'),
(patch.get_x() + patch.get_width() / 2., patch.get_height()),
ha='center',
va='center',
xytext=(0, 10),
textcoords='offset points')
sns.barplot(y='aggregated_edges_covered',
x='fuzzer',
data=unique_region_cov_df_combined,
order=fuzzer_order,
facecolor=(1, 1, 1, 0),
edgecolor='0.2',
ax=axes)
axes.set(ylabel='Reached unique edge coverage')
axes.set(xlabel='Fuzzer (highest coverage on the left)')
axes.set_xticklabels(axes.get_xticklabels(),
rotation=_DEFAULT_LABEL_ROTATION,
horizontalalignment='right')
sns.despine(ax=axes, trim=True)
def write_unique_coverage_ranking_plot(self, unique_region_cov_df_combined,
image_path):
"""Writes ranking plot for unique coverage."""
self._write_plot_to_image(self.unique_coverage_ranking_plot,
unique_region_cov_df_combined, image_path)
def pairwise_unique_coverage_heatmap_plot(self,
pairwise_unique_coverage_table,
axes=None):
"""Draws the heatmap to visualize the unique coverage between
each pair of fuzzers."""
heatmap_args = {
'annot': True,
'fmt': 'd',
'cmap': 'Blues',
'linewidths': 0.5
}
axes = sns.heatmap(pairwise_unique_coverage_table,
ax=axes,
**heatmap_args)
axes.set(ylabel='Not covered by')
axes.set(xlabel='Covered by')
def write_pairwise_unique_coverage_heatmap_plot(
self, pairwise_unique_coverage_table, image_path):
"""Writes pairwise unique coverage heatmap plot."""
self._write_plot_to_image(self.pairwise_unique_coverage_heatmap_plot,
pairwise_unique_coverage_table, image_path)
|
#!/bin/sh
cd src || exit
nim c --out:../ng2bounce --multimethods:on bounce.nim
cd ..
|
def getCombinations(word):
combinations = []
for i in range(len(word)):
for j in range(i+1, len(word)+1):
combinations.append(word[i:j])
return combinations |
//инициализация библиотек
#include <windows.h>
#include <conio.h>
#include <iostream>
//устанавливаем пространство имён
using namespace std;
//класс Sokoban, в котором описана вся игра
class Sokoban
{
private:
//выбранный уровень
unsigned short int lvl;
public:
//конструктор
Sokoban()
{
setLVL(1);
menu();
}
//выбор уровня
void setLVL(int level)
{
lvl = level;
}
//получение уровня
int getLVL()
{
return lvl;
}
//изменение уровня (тут оно херовое, надо научить его двигаться в обе стороны)
void changeLVL()
{
/*TODO: прописать if для нажатых клавиш влево/вправо*/
/*Также нужно сделать так, чтобы счётчик уровней не опускался ниже 1 и не поднимался выше количества уровней*/
unsigned short int newLVL = getLVL() + 1;
setLVL(newLVL);
}
//кол-во коробок на уровне
void setBox(int boxes)
{
boxNum = boxes;
}
//меню (тут надо будет ебаться)
void menu()
{
}
class tile
{
private:
bool box;
bool playerPos;
bool wall;
bool winPlace;
public:
//конструктор
tile()
{
/*
box = false;
playerPos = false;
wall = false;
winPlace = false;
*/
//НАДО ПОЛУЧИТЬ НАЗВАНИЕ УРОВНЯ
//А ДЛЯ ЭТОГО СДЕЛАТЬ ОТДЕЛЬНЫЙ ПРИВАТНЫЙ МЕТОД
ifstream fin(lvlName);
lvlGen();
}
//геттеры
bool getBox()
{
return box;
}
bool getPlayerPos()
{
return playerPos;
}
bool getWall()
{
return wall;
}
bool getWinPlace()
{
return winPlace;
}
//сеттеры
void setBox(bool boxState)
{
box = boxState;
}
void setPlayerPos(bool playerPosState)
{
playerPos = playerPosState
}
void setWall(bool wallState)
{
wall = wallState;
}
void setWinPlace(bool winPlaceState)
{
winPlace = winPlaceState;
}
void lvlGen()
{
//НАДО ПОЛУЧИТЬ НАЗВАНИЕ УРОВНЯ
//А ДЛЯ ЭТОГО СДЕЛАТЬ ОТДЕЛЬНЫЙ ПРИВАТНЫЙ МЕТОД
string lvlName = getLVL() + ".txt";
ifstream fin(lvlName);
//в начало уровней надо поместить кол-во строк/столбцов
fin >> rows >> columns;
for (int i = 0; i < rows; i++)
for (int j = 0; j < columns + 1; j++)
{
//считываем один символ
tileSpec = getline(fin, 1);
switch (tilespec)
{
case '#':
{
array[i, j].setWall(true);
}
case '@':
{
array[i, j].setBox(true);
}
case '+':
{
array[i, j].setPlayerPos(true);
}
case '$':
{
array[i, j].setWinPlace(true);
}
}
}
fin.close();
//СЮДА ЖЕ ВСУНУТЬ ОТРИСОВКУ УРОВНЯ
}
};
class player
{
private:
int PlayerCoordinateX, PlayerCoordinateY;//Координаты, где стоит игрок
bool CanMovePlayer(int MoveInX, int MoveInY)/*Метод, который проверяет, может ли игрок двигаться влево или вправо,
MoveInX и MoveInY - смещение от координат героя, ожидаемый диапазон значений от -1 до +1*/
{
if (tile[PlayerCoordinateX + MoveInX][PlayerCoordinateY + MoveInY].getWall != true)//Если там не стена
{
if (tile[PlayerCoordinateX + MoveInX][PlayerCoordinateY + MoveInY].getBox == true &&
(title[CoordinateX + 2 * MoveInX][CoordinateY + 2 * MoveInY].getBox != true) or (title[CoordinateX + 2 * MoveInX][CoordinateY + 2 * MoveInY].getWall != true))//Если там не две коробки или коробка+стена
return true; //можно
else return false;//иначе нельзя
}
else return false; //инача нельзя
};
public:
void MovePlayer(int MoveInX, int MoveInY)/*Метод, который двигает игрока по плоскости, сдвигает коробки,
MoveInX и MoveInY - смещение от координат героя, ожидаемый диапазон значений от -1 до +1*/
{
if (player.CanMovePlayer(MoveInX, MoveInY) == true)
{
if (tile[PlayerCoordinateX + MoveInX][PlayerCoordinateY + MoveInY].getBox() == true)
{
tile[PlayerCoordinateX + MoveInX][PlayerCoordinateY + MoveInY].setBox(false);//убрать коробку
tile[PlayerCoordinateX + 2 * MoveInX][PlayerCoordinateY + 2 * MoveInY].setBox(true);//поставить коробку
}
tile[PlayerCoordinateX][PlayerCoordinateY].setPlayerPos(false);//Убрать игрока с клетки
tile[PlayerCoordinateX + MoveInX][PlayerCoordinateY + MoveInY].setPlayerPos(true);//Поставить игрока на новую клетку
PlayerCoordinateX = PlayerCoordinateX + MoveInX;//Записать новые координаты игрока
PlayerCoordinateY = PlayerCoordinateY + MoveInY;
}/*Сама коробка двигаться не может, поэтому нет нужны в написании метода её отдельного движения*/
};
};
};
/*
Легенда:
+ тип public
- тип private
*/
//Итак, что нам нужно
/*
данные:
-счётчик уровней: lvl
-количестов ящиков на уровне: boxNum
-количество установленных на нужное место ящиков: boxInPlace
методы:
конструктор: Sokoban (тут надо реализовать запуск меню, меню - отдельный подкласс)
+выбор уровня: setLVL
+меню: menu
+получение уровня: getLVL (для последующего счётчика уровней)
+смена уровня: changeLVL
+кол-во коробок на уровне: setBox
*/
/*
Итак, появляется новый класс: tile
данные:
-наличие коробки: box
-позиция игрока: playerPos
-наличие стены: wall
-место для коробки: winPlace
методы:
+4 геттера
+4 сеттера
//это для каждых данных
*/
|
def age(month, day):
# Get the current date
today = datetime.date.today()
# Get the birth date
birth_date = datetime.date(today.year, month, day)
# If the birth date has not occurred yet this year, subtract 1 from the age
if birth_date > today:
age = today.year - birth_date.year - 1
else:
age = today.year - birth_date.year
return age |
class DefaultSubscription:
pass
class Observable:
def __init__(self):
self.observers = []
def add_observer(self, observer):
self.observers.append(observer)
def remove_observer(self, observer):
self.observers.remove(observer)
def notify_observers(self, value, is_complete=False):
for observer in self.observers:
observer.on_next(value, is_complete)
class Observer:
def on_subscribe(self, subscription: DefaultSubscription):
self.subscription = subscription
def on_next(self, value, is_complete=False):
raise NotImplementedError("Subclasses must implement this method") |
import nltk
#tokenize the text
text = "John and Mary went to the park. They had a great time."
tokenized_text = nltk.word_tokenize(text)
# tag the tokens with their part-of-speech
tagged_words = nltk.pos_tag(tokenized_text)
# filter for proper nouns
names = [word for word,pos in tagged_words if (pos == 'NNP' or pos == 'NNPS')]
print(names)
# Output: ['John', 'Mary'] |
#!/bin/bash
sudo chmod 600 ~/.ssh/id_rsa
sudo chmod 600 ~/.ssh/id_rsa.pub
|
export type ViewState = {
downloadsIsOpen: boolean
timeZone: string
}
export type ViewAction = TIME_ZONE_SET | DOWNLOADS_SHOW | DOWNLOADS_HIDE
export type TIME_ZONE_SET = {
type: "TIME_ZONE_SET"
timeZone: string
}
export type DOWNLOADS_SHOW = {
type: "DOWNLOADS_SHOW"
}
export type DOWNLOADS_HIDE = {
type: "DOWNLOADS_HIDE"
}
|
/*
Create the database based on this schema if it doesn't already exist.
If it does exist, drop it and create it from scratch.
This will set up a single table with three fields:
"id", "title", and "text"
*/
drop table if exists entries;
create table entries (
id integer primary key autoincrement,
title text not null,
text text not null
); |
-- phpMyAdmin SQL Dump
-- version 4.0.10deb1
-- http://www.phpmyadmin.net
--
-- Host: localhost
-- Generation Time: Jan 08, 2018 at 04:35 PM
-- Server version: 5.6.33-0ubuntu0.14.04.1
-- PHP Version: 5.5.9-1ubuntu4.22
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8 */;
--
-- Database: `pariwisata`
--
-- --------------------------------------------------------
--
-- Table structure for table `about`
--
CREATE TABLE IF NOT EXISTS `about` (
`id_about` int(11) NOT NULL AUTO_INCREMENT,
`about_logo` varchar(30) CHARACTER SET latin1 COLLATE latin1_general_ci DEFAULT NULL,
`about_deskripsi` text CHARACTER SET latin1 COLLATE latin1_general_ci,
`id_admin` int(11) DEFAULT NULL,
`about_title_meta` varchar(200) CHARACTER SET latin1 COLLATE latin1_general_ci DEFAULT NULL,
`about_deskripsi_meta` varchar(200) CHARACTER SET latin1 COLLATE latin1_general_ci DEFAULT NULL,
`about_keyword_meta` varchar(200) CHARACTER SET latin1 COLLATE latin1_general_ci DEFAULT NULL,
PRIMARY KEY (`id_about`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=2 ;
--
-- Dumping data for table `about`
--
INSERT INTO `about` (`id_about`, `about_logo`, `about_deskripsi`, `id_admin`, `about_title_meta`, `about_deskripsi_meta`, `about_keyword_meta`) VALUES
(1, 'logo.png', '<h2 style="text-align: justify;">Modul Destinasi Pariwisata adalah sarana informasi Objek Wisata terpadu yang dikembangkan oleh Dinas Komunikasi dan Informatika Pemerintahan Kabupaten Malang menuju Smart Regency. Dengan penyajian informasi pariwisata yang komprehensif diharapkan Modul Destinasi Pariwisata dapat menjadi smart guidance bagi wisatawan baik lokal maupun mancanegara.</h2>', 0, '', '', '');
-- --------------------------------------------------------
--
-- Table structure for table `admin`
--
CREATE TABLE IF NOT EXISTS `admin` (
`admin_id` int(11) NOT NULL AUTO_INCREMENT,
`admin_user` text NOT NULL,
`admin_password` varchar(100) NOT NULL,
`admin_view_password` varchar(100) DEFAULT NULL,
`admin_hak_akses` int(11) NOT NULL,
PRIMARY KEY (`admin_id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=3 ;
--
-- Dumping data for table `admin`
--
INSERT INTO `admin` (`admin_id`, `admin_user`, `admin_password`, `admin_view_password`, `admin_hak_akses`) VALUES
(1, 'admin', '<PASSWORD>', 'admin1', 1),
(2, 'dea', '<PASSWORD>', '<PASSWORD>', 1);
-- --------------------------------------------------------
--
-- Table structure for table `berita`
--
CREATE TABLE IF NOT EXISTS `berita` (
`berita_id` int(11) NOT NULL AUTO_INCREMENT,
`berita_judul` text NOT NULL,
`berita_deskripsi` text,
`berita_tgl` date NOT NULL,
`berita_autor` text NOT NULL,
`berita_foto` varchar(100) NOT NULL,
`berita_tag` text NOT NULL,
PRIMARY KEY (`berita_id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=11 ;
--
-- Dumping data for table `berita`
--
INSERT INTO `berita` (`berita_id`, `berita_judul`, `berita_deskripsi`, `berita_tgl`, `berita_autor`, `berita_foto`, `berita_tag`) VALUES
(5, 'Dua Jempol, Dua Penghargaan Terbaik Pariwisata Diborong Kabupaten Malang', '<p>Komitmen membangun dunia pariwisata di Kabupaten Malang bukan isapan jempol belaka. Terutama kiprah, dedikasi, dan keteguhan kepala daerahnya dalam menerapkan kebijakan yang terarah, integral, dan kontinyu mengawal derap pembangunan kepariwisataan.</p>\r\n<p>Hal ini dibuktikan dengan suksesnya dunia pariwisata Kabupaten Malang yang dipimpin Bupati Dr H Rendra Kresna sebagai inisiator tumbuh kuatnya sektor yang kini jadi unggulan nasional. Melalui program optimalisasi pariwisata dengan branding The Heart of East Java, kekayaan alam serta dedikasi dan komitmen menjaga program yang bisa meningkatkan taraf kesejahteraan masyarakat diapresiasi oleh Pemerintahan Provinsi Jawa Timur (Jatim) dalam Malam Anugerah Wisata di Hotel Mercure Surabaya, Senin (20/11/2017) malam.</p>\r\n<p>Tidak tanggung-tanggung, Pemerintahan Kabupaten Malang menyabet dua penghargaan terbaik di bidang pariwisata yang diserahkan Gubernur Jatim Soekarwo kepada Rendra. Penghargaan bergengsi tersebut meliputi apresiasi terhadap komitmen dan kepedulian tinggi Pemerintah Kabupaten Malang dalam mengembangkan sektor pariwisata. </p>\r\n<p>Komitmen dan kepedulian Pemerintah Kabupaten Malang dalam pengembangan sektor pariwisata terlihat dengan geliat di hampir seluruh desa dalam berlomba membangun kepariwisataan desa yang tidak terjebak dalam eksploitasi lingkungan hidup yang berlebihan.</p>\r\n<p>Bupati Malang Rendra Kresna menyatakan penghargaan pariwisata dari Pemprov Jatim adalah berkat kerja keras bersama</p>\r\n<p>Konsep pariwisata berbasis masyarakat dan ramah lingkungan menjadi nyata dengan komitmen bupati Malang dalam meluncurkan tiga program besarnya di periode kedua kepemimpinannya. Yaitu pengentasan kemiskinan, optimalisasi pariwisata, dan lingkungan hidup.</p>\r\n<p>Sekretaris Daerah Provinsi Jatim <NAME> yang mendampingi Gubernur menyampaikan bahwa apresiasi Malam Anugerah Wisata ditujukan sebagai motivasi bagi seluruh daerah untuk menjaga komitmennya dalam mengembangkan dan memajukan wisata setempat.</p>\r\n<p>"Sektor pariwisata adalah program nasional yang kini terus digalakkan. Berbagai langkah, strategi dan anggaran pusat pun terus digelontorkan ke daerah untuk pariwisata. Maka, patutlah kita untuk menyambutnya," kata Sukardi.</p>\r\n<p>Salah satu cara menyambutnya adalah dengan menggali potensi pariwisata dan mempromosikannya. "Di sinilah ranah kepala daerah dan pemerintah daerah harus siap jadi public relation untuk mempromosikan potensi wisatanya masing-masing," ujar Sukardi. </p>\r\n<p>Komitmen dan kepedulian kepala daerah menjadi harga mati dalam mengoptimalkan dunia pariwisatanya. Pasalnya, lanjut Sukardi, tanpa adanya hal tersebut, sekaya apa pun potensi pariwisata yang dimiliki daerah, maka tidak akan berefek pada kesejahteraan masyarakatnya. "Kepala daerah jadinya tidak hanya merencanakan, tapi wajib juga mengawalnya," tegas Sukardi yang mengucapkan apresiasi tertingginya kepada kepala daerah yang mendapat penghargaan.</p>\r\n<p>Penghargaan kedua yang didapat Pemerintah Kabupaten Malang dalam Malam Anugerah Wisata Jatim adalah terpilihnya Wisata Pantai Clungup Mangrove Conservation Tiga Warna Desa Tambakrejo, Kecamatan Sumbermanjing Wetan, dengan predikat terbaik I kategori wisata alam.</p>\r\n<p>Terpisah, <NAME> menyatakan, bahwa perolehan dua penghargaan dari Pemprov Jatim tersebut sebagai bentuk bukti nyata peran serta seluruh masyarakat Kabupaten Malang. "Karena tanpa adanya kesadaran dari masyarakat, maka sebaik apa pun kebijakan kita, tentunya tidak optimal," kata Rendra, Selasa (21/11) setelah membuka acara lomba Senam Madep Manteb Manetep di Pendapa Kabupaten Malang, Kepanjen.</p>\r\n<p>Menurur Rendra, pemerintah hanya menguatkan kesadaran masyarakat serta mendukung penuh dengan berbagai kebijakan dan anggaran yang ada. "Kebijakan kita jelas dalam pariwisata, bahwa seluruh OPD bahu-membahu mengoptimalkan potensi pariwisata sesuai tupoksinya," ucap Rendra yang menerima langsung penghargaan tersebut.</p>\r\n<p>Diganjarnya pariwisata Kabupaten Malang dengan dua kategori terbaik tersebut, lanjut Rendra, bukan akhir dalam memperjuangkan potensi pariwisata sampai go international. "Ini kami anggap awal. Sebagai motivasi bagi kami semua. Tentunya kami akan terus memaksimalkan potensi wisata yang ada di wilayah masing-masing," imbuh ketua DPW Partai NasDem Jatim itu. </p>\r\n<p>Selain fokus pada optimalisasi wisata, Pemerintahan Kabupaten Malang juga terus bersinergi bersama wilayah Malang Raya dalam memajukan pariwisata. Jika di Kabupaten Malang dengan andalan wisata alam, Kota Batu dikenal wisata buatan, sedangkan Kota Malang dikenal wisata kuliner dan pendidikan. "Sinergitas ini menjadi penting dalam konteks kepaduan serta saling melengkapi potensi wisata yang ada di masing-masing wilayah Malang Raya," pungkas Rendra. </p>', '2017-10-06', 'Nana', 'Dua_Jempol,_Dua_Penghargaan_Terbaik_Pariwisata_Diborong_Kabupaten_Malang.jpg', 'Penghargaan'),
(6, 'Dinas Perhubungan Kabupaten Malang Siapkan Petugas Bermotor Pemandu Jalur Wisata', '<p> Dinas Perhubungan (Dishub) Kabupaten Malang siapkan 10 sepeda motor untuk panduan wisata. Hal itu sebagai langkah Dishub dalam melayani wisatawan yang ingin berkunjung ke objek wisata di Kabupaten Malang namun belum mengetahui jalur dan lokasi yang dituju.</p>\r\n<p>Kepala Dishub Kabupaten Malang, Hafi Lutfi menjelaskan, armada sepeda motor untuk panduan jalan bagi wisatawan ditempatkan di 10 unit pelaksana teknis Dishub Kabupaten Malang. Yakni di UPT Dishub Singosari, Tumpang, Pujon, Dampit, Pagak, Gondanglegi, Turen, Kepanjen, Talangagung, dan kantor Dishub.</p>\r\n<p>"Di masing-masing kantor UPT Dishub itu ada petugas yang siap memandu wisatawan yang ingin mengunjungi objek wisata yang belum tahu jalurnya," kata Hafi Lutfi, Selasa (14/11/2017).</p>\r\n<p>Dijelaskan Hafi Lutfi, disiapkannya petugas Dishub bermotor yang dilengkapi dengan peralatan sirine dan rotator lebih sebagai upaya memberi pelayanan terbaik bagi wisatawan yang berkunjung ke Kabupaten Malang. Karena bagaimanapun, luasnya wilayah Kabupaten Malang tidak jarang menyulitkan wisatawan untuk bisa sampai ke lokasi objek wisata yang akan dikunjungi.</p>\r\n<p>Di samping itu, dikatakan Hafi Lutfi, petugas UPT Dishub yang memandu wisatawan nantinya juga akan dibekali dan wajib mengetahui lokasi wisata di wilayah kerjanya masing-masing. Dengan demikian, petugas pemandu Dishub akan lebih cepat menentukan jalur mana yang bisa ditempuh oleh wisatawan untuk bisa mencapai lokasi wisata tersebut.</p>\r\n<p>"Maka dari itu, kecakapan dalam menguasai kondisi lapangan dan keterampilan mengendarai sepeda motor sebagai pemandu jalur menuju obyek wisata harus dimiliki petugas Dishub UPT Wisata nantinya," ujar Hafi Lutfi.</p>\r\n<p>Untuk pengadaan 10 unit sepeda motor UPT Dishub Pemandu Wisata, tambah Hafi Lutfi, dianggarkan dalam PAPBD 2017 mencapai sekitar Rp 350 juta. Dan diharapkan, besarnya alokasi pengadaan sepeda motor pemandu jalur wisata tersebut sebanding dengan manfaat bagi perkembangan wisata di Kabupaten Malang.</p>\r\n<p>"Untuk itulah, kami rasa Dishub dengan semboyan BISA (Brilian, Inovasi, Speed, dan Akuntable) akan mampu memberikan yang terbaik bagi sektor pariwisata di Kabupaten Malang," ucap Hafi Lutfi.</p>', '2017-11-06', '<NAME>', 'Dinas_Perhubungan_Kabupaten_Malang_Siapkan_Petugas_Bermotor_Pemandu_Jalur_Wisata.jpg', 'Dinas Perhubungan');
-- --------------------------------------------------------
--
-- Table structure for table `event`
--
CREATE TABLE IF NOT EXISTS `event` (
`event_id` int(11) NOT NULL AUTO_INCREMENT,
`event_judul` text NOT NULL,
`event_foto` varchar(100) NOT NULL,
`event_penyelenggara` text NOT NULL,
`event_tgl_pelaksanaan` date NOT NULL,
`event_deskripsi` text NOT NULL,
`event_url_file_jadwal` varchar(100) NOT NULL,
`event_tag` text NOT NULL,
PRIMARY KEY (`event_id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=24 ;
--
-- Dumping data for table `event`
--
INSERT INTO `event` (`event_id`, `event_judul`, `event_foto`, `event_penyelenggara`, `event_tgl_pelaksanaan`, `event_deskripsi`, `event_url_file_jadwal`, `event_tag`) VALUES
(12, 'Gulat Pantai', 'Gulat_Pantai.jpg', 'Dinas Pariwisata dan Kebudayaan Kab Malang', '2017-12-03', '<p>KONI Kabupaten Malang akan menggelar beberapa kegiatan untuk memperingati hari jadi Kabupaten Malang ke 1257. Di antaranya adalah gulat pantai.</p>\r\n<p>Kejuaraan tersebut akan digelar di Pantai Ungapan pada 9 Desember 2017. Ketua KONI Kabupaten Malang, Asyari menjelaskan gulat pantai itu diadopsi dari olahraga tradisional.</p>\r\n<p>Tetapi, nantinya olahraga itu akan dikemas lebih menarik. “Hal itu agar gulat pantai ini bisa menjadi hiburan bagi wisatawan, dan menjadi kejuaraan yang menarik bagi para peserta,” beber Asyari kepada SURYAMALANG.COM, Senin (20/11/2017).</p>\r\n<p>Asyari menambahkan peserta yang akan ikut gulat pantai adalah para atlet gulat. Jika bukan atlet, dikhawatirkan akan menyulitkan penilaian.</p>\r\n<p>“Dalam kejuaraan itu pasti ada teknis dan hal lain, termasuk persiapan fisik.”</p>\r\n<p>“Kalau bukan atlet akan kesulitan bisa ikut kejuaraan tersebut,” terangnya.</p>', 'Gulat_Pantai.', 'ok s'),
(13, 'Lomba marathon beach running 10K', 'Lomba_marathon_beach_running_10K.jpg', 'Dinas Pariwisata dan Kebudayaan Kab Malang', '2017-11-17', '<p>Lomba marathon beach running 10K Malang Beach Festival diselenggarakan dalam rangka perayaan HUT Kabupaten Malang ke 1257.</p>', 'Lomba_marathon_beach_running_10K.', 'ok s');
-- --------------------------------------------------------
--
-- Table structure for table `fasilitas_pendukung`
--
CREATE TABLE IF NOT EXISTS `fasilitas_pendukung` (
`faspen_id` int(11) NOT NULL AUTO_INCREMENT,
`faspen_nama` text NOT NULL,
`faspen_icon` varchar(100) NOT NULL,
PRIMARY KEY (`faspen_id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=5 ;
--
-- Dumping data for table `fasilitas_pendukung`
--
INSERT INTO `fasilitas_pendukung` (`faspen_id`, `faspen_nama`, `faspen_icon`) VALUES
(1, 'Penginapan', 'COTTAGE.png');
-- --------------------------------------------------------
--
-- Table structure for table `fasilitas_wisata`
--
CREATE TABLE IF NOT EXISTS `fasilitas_wisata` (
`faswis_id` int(11) NOT NULL AUTO_INCREMENT,
`faswis_nama` text NOT NULL,
`faswis_icon` varchar(100) NOT NULL,
PRIMARY KEY (`faswis_id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=17 ;
--
-- Dumping data for table `fasilitas_wisata`
--
INSERT INTO `fasilitas_wisata` (`faswis_id`, `faswis_nama`, `faswis_icon`) VALUES
(1, 'KEAMANAN', 'keamanan.png'),
(2, 'KLINIK KESEHATAN', 'klinik.png'),
(3, 'LOKER/PENITIPAN BARANG', 'safebox.png'),
(4, 'PARKIR', 'parkir.png'),
(5, 'PENGINAPAN', 'PENGINAPAN.png'),
(6, 'SOUVENIR SHOP', 'souvenir.png'),
(7, 'TEMPAT IBADAH', 'tempat_ibadah.png'),
(8, 'TEMPAT MAKAN', 'tempat_makan.png'),
(9, 'TOILET', 'toilet.png'),
(10, 'TOUR GUIDE', 'tourist_information.png'),
(11, 'TOURIST INFORMATION', 'tourist_information.png');
-- --------------------------------------------------------
--
-- Table structure for table `foto`
--
CREATE TABLE IF NOT EXISTS `foto` (
`foto_id` int(11) NOT NULL AUTO_INCREMENT,
`url_file_foto` varchar(100) NOT NULL,
`wisata_id` int(11) NOT NULL,
PRIMARY KEY (`foto_id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=646 ;
--
-- Dumping data for table `foto`
--
INSERT INTO `foto` (`foto_id`, `url_file_foto`, `wisata_id`) VALUES
(1, '_DSC2653 (FILEminimizer).JPG', 48),
(2, '_DSC2659 (FILEminimizer).JPG', 48),
(3, '_DSC2660 (FILEminimizer).JPG', 48),
(4, 'DSC03068 (FILEminimizer).JPG', 48),
(5, 'DSC03083 (FILEminimizer).JPG', 48),
(6, '_DSC2653 (FILEminimizer).JPG', 49),
(7, '_DSC2659 (FILEminimizer).JPG', 49),
(8, '_DSC2660 (FILEminimizer).JPG', 49),
(9, 'DSC03068 (FILEminimizer).JPG', 49),
(10, 'DSC03083 (FILEminimizer).JPG', 49),
(11, '_DSC2808 (FILEminimizer).JPG', 64),
(12, 'DSC03393 (FILEminimizer).JPG', 64),
(13, 'DSC03403 (FILEminimizer).JPG', 64),
(14, 'DSC03405 (FILEminimizer).JPG', 64),
(15, 'DSC03420 (FILEminimizer).JPG', 64),
(16, 'DSC02923 (FILEminimizer).JPG', 31),
(17, 'DSC02928 (FILEminimizer).JPG', 31),
(18, 'DSC02932 (FILEminimizer).JPG', 31),
(19, 'DSC02937 (FILEminimizer).JPG', 31),
(20, 'DSC02952 (FILEminimizer).JPG', 31),
(21, 'DSC02923 (FILEminimizer).JPG', 32),
(22, 'DSC02928 (FILEminimizer).JPG', 32),
(23, 'DSC02932 (FILEminimizer).JPG', 32),
(24, 'DSC02937 (FILEminimizer).JPG', 32),
(25, 'DSC02952 (FILEminimizer).JPG', 32),
(26, 'DSC02690 (FILEminimizer).JPG', 14),
(27, 'DSC02696 (FILEminimizer).JPG', 14),
(28, 'DSC02698 (FILEminimizer).JPG', 14),
(29, 'DSC02702 (FILEminimizer).JPG', 14),
(30, 'DSC02709 (FILEminimizer).JPG', 14),
(36, 'DSC02690 (FILEminimizer).JPG', 16),
(37, 'DSC02696 (FILEminimizer).JPG', 16),
(38, 'DSC02698 (FILEminimizer).JPG', 16),
(39, 'DSC02702 (FILEminimizer).JPG', 16),
(40, 'DSC02709 (FILEminimizer).JPG', 16),
(41, 'DSC02670 (FILEminimizer).JPG', 11),
(42, 'DSC02674 (FILEminimizer).JPG', 11),
(43, 'DSC02677 (FILEminimizer).JPG', 11),
(44, 'DSC02687 (FILEminimizer).JPG', 11),
(45, 'DSC02688 (FILEminimizer).JPG', 11),
(46, 'DSC02670 (FILEminimizer).JPG', 12),
(47, 'CANDI_KIDAL2_DSC02672.jpg', 12),
(48, 'CANDI_KIDAL3_DSC02680.jpg', 12),
(49, 'CANDI_KIDAL4_DSC02683.jpg', 12),
(50, 'CANDI_KIDAL5_DSC02685.jpg', 12),
(51, 'DSC02670 (FILEminimizer).JPG', 13),
(52, 'DSC02674 (FILEminimizer).JPG', 13),
(53, 'DSC02677 (FILEminimizer).JPG', 13),
(54, 'DSC02687 (FILEminimizer).JPG', 13),
(55, 'DSC02688 (FILEminimizer).JPG', 13),
(56, 'DSC03027 (FILEminimizer).JPG', 42),
(57, 'DSC03030 (FILEminimizer).JPG', 42),
(58, 'DSC03033 (FILEminimizer).JPG', 42),
(59, 'DSC03036 (FILEminimizer).JPG', 42),
(60, 'DSC03044 (FILEminimizer).JPG', 42),
(61, 'DSC03027 (FILEminimizer).JPG', 43),
(62, 'DSC03030 (FILEminimizer).JPG', 43),
(63, 'DSC03033 (FILEminimizer).JPG', 43),
(64, 'DSC03036 (FILEminimizer).JPG', 43),
(65, 'DSC03044 (FILEminimizer).JPG', 43),
(66, 'DSC03027 (FILEminimizer).JPG', 44),
(67, 'DSC03030 (FILEminimizer).JPG', 44),
(68, 'DSC03033 (FILEminimizer).JPG', 44),
(69, 'DSC03036 (FILEminimizer).JPG', 44),
(70, 'DSC03044 (FILEminimizer).JPG', 44),
(71, '_DSC2777 (FILEminimizer).JPG', 60),
(72, 'a.JPG', 60),
(73, 'b.JPG', 60),
(74, 'c.JPG', 60),
(75, 'd.JPG', 60),
(76, '_DSC2664 (FILEminimizer).JPG', 53),
(77, '_DSC2672 (FILEminimizer).JPG', 53),
(78, 'DJI_0112 (FILEminimizer).JPG', 53),
(79, 'DSC03134 (FILEminimizer).JPG', 53),
(80, 'DSC03178 (FILEminimizer).JPG', 53),
(81, 'DSC02636 (FILEminimizer).JPG', 10),
(82, 'DSC02639 (FILEminimizer).JPG', 10),
(83, 'DSC02641 (FILEminimizer).JPG', 10),
(84, 'DSC02650 (FILEminimizer).JPG', 10),
(85, 'DSC02660 (FILEminimizer).JPG', 10),
(86, '_DSC2619 (FILEminimizer).JPG', 33),
(87, 'DSC02968 (FILEminimizer).JPG', 33),
(88, 'DSC02975 (FILEminimizer).JPG', 33),
(89, 'DSC02992 (FILEminimizer).JPG', 33),
(90, 'DSC02999 (FILEminimizer).JPG', 33),
(91, '_DSC2619 (FILEminimizer).JPG', 34),
(92, 'DSC02968 (FILEminimizer).JPG', 34),
(93, 'DSC02975 (FILEminimizer).JPG', 34),
(94, 'DSC02992 (FILEminimizer).JPG', 34),
(95, 'DSC02999 (FILEminimizer).JPG', 34),
(101, '_DSC2619 (FILEminimizer).JPG', 36),
(102, 'DSC02968 (FILEminimizer).JPG', 36),
(103, 'DSC02975 (FILEminimizer).JPG', 36),
(104, 'DSC02992 (FILEminimizer).JPG', 36),
(105, 'DSC02999 (FILEminimizer).JPG', 36),
(106, '_DSC2619 (FILEminimizer).JPG', 37),
(107, 'DSC02968 (FILEminimizer).JPG', 37),
(108, 'DSC02975 (FILEminimizer).JPG', 37),
(109, 'DSC02992 (FILEminimizer).JPG', 37),
(110, 'DSC02999 (FILEminimizer).JPG', 37),
(111, '_DSC2619 (FILEminimizer).JPG', 38),
(112, 'DSC02968 (FILEminimizer).JPG', 38),
(113, 'DSC02975 (FILEminimizer).JPG', 38),
(114, 'DSC02992 (FILEminimizer).JPG', 38),
(115, 'DSC02999 (FILEminimizer).JPG', 38),
(121, '_DSC2619 (FILEminimizer).JPG', 40),
(122, 'DSC02968 (FILEminimizer).JPG', 40),
(123, 'DSC02975 (FILEminimizer).JPG', 40),
(124, 'DSC02992 (FILEminimizer).JPG', 40),
(125, 'DSC02999 (FILEminimizer).JPG', 40),
(126, 'DSC03197 (FILEminimizer).JPG', 54),
(127, 'DSC03210 (FILEminimizer).JPG', 54),
(128, 'DSC03219 (FILEminimizer).JPG', 54),
(129, 'DSC03223 (FILEminimizer).JPG', 54),
(130, 'DSC03225 (FILEminimizer).JPG', 54),
(131, 'DSC02615 (FILEminimizer).JPG', 7),
(132, 'DSC02622 (FILEminimizer).JPG', 7),
(133, 'DSC02625 (FILEminimizer).JPG', 7),
(134, 'DSC02626 (FILEminimizer).JPG', 7),
(135, 'DSC02631 (FILEminimizer).JPG', 7),
(136, 'DSC02615 (FILEminimizer).JPG', 8),
(139, 'DSC02626 (FILEminimizer).JPG', 8),
(146, 'DSC03284 (FILEminimizer).JPG', 59),
(147, 'DSC03285 (FILEminimizer).JPG', 59),
(148, 'DSC03286 (FILEminimizer).JPG', 59),
(149, 'DSC03289 (FILEminimizer).JPG', 59),
(150, 'DSC03291 (FILEminimizer).JPG', 59),
(156, '_DSC2823 (FILEminimizer).JPG', 66),
(157, '_DSC2828 (FILEminimizer).JPG', 66),
(158, 'DSC03426 (FILEminimizer).JPG', 66),
(159, 'DSC03428 (FILEminimizer).JPG', 66),
(160, 'DSC03436 (FILEminimizer).JPG', 66),
(161, '_DSC2823 (FILEminimizer).JPG', 67),
(162, '_DSC2828 (FILEminimizer).JPG', 67),
(163, 'DSC03426 (FILEminimizer).JPG', 67),
(164, 'DSC03428 (FILEminimizer).JPG', 67),
(165, 'DSC03436 (FILEminimizer).JPG', 67),
(166, 'DSC02845 (FILEminimizer).JPG', 24),
(167, 'DSC02856 (FILEminimizer).JPG', 24),
(168, 'DSC02859 (FILEminimizer).JPG', 24),
(169, 'DSC02860 (FILEminimizer).JPG', 24),
(170, 'DSC02863 (FILEminimizer).JPG', 24),
(176, 'DSC02876 (FILEminimizer).JPG', 26),
(177, 'DSC02881 (FILEminimizer).JPG', 26),
(178, 'DSC02893 (FILEminimizer).JPG', 26),
(179, 'DSC02896 (FILEminimizer).JPG', 26),
(180, 'DSC02917 (FILEminimizer).JPG', 26),
(181, 'DSC02876 (FILEminimizer).JPG', 27),
(182, 'DSC02881 (FILEminimizer).JPG', 27),
(183, 'DSC02893 (FILEminimizer).JPG', 27),
(184, 'DSC02896 (FILEminimizer).JPG', 27),
(185, 'DSC02917 (FILEminimizer).JPG', 27),
(186, 'DSC02876 (FILEminimizer).JPG', 28),
(187, 'DSC02881 (FILEminimizer).JPG', 28),
(188, 'DSC02893 (FILEminimizer).JPG', 28),
(189, 'DSC02896 (FILEminimizer).JPG', 28),
(190, 'DSC02917 (FILEminimizer).JPG', 28),
(201, 'DSC02781 (FILEminimizer).JPG', 22),
(202, 'DSC02783 (FILEminimizer).JPG', 22),
(203, 'DSC02792 (FILEminimizer).JPG', 22),
(204, 'DSC02825 (FILEminimizer).JPG', 22),
(205, 'DSC02866 (FILEminimizer).JPG', 22),
(211, 'DSC02718 (FILEminimizer).JPG', 17),
(212, 'DSC02720 (FILEminimizer).JPG', 17),
(213, 'DSC02728 (FILEminimizer).JPG', 17),
(214, 'DSC02730 (FILEminimizer).JPG', 17),
(215, 'DSC02733 (FILEminimizer).JPG', 17),
(221, 'DSC02745 (FILEminimizer).JPG', 19),
(222, 'DSC02748 (FILEminimizer).JPG', 19),
(223, 'DSC02750 (FILEminimizer).JPG', 19),
(224, 'DSC02751 (FILEminimizer).JPG', 19),
(225, 'DSC02756 (FILEminimizer).JPG', 19),
(226, 'DSC02745 (FILEminimizer).JPG', 20),
(227, 'DSC02748 (FILEminimizer).JPG', 20),
(228, 'DSC02750 (FILEminimizer).JPG', 20),
(229, 'DSC02751 (FILEminimizer).JPG', 20),
(230, 'DSC02756 (FILEminimizer).JPG', 20),
(231, 'DSC02745 (FILEminimizer).JPG', 21),
(232, 'DSC02748 (FILEminimizer).JPG', 21),
(233, 'DSC02750 (FILEminimizer).JPG', 21),
(234, 'DSC02751 (FILEminimizer).JPG', 21),
(235, 'DSC02756 (FILEminimizer).JPG', 21),
(236, '_DSC2626 (FILEminimizer).JPG', 41),
(237, '_DSC2628 (FILEminimizer).JPG', 41),
(238, '_DSC2635 (FILEminimizer).JPG', 41),
(239, 'DSC03015 (FILEminimizer).JPG', 41),
(240, 'DSC03020 (FILEminimizer).JPG', 41),
(241, '_DSC2713 (FILEminimizer).JPG', 55),
(242, '_DSC2718 (FILEminimizer).JPG', 55),
(243, 'DSC03227 (FILEminimizer).JPG', 55),
(244, 'DSC03228 (FILEminimizer).JPG', 55),
(245, 'DSC03238 (FILEminimizer).JPG', 55),
(246, '_DSC2713 (FILEminimizer).JPG', 56),
(247, '_DSC2718 (FILEminimizer).JPG', 56),
(248, 'DSC03227 (FILEminimizer).JPG', 56),
(249, 'DSC03228 (FILEminimizer).JPG', 56),
(250, 'DSC03238 (FILEminimizer).JPG', 56),
(251, 'DSC02584 (FILEminimizer).JPG', 6),
(252, 'DSC02588 (FILEminimizer).JPG', 6),
(253, 'DSC02589 (FILEminimizer).JPG', 6),
(254, 'DSC02591 (FILEminimizer).JPG', 6),
(255, 'DSC02597 (FILEminimizer).JPG', 6),
(256, '_DSC2792 (FILEminimizer).JPG', 63),
(257, 'DSC03355 (FILEminimizer).JPG', 63),
(258, 'DSC03360 (FILEminimizer).JPG', 63),
(259, 'DSC03388 (FILEminimizer).JPG', 63),
(260, 'DSC03390 (FILEminimizer).JPG', 63),
(261, '_DSC2739 (FILEminimizer).JPG', 57),
(262, '_DSC2740 (FILEminimizer).JPG', 57),
(263, '_DSC2744 (FILEminimizer).JPG', 57),
(264, 'DSC03268 (FILEminimizer).JPG', 57),
(265, 'DSC03276 (FILEminimizer).JPG', 57),
(266, '_DSC2739 (FILEminimizer).JPG', 58),
(267, '_DSC2740 (FILEminimizer).JPG', 58),
(268, '_DSC2744 (FILEminimizer).JPG', 58),
(269, 'DSC03268 (FILEminimizer).JPG', 58),
(270, 'DSC03276 (FILEminimizer).JPG', 58),
(271, '_DSC2786 (FILEminimizer).JPG', 61),
(272, 'DSC03320 (FILEminimizer).JPG', 61),
(273, 'DSC03326 (FILEminimizer).JPG', 61),
(274, 'DSC03332 (FILEminimizer).JPG', 61),
(275, 'DSC03335 (FILEminimizer).JPG', 61),
(276, '_DSC2786 (FILEminimizer).JPG', 62),
(277, 'DSC03320 (FILEminimizer).JPG', 62),
(278, 'DSC03326 (FILEminimizer).JPG', 62),
(279, 'DSC03332 (FILEminimizer).JPG', 62),
(280, 'DSC03335 (FILEminimizer).JPG', 62),
(281, 'DSC03089 (FILEminimizer).JPG', 51),
(282, 'DSC03096 (FILEminimizer).JPG', 51),
(283, 'DSC03104 (FILEminimizer).JPG', 51),
(284, 'DSC03112 (FILEminimizer).JPG', 51),
(285, 'DSC03120 (FILEminimizer).JPG', 51),
(286, 'DSC03089 (FILEminimizer).JPG', 52),
(287, 'DSC03096 (FILEminimizer).JPG', 52),
(288, 'DSC03104 (FILEminimizer).JPG', 52),
(289, 'DSC03112 (FILEminimizer).JPG', 52),
(290, 'DSC03120 (FILEminimizer).JPG', 52),
(291, '_DSC2637 (FILEminimizer).JPG', 45),
(292, '_DSC2649 (FILEminimizer).JPG', 45),
(293, 'DSC03048 (FILEminimizer).JPG', 45),
(294, 'DSC03049 (FILEminimizer).JPG', 45),
(295, 'DSC03053 (FILEminimizer).JPG', 45),
(296, '_DSC2637 (FILEminimizer).JPG', 46),
(297, '_DSC2649 (FILEminimizer).JPG', 46),
(298, 'DSC03048 (FILEminimizer).JPG', 46),
(299, 'DSC03049 (FILEminimizer).JPG', 46),
(300, 'DSC03053 (FILEminimizer).JPG', 46),
(301, '_DSC2637 (FILEminimizer).JPG', 47),
(302, '_DSC2649 (FILEminimizer).JPG', 47),
(303, 'DSC03048 (FILEminimizer).JPG', 47),
(304, 'DSC03049 (FILEminimizer).JPG', 47),
(305, 'DSC03053 (FILEminimizer).JPG', 47),
(306, 'loyal.jpg', 70),
(308, '5.jpg', 73),
(309, '520264_b1b8_4.jpg', 74),
(317, 'COBAN_SUMBERPITU1__DSC2717.jpg', 101),
(318, 'COBAN_SUMBERPITU2__DSC2724.jpg', 101),
(319, 'COBAN_SUMBERPITU3_DSC03237.jpg', 101),
(320, 'COBAN_SUMBERPITU4_DSC03247.jpg', 101),
(322, 'SUMBER_MARON1_DSC03635.jpg', 102),
(323, 'SUMBER_MARON2_DSC03636.jpg', 102),
(324, 'SUMBER_MARON3_DSC03641.jpg', 102),
(327, 'WADUK_SELOREJO1_DSC02432.jpg', 103),
(328, 'WADUK_SELOREJO3_DSC02507.jpg', 103),
(329, 'WADUK_SELOREJO4_DSC02513.jpg', 103),
(330, 'WADUK_SELOREJO5_DSC02519.jpg', 103),
(345, 'SUMBER_MARON4_DSC03699.jpg', 102),
(348, 'KEBUN_TEH_WONOSARI1__DSC2651.jpg', 50),
(349, 'KEBUN_TEH_WONOSARI2_DSC03064.jpg', 50),
(350, 'KEBUN_TEH_WONOSARI3_DSC03067.jpg', 50),
(351, 'AGRO_SALAK1_DSC03604.jpg', 92),
(352, 'KEBUN_TEH_WONOSARI4_DSC03077.jpg', 50),
(353, 'AGRO_SALAK2_DSC03605.jpg', 92),
(354, 'KEBUN_TEH_WONOSARI5_DSC03079.jpg', 50),
(355, 'AGRO_SALAK3_DSC03617.jpg', 92),
(356, 'BEDENGAN1__DSC2814.jpg', 104),
(357, 'BEDENGAN3_DSC03403.jpg', 104),
(358, 'BEDENGAN4_DSC03404.jpg', 104),
(360, 'BONDERLAND1_DSC03713.jpg', 93),
(361, 'BONDERLAND2_DSC03714.jpg', 93),
(362, 'BONDERLAND3_DSC03718.jpg', 93),
(363, 'BONDERLAND4_DSC03724.jpg', 93),
(364, 'BONDERLAND5_DSC03727.jpg', 93),
(365, 'CANDI_BADUT1_DSC03547.jpg', 97),
(366, 'CANDI_BADUT2_DSC03549.jpg', 97),
(367, 'PANTAI_BAJULMATI1_DSC02844.jpg', 25),
(368, 'CANDI_BADUT3_DSC03550.jpg', 97),
(369, 'PANTAI_BAJULMATI2_DSC02847.jpg', 25),
(370, 'CANDI_BADUT4_DSC03551.jpg', 97),
(371, 'CANDI_BADUT5_DSC03555.jpg', 97),
(372, 'PANTAI_BAJULMATI3_DSC02856.jpg', 25),
(373, 'PANTAI_BAJULMATI5_DSC02860.jpg', 25),
(374, 'CANDI_SUMBERAWAN1_DSC04156.jpg', 96),
(375, 'CANDI_SUMBERAWAN2_DSC04157.jpg', 96),
(377, 'CANDI_SUMBERAWAN3_DSC04160.jpg', 96),
(379, 'CANDI_SUMBERAWAN4_DSC04162.jpg', 96),
(380, 'CANDI_SUMBERAWAN5_DSC04168.jpg', 96),
(381, 'PANTAI_TAMBAN1_DSC03977.jpg', 87),
(382, 'PANTAI_TAMBAN2_DSC03978.jpg', 87),
(383, 'COBAN_BIDADARI1_DSC04201.jpg', 107),
(384, 'PANTAI_TAMBAN3_DSC03984.jpg', 87),
(386, 'COBAN_BIDADARI2_DSC04202.jpg', 107),
(387, 'PANTAI_TAMBAN4_DSC03986.jpg', 87),
(389, 'COBAN_BIDADARI3_DSC04214.jpg', 107),
(391, 'COBAN_BIDADARI4_DSC04224.jpg', 107),
(392, 'COBAN_BIDADARI5_DSC04226.jpg', 107),
(393, 'PANTAI_GATRA1_DSC04054.jpg', 78),
(394, 'PANTAI_GATRA2_DSC04055.jpg', 78),
(395, 'PANTAI_TIGA_WARNA1_DSC04016.jpg', 88),
(396, 'PANTAI_GATRA3_DSC04061.jpg', 78),
(397, 'PANTAI_GATRA4_DSC04064.jpg', 78),
(398, 'PANTAI_TIGA_WARNA2_DSC04030.jpg', 88),
(399, 'PANTAI_GATRA5_DSC04073.jpg', 78),
(400, 'PANTAI_TIGA_WARNA3_DSC04031.jpg', 88),
(401, 'COBAN_JAHE1_DSC04893.jpg', 108),
(402, 'PANTAI_TIGA_WARNA4_DSC04040.jpg', 88),
(403, 'COBAN_JAHE2_DSC04895.jpg', 108),
(404, 'PANTAI_TIGA_WARNA5_DSC04043.jpg', 88),
(405, 'COBAN_JAHE3_DSC04898.jpg', 108),
(406, 'PANTAI_GOA_CINA1_DSC03908.jpg', 79),
(407, 'COBAN_JAHE4_DSC04910.jpg', 108),
(408, 'PANTAI_GOA_CINA2_DSC03909.jpg', 79),
(409, 'COBAN_JAHE5_DSC04915.jpg', 108),
(410, 'PANTAI_GOA_CINA3_DSC03912.jpg', 79),
(411, 'PANTAI_GOA_CINA4_DSC03923.jpg', 79),
(412, 'PANTAI_GOA_CINA5_DSC03924.jpg', 79),
(413, 'PANTAI_UNGAPAN1_DSC03884.jpg', 89),
(414, 'PANTAI_UNGAPAN2_DSC03886.jpg', 89),
(415, 'PANTAI_UNGAPAN3_DSC03887.jpg', 89),
(416, 'PANTAI_UNGAPAN4_DSC03890.jpg', 89),
(417, 'COBAN_PELANGI1__DSC2765.jpg', 109),
(418, 'PANTAI_UNGAPAN5_DSC03891.jpg', 89),
(419, 'PANTAI_JEMBATAN_PANJANG1_P1360115.jpg', 80),
(420, 'COBAN_PELANGI2__DSC2768.jpg', 109),
(421, 'COBAN_PELANGI3_DSC03298.jpg', 109),
(422, 'PANTAI_LENGGOKSONO1_DSC04124.jpg', 81),
(423, 'COBAN_PELANGI4_DSC03303.jpg', 109),
(424, 'PANTAI_LENGGOKSONO2_DSC04133.jpg', 81),
(425, 'COBAN_PELANGI5_DSC03304.jpg', 109),
(426, 'PANTAI_LENGGOKSONO3_DSC04141.jpg', 81),
(427, 'PANTAI_LENGGOKSONO4_DSC04143.jpg', 81),
(428, 'PANTAI_LENGGOKSONO5_DSC04149.jpg', 81),
(429, 'PANTAI_WEDI_AWU1_DJI_0272.jpg', 90),
(430, 'COBAN_RONDO1__DSC2664.jpg', 110),
(431, 'PANTAI_WEDI_AWU2_DSC04105.jpg', 90),
(432, 'COBAN_RONDO2__DSC2673.jpg', 110),
(433, 'PANTAI_WEDI_AWU3_DSC04106.jpg', 90),
(434, 'COBAN_RONDO4_DJI_0112.jpg', 110),
(435, 'PANTAI_WEDI_AWU5_DSC04112.jpg', 90),
(436, 'COBAN_RONDO5_DSC03177.jpg', 110),
(437, 'PANTAI_NGANTEB1_DSC03829.jpg', 77),
(438, 'PANTAI_NGANTEB2_DSC03831.jpg', 77),
(439, 'PANTAI_NGANTEB3_DSC03835.jpg', 77),
(440, 'PANTAI_NGANTEB4_DSC03838.jpg', 77),
(441, 'PANTAI_NGANTEB5_DSC03845.jpg', 77),
(442, 'BOONPRING1_DSC02636.jpg', 111),
(443, 'BOONPRING2_DSC02648.jpg', 111),
(444, 'BOONPRING3_DSC02650.jpg', 111),
(445, 'BOONPRING4_DSC02659.jpg', 111),
(447, 'BOONPRING5_DSC02660.jpg', 111),
(448, 'PEMANDIAN_KENDEDES1__DSC2632.jpg', 117),
(449, 'PEMANDIAN_KENDEDES2__DSC2633.jpg', 117),
(450, 'PANTAI_NGUDEL1_DSC02748.jpg', 82),
(451, 'PEMANDIAN_KENDEDES3__DSC2636.jpg', 117),
(452, 'PANTAI_NGUDEL2_DSC02751.jpg', 82),
(453, 'PEMANDIAN_KENDEDES4_DSC03019.jpg', 117),
(454, 'PANTAI_NGUDEL3_DSC02759.jpg', 82),
(455, 'PANTAI_NGUDEL4_DSC02768.jpg', 82),
(456, 'PEMANDIAN_KENDEDES5_DSC03020.jpg', 117),
(457, 'PANTAI_NGUDEL5_DSC02772.jpg', 82),
(458, 'COBAN_SUMBERPITU5_DSC03251.jpg', 101),
(459, 'PANTAI_PARANGDOWO1_DSC03867.jpg', 83),
(460, 'PANTAI_PARANGDOWO2_DSC03869.jpg', 83),
(461, 'PANTAI_PARANGDOWO3_DSC03876.jpg', 83),
(462, 'PANTAI_PARANGDOWO4_DSC03880.jpg', 83),
(463, 'PANTAI_PARANGDOWO5_DSC03883.jpg', 83),
(464, 'DESA_WISATA_PUJON_KIDUL1_DSC02363.jpg', 9),
(465, 'DESA_WISATA_PUJON_KIDUL2_DSC02366.jpg', 9),
(467, 'DESA_WISATA_PUJON_KIDUL3_DSC02369.jpg', 9),
(469, 'WISATA_RELIGI_SUMUR_PITU1_DSC03807.jpg', 99),
(470, 'DESA_WISATA_PUJON_KIDUL4_DSC02399.jpg', 9),
(472, 'WISATA_RELIGI_SUMUR_PITU2_DSC03809.jpg', 99),
(473, 'DESA_WISATA_PUJON_KIDUL5_DSC02400.jpg', 9),
(476, 'WISATA_RELIGI_SUMUR_PITU3_DSC03810.jpg', 99),
(477, 'WISATA_RELIGI_SUMUR_PITU4_DSC03816.jpg', 99),
(478, 'WISATA_RELIGI_SUMUR_PITU5_DSC03819.jpg', 99),
(479, 'WENDIT1_DSC03325.jpg', 98),
(480, 'WENDIT2_DSC03328.jpg', 98),
(481, 'WENDIT3_DSC03332.jpg', 98),
(482, 'PANTAI_SENDIKI1_DSC03936.jpg', 85),
(483, 'WENDIT4_DSC03337.jpg', 98),
(484, 'PANTAI_SENDIKI2_DSC03938.jpg', 85),
(485, 'WENDIT5_DSC03346.jpg', 98),
(486, 'PANTAI_SENDIKI3_DSC03950.jpg', 85),
(487, 'PANTAI_SENDIKI4_DSC03955.jpg', 85),
(488, 'PANTAI_SENDIKI5_DSC03958.jpg', 85),
(489, 'HUTAN_PINUS_SEMERU1_DSC03748.jpg', 113),
(490, 'PANTAI_SENDANGBIRU1_DSC04081.jpg', 84),
(491, 'PANTAI_SENDANGBIRU2_DSC04082.jpg', 84),
(492, 'PANTAI_SENDANGBIRU3_DSC04083.jpg', 84),
(493, 'HUTAN_PINUS_SEMERU2_DSC03749.jpg', 113),
(494, 'PANTAI_SENDANGBIRU4_DSC04085.jpg', 84),
(495, 'PANTAI_SENDANGBIRU5_DSC04090.jpg', 84),
(496, 'WADUK_SELOREJO1_DSC02432.jpg', 105),
(497, 'HUTAN_PINUS_SEMERU3_DSC03755.jpg', 113),
(498, 'WADUK_SELOREJO3_DSC02507.jpg', 105),
(499, 'HUTAN_PINUS_SEMERU4_DSC03757.jpg', 113),
(500, 'WADUK_SELOREJO4_DSC02513.jpg', 105),
(501, 'HUTAN_PINUS_SEMERU5_DSC03768.jpg', 113),
(502, 'WADUK_SELOREJO5_DSC02519.jpg', 105),
(503, 'PANTAI_SIPELOT1_DSC04095.jpg', 86),
(504, 'PANTAI_SIPELOT2_DSC04096.jpg', 86),
(505, 'PANTAI_SIPELOT3_DSC04097.jpg', 86),
(506, 'PANTAI_SIPELOT4_DSC04101.jpg', 86),
(507, 'PANTAI_SIPELOT5_DSC04103.jpg', 86),
(508, 'KAMPUNG_RIVER_TUBING_PAMOTAN1_DSC03791.jpg', 114),
(509, 'SUMBER_JENON1_DSC03595.jpg', 120),
(510, 'KAMPUNG_RIVER_TUBING_PAMOTAN2_DSC03792.jpg', 114),
(511, 'SUMBER_JENON2_DSC03599.jpg', 120),
(512, 'KAMPUNG_RIVER_TUBING_PAMOTAN3_IMG_20170826_154229_659.jpg', 114),
(513, 'SUMBER_JENON3_DSC03600.jpg', 120),
(514, 'SUMBER_JENON4_DSC03601.jpg', 120),
(515, 'SUMBER_JENON5_DSC03603.jpg', 120),
(516, 'KARANGKATES1_DSC02928.jpg', 106),
(517, 'KARANGKATES2_DSC02932.jpg', 106),
(518, 'KARANGKATES3_DSC02939.jpg', 106),
(519, 'KARANGKATES4_DSC02941.jpg', 106),
(520, 'KARANGKATES5_DSC02952.jpg', 106),
(521, 'SUMBER_SIRAH1_DSC02585.jpg', 121),
(522, 'SUMBER_SIRAH2_DSC02590.jpg', 121),
(523, 'SUMBER_SIRAH3_DSC02591.jpg', 121),
(524, 'MASJID_TIBAN3_DSC02627.jpg', 8),
(525, 'SUMBER_SIRAH4_DSC02593.jpg', 121),
(526, 'MASJID_TIBAN4_DSC02628.jpg', 8),
(527, 'SUMBER_SIRAH5_DSC02596.jpg', 121),
(528, 'MASJID_TIBAN5_DSC02635.jpg', 8),
(529, 'HAWAI_WATERPARK1_DSC03586.jpg', 94),
(530, 'HAWAI_WATERPARK2_DSC03588.jpg', 94),
(531, 'HAWAI_WATERPARK3_DSC03590.jpg', 94),
(532, 'HAWAI_WATERPARK4_DSC03593.jpg', 94),
(533, 'HAWAI_WATERPARK5_DSC03594.jpg', 94),
(534, 'SUMBER_TAMAN1_DSC03621.jpg', 122),
(535, 'SUMBER_TAMAN2_DSC03623.jpg', 122),
(536, 'SUMBER_TAMAN3_DSC03625.jpg', 122),
(537, 'SUMBER_TAMAN4_DSC03628.jpg', 122),
(538, 'SUMBER_TAMAN5_DSC03629.jpg', 122),
(539, 'SUMBER_AIR_KRABYAKAN1_DSC03089.jpg', 125),
(540, 'PADEPOKAN_ASMARA_BANGUN1__DSC2829.jpg', 65),
(541, 'SUMBER_AIR_KRABYAKAN2_DSC03096.jpg', 125),
(542, 'PADEPOKAN_ASMARA_BANGUN2_DSC03426.jpg', 65),
(543, 'PADEPOKAN_ASMARA_BANGUN3_DSC03429.jpg', 65),
(544, 'SUMBER_AIR_KRABYAKAN3_DSC03098.jpg', 125),
(545, 'PADEPOKAN_ASMARA_BANGUN4_DSC03436.jpg', 65),
(546, 'SUMBER_AIR_KRABYAKAN4_DSC03110.jpg', 125),
(547, 'PADEPOKAN_ASMARA_BANGUN5_DSC03464.jpg', 65),
(548, 'SUMBER_AIR_KRABYAKAN5_DSC03112.jpg', 125),
(549, 'NDAYUNG_RAFTING1_DSC03284.jpg', 116),
(550, 'NDAYUNG_RAFTING2_DSC03286.jpg', 116),
(551, 'NDAYUNG_RAFTING3_DSC03287.jpg', 116),
(552, 'P-WEC1_DSC03557.jpg', 95),
(553, 'NDAYUNG_RAFTING4_DSC03291.jpg', 116),
(554, 'NDAYUNG_RAFTING5_DSC03293.jpg', 116),
(555, 'P-WEC2_DSC03559.jpg', 95),
(556, 'P-WEC3_DSC03561.jpg', 95),
(557, 'P-WEC4_DSC03563.jpg', 95),
(558, 'P-WEC5_DSC03566.jpg', 95),
(559, 'LEMBAH_TUMPANG1_DSC03198.jpg', 115),
(560, 'DESA_WISATA_NGADAS1_DSC04175.jpg', 100),
(561, 'PETERNAKAN_LEBAH_RIMBA_RAYA1__DSC2641.jpg', 118),
(562, 'LEMBAH_TUMPANG2_DSC03200.jpg', 115),
(563, 'DESA_WISATA_NGADAS2_DSC04178.jpg', 100),
(564, 'LEMBAH_TUMPANG3_DSC03208.jpg', 115),
(565, 'DESA_WISATA_NGADAS3_DSC04179.jpg', 100),
(566, 'PETERNAKAN_LEBAH_RIMBA_RAYA3_DSC03048.jpg', 118),
(567, 'LEMBAH_TUMPANG4_DSC03218.jpg', 115),
(568, 'PETERNAKAN_LEBAH_RIMBA_RAYA4_DSC03049.jpg', 118),
(569, 'DESA_WISATA_NGADAS4_DSC04182.jpg', 100),
(570, 'LEMBAH_TUMPANG5_DSC03223.jpg', 115),
(571, 'DESA_WISATA_NGADAS5_DSC04187.jpg', 100),
(572, 'PETERNAKAN_LEBAH_RIMBA_RAYA5_DSC03061.jpg', 118),
(573, 'SENGKALING1__DSC2801.jpg', 30),
(574, 'SENGKALING2_DSC03366.jpg', 30),
(575, 'SENGKALING3_DSC03379.jpg', 30),
(576, 'SENGKALING4_DSC03388.jpg', 30),
(577, 'SENGKALING5_DSC03390.jpg', 30),
(578, 'WISATA_PETIK_JERUK_DAU1_DSC03567.jpg', 91),
(579, 'WISATA_PETIK_JERUK_DAU2_DSC03568.jpg', 91),
(581, 'WISATA_PETIK_JERUK_DAU3_DSC03569.jpg', 91),
(582, 'LEDOK_AMPRONG_TUBING1__DSC2739.jpg', 124),
(583, 'LEDOK_AMPRONG_TUBING2__DSC2743.jpg', 124),
(584, 'LEDOK_AMPRONG_TUBING3_DSC03268.jpg', 124),
(585, 'WISATA_PETIK_JERUK_DAU4_DSC03571.jpg', 91),
(586, 'LEDOK_AMPRONG_TUBING4_DSC03275.jpg', 124),
(587, 'WISATA_PETIK_JERUK_DAU5_DSC03572.jpg', 91),
(588, 'LEDOK_AMPRONG_TUBING5_DSC03277.jpg', 124),
(589, 'LEMBAH_DIENG1_DSC03535.jpg', 123),
(590, 'LEMBAH_DIENG2_DSC03540.jpg', 123),
(591, 'LEMBAH_DIENG3_DSC03541.jpg', 123),
(592, 'LEMBAH_DIENG5_DSC03546.jpg', 123),
(593, 'TNBTS1_DJI_0042.jpg', 112),
(594, 'TNBTS2_DJI_0069.jpg', 112),
(595, 'TNBTS3_DSC04382.jpg', 112),
(596, 'TNBTS4_DSC04391.jpg', 112),
(597, 'TNBTS5_DSC04415.jpg', 112),
(598, 'SALAK_4.PNG', 92),
(599, 'SALAK_5.PNG', 92),
(600, 'bedengan1.jpg', 104),
(601, 'bedengan2.jpg', 104),
(602, 'LEMBAH_DIENG_1.PNG', 123),
(603, 'PAMOTAN_1.PNG', 114),
(604, 'coban_rondo.jpg', 110),
(605, 'PAMOTAN_2.PNG', 114),
(606, 'PANTAI_TAMBAN.PNG', 87),
(607, 'jembatan_panjang1.jpg', 80),
(608, 'jembatan_panjang2.jpg', 80),
(609, 'jembatan_panjang3.jpg', 80),
(610, 'jembatan_panjang4.jpg', 80),
(611, 'RIMBA_RAYA.PNG', 118),
(612, 'SELOREJO.PNG', 105),
(613, 'SUMBER_MARON.PNG', 102),
(614, 'WEDI_AWU.PNG', 90),
(615, 'CANDI_JAGO1_DSC02691.jpg', 15),
(616, 'CANDI_JAGO2_DSC02697.jpg', 15),
(617, 'CANDI_JAGO3_DSC02704.jpg', 15),
(618, 'CANDI_JAGO4_DSC02710.jpg', 15),
(619, 'CANDI_JAGO5_DSC02715.jpg', 15),
(620, 'GUNUNG_KAWI1_DSC02978.jpg', 35),
(621, 'GUNUNG_KAWI2_DSC02993.jpg', 35),
(622, 'GUNUNG_KAWI3_DSC02994.jpg', 35),
(623, 'GUNUNG_KAWI4_DSC02999.jpg', 35),
(624, 'GUNUNG_KAWI5_DSC03001.jpg', 35),
(625, 'GUNUNG_KAWI1_DSC02978.jpg', 39),
(626, 'GUNUNG_KAWI2_DSC02993.jpg', 39),
(627, 'GUNUNG_KAWI3_DSC02994.jpg', 39),
(628, 'GUNUNG_KAWI4_DSC02999.jpg', 39),
(629, 'GUNUNG_KAWI5_DSC03001.jpg', 39),
(630, 'PANTAI_BAJULMATI4_DSC02859.jpg', 25),
(631, 'PANTAI_BALEKAMBANG1_DSC02882.jpg', 29),
(632, 'PANTAI_BALEKAMBANG2_DSC02892.jpg', 29),
(633, 'PANTAI_BALEKAMBANG3_DSC02897.jpg', 29),
(634, 'PANTAI_BALEKAMBANG4_DSC02907.jpg', 29),
(635, 'PANTAI_BALEKAMBANG5_DSC02919.jpg', 29),
(636, 'PANTAI_BATU_BENGKUNG1_DSC02777.jpg', 23),
(637, 'PANTAI_BATU_BENGKUNG2_DSC02782.jpg', 23),
(638, 'PANTAI_BATU_BENGKUNG3_DSC02824.jpg', 23),
(639, 'PANTAI_BATU_BENGKUNG4_DSC02825.jpg', 23),
(640, 'PANTAI_BATU_BENGKUNG5_DSC02843.jpg', 23),
(641, 'PANTAI_NGLIYEP1_DSC02718.jpg', 18),
(642, 'PANTAI_NGLIYEP2_DSC02720.jpg', 18),
(643, 'PANTAI_NGLIYEP3_DSC02728.jpg', 18),
(644, 'PANTAI_NGLIYEP4_DSC02730.jpg', 18),
(645, 'PANTAI_NGLIYEP5_DSC02736.jpg', 18);
-- --------------------------------------------------------
--
-- Table structure for table `grafik`
--
CREATE TABLE IF NOT EXISTS `grafik` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`bulan` varchar(20) DEFAULT NULL,
`nilai` int(5) DEFAULT NULL,
`kode` int(2) DEFAULT NULL,
`tahun` int(11) NOT NULL,
`pendapatan` double NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=25 ;
--
-- Dumping data for table `grafik`
--
INSERT INTO `grafik` (`id`, `bulan`, `nilai`, `kode`, `tahun`, `pendapatan`) VALUES
(1, 'Januari', 23, 0, 2018, 10),
(2, 'Februari', 55, 0, 2018, 0),
(3, 'Maret', 67, 0, 2018, 0),
(4, 'April', 80, 0, 2018, 0),
(5, 'Mei', 82, 0, 2018, 0),
(6, 'Juni', 87, 0, 2018, 0),
(7, 'Juli', 89, 0, 2018, 0),
(8, 'Agustus', 92, 0, 2018, 0),
(9, 'September', 65, 0, 2018, 0),
(10, 'Oktober', 78, 0, 2018, 0),
(11, 'November', 90, 0, 2018, 0),
(12, 'Desember', 99, 0, 2018, 0),
(13, 'Januari', 15, 1, 2018, 10),
(14, 'Februari', 35, 1, 2017, 0),
(15, 'Maret', 47, 1, 2018, 0),
(16, 'April', 50, 1, 2018, 0),
(17, 'Mei', 62, 1, 2018, 0),
(18, 'Juni', 77, 1, 2018, 0),
(19, 'Juli', 49, 1, 2018, 0),
(20, 'Agustus', 52, 1, 2017, 0),
(21, 'September', 65, 1, 2018, 0),
(22, 'Oktober', 78, 1, 2018, 0),
(23, 'November', 80, 1, 2018, 0),
(24, 'Desember', 89, 1, 2018, 0);
-- --------------------------------------------------------
--
-- Table structure for table `info`
--
CREATE TABLE IF NOT EXISTS `info` (
`info_tahun` int(11) NOT NULL AUTO_INCREMENT,
`info_jum_lokal` int(11) NOT NULL,
`info_jum_manca` int(11) NOT NULL,
`info_pendapatan` int(11) NOT NULL,
PRIMARY KEY (`info_tahun`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=2018 ;
--
-- Dumping data for table `info`
--
INSERT INTO `info` (`info_tahun`, `info_jum_lokal`, `info_jum_manca`, `info_pendapatan`) VALUES
(2017, 2000000, 790000, 1000000000);
-- --------------------------------------------------------
--
-- Table structure for table `kategori`
--
CREATE TABLE IF NOT EXISTS `kategori` (
`kategori_id` int(11) NOT NULL AUTO_INCREMENT,
`kategori_nama` text NOT NULL,
`kategori_icon` text NOT NULL,
PRIMARY KEY (`kategori_id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=14 ;
--
-- Dumping data for table `kategori`
--
INSERT INTO `kategori` (`kategori_id`, `kategori_nama`, `kategori_icon`) VALUES
(1, 'Alam', 'alam.png'),
(2, 'Budaya', 'budaya.png'),
(3, 'Sejarah', 'sejarah.png'),
(4, 'Pendidikan', 'education.png'),
(5, 'Pertanian', 'pertanian.png'),
(6, 'Religi', 'religi.png'),
(7, 'Bahari', 'bahari.png'),
(8, 'Kuliner', 'kuliner.png');
-- --------------------------------------------------------
--
-- Table structure for table `kategori_wisata`
--
CREATE TABLE IF NOT EXISTS `kategori_wisata` (
`wisata_id` int(11) NOT NULL DEFAULT '0',
`kategori_id` int(11) NOT NULL DEFAULT '0',
PRIMARY KEY (`wisata_id`,`kategori_id`),
KEY `kategori_id` (`kategori_id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `komentar`
--
CREATE TABLE IF NOT EXISTS `komentar` (
`komentar_id` int(11) NOT NULL AUTO_INCREMENT,
`komentar_ip` text NOT NULL,
`komentar_deskripsi` text NOT NULL,
`komentar_tgl` date NOT NULL,
`komentar_nilai_rating` double NOT NULL,
`wisata_id` int(11) NOT NULL,
PRIMARY KEY (`komentar_id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=44 ;
--
-- Dumping data for table `komentar`
--
INSERT INTO `komentar` (`komentar_id`, `komentar_ip`, `komentar_deskripsi`, `komentar_tgl`, `komentar_nilai_rating`, `wisata_id`) VALUES
(10, '192.168.3.11', 'mantap', '2017-11-21', 5, 30),
(11, '192.168.3.11', '', '2017-11-24', 5, 9),
(12, '192.168.3.11', '', '2017-11-26', 5, 47),
(13, '192.168.3.11', 'Ok', '2017-11-26', 5, 47),
(14, '192.168.3.11', 'Sip', '2017-11-26', 5, 18),
(16, '192.168.3.11', 'Keren', '2017-11-26', 5, 9),
(21, '172.16.31.10', 'Tes', '2017-12-31', 5, 23),
(22, '172.16.31.10', 'Tes', '2017-12-31', 5, 23),
(25, '', '', '2018-01-02', 5, 47),
(26, '192.168.127.12', 'tes', '2018-01-02', 5, 25),
(27, '', '', '2018-01-04', 5, 47),
(29, '172.16.17.32', '', '2018-01-04', 4, 9),
(30, '172.16.58.3', 'text', '2018-01-05', 0, 29),
(31, '172.16.31.10', '', '2018-01-05', 5, 9),
(32, '192.168.3.11', 'ttte', '2018-01-05', 0, 9),
(33, '172.16.17.32', 'cozy places', '2018-01-06', 5, 49),
(34, '172.16.17.32', 'cozy', '2018-01-06', 5, 49),
(35, '172.16.17.32', 'ff', '2018-01-06', 5, 9),
(36, '172.16.17.32', 'fd', '2018-01-06', 4, 18),
(37, '172.16.17.32', '', '2018-01-06', 1, 25),
(38, '172.16.17.32', 'fff', '2018-01-06', 3, 30),
(39, '172.16.17.32', 'tes', '2018-01-06', 3, 97),
(40, '172.16.17.32', 'ess', '2018-01-06', 3, 15),
(41, '172.16.17.32', 'essd', '2018-01-06', 3, 15),
(42, '172.16.17.32', 'essd', '2018-01-06', 5, 15),
(43, '172.16.17.32', 'tes', '2018-01-06', 4, 8);
-- --------------------------------------------------------
--
-- Table structure for table `kontak`
--
CREATE TABLE IF NOT EXISTS `kontak` (
`id_kontak` int(11) NOT NULL AUTO_INCREMENT,
`kontak_lat` varchar(100) DEFAULT NULL,
`kontak_long` varchar(100) DEFAULT NULL,
`kontak_deskripsi` text,
`kontak_judul` varchar(30) DEFAULT NULL,
`kontak_title_meta` varchar(200) DEFAULT NULL,
`kontak_deskripsi_meta` text,
`kontak_keyword_meta` varchar(200) DEFAULT NULL,
PRIMARY KEY (`id_kontak`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=2 ;
--
-- Dumping data for table `kontak`
--
INSERT INTO `kontak` (`id_kontak`, `kontak_lat`, `kontak_long`, `kontak_deskripsi`, `kontak_judul`, `kontak_title_meta`, `kontak_deskripsi_meta`, `kontak_keyword_meta`) VALUES
(1, '-7.893785', '112.666007', '<p><span style="color: #222222; font-family: arial, sans-serif; font-size: 13px;">Jl. <NAME>a No.348b, Purwodadi, Blimbing, Malang City, East Java 65126</span></p>\r\n<p>Telepon : (0341) 408788</p>\r\n<p><span class="contact-info">Email : <EMAIL></span></p>', 'Dinas Komunikasi dan Informati', 'Hotel Viride', '<p><span class="contact-info">Address: <em>322 Moon St, Venice Italy, 1231</em></span><br /><span class="contact-info">Phone: <em>0039 12345 6789</em></span><br /><span class="contact-info">Email: <a href="#"><em><EMAIL></em></a></span></p>', 'Hotel Viride');
-- --------------------------------------------------------
--
-- Table structure for table `language`
--
CREATE TABLE IF NOT EXISTS `language` (
`phrase_id` int(11) NOT NULL AUTO_INCREMENT,
`phrase` longtext COLLATE utf8_unicode_ci NOT NULL,
`english` longtext COLLATE utf8_unicode_ci NOT NULL,
`indonesian` longtext COLLATE utf8_unicode_ci NOT NULL,
PRIMARY KEY (`phrase_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci AUTO_INCREMENT=247 ;
--
-- Dumping data for table `language`
--
INSERT INTO `language` (`phrase_id`, `phrase`, `english`, `indonesian`) VALUES
(1, 'Beranda', 'Home', 'Beranda'),
(2, 'Destinasi Wisata', 'Travel Destinations', 'Destinasi Wisata'),
(3, 'Event Wisata', 'Tour Event', 'Event Wisata'),
(4, 'Berita', 'News', 'Berita'),
(5, 'Tentang Kami', 'About Us', 'Tentang Kami'),
(6, 'Kontak Kami', 'Contact Us', 'Kontak Kami'),
(7, 'Terdapat Lebih dari <a href="#">238 Tempat Wisata</a> Menunggu Anda', 'More than <a href="#"> 238 Places of Interest </a> Waiting for you', 'Terdapat Lebih dari <a href="#">238 Tempat Wisata</a> Menunggu Anda'),
(8, 'Destinasi Wisata Lainnya', '\r\nOther Travel Destinations', 'Destinasi Wisata Lainnya'),
(31, 'Pilih Bahasa', 'Select Language', 'Pilih Bahasa'),
(9, 'Baca Selengkapnya', '\r\nRead more', 'Baca Selengkapnya'),
(10, 'Sekilas Pandang', '\r\nOverview', 'Sekilas Pandang'),
(11, 'Kabupaten Malang adalah kabupaten terluas kedua di Jawa Timur setelah Kabupaten Banyuwangi dan merupakan kabupaten dengan populasi terbesar di Jawa Timur. Ibu kota Kabupaten Malang adalah Kepanjen.', '\r\nMalang Regency is the second largest regency in East Java after Banyuwangi Regency and is the largest population districts in East Java. The capital of Malang Regency is Kepanjen.', 'Kabupaten Malang adalah kabupaten terluas kedua di Jawa Timur setelah Kabupaten Banyuwangi dan merupakan kabupaten dengan populasi terbesar di Jawa Timur. Ibu kota Kabupaten Malang adalah Kepanjen.'),
(12, 'Jenis Wisata', '\r\nTour Type', 'Jenis Wisata'),
(13, 'Alam', 'Natural', 'Alam'),
(14, 'Budaya', 'Culture', 'Budaya'),
(15, 'Sejarah', 'History', 'Sejarah'),
(16, 'Pendidikan', 'Education', 'Pendidikan'),
(17, 'Pertanian', '\r\nAgriculture', 'Pertanian'),
(18, 'Religi', '\r\nReligion', 'Religi'),
(19, 'Bahari', 'Nautical', 'Bahari'),
(20, 'Kuliner', '\r\nCulinary', 'Kuliner'),
(21, 'Wahana', 'Rides', 'Wahana'),
(22, 'Cari Berita', 'Search News', 'Cari Berita'),
(23, 'Direktori Berita', 'News Directory', 'Direktori Berita'),
(24, 'Tags Berita', 'News Tags', 'Tags Berita'),
(25, 'Selengkapnya', 'Learn more\r\n', 'Selengkapnya'),
(26, 'Semua', 'All', 'Semua'),
(27, 'Cari Event', '\r\nSearch Event', 'Cari Event'),
(28, 'Direktori Event', '\r\nEvent Directory', 'Direktori Event'),
(29, 'Tags Event', '\r\nEvent Tags', 'Tags Event'),
(30, 'Masukkan Kata', '\r\nEnter a word', 'Masukkan Kata'),
(32, 'Masukkan Nama Event', 'Enter Event Name', 'Masukkan Nama Event'),
(33, 'Masukkan Destinasi Wisata', '\r\nEnter Tour Destinations', 'Masukkan Destinasi Wisata'),
(34, 'Tags Destinasi Wisata', '\r\nTags Travel Destinations', 'Tags Destinasi Wisata'),
(35, 'Fasilitas Wisata', 'Tour Facility', 'Fasilitas Wisata'),
(36, 'Fasilitas Pendukung', '\r\nSupporting Facilities', 'Fasilitas Pendukung'),
(37, 'Masukkan Destinasi Wisata', '\r\nEnter Tour Destinations', 'Masukkan Destinasi Wisata'),
(38, 'Kami ingin mengenal Anda', 'We want to know you', 'Kami ingin mengenal Anda'),
(39, 'Pariwisata Kabupaten Malang', '\r\nTourism Malang Regency', 'Pariwisata Kabupaten Malang'),
(245, 'Terdapat Lebih dari', 'More than', 'Terdapat Lebih dari'),
(246, 'Tempat Wisata Menunggu Anda', 'Places to Stay Awaiting You', 'Tempat Wisata Menunggu Anda');
-- --------------------------------------------------------
--
-- Table structure for table `mainmenu`
--
CREATE TABLE IF NOT EXISTS `mainmenu` (
`seq` int(11) NOT NULL AUTO_INCREMENT,
`idmenu` int(11) NOT NULL,
`nama_menu` varchar(50) NOT NULL,
`active_menu` varchar(50) NOT NULL,
`icon_class` varchar(50) NOT NULL,
`link_menu` varchar(50) NOT NULL,
`menu_akses` varchar(12) NOT NULL,
`entry_date` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`entry_user` varchar(50) DEFAULT NULL,
PRIMARY KEY (`seq`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1 AUTO_INCREMENT=19 ;
--
-- Dumping data for table `mainmenu`
--
INSERT INTO `mainmenu` (`seq`, `idmenu`, `nama_menu`, `active_menu`, `icon_class`, `link_menu`, `menu_akses`, `entry_date`, `entry_user`) VALUES
(1, 1, 'Dashboard', '', 'menu-icon fa fa-dashboard', 'Dashboard', '', '2017-10-19 15:48:01', NULL),
(2, 2, 'Berita', '', 'menu-icon fa fa-credit-card', 'C_berita', '', '2017-10-25 03:10:59', NULL),
(3, 3, 'Event', '', 'menu-icon fa fa-calendar', 'C_event', '', '2017-10-25 03:11:02', NULL),
(4, 4, 'Wisata', '', 'menu-icon fa fa-binoculars', '#', '', '2017-10-25 03:10:55', NULL),
(6, 6, 'About', '', 'menu-icon fa fa-newspaper-o', 'About', '', '2017-11-01 18:30:17', NULL),
(5, 5, 'Wahana', '', 'menu-icon fa fa-automobile', 'Wahana', '', '2017-10-29 18:28:17', NULL),
(7, 7, 'Kontak', '', 'menu-icon fa fa-book', 'Kontak', '', '2017-11-01 19:03:49', NULL),
(8, 8, 'User', '', 'menu-icon fa fa-user', 'User', '', '2017-12-06 05:53:46', NULL),
(9, 9, 'Setup Web', '', 'menu-icon fa fa-cogs', '#', '', '2017-12-11 09:58:45', NULL),
(10, 10, 'Grafik', '', 'menu-icon fa fa-bar-chart', '#', '', '2017-12-11 12:07:33', NULL),
(11, 11, 'Pendapatan', '', 'menu-icon fa fa-money', 'Pendapatan', '', '2017-12-11 23:26:43', NULL);
-- --------------------------------------------------------
--
-- Table structure for table `submenu`
--
CREATE TABLE IF NOT EXISTS `submenu` (
`id_sub` int(11) NOT NULL AUTO_INCREMENT,
`nama_sub` varchar(50) NOT NULL,
`mainmenu_idmenu` int(11) NOT NULL,
`active_sub` varchar(20) NOT NULL,
`icon_class` varchar(100) NOT NULL,
`link_sub` varchar(70) NOT NULL,
`sub_akses` varchar(12) NOT NULL,
`entry_date` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`entry_user` varchar(20) DEFAULT NULL,
PRIMARY KEY (`id_sub`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1 AUTO_INCREMENT=7 ;
--
-- Dumping data for table `submenu`
--
INSERT INTO `submenu` (`id_sub`, `nama_sub`, `mainmenu_idmenu`, `active_sub`, `icon_class`, `link_sub`, `sub_akses`, `entry_date`, `entry_user`) VALUES
(1, 'Kategori Wisata', 4, '', '', 'Kategori', '', '2017-10-19 20:10:19', NULL),
(2, 'Fasilitas Wisata', 4, '', '', 'Fasilitas_wisata', '', '2017-10-29 15:08:01', NULL),
(3, 'Fasilitas Pendukung', 4, '', '', 'Fasilitas_pendukung', '', '2017-10-29 15:08:10', NULL),
(4, 'Set Up Content', 9, '', '', 'Setup', '', '2017-12-11 10:00:08', NULL),
(5, 'User Privillages', 9, '', '', 'User/user_maintenance', '', '2017-12-11 10:01:14', NULL),
(6, 'Grafik Wisatawan', 10, '', '', 'Grafik', '', '2017-12-11 12:08:35', NULL);
-- --------------------------------------------------------
--
-- Table structure for table `tab_akses_mainmenu`
--
CREATE TABLE IF NOT EXISTS `tab_akses_mainmenu` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`id_menu` int(11) NOT NULL,
`id_level` int(11) NOT NULL,
`c` int(11) DEFAULT '0',
`r` int(11) DEFAULT '0',
`u` int(11) DEFAULT '0',
`d` int(11) DEFAULT '0',
`entry_date` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`entry_user` varchar(50) NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1 AUTO_INCREMENT=23 ;
--
-- Dumping data for table `tab_akses_mainmenu`
--
INSERT INTO `tab_akses_mainmenu` (`id`, `id_menu`, `id_level`, `c`, `r`, `u`, `d`, `entry_date`, `entry_user`) VALUES
(1, 1, 1, NULL, 1, NULL, NULL, '2017-09-25 16:49:01', 'direktur'),
(2, 2, 1, 0, 1, 0, 0, '2017-10-19 15:29:29', ''),
(3, 3, 1, 0, 1, 0, 0, '2017-10-19 16:39:59', ''),
(4, 4, 1, 0, 1, 0, 0, '2017-10-19 19:41:35', ''),
(5, 5, 1, 0, 1, 0, 0, '2017-10-29 18:21:31', ''),
(6, 6, 1, 0, 1, 0, 0, '2017-11-01 18:28:08', ''),
(7, 7, 1, 0, 1, 0, 0, '2017-11-01 19:03:55', ''),
(8, 8, 1, 0, 1, 0, 0, '2017-12-06 05:54:37', ''),
(9, 9, 1, 0, 1, 0, 0, '2017-12-11 09:59:05', ''),
(10, 10, 1, 0, 1, 0, 0, '2017-12-11 12:07:49', ''),
(11, 11, 1, 0, 1, 0, 0, '2017-12-11 23:27:11', '');
-- --------------------------------------------------------
--
-- Table structure for table `tab_akses_submenu`
--
CREATE TABLE IF NOT EXISTS `tab_akses_submenu` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`id_sub_menu` int(11) NOT NULL,
`id_level` int(11) NOT NULL,
`c` int(11) DEFAULT '0',
`r` int(11) DEFAULT '0',
`u` int(11) DEFAULT '0',
`d` int(11) DEFAULT '0',
`entry_date` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`entry_user` varchar(30) NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1 AUTO_INCREMENT=7 ;
--
-- Dumping data for table `tab_akses_submenu`
--
INSERT INTO `tab_akses_submenu` (`id`, `id_sub_menu`, `id_level`, `c`, `r`, `u`, `d`, `entry_date`, `entry_user`) VALUES
(1, 1, 1, 0, 1, 0, 0, '2017-12-11 10:44:02', ''),
(2, 2, 1, 0, 1, 0, 0, '2017-10-19 20:11:54', ''),
(3, 3, 1, 0, 1, 0, 0, '2017-10-19 20:11:55', ''),
(4, 4, 1, 0, 1, 0, 0, '2017-12-11 10:02:00', ''),
(5, 5, 1, 0, 1, 0, 0, '2017-12-11 10:02:00', ''),
(6, 6, 1, 0, 1, 0, 0, '2017-12-11 12:08:59', '');
-- --------------------------------------------------------
--
-- Table structure for table `tag`
--
CREATE TABLE IF NOT EXISTS `tag` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`nama` varchar(64) NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=1 ;
-- --------------------------------------------------------
--
-- Table structure for table `t_setup`
--
CREATE TABLE IF NOT EXISTS `t_setup` (
`ID` int(11) NOT NULL AUTO_INCREMENT,
`fc_param` char(20) CHARACTER SET latin1 COLLATE latin1_general_ci NOT NULL DEFAULT '',
`fc_kode` char(4) CHARACTER SET latin1 COLLATE latin1_general_ci NOT NULL DEFAULT '',
`fc_isi` text CHARACTER SET latin1 COLLATE latin1_general_ci,
`fc_link` char(200) DEFAULT NULL,
PRIMARY KEY (`ID`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1 ROW_FORMAT=FIXED AUTO_INCREMENT=42 ;
--
-- Dumping data for table `t_setup`
--
INSERT INTO `t_setup` (`ID`, `fc_param`, `fc_kode`, `fc_isi`, `fc_link`) VALUES
(1, 'WISATA', '8', '_DSC2653 (FILEminimizer).JPG', NULL),
(2, 'WISATA', '23', '_DSC2653 (FILEminimizer).JPG', NULL),
(27, 'WISATA', '18', '_DSC2808 (FILEminimizer).JPG', NULL),
(5, 'WISATA', '29', 'DSC02923 (FILEminimizer).JPG', NULL),
(7, 'WISATA', '35', 'DSC02923 (FILEminimizer).JPG', NULL),
(11, 'FACEBOOK', '1', 'facebook', NULL),
(12, 'TWITTER', '1', 'twitter.com', NULL),
(13, 'INSTAGRAM', '1', 'instagram.com', NULL),
(14, 'YOUTUBE', '1', 'youtube.com', NULL),
(15, 'SEKILAS', '1', '<p>Kabupaten Malang adalah kabupaten terluas kedua di Jawa Timur setelah Kabupaten Banyuwangi dan merupakan kabupaten dengan populasi terbesar di Jawa Timur. Ibu kota Kabupaten Malang adalah Kepanjen</p>', NULL),
(36, 'HEADER', '1', '16.jpg', NULL),
(37, 'HEADER', '2', '16.jpg', NULL),
(38, 'HEADER', '3', '16.jpg', NULL),
(39, 'HEADER', '4', '16.jpg', NULL),
(40, 'HEADER', '5', '16.jpg', NULL),
(41, 'JUDUL', '1', '238', NULL);
-- --------------------------------------------------------
--
-- Table structure for table `user_type`
--
CREATE TABLE IF NOT EXISTS `user_type` (
`user_type_id` int(11) NOT NULL AUTO_INCREMENT,
`user_type_name` varchar(200) NOT NULL,
PRIMARY KEY (`user_type_id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=2 ;
--
-- Dumping data for table `user_type`
--
INSERT INTO `user_type` (`user_type_id`, `user_type_name`) VALUES
(1, 'Administrator');
-- --------------------------------------------------------
--
-- Table structure for table `wahana`
--
CREATE TABLE IF NOT EXISTS `wahana` (
`wahana_id` int(11) NOT NULL AUTO_INCREMENT,
`wahana_nama` text NOT NULL,
`wahana_icon` varchar(100) NOT NULL,
`wahana_deskripsi` text NOT NULL,
PRIMARY KEY (`wahana_id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=166 ;
--
-- Dumping data for table `wahana`
--
INSERT INTO `wahana` (`wahana_id`, `wahana_nama`, `wahana_icon`, `wahana_deskripsi`) VALUES
(1, 'BAN', 'BAN.png', 'Ban merupakan alat yang digunakan untuk memudahkan pengunjung dalam menikmati tempat wisata air.'),
(2, 'DOKUMENTASI FOTO', 'DOKUMENTASI_FOTO.png', 'Dokumentasi foto merupakan fasilitas yang dapat dimanfaatkan oleh pengunjung untuk mendapatkan dokumentasi foto dari kamera profesional. Dengan fasilitas ini, pengunjung tidak perlu takut kehilangan momen ketika mengunjungi tempat wisata.'),
(4, 'KUDA', 'KUDA.png', 'Kuda merupakan hewan yang disewakan kepada pengunjung untuk berkeliling di tempat wisata.'),
(5, 'KOLAM ANAK', 'KOLAM_ANAK.png', 'Kolam anak merupakan kolam yang digunakan untuk bermain anak-anak kecil. Di kolam ini biasanya banyak diisi permainan anak seperti seluncur kecil, dll.'),
(9, 'ATV', 'ATV.png', 'ATV merupakan sebuah kendaraan bermotor roda 4 yang dapat digunakan di segala medan seperti motor trail. Permainan ATV cocok untuk pengunjung yang senang memacu adrenalin.'),
(10, 'HAND BOAT', 'HAND_BOAT.png', 'Hand boat merupakan sebuah perahu kecil yang untuk memainkannya dengan mengayuh pedalnya menggunakan tangan. Hand boat bisa dimainkan 1 - 2 orang.'),
(13, 'PERAHU', 'PERAHU.png', 'Perahu merupakan sebuah fasilitas yang dapat digunakan pengunjung untuk mengelilingi tempat wisata. Perahu ini berada di objek wisata yang terdapat waduk atau danau.'),
(23, 'CAMPING GROUND', 'CAMPING_GROUND.png', 'Camping Ground merupakan area yang biasanya dapat digunakan untuk mendirikan tenda di sebuah tempat wisata. Kebanyakan camping ground tidak menyediakan tenda sehingga pengunjung bisa membawa tenda sendiri.'),
(24, 'K<NAME>ENANG', 'KOLAM_RENANG.png', 'Kolam renang merupakan kolam yang berfungsi untuk berolahraga renang. Kolam ini biasanya tidak terdapat banyak wahana permainan.'),
(35, 'BANANA BOAT', 'BANANA_BOAT.png', 'Banana boat merupakan permainan untuk pengunjung yang mempunyai nyali besar. Dalam permainan ini, banana boat ditarik sebuah speed boat berkecepatan tinggi, seringkali pengendara speed boat akan mempercepat laju dan belok secara mendadak untuk menggulingkan pengunjung yang menaikinya tetapi dengan syarat semua penumpang harus mengenakan safety vest.'),
(37, 'MINI PET ZOO', 'PET_ZOO.png', 'Mini pet zoo merupakan fasilitas kebun binatang mini yang disediakan di objek wisata. Di kebun binatang mini umumnya terdapat hewan seperti rusa, kelinci, dll.'),
(39, 'PLAYGROUND', 'PLAYGROUND.png', 'Playground merupakan area bermain yang ada di objek wisata. Di playground terdapat bermacam-macam permainan untuk anak kecil.'),
(44, 'KOLAM IKAN', 'KOLAM_IKAN.png', 'Kolam ikan merupakan sebuah kolam yang berisi ikan-ikan hias seeperti ikan mas, ikan koi, dll. Untuk kolam ikan biasanya hanya sebagai penghias saja.'),
(46, 'PANGGUNG PERTUNJUKAN', 'PANGGUNG_PERTUNJUKAN.png', 'Panggung pertunjukan merupakan sebuah tempat yang digunakan untuk mengadakan pentas atau pertunjukan.'),
(47, 'PELAMPUNG ', 'PELAMPUNG.png', 'Pelampung merupakan sebuah alat yang digunakan untuk melindungi badan agar dapat mangapung ketika berada di air. Pelampung ini digunakan ketika snorkeling, rafting, tubing, dll.'),
(52, 'WISATA PETIK', 'WISATA _PETIK.png', 'Biasanya wisata ini dilakukan pada perkebunan buah dan sayur.'),
(54, 'SEPEDA DRIFT ', 'DRIFT.png', 'Drift merupakan sebuah sepeda beroda 3 yang dapat digunakan untuk drift oleh penggunanya. Sepeda drift ini cocok untuk pengunjung yang menyukai permainan ekstrem.'),
(56, 'TERAPI HEWAN', 'TERAPI_HEWAN.png', 'Terapi yang ditujukan untuk pengunjung yang dimana ingin mencoba pengobatan dengan hewan, misalnya terapi ikan, terapi lebah, dll.'),
(57, '<NAME>', 'RUMAH_KELINCI.png', 'R<NAME> merupakan fasilitas yang ada di objek wisata yang berisi banyak kelinci. Pengunjung biasanya dapat langsung memberi makan kelinci.'),
(61, 'AULA', 'AULA.png', 'Aula merupakan sebuah gedung besar yang biasanya digunakan untuk mengadakan pertemuan atau acara. '),
(62, 'SEPEDA AIR ', 'SEPEDA_AIR.png', 'Sepeda air merupakan salah satu wahana permainan yang dimana pemainnya menaiki dan mengayuh sepeda. Sama dengan bersepeda pada umumnya, hanya saja sepeda ini dikayuh khusus di air.'),
(65, 'KERETA WISATA ', 'KERETA_WISATA.png', 'Kereta wisata merupakan sebuah permainan yang berbentuk kereta mini. Kereta wisata ini muat untuk anak kecil maupun orang dewasa. Kereta wisata biasanya digunakan untuk mengelilingi tempat wisata.'),
(66, 'TRAIL ', 'TRAIL.png', '<p style="text-align: justify;">Trail merupakan kendaraan bermotor roda dua yang kebanyakan digunakan untuk menjelajah daerah-daerah selain jalan beraspal seperti jalan berbatu,berlumpur, hutan, dll.</p>'),
(67, 'SNORKELING', 'SNORKELING.png', 'Snorkeling merupakan salah satu wisata air, dimana para pengunjung dapat menikmati keindahan bawah laut dengan menggunakan pelampung sehingga pengunjung tidak akan tenggelam.'),
(71, '<NAME> ', 'MANDI_BOLA.png', 'Mandi bola merupakan wahana permainan yang dikhususkan untuk anak kecil.'),
(72, 'WATERBOOM', 'WATERBOOM.png', 'Lokasi wisata air yang di dalamnya banyak terdapat berbagai macam permainan dan bentuk seluncuran air.'),
(76, 'SHOOTING TARGET ', 'SHOOTING_TARGET.png', 'Shooting target merupakan permainan menembak. Cara penggunaannya sama dengan panahan, hanya saja dipermainan ini pengunjung menggunakan replika pistol.'),
(77, 'LABIRIN ', 'LABIRIN.png', 'Labirin merupakan sebuah wahana yang mengharuskan pengunjungnya untuk mengatur strategi agar dapat menemukan jalan keluar.'),
(78, 'PANAHAN', 'PANAHAN.png', 'Panahan merupakan permainan yang menggunakan busur penuh sebagai alat untuk memanah.'),
(79, 'SEGWAY ', 'SEGWAY.png', 'Segway merupakan sebuah kendaraan listrik beroda dua. Pengunjung dapat menggunakan alat ini untuk mengelilingi objek wisata.'),
(81, 'SEPEDA ', 'SEPEDA.png', '<p style="text-align: justify;">Sepeda merupakan sebuah kendaraan yang disewakan di objek wisata. Sepeda dapat digunakan untuk mengelilingi objek wisata. Biasanya disewakan sepeda single maupun sepeda tandem.</p>'),
(92, 'TUBING ', 'TUBING.png', '<p style="text-align: justify;">Lokasi wisata arus air dengan menggunakan ban besar yang diisi satu orang sebagai media mengarungi sungai. Untuk wisata tubing ini sudah disediakan guide untuk memandu pengunjung.</p>'),
(94, 'OUTBOUND', 'OUTBOUND.png', '<p style="text-align: justify;">Outbound merupakan kegiatan yang ditujukan untuk melatih kebersamaan dalam sebuah tim. Di dalam outond biasanya terdapat banyak permainan.</p>'),
(97, 'OFF ROAD ', 'OFF_ROAD.png', '<p style="text-align: justify;">Off road merupakan jenis permainan yang menggunakan mobil yang mengajak pengunjung merasakan sensasi adventure. Mobil off road akan melewati jalan di luar jalan beraspal seperti jalan tanah, lumpur, pasir, sungai, atau batuan.</p>'),
(99, 'RAFTING ', 'RAFTING.png', '<p style="text-align: justify;">Rafting merupakan aktivitas yang memadukan unsur petualangan, edukasi, olahraga, dan rekreasi dengan mengarungi sungai yang berjeram. Rafting biasanya menggunakan perahu karet sebagai alat untuk mengarungi derasnya sungai.</p>'),
(102, 'SPOT FOTO', 'SPOT_FOTO.png', '<p style="text-align: justify;">Suatu tempat dimana lokasi tersebut sangat cocok sebagai pengabadian momen wisata.</p>'),
(104, 'KOLAM SPA ', 'KOLAM_SPA.png', '<p style="text-align: justify;">Kolam spa merupakan kolam yang berfungsi untuk relaksasi. DI kolam spa pengunjung bisa berendam dengan menggunakan air hangat.</p>'),
(105, 'KOLAM ARUS ', 'KOLAM_ARUS.png', '<p style="text-align: justify;">Kolam arus merupakan sebuah wahana kolam yang terdapat arus yang sangat deras. Di kolam arus, pengunjung biasanya menggunakan ban dan akan merasakan sensasi arus yang deras layaknya di sungai.</p>'),
(113, 'BIOSKOP 4D', 'BIOSKOP_4D.png', '<p style="text-align: justify;">Bioskop 4D merupakan wahana bioskop yang membuat pengunjungnya merasakan sensasi menonton film dengan kualitas 4D.</p>'),
(114, 'BOOM-BOOM CAR', 'BOOM-BOOM_CAR.png', '<p style="text-align: justify;">Boom-boom car merupakan wahana permainan mobil bertenaga listrik. Permainan ini akan membuat pengunjungnya saling menabrakkan mobil satu sama lain.</p>'),
(115, 'BUMPER BOAT', 'BUMPER_BOAT.png', '<p style="text-align: justify;">Bumber boat merupakan wahana permainan yang hampir sama dengan boom-boom car. Permainan bumper boat merupakan sebuah perahu karet berbentuk bulat dan akan membuat pengunjungnya saling menabrakkan perahunya satu sama lain.</p>'),
(116, 'MISTERI BAJAK LAUT', 'MISTERI_BAJAK_LAUT.png', '<p style="text-align: justify;">Misteri bajak laut merupakan wahana permainan yang berbentuk menyerupai kapal. Dalam permainan ini, pengunjung dapat merasakan sensasi seakan-akan berada di kapal sungguhan yang sedang berlayar.</p>'),
(120, 'HAMMOCK', 'HAMMOCK.png', '<p style="text-align: justify;">Hammock merupakan sebuah ayunan gantung yang dapat digunakan untuk bersantai ketika melakukan kegiatan outdoor.</p>'),
(135, 'AIR SOFT GUN', 'AIR_SOFT_GUN.png', '<p style="text-align: justify;">Air soft gun merupakan permainan yang menggunakan replika senjata api sebagai alat permainan. Dalam permainan air soft gun biasanya dilakukan secara berkelompok.</p>'),
(136, 'AYUNAN', 'AYUNAN.png', '<p style="text-align: justify;">Ayunan merupakan permainan yang dapat memacu adrenalin. Permainan ayunan ini akan membuat pengunjung merasakan sensasi berayun-berayun di atas lembah.</p>'),
(137, 'BELI IKAN', 'BELI IKAN.png', '<p style="text-align: justify;">Beli ikan merupakan aktivitas membeli ikan segar langsung dari nelayan di tepi pantai. Biasanya, selain membeli ikan segar, ikan bisa langsung dimasak dan disantap di tempat dengan menikmati pemandangan pantai yang indah.</p>'),
(138, 'COTTAGE', 'COTTAGE.png', '<p style="text-align: justify;">Cottage merupakan sebuah tempat peristirahatan yang disewakan kepada pengunjung. Pengunjung biasanya dapat menyewa cottage 1 hari penuh ataupun hanya per jam.</p>'),
(139, 'DIVING COURSE', 'DIVING_COURSE.png', '<p style="text-align: justify;">Diving course merupakan sebuah fasilitas untuk pelatihan menyelam. Pelatihan ini nantinya akan mendapat sertifikat yang dapat digunakan untuk mendapatkan izin agar dapat menyelam di laut lepas.</p>'),
(140, 'DOKUMENTASI VIDEO', 'DOKUMENTASI_VIDEO.png', '<p style="text-align: justify;">Dokumentasi video merupakan fasilitas yang dapat dimanfaatkan oleh pengunjung untuk mendapatkan dokumentasi video dari kamera profesional dan drone. Dengan fasilitas ini, pengunjung bisa mendapatkan hasil dokumentasi yang berbeda dengan menggunakan drone.</p>'),
(141, 'GUIDE', 'GUIDE.png', '<p style="text-align: justify;">Guide merupakan fasilitas yang disediakan oleh tempat wisata untuk mendampingi pengunjung. Guide akan memudahkan pengunjung untuk mendapatkan informasi dari sebuah tempat wisata.</p>'),
(142, 'HANDUK', 'HANDUK.png', '<p style="text-align: justify;">Handuk merupakan sebuah fasilitas penyewaan untuk pengunjung yang disediakan oleh tempat wisata. Penyewaan handuk ini biasanya ada di tempat wisata seperti kolam renang.</p>'),
(143, 'HOMESTAY', 'HOMESTAY.png', '<p style="text-align: justify;">Homestay merupakan fasilitas penginapan yang ada di tempat wisata. Homestay biasanya bekerja sama dengan penduduk sekitar tempat wisata agar rumahnya dapat digunakan sebagai fasilitas penginapan.</p>'),
(144, 'KANO', 'KANO.png', '<p style="text-align: justify;">Kano merupakan sebuah perahu kecil yang hanya dapat memuat 1 orang. Untuk menggerakkan kano biasanya menggunakan sebuah dayung.</p>'),
(145, 'KOLAM PANCING', 'KOLAM_PANCING.png', '<p style="text-align: justify;">Kolam pancing merupakan kolam yang berisi ikan-ikan air tawar seperti ikan mujaer, ikan gurame, ikan tombro, ikan nila, dll. Untuk kolam pancing ini biasanya pengunjung membawa alat pancing sendiri.</p>'),
(146, 'LOKER', 'LOKER.png', '<p style="text-align: justify;">Loker merupakan sebuah fasilitas penyimpanan barang yang ada di objek wisata. Dengan adanya loker, pengunjung tidak perlu takut kehilangan barang ketika sedang menikmati objek wisata.</p>'),
(147, 'PAPAN SURFING', 'PAPAN_SURFING.png', '<p style="text-align: justify;">Papan surfing merupakan sebuah alat yang digunakan untuk pengunjung yang ingin melakukan olahraga surfing.</p>'),
(148, 'TENDA', 'TENDA.png', '<p style="text-align: justify;">Tenda merupakan tempat istirahat bagi pelancong yang ingin berlibur di lokasi wisata yang bisa dikatakan tidak ada penginapan sama sekali.</p>'),
(149, 'TIKAR', 'TIKAR.png', '<p style="text-align: justify;">Alat yang digunakan untuk duduk lesehan. Tikar sangat cocok dikunakan ketika berwisata bersama keluarga maupun bersama teman-teman.</p>'),
(153, 'GAZEBO', 'GAZEBO.png', '<p style="text-align: justify;">Gazebo merupakan sebuah fasilitas seperti gubuk kecil yang dapat disewa oleh pengunjung. Gazebo ini dapat dimanfaatkan sebagai tempat istirahat untuk melepas lelah di tempat wisata.</p>'),
(154, 'FLYING FOX', 'FLYING_FOX.png', '<p style="text-align: justify;">Flying fox merupakan permainan yang dapat memacu adrenalin. Pengunjung akan merasakan sensasi terbang seperti burung dan menikmati pemandangan dari atas ketika menaiki permainan ini.</p>');
-- --------------------------------------------------------
--
-- Table structure for table `wahana_wisata`
--
CREATE TABLE IF NOT EXISTS `wahana_wisata` (
`wahwis_id` int(11) NOT NULL AUTO_INCREMENT,
`wahana_id` int(11) NOT NULL,
`wisata_id` int(11) NOT NULL,
`wahwis_htm` varchar(20) NOT NULL,
PRIMARY KEY (`wahwis_id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=438 ;
--
-- Dumping data for table `wahana_wisata`
--
INSERT INTO `wahana_wisata` (`wahwis_id`, `wahana_id`, `wisata_id`, `wahwis_htm`) VALUES
(1, 1, 6, 'Rp 5.000,-'),
(2, 2, 7, 'Rp 15.000,-'),
(3, 3, 7, ''),
(4, 2, 8, 'Rp 15.000,-'),
(6, 81, 9, 'Rp 35.000,-'),
(7, 4, 9, 'Rp 5.000,-'),
(8, 4, 10, 'Rp 5.000,-'),
(9, 5, 10, 'N/A'),
(11, 62, 10, 'Rp 20.000,-'),
(12, 13, 10, 'Rp 3.000,-'),
(13, 9, 10, 'Rp 15.000,-'),
(14, 10, 10, 'Rp 10.000,-'),
(15, 11, 14, ''),
(16, 12, 14, ''),
(19, 11, 16, ''),
(20, 12, 16, ''),
(21, 1, 17, 'Rp 16.000,-'),
(22, 102, 17, 'FREE'),
(31, 13, 18, 'Rp 16.000,-'),
(33, 102, 18, 'FREE'),
(35, 9, 19, 'Rp 60.000,-'),
(36, 23, 19, 'FREE'),
(37, 21, 20, ''),
(38, 22, 20, ''),
(39, 23, 20, ''),
(40, 24, 24, ''),
(41, 25, 24, ''),
(42, 24, 25, 'N/A'),
(43, 23, 25, 'N/A'),
(44, 6, 26, ''),
(45, 22, 26, ''),
(46, 28, 26, ''),
(47, 29, 26, ''),
(48, 30, 26, ''),
(49, 31, 26, ''),
(50, 6, 27, ''),
(51, 22, 27, ''),
(52, 28, 27, ''),
(53, 29, 27, ''),
(54, 30, 27, ''),
(55, 31, 27, ''),
(56, 6, 28, ''),
(57, 22, 28, ''),
(58, 28, 28, ''),
(59, 29, 28, ''),
(60, 30, 28, ''),
(61, 31, 28, ''),
(62, 154, 29, 'Rp 20.000,-'),
(63, 9, 29, 'Rp 30.000,-'),
(68, 113, 30, 'Rp 12.000,-'),
(69, 114, 30, 'Rp 20.000,-'),
(70, 115, 30, 'Rp 20.000,-'),
(71, 116, 30, 'Rp 8.000,-'),
(72, 102, 30, 'Rp 10.000,-'),
(73, 13, 30, 'Rp 25.000,-'),
(74, 32, 31, 'Rp 5.000,-'),
(75, 33, 31, ''),
(76, 34, 31, ''),
(77, 35, 31, 'Rp 20.000,-'),
(78, 36, 31, 'Rp 5.000,- s.d. Rp 1'),
(79, 37, 31, ''),
(80, 38, 31, ''),
(81, 39, 31, ''),
(82, 33, 32, ''),
(83, 34, 32, ''),
(84, 35, 32, ''),
(85, 36, 32, ''),
(86, 37, 32, ''),
(87, 38, 32, ''),
(88, 39, 32, ''),
(89, 40, 33, ''),
(90, 41, 33, ''),
(91, 42, 33, ''),
(92, 43, 33, ''),
(93, 126, 33, ''),
(94, 40, 34, ''),
(95, 41, 34, ''),
(96, 42, 34, ''),
(97, 43, 34, ''),
(98, 126, 34, ''),
(104, 40, 36, ''),
(105, 41, 36, ''),
(106, 42, 36, ''),
(107, 43, 36, ''),
(108, 126, 36, ''),
(109, 40, 37, ''),
(110, 41, 37, ''),
(111, 42, 37, ''),
(112, 43, 37, ''),
(113, 126, 37, ''),
(114, 40, 38, ''),
(115, 41, 38, ''),
(116, 42, 38, ''),
(117, 43, 38, ''),
(118, 126, 38, ''),
(124, 40, 40, ''),
(125, 41, 40, ''),
(126, 42, 40, ''),
(127, 43, 40, ''),
(128, 126, 40, ''),
(129, 44, 41, ''),
(130, 39, 41, ''),
(131, 46, 41, ''),
(132, 47, 41, 'Rp 3.000,-'),
(133, 48, 41, 'Rp 5.000,-'),
(134, 49, 42, ''),
(135, 50, 42, ''),
(138, 49, 44, ''),
(139, 50, 44, ''),
(140, 51, 45, 'Rp 25.000,-/15 MENIT'),
(141, 52, 45, 'Rp 15.000,-/KG'),
(142, 53, 45, 'Rp 15.000,-/KG'),
(143, 54, 45, 'Rp 25.000,-'),
(144, 55, 45, ''),
(145, 56, 45, 'Rp 100.000,-'),
(146, 57, 45, ''),
(147, 58, 45, ''),
(148, 51, 46, 'Rp 25.000,-/15 MENIT'),
(149, 52, 46, 'Rp 15.000,-/KG'),
(150, 53, 46, 'Rp 15.000,-/KG'),
(151, 54, 46, 'Rp 25.000,-'),
(152, 55, 46, ''),
(153, 56, 46, 'Rp 100.000,-'),
(154, 57, 46, ''),
(155, 58, 46, ''),
(156, 51, 47, 'Rp 25.000,-/15 MENIT'),
(157, 52, 47, 'Rp 15.000,-/KG'),
(158, 53, 47, 'Rp 15.000,-/KG'),
(159, 54, 47, 'Rp 25.000,-'),
(160, 55, 47, ''),
(161, 56, 47, 'Rp 100.000,-'),
(162, 57, 47, ''),
(163, 58, 47, ''),
(164, 24, 48, 'Rp 10.000,-'),
(165, 23, 48, 'Rp 500.000,-/HARI'),
(166, 127, 48, 'Rp 750.000,- s.d. Rp'),
(167, 62, 48, 'Rp 15.000,-'),
(168, 39, 48, ''),
(169, 6, 48, 'Rp 25.000,-'),
(170, 65, 48, 'Rp 5.000,-/ORANG'),
(171, 66, 48, 'Rp 50.000,- s.d. Rp '),
(172, 24, 49, 'Rp 10.000,-'),
(173, 23, 49, 'Rp 500.000,-/HARI'),
(174, 127, 49, 'Rp 750.000,- s.d. Rp'),
(175, 62, 49, 'Rp 15.000,-'),
(176, 39, 49, ''),
(177, 6, 49, 'Rp 25.000,-'),
(178, 65, 49, 'Rp 5.000,-/ORANG'),
(179, 66, 49, 'Rp 50.000,- s.d. Rp '),
(180, 24, 50, 'Rp 10.000,-'),
(181, 23, 50, 'Rp 500.000,-'),
(182, 61, 50, 'Rp 1.000.000,-'),
(183, 62, 50, 'Rp 15.000,-'),
(184, 39, 50, 'FREE'),
(185, 154, 50, 'Rp 25.000,-'),
(186, 65, 50, 'Rp 5.000,-'),
(187, 66, 50, 'Rp 50.000,-'),
(188, 32, 51, ''),
(189, 68, 51, 'Rp 3.000,-'),
(190, 62, 51, 'Rp 5.000,-'),
(191, 70, 51, 'Rp 2.000,-'),
(192, 71, 51, 'Rp 5.000,-'),
(193, 72, 51, 'RP 50.000,- s.d. Rp '),
(194, 48, 51, 'Rp 5.000,-'),
(195, 47, 51, 'Rp 3.000,-'),
(196, 32, 52, ''),
(197, 68, 52, 'Rp 3.000,-'),
(198, 62, 52, 'Rp 5.000,-'),
(199, 70, 52, 'Rp 2.000,-'),
(200, 71, 52, 'Rp 5.000,-'),
(201, 72, 52, 'RP 50.000,- s.d. Rp '),
(202, 48, 52, 'Rp 5.000,-'),
(203, 47, 52, 'Rp 3.000,-'),
(204, 75, 53, 'Rp 15.000,-'),
(205, 76, 53, 'Rp 10.000,-/5 PELURU'),
(206, 77, 53, 'Rp 10.000,-'),
(207, 78, 53, 'Rp 5.000,-'),
(208, 79, 53, 'Rp 30.000,-'),
(209, 80, 53, 'Rp 10.000,-'),
(210, 81, 53, 'Rp 5.000,-'),
(211, 27, 53, 'Rp 30.000,-'),
(212, 128, 54, 'Rp 10.000,-'),
(213, 84, 54, 'Rp 10.000,-'),
(214, 24, 54, ''),
(215, 86, 54, ''),
(216, 87, 54, ''),
(217, 88, 54, ''),
(218, 89, 55, ''),
(219, 90, 55, ''),
(220, 91, 55, ''),
(221, 89, 56, ''),
(222, 90, 56, ''),
(223, 91, 56, ''),
(224, 92, 57, 'Rp 70.000,- s.d. Rp '),
(225, 23, 57, 'Rp 25.000,-'),
(226, 94, 57, 'Rp 15.000,-'),
(227, 92, 58, 'Rp 70.000,- s.d. Rp '),
(228, 23, 58, 'Rp 25.000,-'),
(229, 94, 58, 'Rp 15.000,-'),
(230, 23, 59, 'Rp 150.000,-'),
(231, 94, 59, 'Rp 110.000,-'),
(232, 97, 59, 'Rp 250.000,-'),
(233, 98, 59, 'Rp 350.000,-'),
(234, 99, 59, 'Rp 200.000,- s.d. Rp'),
(235, 100, 59, 'Rp 399.000,-'),
(236, 23, 1, 'Rp 150.000,-'),
(237, 102, 1, 'Rp 10.000,-'),
(238, 4, 1, 'Rp 50.000,-'),
(239, 23, 60, 'Rp 150.000,-'),
(240, 102, 60, 'Rp 10.000,- FREE AIR'),
(241, 75, 60, 'Rp 50.000,- PP'),
(242, 104, 61, 'Rp 10.000,-'),
(243, 105, 61, 'Rp 10.000,-'),
(244, 46, 61, ''),
(245, 107, 61, ''),
(246, 108, 61, 'Rp 20.000,-'),
(247, 109, 61, ''),
(248, 36, 61, 'Rp 5.000,- s.d. Rp 3'),
(249, 111, 61, 'Rp 5.000,-'),
(250, 46, 62, ''),
(251, 107, 62, ''),
(252, 108, 62, 'Rp 20.000,-'),
(253, 109, 62, ''),
(254, 36, 62, 'Rp 5.000,- s.d. Rp 3'),
(255, 111, 62, 'Rp 5.000,-'),
(256, 129, 63, 'Rp 13.000,-'),
(257, 130, 63, 'Rp 12.000,-'),
(258, 114, 63, 'Rp 20.000,-'),
(259, 115, 63, 'Rp 20.000,-'),
(260, 116, 63, 'Rp 8.000,-'),
(261, 117, 63, 'Rp 10.000,-'),
(262, 131, 63, 'Rp 10.000,-'),
(263, 62, 63, 'Rp 25.000,-'),
(264, 120, 64, 'Rp 15.000,-'),
(265, 23, 64, 'Rp 25.000,-'),
(270, 2, 70, '150000'),
(271, 3, 70, '120001'),
(275, 151, 10, 'Rp 10.000,-'),
(277, 81, 2, 'Rp.35.000,00'),
(278, 4, 2, 'Rp.5.000,00'),
(279, 76, 2, 'Rp.10.000,00'),
(280, 71, 2, 'Rp.3.000,00'),
(281, 81, 75, 'Rp.35.000,00'),
(282, 4, 75, 'Rp.5.000,00'),
(283, 76, 75, 'Rp.10.000,00'),
(284, 71, 75, 'Rp.3.000,00'),
(285, 113, 76, 'Rp 12.000,-'),
(286, 114, 76, 'Rp 20.000,-'),
(287, 115, 76, 'Rp 20.000,-'),
(288, 116, 76, 'Rp 8.000,-'),
(289, 102, 76, 'Rp 10.000,-'),
(290, 13, 76, 'Rp 10.000,-'),
(291, 62, 76, 'Rp 25.000,-'),
(292, 23, 77, 'N/A'),
(293, 148, 78, 'Rp 100.000,-'),
(294, 23, 78, 'Rp 25.000,-'),
(295, 144, 78, 'Rp 25.000,-'),
(296, 47, 78, 'Rp 5.000,-'),
(297, 148, 79, 'N/A'),
(298, 23, 79, 'N/A'),
(299, 23, 80, 'N/A'),
(300, 13, 81, 'Rp 50.000,-'),
(301, 147, 81, '(N/A)'),
(302, 67, 81, '(N/A)'),
(303, 23, 81, '(N/A)'),
(304, 9, 82, 'Rp 60.000,-'),
(305, 23, 82, '(N/A)'),
(306, 153, 83, '(N/A)'),
(307, 23, 83, '(N/A)'),
(308, 13, 84, 'Rp 160.000,-'),
(309, 23, 84, '(N/A)'),
(310, 149, 85, 'Rp 20.000,-'),
(311, 138, 85, 'Rp 150.000,-'),
(312, 23, 85, 'Rp 20.000,-'),
(315, 13, 87, 'Rp.150.000,-'),
(316, 137, 87, '(N/A)'),
(317, 23, 87, '(N/A)'),
(318, 47, 88, 'Rp 10.000,-'),
(319, 67, 88, 'Rp 20.000,-'),
(320, 153, 88, 'FREE'),
(321, 141, 88, 'Rp 100.000,-'),
(322, 153, 89, 'N/A'),
(323, 23, 89, 'N/A'),
(324, 13, 90, 'Rp 50.000,-'),
(325, 147, 90, '(N/A)'),
(326, 67, 90, '(N/A)'),
(327, 23, 90, '(N/A)'),
(328, 137, 90, '(N/A)'),
(329, 52, 91, 'Rp 20.000,-'),
(330, 81, 93, 'FREE'),
(331, 39, 93, 'FREE'),
(332, 142, 93, 'Rp 10.000,-'),
(333, 146, 93, 'Rp 5.000,-'),
(334, 46, 93, 'N/A'),
(335, 47, 93, 'Rp 15.000,-'),
(336, 153, 94, 'Rp 130.000,-'),
(337, 146, 94, 'Rp 30.000,-'),
(338, 1, 94, 'Rp 30.000,-'),
(339, 94, 94, 'N/A'),
(340, 139, 94, 'N/A'),
(341, 94, 95, 'N/A'),
(342, 135, 95, 'N/A'),
(343, 66, 95, 'N/A'),
(344, 120, 96, 'Rp 5.000,-'),
(345, 149, 96, 'Rp 5.000,-'),
(346, 23, 96, 'Rp 10.000,-'),
(347, 104, 98, 'Rp 10.000,-'),
(348, 105, 98, 'Rp 10.000,-'),
(349, 46, 98, '(N/A)'),
(350, 39, 98, 'FREE'),
(351, 72, 98, 'Rp 20.000,-'),
(352, 13, 98, 'Rp 30.000,-'),
(353, 143, 100, 'N/A'),
(354, 146, 102, 'Rp 5.000,-'),
(355, 1, 102, 'Rp 5.000,-'),
(356, 120, 104, 'Rp 15.000,-'),
(357, 23, 104, 'Rp 25.000,-'),
(358, 13, 105, 'Rp 50.000,-'),
(359, 94, 105, 'N/A'),
(360, 24, 106, 'Rp 5.000,-'),
(361, 35, 106, 'Rp 20.000,-'),
(362, 13, 106, 'Rp 10.000,-'),
(363, 37, 106, 'FREE'),
(364, 39, 106, 'FREE'),
(365, 102, 107, 'FREE'),
(366, 136, 107, 'N/A'),
(367, 154, 107, 'N/A'),
(368, 120, 108, 'N/A'),
(369, 153, 108, 'N/A'),
(370, 99, 108, 'N/A'),
(371, 154, 108, 'N/A'),
(372, 23, 109, 'Rp 150.000,-'),
(373, 102, 109, 'Rp 10.000,-'),
(374, 4, 109, 'Rp 50.000,-'),
(375, 4, 110, 'Rp 15.000,-'),
(376, 76, 110, 'Rp 10.000,-'),
(377, 77, 110, 'Rp 10.000,-'),
(378, 78, 110, 'Rp 5.000,-'),
(379, 79, 110, 'Rp 30.000,-'),
(380, 81, 110, 'Rp 10.000,-'),
(381, 9, 110, 'Rp 30.000,-'),
(382, 4, 111, 'Rp 5.000,-'),
(383, 5, 111, 'N/A'),
(384, 154, 111, 'Rp 15.000,-'),
(385, 62, 111, 'Rp 20.000,-'),
(386, 13, 111, 'Rp 3.000,-'),
(387, 9, 111, 'Rp 15.000,-'),
(388, 10, 111, 'Rp 10.000,-'),
(389, 4, 112, 'Rp 50.000,-'),
(390, 120, 113, 'Rp 7.000,-'),
(391, 78, 113, 'Rp 10.000,-'),
(392, 23, 113, 'N/A'),
(393, 71, 113, 'Rp 5.000,-'),
(394, 92, 114, 'Rp 50.000,-'),
(395, 2, 114, 'Rp 100.000,-'),
(396, 140, 114, 'Rp 750.000,-'),
(397, 23, 114, 'Rp 75.000,-'),
(398, 143, 114, 'Rp 200.000,-'),
(399, 81, 115, 'Rp 10.000,-'),
(400, 4, 115, 'Rp 10.000,-'),
(401, 24, 115, 'FREE'),
(402, 153, 115, 'N/A'),
(403, 23, 116, 'Rp 150.000,-'),
(404, 94, 116, 'Rp 110.000,-'),
(405, 97, 116, 'Rp 250.000,-'),
(406, 99, 116, 'Rp 300.000,-'),
(407, 66, 116, 'Rp 399.000,-'),
(408, 5, 117, 'FREE'),
(409, 37, 117, 'FREE'),
(410, 46, 117, 'N/A'),
(411, 47, 117, 'Rp 3.000,-'),
(412, 1, 117, 'Rp 5.000,-'),
(413, 9, 118, 'Rp 25.000,-'),
(414, 52, 118, 'Rp 15.000,-'),
(415, 54, 118, 'Rp 25.000,-'),
(416, 102, 118, 'Rp 100.000,-'),
(417, 57, 118, 'FREE'),
(418, 56, 118, 'FREE'),
(419, 76, 9, 'Rp 10.000,-'),
(420, 71, 9, 'Rp 3.000,-'),
(421, 62, 30, 'Rp 25.000,-'),
(422, 1, 121, 'Rp 6.000,-'),
(423, 24, 123, 'Rp 15.000,-'),
(424, 145, 123, 'Rp 15.000,-'),
(425, 92, 124, 'Rp 120.000,-'),
(426, 23, 124, 'Rp 25.000,-'),
(427, 94, 124, 'Rp 15.000,-'),
(428, 24, 125, 'FREE'),
(429, 56, 125, 'Rp 3.000,-'),
(430, 62, 125, 'Rp 5.000,-'),
(431, 13, 125, 'Rp 2.000,-'),
(432, 71, 125, 'Rp 5.000,-'),
(433, 153, 125, 'Rp 100.000,-'),
(434, 13, 25, 'N/A'),
(435, 13, 86, 'Rp 50.000,-'),
(436, 137, 86, 'N/A'),
(437, 23, 86, 'N/A');
-- --------------------------------------------------------
--
-- Table structure for table `wisata`
--
CREATE TABLE IF NOT EXISTS `wisata` (
`wisata_id` int(11) NOT NULL AUTO_INCREMENT,
`kategori_id` int(11) DEFAULT NULL,
`wisata_url_video` text NOT NULL,
`wisata_nama` text NOT NULL,
`wisata_deskripsi` text NOT NULL,
`wisata_tag` text NOT NULL,
`wisata_htm_lokal` varchar(20) NOT NULL,
`wisata_htm_intl` varchar(20) NOT NULL,
`wisata_latitude` varchar(50) NOT NULL,
`wisata_longitude` varchar(50) NOT NULL,
`wisata_tampil` int(11) NOT NULL,
PRIMARY KEY (`wisata_id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=126 ;
--
-- Dumping data for table `wisata`
--
INSERT INTO `wisata` (`wisata_id`, `kategori_id`, `wisata_url_video`, `wisata_nama`, `wisata_deskripsi`, `wisata_tag`, `wisata_htm_lokal`, `wisata_htm_intl`, `wisata_latitude`, `wisata_longitude`, `wisata_tampil`) VALUES
(8, 6, '3CUZL6hk6Js', '<NAME>', '<p style="text-align: justify;">Masjid Tiban merupakan Masjid yang berlokasi di Desa Sananrejo, Kecamatan Turen, Kabupaten Malang. Masjid Tiban ini memiliki arsitektur bangunan masjid yang sangat megah. Sebenarnya, Masjid Tiban masuk ke dalam kompleks Pondok Pesantren Salafiah Bihaaru Bahri Asali Fadlaailir Rahmah.</p>', 'IBADAH ARSITEKTUR SELFIE KULINER PENDIDIKAN', 'FREE', 'FREE', '-8.15102', '112.71309', 8),
(9, 8, '0i2_7wL1jNw', 'DESA WISATA PUJON KIDUL', '<p style="text-align: justify;">Kabupaten Malang mempunyai berbagai macam destinasi wisata dari yang alami maupun buatan. Kebanyakan pengunjung akan mencari tempat yang enak, sejuk untuk digunakan sebagai tempat berkumpul bersama teman, sahabat dan keluarga. Salah satu lokasi yang banyak diminati oleh pengunjung adalah Desa Wisata Pujon Kidul. Suasana khas pedesaan mengelilingi tempat wisata ini dengan hamparan persawahan yang luas dan hijau. Cocok bagi pengunjung yang penat dengan suasana perkotaan. Disini juga ada beberapa spot untuk foto. Disediakan pula tempat makan dengan suasana ditengah sawah. Desa Wisata Pujon Kidul terletak di Desa Pujon Kidul, Kecamatan Pujon, Kabupaten Malang. Jarak tempuh dari Kota Malang sekitar 29 Km atau sekitar 1,5 jam perjalanan. Akses menuju ke Desa Pujon Kidul sangat mudah dijangkau meskipun melewati pemukiman penduduk.</p>', 'SELFIE RILEKS NONGKRONG DESA WISATA KULINER', 'Rp 8.000,-', 'Rp 8.000,-', '-7.85594', '112.4549', 27),
(12, 3, 'fWeE5foWWdo', 'CANDI KIDAL', '<p style="text-align: justify;">Candi Kidal merupakan Candi yang berlokasi di Desa Rejokidal, Kecamatan Tumpang, Kabupaten Malang. <NAME> merupakan salah satu candi warisan dari Kerajaan Singosari sebagai bentuk penghormatan atas jasa besar Anusapati, Raja kedua dari Kerajaan Singosari yang memerintah selama 20 tahun (1227-1248).</p>', 'SELFIE PENDIDIKAN SEJARAH CAGAR BUDAYA', 'N/A', 'N/A', '-8.02577', '112.70905', 3),
(15, 3, 'HO1i1Q8fF04', '<NAME>', '<p style="text-align: justify;"><NAME> merupakan Candi yang berlokasi di Dusun Jago, Desa Tumpang, Kecamatan Tumpang, Kabupaten Tumpang. Menurut Kitab Negarakertagama dan Pararaton, nama <NAME> sebenarnya berasal dari kata “Jajaghu” yang didirikan pada masa Kerajaan Singosari Pada abad ke-13. Jajaghu yang artinya adalah keagungan merupakan istilah yang digunakan untuk menyebut tempat suci.</p>', 'SELFIE PENDIDIKAN SEJARAH CAGAR BUDAYA', 'N/A', 'N/A', '-8.00586', '112.7641', 6),
(18, 7, 'ZUF7yB1ePqw', '<NAME>', '<p style="text-align: justify;">Pantai Ngliyep merupakan Objek Wisata Pantai yang berlokasi di Desa Kedungsalam, Kecamatan Donomulyo, Kabupaten Malang. Pantai Ngliyep berada di tepi Samudera Indonesia. Luas Area Wisata Pantai Ngliyep 10 Ha yang terdiri dari hutan lindung, areal wisata, penginapan, dan lahan parkir.</p>', 'SELFIE RILEKS PANORAMA CAMPING NONGKRONG', 'Rp 16.000,-', 'Rp 16.000,-', '-8.38365', '112.4243', 10),
(23, 7, 'LlIMe6cAI20', 'PANTAI BATU BENGKUNG', '<p style="text-align: justify;">Pantai Batu Bengkung merupakan Objek WIsata Pantai yang berlokasi di Desa Gajahrejo, Kecamatan Gedangan, Kabupaten Malang. Pantai Batu Bengkung merupakan salah satu pantai dengan batuan karang yang membentang membentuk barisan. Batuan karang ini akan memecah ombak besar di tepian pantai. Tidak jarang air yang terbawa ombak terjebak di dalam cekungan karang sehingga menghasilkan kolam air asin alami yang aman untuk berenang.</p>', 'SELFIE RILEKS PANORAMA CAMPING', 'Rp 10.000,-', 'Rp 10.000,-', '-8.43017', '112.6151', 10),
(25, 7, 'S6o9o44bPxY', 'PANTAI BAJUL MATI', '<p style="text-align: justify;">Pantai Bajul Mati merupakan Objek Wisata Pantai yang berlokasi di Desa Gajahrejo, Kecamatan Gedangan, Kabupaten Malang. Pantai Bajul Mati ini memiliki teluk-teluk yang indah.</p>', 'SELFIE RILEKS PANORAMA CAMPING', 'Rp 10.000,-', 'Rp 10.000,-', '-8.43091', '112.63542', 7),
(29, 7, 'aSzZMEqHzaU', 'PANTAI BALEKAMBANG', '<p style="text-align: justify;">Pantai Balekambang merupakan Objek Wisata Pantai yang tereltak di Dusun Sumber Jambe, Desa Srigonco, Kecamatan Bantur, Kabupaten Malang. Suasana yang ditawarkan Pantai Balekambang sangat menarik sekali dengan ditumbuhi aneka pepohonan yang rindang. Pantai Balekambang memiliki 3 pulau kecil yaitu Pulau Ismoyo, Pulau Anoman, dan Pulau Wisanggeni. Di tiap-tiap pulau disambungkan dengan jembatan. Di Pulau Ismoyo terdapat Pura yang bernama Pura Amarta Jati.</p>', 'SELFIE IBADAH PANORAMA CAMPING KULINER', 'Rp 16.000,-', 'Rp 16.000,-', '-8.40276', '112.53365', 14),
(30, 8, 'HSdIP6zx77s', 'TAMAN REKREASI SENGKALING UMM', '<p style="text-align: justify;">Taman Rekreasi Sengkaling UMM merupakan Objek Wisata yang berlokasi di Jalan Raya Mulyoagung No. 188, Kecamatan Dau, Kabupaten Malang. Taman Rekreasi Sengkaling mempunyai berbagai fasilitas yang memiliki keunggulan dengan adanya wisata air yang berasal dari sumber alami yang salah satunya adalah Kolam Tirta Alam. Tempat ini dikenakan biaya masuk Rp 25.000,- dan biaya parkir Rp 5.000,-. </p>', 'SELFIE RENANG PIKNIK RILEKS PLAYGROUND', 'Rp 25.000,-', 'Rp 25.000,-', '-7.91538', '112.58890', 504),
(35, 6, 'gnpqC9Lw938', 'PESAREAN GUNUNG KAWI', '<p style="text-align: justify;">Terdapat banyak bangunan dengan arsitektur khas Tiongkok seperti kuil atau klenteng sebagai tempat sembahyang atau melakukan ritual bagi orang-orang Kong Hu Cu. Selain itu, terdapat pasar dagangan seperti aneka lampion, baju khas Tionghoa, lilin-lilin besar, dan beragam cenderamata lainnya. Uniknya, yang menjajahkan dagangan tersebut kebanyakan orang-orang berdarah Tionghoa, membuat suasana khas Tiongkok pun semakin kental terasa. Tak sedikit pula orang Tionghoa dari luar negeri datang ke tempat ini pada hari-hari tertentu untuk melakukan ritual keagamaan seperti memohon keselamatan, giam si, ci suak, dan sebagainya.</p>', 'PESAREAN IBADAH BELANJA RAMAL NASIB HOKI', 'Rp 3.000,- ', 'Rp 3.000,- ', '-8.02288', '112.49354', 8),
(39, 6, 'gnpqC9Lw938', '<NAME>', '<p style="text-align: justify;">Gunung Kawi merupakan salah satu gunung berapi yang masih aktif dan berlokasi di Kecamatan Wonosari, Kabupaten Malang. Saat berkunjung ke Gunung Kawi suasana magis akan terasa kental. Terdapat beberapa tempat atau petilasan untuk beberapa orang yang berdoa dan memohon berkat untuk kesuksesan usaha, jodoh, dan banyak hal lainnya.</p>', 'PESAREAN IBADAH BELANJA RAMAL NASIB HOKI', 'Rp 10.000,-', 'Rp 10.000,-', '-8.00317', '112.48574', 5),
(43, 3, 'dRWNRxZcIc8', '<NAME>', '<p style="text-align: justify;"><NAME> merupakan Objek Wisata Candi yang berlokasi di Desa Candirenggo, Kecamatan Singosari, Kabupaten Malang. Menurut para ahli, Candi ini diperkirakan dibangun sekitar tahun 1300 M sebagai persembahan untuk menghormati Raja Kertanegara dari Kerajaan Singosari.</p>', 'SELFIE PENDIDIKAN SEJARAH CAGAR BUDAYA', 'N/A', 'N/A', '-7.88779', '112.66386', 5),
(50, 5, 'XNzHFvvT_JM', 'AGROWISATA KEBUN TEH WONOSARI', '<p style="text-align: justify;">Agrowisata Kebun Teh Wonosari merupakan Objek Wisata yang berlokasi di Kecamatan Lawang dan berada di lereng Gunung Arjuno. Perkebunan ini berada di ketinggian 950-1250 mdpl dan menawarkan hamparan hijau kebun teh dengan suasana yang sejuk dan damai.</p>', 'SELFIE PENDIDIKAN PIKNIK RILEKS ADVENTURE', 'Rp 10.000,-', 'Rp 10.000,-', '-7.82167', '112.6426', 6),
(65, 2, 'H08S-g-EAK8', 'PADEPOKAN SENI TOPENG MALANGAN "ASMORO BANGUN"', '<p style="text-align: justify;">Padepokan Seni Topeng Malangan "Asmoro Bangun" merupakan padepokan topeng malangan yang berlokasi di Dusun Kedungmonggo, Desa Karangpandan, Kecamatan Pakisaji, Kabupaten Malang. Padepokan Topeng Malangan ini merupakan satu-satunya padepokan yang masih sangat giat untuk mempertahankan dan mengembangkan kesenian dan kebudayaan asli Malang. Padepokan Seni Topeng Malangan "Asm<NAME>" saat ini dikelola oleh Tri Handoyo.</p>', 'TARI SENI TOPENG MALANGAN PERTUNJUKAN UKIRAN', 'FREE', 'FREE', '-8.07325', '112.58925', 3),
(75, 13, '', 'DESA WISATA PUJON KIDUL', '<p>Kabupaten Malang mempunyai berbagai macam destinasi wisata dari yang alami maupun buatan. Kebanyakan pengunjung akan mencari tempat yang enak, sejuk untuk digunakan sebagai tempat berkumpul bersama teman, sahabat dan keluarga. Salah satu lokasi yang banyak diminati oleh pengunjung adalah Desa Wisata Pujon Kidul. Suasana khas pedesaan mengelilingi tempat wisata ini dengan hamparan persawahan yang luas dan hijau. Cocok bagi pengunjung yang penat dengan suasana perkotaan. Disini juga ada beberapa spot untuk foto. Disediakan pula tempat makan dengan suasana ditengah sawah. Desa Wisata Pujon Kidul terletak di Desa Pujon Kidul, Kecamatan Pujon, Kabupaten Malang. Jarak tempuh dari Kota Malang sekitar 29 Km atau sekitar 1,5 jam perjalanan. Akses menuju ke Desa Pujon Kidul sangat mudah dijangkau meskipun melewati pemukiman penduduk.</p>', 'SELFIE RILEKS NONGKRONG DESA WISATA KULINER', 'Rp.8.000,00', 'Rp.8.000,00', '-7.855945', '112.45492', 8),
(76, 13, '', 'TAMAN REKREASI SENGKALING UMM', '<p>Taman Rekreasi Sengkaling UMM merupakan Objek Wisata yang berlokasi di Jalan Raya Mulyoagung No. 188, Kecamatan Dau, Kabupaten Malang. Taman Rekreasi Sengkaling mempunyai berbagai fasilitas yang memiliki keunggulan dengan adanya wisata air yang berasal dari sumber alami yang salah satunya adalah Kolam Tirta Alam. Tempat ini dikenakan biaya masuk Rp 25.000,- dan biaya parkir Rp 5.000,-.</p>', 'SELFIE RENANG PIKNIK RILEKS PLAYGROUND', 'Rp 25.000,-', 'Rp 25.000,-', '-7.19538', '112.58892', 2),
(77, 7, 'Ak9OWnjXvq8', '<NAME>', '<p style="text-align: justify;">Pantai Nganteb berlokasi di Desa Tumpakrejo, Kecamatan Gedangan, Kabupaten Malang. Pantai ini termasuk dibawah pengelolaan Perhutani Malang. Pantai ini sering digunakan sebagai lokasi surfing, karena ombaknya yang besar dan perairannya yang hampir tanpa batuan karang. Perlu diingat, tidak disarankan untuk berenang dipantai ini, sebagai gantinya jika ingin berenang bisa ke muara sungai yang berada di sebelah timur Pantai Nganteb. Di Pantai Nganteb ini terdapat wisata religi berupa Pendopo Agung di Gunung Batok. Rute yang ditempuh untuk mencapai Pantai Nganteb ini bisa menuju arah Terminal Gadang lanjut ke Bululawang. Di Bululawang akan bertemu dengan jalan yang bercabang, ambillah jalur kiri untuk menuju Gondanglegi. Terus menuju arah ke Pantai Balekambang. Jarak menuju Pantai Nganteb dari pintu masuk berjarak sekitar 4 km. Hati-hati melewati jalan ini karena dominasi jalan tanah berbatu dan lebih berhati-hati lagi jika hujan.</p>', 'SELFIE RILEKS CAMPING SURFING', 'Rp 10.000,-', 'Rp 10.000,-', '-8.40994', '112.57677', 2),
(78, 7, 'BEKs3LDwZ7A', '<NAME>', '<p style="text-align: justify;">Pantai Gatra berlokasi di Desa Tambakrejo, Kecamatan Sumbermanjing Wetan, Kabupaten Malang. Pantai Gatra termasuk pantai yang tersembunyi dan bebas dari berbagai macam sampah (kecuali sampah bawaan dari arus laut itu sendiri). Bebas dari berbagai macam sampah karena, di pantai ini mempunyai aturan, setiap pengunjung harap melapor ketika akan mengunjungi pantai ini. Maksud dari melapor adalah petugas akan mengecek setiap bawaan dari pengunjung dan akan dicatat. Hal ini dimaksudkan agar para pengunjung tertib dalam menjaga kebersihan pantai. Jika pengunjung lupa akan sampah bawaannya ketika dicek kembali, pengunjung akan diminta untuk kembali ke lokasi untuk mencari dan mengumpulkan sampahnya tapi jika tidak pengunjung akan dikenakan denda sebesar Rp 100.000,-/item yang dibawa. Aturan ini diberlakukan sangat ketat di pantai ini. Untuk menuju ke Pantai Gatra ini melewati Turen dan mengikuti arah ke Sendang Biru. Jalur setapak permanan telah dibangun tapi belum seluruhnya, sebagian besar jalan masih melewati tanah. Saat musim kering dan tanah keras, hanya kendaraan roda dua yang bisa masuk hingga pintu masuk, selebihnya harus jalan kaki sepanjang kurang lebih 500 m menuju Pantai Gatra. Pantai Gatra masuk dalam kawasan Clungup Mangrove Conservation (CMC).</p>', 'SELFIE CAMPING PIKNIK RILEKS KANO TRACKING ADVENTURE PANORAMA', 'Rp 10.000,-', 'Rp 10.000,-', '-8.44263', '112.66966', 2),
(79, 7, 'oHSulYh5L5s', 'PANTAI GOA CINA', '<p>Pantai Goa Cina terletak di Desa Sitiarjo, Kecamatan Sumbermanjing Wetan, Kabupaten Malang. Membutuhkan waktu kira-kira 3 jam untuk mencapai Pantai ini dari Malang Kota.</p>', 'SELFIE RILEKS CAMPING PANORAMA', 'Rp 10.000,-', 'Rp 10.000,-', '-8.44712', '112.65145', 3),
(80, 7, 'jofnfWBK1M4', 'PANTAI JEMBATAN PANJANG', '<p style="text-align: justify;">Tiket memasuki pantai ini biayanya Rp 5.000,-. Pantai Jembatan Panjang ini bersebelahan dengan Pantai Balekambang tepatnya di sebelah barat, di Desa Sumberbening, Kecamatan Bantur, Kabupaten Malang. Tempat ini cocok sekali jika digunakan untuk spot pemotretan. Akses menuju Pantai Jembatan Panjang ini rutenya sama saat kita mau menuju ke Pantai Balekambang. Pantai ini dapat dicapai melalui Kecamatan Gondanglegi dan Bantur dilanjutkan ke Desa Sumberbening. Sekitar ± 65 km di Kecamatan Bantur di sebelah selatan Kota Malang.</p>', 'SELFIE RILEKS CAMPING PANORAMA', 'Rp 5.000,-', 'Rp 5.000,-', '-8.40226', '112.53018', 3),
(81, 7, 'RsNLsFHpEZs', '<NAME>', '<p style="text-align: justify;"><NAME>sono terletak di Dusun Lenggoksono, Desa Purwodadi, Kecamatan Tirtoyudo, Kabupaten Malang. Perjalanan menuju pantai ini jika dari Malang Kota ± 3 jam ke arah Dampit dan jalannya berliku-liku, naik turun dan cukup sempit. Pantai ini cukup panjang, berpasir putih dan rindang. Banyak sekali padagang-pedagang makanan disekitar pantai. Adanya fasilitas toilet dan kamar bilas. Tersedia persewaan alat-alat snorkeling. Dan jika ingin bermalam disini, bisa mendirikan tenda. Dari Pantai Lenggoksono ini, pengunjung bisa menyebrang menggunakan kapal-kapal yang disediakan menuju ke Teluk Kletakan, Pantai Bolu-Bolu dan Banyu Anjlok.</p>', 'SELFIE RILEKS CAMPING SURFING SNORKELING PANORAMA', 'Rp 10.000,-', 'Rp 10.000,-', '-8.37237', '112.83893', 2),
(82, 7, 'oRq0Xb-P2yQ', '<NAME>', '<p style="text-align: justify;">Pantai Ngudel berlokasi di Desa Sindurejo, Kecamatan Gedangan, Kabupaten Malang, Jawa Timur. Tiket masuk untuk masuk pantai ini Rp 10.000 dan biaya parkir dikenakan biaya Rp 15.000 Beberapa hal yang dapat dilakukan dipantai ini adalah bermain air, bermain pasir pantai, minum es kelapa muda dan berfoto, bisa juga pengunjung ikut berpartisipasi dalam pelepasan tukik/penyu kecil. Tapi perlu diingat bahwa ombak di pantai ini tergolong cukup tinggi, maka dianjurkan untuk tidak berenang hingga ke tengah pantai. Pantai ini berjarak kurang lebih 60 km dari Malang dan bisa menghabiskan waktu kira-kira 2 jam perjalanan dengan kendaraan pribadi melalui rute Gadang menuju ke Turen lalu menuju arah Sumbermanjing Wetan dan ke arah Jalur Lintas Selatan.</p>', 'SELFIE RILEKS PANORAMA CAMPING', 'Rp 10.000,-', 'Rp 10.000,-', '-8.41675', '112.58593', 7),
(83, 7, 'GUuMt46pW6w', '<NAME>', '<p style="text-align: justify;">Pantai Parang Dowo berlokasi di Des<NAME>, Kecamatan Gedangan Kabupaten Malang. Di pantai ini pengunjung dapat melakukan aktifitas bermain air, bermain pasir, dan lain-lain. Untuk mencapai pantai ini harus menggunakan kendaraan pribadi, karena masih termasuk kawasan yang tidak dilalui angkutan umum.</p>', 'SELFIE RILEKS CAMPING PANORAMA', 'Rp 10.000,-', 'Rp 10.000,-', '-8.42838', '112.62536', 2),
(84, 7, 'wmgmxJoYNfQ', '<NAME>', '<p style="text-align: justify;">Pantai <NAME> terletak di Dusun <NAME>, Desa Tambakrejo, Kecamatan Sumbermanjing Wetan, Kabupaten Malang. Pantai ini berjarak sekitar 30 Km sebelah selatan Kota Malang. Pantai ini berdekatan dengan Pulau Sempu yang dapat ditempuh hanya beberapa menit menggunakan perahu dari Pantai <NAME>. Pantai ini menjadi tempat berlabuhnya perahu-perahu nelayan dan pelelangan ikan.</p>', 'SELFIE PIKNIK RILEKS CAMPING ADVENTURE PANORAMA', 'Rp 10.000,-', 'Rp 10.000,-', '-8.43183', '112.68647', 4),
(85, 7, 'RwJI7TpeVvU', '<NAME>', '<p style="text-align: justify;">Pantai Sendiki terletak di Desa Tambakrejo, Sumbermanjing Wetan, Kabupaten Malang. Pantai ini masih terletak pada satu garis dengan Pantai Sendang Biru. Pengunjung dimanjakan oleh hamparan pasir putih dan panorama yang sangat indah. Pantai ini juga terdapat area untuk camping. Jalan menuju pantai ini mudah diakses untuk kendaraan roda dua maupun roda empat. Jarak dari pusat Kota Malang sekitar 60 km dan waktu yang ditempuh sekitar 3 jam perjalanan.</p>', 'SELFIE RILEKS CAMPING PIKNIK TRACKING ADVENTURE PANORAMA', 'Rp 10.000,-', 'Rp 10.000,-', '-8.41681', '112.72594', 3),
(86, 7, 'ADWQfRWVXz4', 'PANTAI SIPELOT', '<p style="text-align: justify;">Pantai Sipelot terletak di Desa Pujiharjo, Kecamatan Tirtoyudo, Kabupaten Malang. Pantai ini mempunyai pemandangan yang mempesona, mempunyai garis pantai yang panjang, pasir yang kecoklatan. Jika ingin berkemah, pantai ini cocok untuk spot berkemah. Disebelah timur pantai bisa menikmati air payau yang kehijauan yang indah yang dikelilingi perbukitan kapur dan tebing-tebing tinggi. Disini pengunjung bisa berfoto, camping, snorkeling. Akses menuju pantai ini jalannya agak sempit, maka disarankan bagi para pengunjung untuk menggunakan kendaraan pribadi terutama motor karena lebih praktis. Jalannya berkelok-kelok dan naik turun. Rutenya dari Malang Kota ke arah selatan menuju Gadang - Kecamatan Bululawang - Kecamatan Turen - Kecamatan Dampit - Kecamatan Tirtoyudo ikuti petunjuk menuju Desa Pujiharjo. Papan petunjuk Pantai Sipelot sudah tersedia, jadi pengunjung dapat dengan mudah sampai ke lokasi.</p>', 'SELFIE RILEKS CAMPING PANORAMA', 'N/A', 'N/A', '-8.38061', '112.89923', 2),
(87, 7, '_DC9KSCgioU', 'PANTAI TAMBAN', '<p style="text-align: justify;">Pantai Tamban ini berlokasi di Desa Tambakrejo, Kecamatan Sumbermanjing, Kabupaten Malang. Banyak penunjuk jalan menuju ke pantai ini bagi para pengunjung yang belum pernah mengunjungi pantai ini sebelumnya. Pertama menuju ke arah selatan Malang Gadang - Bululawang - Turen - Sumbermanjing Wetan atau mengikuti petunjuk jalan Pantai Sendang Biru. Jalan ini cukup baik untuk dilalui kendaraan roda 2 maupun roda 4. Para pengunjung bisa melakukan aktifitas seperti memancing.</p>', 'SELFIE RILEKS KULINER CAMPING PANORAMA', 'Rp 10.000,-', 'Rp 10.000,-', '-8.41729', '112.70987', 3),
(88, 7, 'PfpKI-dJbho', 'PANTAI TIGA WARNA', '<p style="text-align: justify;">Pantai Tiga Warna terletak di Desa Tambakrejo, Kecamatan Sumbermanjing Wetan, Kabupaten Malang. Pantai yang mempunyai pasir putih yang lembut dan juga air laut yang jernih dan banyak terumbu karang menjadi suatu hal yang dicari oleh para pengunjung. Bermain air di sini cukup aman karena garis pantainya yang cukup panjang. Rute untuk menuju Pantai Tiga Warna melalui Gadang - Bululawang - Turen - ikuti arah ke Pantai <NAME>. Setelah perjalanan selama 2 jam, ada persimpangan menuju Pantai <NAME> atau Pantai <NAME>. Lurus sekitar 2 km sampai menemukan persimpangan jalan. Ambil arah kiri menuju TPI (Tempat Pelelangan Ikan). Nanti akan masuk ke perkampungan warna dan ambillah arah ke Clungup Mangrove Conservation (CMC). Mangrove Conversation dan Pantai Clungup adalah pintu masuk pertama sebelum memasuki area PantaI Tiga Warna. Jika menggunakan motor, sangat bisa melalui jalan sempit. Tapi jika menggunakan mobil harus parkir sekitar 1 Km sebelum masuk ke Clungup Mangrove Conservation lalu dilanjutkan dengan jalan kaki. Jika ingin mengunjungi Pantai Tiga Warna ini pengunjung diwajibkan untuk memakai jasa guide. Pantai Tiga Warna termasuk pantai yang tersembunyi dan bebas dari berbagai macam sampah (kecuali sampah bawaan dari arus laut itu sendiri). Bebas dari berbagai macam sampah karena, di pantai ini mempunyai aturan, setiap pengunjung harap melapor ketika akan mengunjungi pantai ini. Maksud dari melapor adalah petugas akan mengecek setiap bawaan dari pengunjung dan akan dicatat. Hal ini dimaksudkan agar para pengunjung tertib dalam menjaga kebersihan pantai. Jika pengunjung lupa akan sampah bawaannya ketika dicek kembali, pengunjung akan diminta untuk kembali ke lokasi untuk mencari dan mengumpulkan sampahnya tapi jika tidak pengunjung akan dikenakan denda sebesar Rp 100.000,-/item yang dibawa. Aturan ini diberlakukan sangat ketat di pantai ini. Untuk berkunjung ke pantai ini, pengunjung dibatasi waktu selama 2 jam.</p>', 'SELFIE SNORKELING PIKNIK RILEKS TRACKING ADVENTURE PANORAMA', 'Rp 10.000,-', 'Rp 10.000,-', '-8.43914', '112.67779', 3),
(89, 7, 'GlljLJYXeAk', 'PANTAI UNGAPAN', '<p style="text-align: justify;">Pantai Ungapan berlokasi di Desa Gajahrejo, Kecamatan Gedangan, Kabupaten Malang. Jika dari Kota Malang langsung saja ke selatan melewati Gadang, lalu ikuti petunjuk jalan yang mengarah ke Bululawang dan Krebet. Sampai di pertigaan Krebet, ikuti petunjuk jalan Turen dan Sumbermanjing Wetan. Sampai di pertigaan Turen belok ke arah selatan menuju Pantai Sendang Biru. Ikuti sampai di Jalur Lintas Selatan (JLS), setelah itu ikuti petunjuk ke Pantai Ungapan.</p>', 'SELFIE RILEKS CAMPING PANORAMA', 'Rp 10.000,-', 'Rp 10.000,-', '-8.43648', '112.64149', 5),
(90, 7, '1oReZ2miPLU', 'PANTAI WEDI AWU', '<p style="text-align: justify;">Pantai Wedi Awu ini termasuk pantai yang masi belum banyak diketahui masyarakat tetapi bagi para traveller, pantai ini mampu memikat para pengunjung. Pantai yang terletak di Dusun Balearjo, Desa Purwodadi, Kecamatan Tirtoyudo, Kabupaten Malang ini mempunyai gelombang yang cukup baik untuk olahraga selancar. Lokasi pantai ini bersebelahan dengan Pantai Lenggoksono. Jika pengunjung menemukan jalan bercabang dua, jika lurus ke Pantai Lenggoksono, jika Pantai Wedi Awu ambil jalan cabang sebelah kirinya. Namun hati- hati jalan menuju Pantai Wedi Awu ini agak sempit, bahaya bila tidak berhati-hati ketika setelah hujan. Pantai Wedi Awu berjarak sekitar 74 km dari pusat Kota Malang dan memakan waktu ± 2,5 - 3 jam perjalanan.</p>', 'SELFIE RILEKS CAMPING PANORAMA', 'N/A', 'N/A', '-8.37620', '112.84647', 2),
(91, 5, 'XumfM6Poavw', 'AGRO WISATA PETIK JERUK SELOREJO DAU', '<p style="text-align: justify;"><NAME> terletak di Desa Selorejo, Kecamatan Dau, Kabupaten Malang. Disini pengunjung bisa memetik kemudian menikmati buah jeruk langsung dari pohonnya dan juga akan disuguhi dengan pemandangan alam perkebunan jeruk yang cukup luas, asri dan udara yang sejuk.</p>', 'SELFIE PENDIDIKAN WISATA PETIK ADVENTURE PERTANIAN', 'FREE', 'FREE', '-7.93911', '112.55392', 12),
(92, 5, 'G-h3GEfUQDQ', 'AGRO SALAK SWARU (PUSAT OLEH-OLEH)', '<p style="text-align: justify;">Terletak di Desa Swaru, Desa Kademangan, Kecamatan Pagelaran, Kabupaten Malang dengan luas lahan sekitar 300 Ha dan produksi kira-kira sekitar 38 ton/Ha. Salak merupakan buah yang mempunyai banyak kandungan gizi yang cukup lengkap. Selain dapat menikmati buah salak sepuasnya, pengunjung berkesempatan melihat proses pembuatan kripik dan jenang/dodol buah yang dibuat oleh salah satu pabrik olahan salak. Di Agro Salak Swaru ini pengunjung dapat membeli oleh-oleh olahan salak seperti kopi biji salak, dodol salak, kurma salak, manisan salak, teh salak, sirup salak, sari salak, dan selai salak.</p>', 'EDUKASI PENDIDIKAN PENGOLAHAN SALAK', 'FREE', 'FREE', '-8.21243', '112.60898', 6),
(93, 4, 'JyBPrqV3NKw', 'BONDERLAND', '<p style="text-align: justify;">Taman wisata air ini terletak di Jalan Embong Turi, Dusun Bunder, Desa Genengan, Kecamatan Pakisaji, Kabupaten Malang. Tempat ini mudah dijangkau, hanya berjarak kurang lebih 8 km dari pusat Kota Malang dan akses jalan sangat mudah. tiket masuk untuk hari-hari biasa Rp 15.000,- sedangkan untuk waktu weekend Rp 20.000,- dan untuk biaya parkir gratis. Pada hari libur, pengunjung dapat menikmati “Special Show Theater” yang akan ditampilkan oleh team kreatif yang bersangkutan. Di Bonderland ini terdapat Kampoeng Seni yang digunakan sebagai tempat untuk memamerkan hasil seni. Tempat wisata ini buka mulai pukul 07.00 – 17.00 WIB.</p>', 'SELFIE RENANG RILEKS', 'Rp 15.000,-', 'Rp 15.000,-', '-8.04895', '112.60855', 4),
(94, 4, 'vh1Co7N2kgo', 'HAWAI WATERPARK', '<p style="text-align: justify;">Hawai Waterpark merupakan salah satu permainan air terbesar yang berlokasi di Jalan Graha Kencana Raya, Desa Banjararum, Kecamatan Singosari, Kabupaten Malang, Jawa Timur. Harga tiket untuk masuk ke lokasi ini, untuk Weekday RP 75.000,- dan Weekend Rp 100.000,-. Jam operasional Hawai Waterpark ini Senin - Jumat 10.00 - 16.00 WIB, Sabtu- Minggu 08.00 - 17.00 WIB dan hari libur nasional 08.00 - 17.00 WIB. Banyak sekali jenis-jenis wahana air yang disediakan disini antara lain Hawai Water House, Waikiki Beach, Ekolu Slide, Hula-Hula Slide, Water Plaza, dan lain-lain.</p>', 'SELFIE RILEKS PIKNIK ADVENTURE RENANG', 'Rp 75.000,-', 'Rp 75.000,-', '-7.92346', '112.65825', 3),
(95, 4, 'FLBdGAGNHqw', 'PETUNGSEWU WILDLIFE EDUCATION CENTER (P-WEC)', '<p style="text-align: justify;">P-WEC (Petungsewu Wildlife Education Center) adalah pusat pendidikan informal tentang konservasi alam, outbound dan green adventure yang sudah berdiri sejak 2003 di Desa Petungsewu, Kecamatan Dau, Kabupaten Malang. P-WEC merupakan sebuah program kerjasama antara Petungsewu Adventure dan lembaga Protection of Forest and Fauna (PROFAUNA). Akses untuk mencapai ke lokasi cukup mudah bisa ditempuh dengan mobil, motor bahkan angkutan umum. Letaknya agak tersembunyi di dalam desa yang melewati hutan, kebun dan sawah. Tapi jangan khawatir, terdapat papan petunjuk bertuliskan P-WEC sebagai penanda arah untuk menuju ke lokasi.</p>', 'SELFIE EDUKASI OUTBOND ADVENTURE TRACKING', 'FREE', 'FREE', '-7.95214', '112.54322', 6),
(96, 3, 'sX8WFWw-bQ4', '<NAME>', '<p style="text-align: justify;">Candi Sumberawan terletak di Desa Toyomarto, Kecamatan Singosari, Kabupaten Malang. Candi ini hanya berbentuk seperti stupa Buddha peninggalan Kerajaan Singosari. Suasana di area Candi Sumberawan ini sangat teduh dan tenang cocok sebagai tempat meditasi. Di lokasi candi ini ada sumber air yang disebut oleh warga sekitar adalah sumber air bertuah dan airnya sangat bening. Candi Sumberawan digunakan sebagai tempat ibadah umat Buddha dan Hindu dan juga sebagai tempat untuk ritual kejawen. Candi ini diperkenalkan kembali dengan nama “Kampoeng Wisata Sumberawan” oleh tim mahasiswa yang menggandeng beberapa pihak lain, yakni Perhutani dan Lembaga Kemitraan Desa Pengelola Hutan (LKDPH) Desa Toyomarto. Letak candi ini sangat unik, yakni terletak ditengah-tengah hutan, dan hutan tersebut terletak di tengah padatnya pemukiman penduduk. Rute untuk mencapai candi ini ikuti saja arah Malang - Pandaan sampai melewati Pasar Singosari. Setelah Pasar Singosari akan ada pertigaan dengan arah Polsek Singosari, lalu belok di pertigaan tersebut dan tiba di Jl. Kartanegara. Tepat di perempatan dekat Candi Singosari akan ada papan petunjuk arah menuju Candi Sumberawan. Jika dari Candi Singosari hanya berjarak sekitar 6 Km menuju Candi Sumberawan dan lebih baik bertanya kepada penduduk sekitar. Untuk sampai ke kompleks candi, pengunjung harus memarkir kendaraannya ditempat yang disediakan dan lanjut dengan berjalan kaki dengan melewati jembatan kecil. Jangan lupa untuk mengisi buku tamu dan membayar biaya retribusi sukarela.</p>', 'SELFIE PENDIDIKAN SEJARAH CAGAR BUDAYA OUTBOND PIKNIK CAMPING RILEKS', 'Rp 5.000,-', 'Rp 5.000,-', '-7.85534', '112.64483', 6),
(97, 3, 'xCsk73MPBCQ', '<NAME>', '<p style="text-align: justify;">Candi Badut terletak di Desa Karangbesuki, Kecamatan Dau, Kabupaten Malang. Jika berwisata di Candi ini, kita akan dapat melihat bahwa Candi ini menghadap ke arah barat dan dikelilingi oleh gunung- gunung antara lain Gunung Kawi di sebelah selatan, Gunung Arjuna di barat, Gunung Tengger di sebelah utara dan Gunung Semeru di sebelah timur. Candi Badut digunakan untuk kegiatan rutin ibadah sebelum Hari Raya Nyepi. Cagar budaya ini buka setiap hari mulai pukul 08.00 – 16.00 WIB dan pengunjung tidak dikenakan tarif khusus. Rute yang harus ditempuh jika menggunakan kendaraan pribadi dari Kota Malang yakni menuju ke jalan Retawu tepat di sebelah utara Museum Brawijaya. Ikuti jalan ke arah barat melewati Jalan Bondowoso, Jalan Raya Tidar, Jalan Puncak Mandala, Jalan Puncak Yamin, Jalan Esberg lalu belok kanan melewati ujung Jalan Himalaya. Dari sini kita dapat menemui Jalan Candi VD, dimana lokasi candi berada di kiri jalan, tepat di depan TK Dharma Wanita II Karangbesuki. Jika menggunakan kendaraan umum dari Kota Malang kita bisa naik mikrolet jurusan Arjosari - Tidar (AT) dan turun di pojok Jalan Himalaya. Dari Jalan Himalaya harus berjalan ke arah utara kurang lebih 300 meter hingga sampai di lokasi Candi.</p>', 'SELFIE PENDIDIKAN SEJARAH CAGAR BUDAYA', 'FREE', 'FREE', '-7.9578', '112.59852', 2),
(98, 2, 'spsXAZ0QzqI', 'WENDIT WATERPARK', '<p style="text-align: justify;">Wendit Waterpark merupakan Objek Wisata yang berlokasi di Desa Mangliawan, Kecamatan Pakis, Kabupaten Malang. Air di Wendit Water Park ini berasal dari sumber alami yang bernama Sendang Widodaren. Di Wendit Water Park banyak kera berekor panjang. Untuk mencapai ke lokasi ini menempuh jarak kurang kebih 8 km dari pusat kota Malang. Wendit waterpark ini dibuat dengan biaya masuk sebesar Rp 18.200,- dan biaya parkir dikenakan Rp 5.000,-.</p>', 'PIKNIK PEMANDIAN RENANG RILEKS BERENDAM SELFIE', 'Rp 18.200,-', 'Rp 18.200,-', '-7.95216', '112.6741', 2),
(99, 2, 'FfAnn3e25NY', 'WISATA RELIGI SUMUR PITU', '<p style="text-align: justify;">Wisata Religi Sumur Pitu terletak di Desa Sumberbening, Kecamatan Bantur, Kabupaten Malang dan berada di dalam kompleks Pantai Jembatan Panjang. Wisata Religi Sumur Pitu terkenal dengan hutan bambunya yang unik. Di Sumur Pitu ini biasanya pengunjung melakukan aktivitas berdoa maupun bertapa.</p>', 'BERDOA BERTAPA SUMUR AIR SUCI', 'FREE', 'FREE', '-8.40167', '112.53159', 4),
(100, 2, 'gaGJDW-DPUo', 'DESA WISATA NGADAS', '<p style="text-align: justify;">Desa WIsata Ngadas berada di dalam wilayah teritori Taman Nasional Bromo Tengger Semeru, maka dari itu desa ini selalu diselimuti kabut tipis. Desa ini merupakan desa tertinggi di Jawa karena topografi Desa Ngadas sendiri adalah pegunungan dengan iklim Montana. Suhu disini sekitar 0 – 20 derajat celcius. Desa ini satu-satunya desa yang didiami oleh Suku Tengger. Tersedia banyak rumah-rumah penduduk yang dijadikan homestay bagi pengunjung yang ingin bermalam, dapat juga menikmati matahari terbit di Penanjakan atau Bromo, dll. Untuk mencapai tempat wisata ini, dari Kota Malang setidaknya pengunjung harus menempuh jarak sekitar 30 kilometer dan harus melewati jalanan yang terbilang curam sejauh kurang lebih dua jam perjalanan.</p>', 'SELFIE DESA WISATA DI ATAS AWAN RILEKS PERTANIAN', 'N/A', 'N/A', '-7.98286', '112.90881', 3),
(101, 1, 'KNtEDUS51sg', 'SUMBER PITU', '<p style="text-align: justify;">Sumber Pitu merupakan Objek Wisata yang berlokasi di Desa Duwet Krajan, Kecamatan Tumpang, Kabupaten Malang. Dinamakan sumber pitu karena di lokasi tersebut ada tujuh sumber mata air. Selain terdapat sumber, di Sumber Pitu juga terdapat goa dan air terjun. Menikmati objek wisata ini tidak dikenakan biaya.</p>', 'SELFIE SUMBER ADVENTURE TRACKING BERENDAM UNDERWATER AIR TERJUN', 'FREE', 'FREE', '-8.0136', '112.82169', 20),
(102, 1, 'ruTAGSwp11g', '<NAME>', '<p style="text-align: justify;">Sumber Maron merupakan salah satu destinasi wisata yang banyak di datangi pengunjung dari warga lokal maupun internasional yang ingin merasakan sensasi kesegaran air yang sangat bening dan merasakan sensasi air terjun kecil yang ada dan pengunjung dapat menikmati sensasi river tubing disini dengan tarif yang murah hanya Rp 5.000,- untuk ban sepuasnya. Jangan khawatir untuk bermain river tubing, karena disini telah ada penjaga untuk membantu jika diperlukan. Di lokasi ini juga banyak pedagang-pedagang yang menjajakan jualannya seperti makanan, minuman bahkan keperluan yang dibutuhkan untuk bermain air. Untuk tarif masuk ke Sumber Maron ini sangat murah, hanya Rp 3.000,-. Singkat sejarah Sumber Maron ini merupakan sumber air jernih yang berada diantara sawah dan hutan-hutan disekitarnya. Lokasinya terletak di Dusun Adiluwih, Desa Karangsuko, Kecamatan Pagelaran, Kabupaten Malang.</p>', 'SELFIE RILEKS PIKNIK TRACKING ADVENTURE TUBING', 'Rp 3.000,-', 'Rp 3.000,-', '-8.1655', '112.59374', 20),
(104, 1, 'U2-K6iTHQSY', 'BEDENGAN ADVENTURE PARK', '<p style="text-align: justify;">Bedengan Adventure Park merupakan Bumi Perkemahan yang berlokasi di Dusun Selokerto, Desa Selorejo, Kecamatan Dau, Kabupaten Malang. Di tempat ini dapat digunakan untuk camping maupun hanya sekedar ingin menikmati hutan pinus dengan menggunakan hammock. Menikmati keindahan alam ini tidak dibanderol biaya.</p>', 'SELFIE CAMPING ADVENTURE TRACKING HAMMOCK', 'FREE', 'FREE', '-7.93954', '112.53057', 9),
(105, 1, '_0RL25VLQgQ', 'WADUK SELOREJO', '<p style="text-align: justify;">Waduk dengan ketinggian 600 mdpl ini terletak di Kecamatan Ngantang, Kabupaten Malang, Jawa Timur. Waduk ini dikelilingi oleh Gunung Kelud, Gunung Anjasmoro dan Gunung Kawi. Untuk menuju ke lokasi ini ditempuh dengan rute yang mudah. Dengan menggunakan kendaraan pribadi bisa melewati jalur Kabupaten Malang - Batu - Selorejo ± 60 km. Dengan menggunakan Bus, bisa menggunakan jalur Kediri - Malang atau Jombang - Malang, begitupun sebaliknya. Disini juga tersedia Food CentEeR dan berbagai kios-kios yang memperjualbelikan berbagai souvenir.</p>', 'SELFIE RILEKS MANCING KULINER', 'Rp 13.000,-', 'Rp 13.000,-', '-7.87739', '112.36103', 7),
(106, 1, 'kZVlcWh35GI', 'BENDUNGAN KARANGKATES', '<p style="text-align: justify;">Bendungan Karangkates merupakan Objek Wisata yang berlokasi di Kecamatan Sumberpucung, Kabupaten Malang. Bendungan ini juga bisa disebut Waduk Ir. Sutami, Waduk Karangkates, atau Bendungan Sutami. Air bendungan ini berasal dari mata air di Gunung Arjuno dan ditambah air hujan. Untuk memasuki kawasan wisata ini dikenakan biaya masuk sebesar Rp 7.000,- dan biaya parkir sebesar Rp 4.000,-.</p>\r\n<p style="text-align: justify;"> </p>', 'SELFIE MANCING PIKNIK BELAJAR RENANG', 'Rp 7.000,-', 'Rp 7.000,-', '-8.15589', '112.4494', 9),
(107, 1, 'okTNVnvK5Ao', 'COBAN BIDADARI', '<p style="text-align: justify;">Coban Bidadari merupakan destinasi wisata air terjun yang berada di kawasan pintu gerbang Taman Nasional Bromo Tengger Semeru. Wisata air terjun ini terletak di Desa Gubuk Klakah, Kecamatan Poncokusumo, Kabupaten Malang. Di Coban Bidadari juga terdapat tempat makan bernama "Cafe in Puncak" yang dapat digunakan untuk menikmati pemandangan Coban Bidadari dari atas.</p>', 'SELFIE ADVENTURE TRACKING SPOT FOTO RILEKS', 'Rp 10.000,-', 'Rp 10.000,-', '-8.0061', '112.86482', 4),
(108, 1, '_Mokaf5qK2Y', 'COBAN JAHE', '<p style="text-align: justify;">Coban Jahe, sekilas pasti anda akan bertanya-tanya kenapa dinamakan Coban Jahe? Kebanyakan orang pasti akan mengira nama air terjun diambil dari salah satu nama rempah-rempah namun ternyata nama Jahe dimaksudkan nama yang dibuat untuk mengenang perjuangan pahlawan kemerdekaan RI sekitar tahun 1947- 1948. Nama Jahe diambil dari bahasa jawa ‘Pejahe’ yang berarti meninggal dunia. Coban ini merupakan salah satu wisata air terjun yang terdapat di Kabupaten Malang, tepatnya di Dusun Begawan , Desa Pandansari Lor, Kecamatan Jabung dan masih satu kawasan dengan kawasan Perhutani Unit II RPH Sukopuro - Jabung Kabupaten Malang. <NAME> menyuguhkan kealamian alam dengan banyaknya dedaunan dan rerumputan yang dibiarkan tumbuh bebas. Dilokasi ini juga banyak tumbuh pohon mahoni yang menambah suasana kesejukan alam <NAME>he. Objek wisata ini berjarak sekitar 23 Km dari Kota Malang dan dapat ditempuh dengan kendaraan roda 2 atau roda 4 dengan mengambil arah ke Tumpang. Sesampainya di Tumpang belok ke kiri ke Desa Pandansari Lor, Dusun Begawan. Sampai di pintu masuk Coban Jahe, perjalanan ditempuh cukup sulit ditempuh dengan kendaraan bermotor karena berupa jalan setapak tanah dan ada beberapa bagian jalan yang makadam.</p>', 'SELFIE TRACKING ADVENTURE COBAN HAMMOCK RAFTING', 'N/A', 'N/A', '-7.96946', '112.80299', 6),
(109, 1, '2abq457lJRM', '<NAME>', '<p style="text-align: justify;"><NAME> merupakan Objek Wisata yang berlokasi di Desa Gubuk Klakah, Kecamatan Poncokusumo, Kabupaten Malang. Coban Pelangi mengalir dari sebuah tebing dengan ketinggian 30 m. Bila beruntung, para pengunjung juga bisa menyaksikan pelangi yang terbias dari pucuk-pucuk tebing. Biaya yang dikeluarkan untuk wisatawan domestik sebesar Rp 10.000,- dan untuk wisatawan internasional sebesar Rp 15.000,-. Untuk biaya parkir dikenakan biaya hanya Rp 5.000,- saja. Jika berangkat dari arah Malang, letak coban searah dengan Bromo. Jadi sebelum mencapai Bromo, akan dijumpai sebuah papan nama dimana letak air Coban Pelangi berada. Sesampai dilokasi, wisatawan diharuskan untuk berjalan kaki sejauh kurang lebih 1 km menuju lokasi Coban Pelangi.</p>', 'SELFIE ADVENTURE TRACKING CAMPING AIR TERJUN', 'Rp 10.000,-', 'Rp 15.000,-', '-8.01149', '112.86535', 5),
(110, 1, '2xc8JsFMAnc', '<NAME>', '<p style="text-align: justify;">C<NAME> merupakan Objek Wisata yang berlokasi di Desa Pandesari, Kecamatan Pujon, Kabupaten Malang. Air Terjun di Coban Rondo memiliki ketinggian 84 m dan merupakan wisata air terjun yang paling mudah di tempuh. Biaya masuk disini dibedakan antara wisatawan domestik dan internasional. Untuk pengunjung domestik dikenakan tarif masuk Rp 20.000,- sedangkan untuk wisatawan internasional dikenakan tarif Rp 35.000,-. Untuk parkir dikenakan biaya Rp 10.000,-.</p>', 'SELFIE RILEKS ADVENTURE TRACKING CAMPING PIKNIK GATHERING AIR TERJUN', 'Rp 20.000,-', 'Rp 35.000,-', '-7.88503', '112.47729', 5),
(111, 1, 'xOiavL7On48', 'EKO WISATA BOON PRING ANDEMAN', '<p style="text-align: justify;">Eko Wisata Boon Pring Andeman merupakan salah satu tempat rekreasi alam dan edukasi yang terletak di Desa Sanankerto, Kecamatan Turen, Kabupaten Malang. Awalnya taman wisata ini adalah sumber mata air besar dan masih alami dan banyak dimanfaatkan penduduk sekitar untuk pengairan sawah. Akses menuju Taman Wisata ini jalannya sudah cukup mulus. Jika dari Kota Malang, dapat ditempuh dengan menggunakan kendaraan pribadi (roda 4/roda 2/bus pariwisata) sekitar 40 km. Apabila menggunakan angkutan umum (mikrolet/minibus) dengan arah tujuan Kecamatan Turen dari Terminal Pasar Gadang dengan waktu tempuh kurang lebih 35 menit. Tiket masuk dan tiket parkir lokasi wisata ini cukup murah.</p>', 'SELFIE RILEKS PIKNIK ADVENTURE', 'Rp 5.000,-', 'Rp 5.000,-', '-8.15577', '112.76196', 3),
(112, 1, '8ghj4SyTB1M', '<NAME>', '<p style="text-align: justify;">Gunung Bromo merupakan gunung yang paling banyak dikunjungi wisatawan di Provinsi Jawa Timur untuk menikmati sunrise. Gunung Bromo mempunyai ketinggian 2.392 mdpl. Di Gunung Bromo pengunjung bisa menikmati objek wisata lain seperti Puncak Seruni, Bukit Kingkong, dan Bukit Cinta untuk menikmati sunrise, Kawah Bromo, Pura Luhur Poten, Pasir Berbisik, dan Bukit Teletubbies. Gunung Bromo termasuk gunung berapi yang masih aktif. Letusan terbesar terjadi pada tahun 1974 dan kembali meletus pada tahun 2010. Pengunjung dapat melihat pemandangan gunung dengan adanya sebuah kawah. Untuk mengunjungi kawasan wisata Gunung Bromo ini, pengunjung diwajibkan untuk menyewa Jeep karena melihat kondisi alam yang akan susah jika dilewati kendaraan pribadi. Oleh karena itu, di Gunung Bromo banyak sekali persewaan Jeep untuk membantu para wisatawan mengelilingi kawasan wisata ini. Ada banyak rute yang ditempuh untuk dapat sampai ke kawasan wisata Gunung Bromo ini, bisa ditempuh dari Kabupaten Malang, Kabupaten Probolinggo dan Kabupaten Pasuruan. Sepanjang jalan mendekati lokasi Gunung, akan ada banyak sekali penginapan bagi para wisatawan, mulai dari homestay, villa hingga hotel. Untuk masuk ke kawasan wisata ini, wisatawan akan dikenakan tarif masuk Rp 27.500,-/orang.</p>', 'SELFIE PANORAMA SUNRISE PENANJAKAN ADVENTURE LAUTAN PASIR JEEP TRACKING', 'Rp 27.500,-', 'Rp 217.500,-', '-7.94249', '112.95301', 2),
(113, 1, 'apCw8s6yRKw', '<NAME>', '<p style="text-align: justify;">Hutan Pinus Semeru terletak di Desa Sumberputih, Kecamatan Wajak, Kabupaten Malang, Jawa Timur. Masuk area wisata ini tidak dikenakan biaya alias gratis. Udara disini segar dan cukup nyaman untuk berlama-lama di tempat ini. Hutan Pinus Semeru juga cocok dijadikan spot foto bagi para penggila foto untuk di upload di sosial media serta didukung juga adanya banyak dan bermacam-macam spot untuk berfoto misalnya ayunan, rumah pohon dan lain-lain. Daya tarik dari kejauhan adalah terlihat banyaknya payung- payung yang bergantungan di antara pepohonan. Pengunjung juga bisa membawa ataupun menyewa hammock untuk digantung diantara pohon pinus. Hammock disini dikenakan biaya Rp 7.000,-. Jangan khawatir akan datangnya lapar dan haus, karena disini banyak sekali warung-warung di sepanjang jalan menuju area wisata. Jalan menuju ke Hutan Pinus Semeru ini cocok sekali bisa berkunjung kesini mengendarai mobil Off Road ataupun motor Trail. Lokasi Hutan Pinus Semeru ini sekitar 40 km dari kota Malang. Rute menuju Hutan Pinus Semeru bisa mengikuti petunjuk arah menuju Bululawang. Jika sampai pertigaan Krebet belok ke kiri arah Jalan Raya Bakalan. Sampai pertigaan Jalan Raya Kedok, akan menemukan Masjid Tibar. Setelah melewati masjid itu, lalu menuju arah Desa Sumberputih. Di sana akan ada petunjuk arah ke arah Hutan Pinus Semeru.</p>', 'SELFIE RILEKS HAMMOCK CAMPING', 'FREE', 'FREE', '-8.14647', '112.82597', 5),
(114, 1, 'M29Sv7KN1yw', 'KAMPOENG RIVER TUBING PAMOTAN', '<p style="text-align: justify;">Kampoeng River Tubing Pamotan terletak di Desa Pamotan, Kecamatan Dampit, Kabupaten Malang. Di Kampoeng River Tubing Pamotan terdapat wisata river tubing dengan jalur sepanjang 3,8 km. Selain itu di kampung ini juga terdapat wisata edukasi dengan menginap di rumah warga sekitar Kampoeng River Tubing </p>', 'SELFIE TRACKING ADVENTURE TUBING EDUKASI CAMPING', 'FREE', 'FREE', '-8.18838', '112.75468', 4),
(115, 1, '-5ct2YUoLXo', 'LEMBAH TUMPANG RESORT', '<p style="text-align: justify;">Lembah Tumpang Resort merupakan Objek Wisata yang berlokasi di Dusun Nglanggang, Desa Slamet, Kecamatan Tumpang, Kabupaten Malang. Lembah Tumpang Resort ini masih dalam proses pembangunan tetapi sudah banyak tempat yang bisa digunakan untuk berwisata. Kecamatan Tumpang merupakan pintu akses menuju kawasan wana wisata Taman Nasional Bromo Tengger. Datangnya ribuan wisatawan dan traveller dari mancanegara maupun dalam negeri, menarik investor untuk mendirikan sebuah Resort yang bernama Lembah Tumpang. Resort ini dibangun mulai tahun 2014 dan ditargetkan akan rampung tahun 2018. Di resort ini, mata pengunjung akan disuguhkan dengan nuansa pedesaan jaman kerajaan Hindu Jawa karena ornamen-ornamennya dan arsitekturnya kental dengan ornamen arca kerajaan. Di dalam Lambah Tumpang Resort, terdapat candi dan air terjun yang masih belum ada namanya. Meskipun Lembah Tempang ini sebuah resort, ketika memasuki kawasan ini tetap akan dikenakan biaya sebesar Rp 30.000,- dan biaya parkir Rp 5.000,-.</p>', 'SELFIE PIKNIK RENANG GATHERING RILEKS ADVENTURE', 'Rp 30.000,-', 'Rp 30.000,-', '-7.99458', '112.73803', 3),
(116, 1, 'P0l6DAT25Cc', 'NDAYUNG RAFTING', '<p style="text-align: justify;">Ndayung Rafting merupakan Objek Wisata yang berlokasi di Desa Gubuk Klakah, Kecamatan Poncokusumo, Kabupaten Malang. Letak Ndayung Rafting sendiri berada di aliran Sungai Amprong yang merupakan kawasan wisata alam Coban Pelangi yang berada di Taman Nasional Bromo Tengger Semeru. Selain untuk melakukan kegiatan rafting, di Ndayung Rafting terdapat fasilitas paket Bromo Sunrise untuk pengunjung yang menginap di Ndayung Rafting. Untuk sampai ke lokasi ini tidak dibutuhkan waktu yang cukup lama, hanya 45 menit saja dari bandara Abdurrahman Saleh atau 90 menit dari Kota Malang dengan berkendara. Kondisi jalan naik turun tapi jalannya halus dan beraspal. Mengunjungi lokasi ini hanya akan dikenakan biaya parkir saja sebesar Rp 5.000,-.</p>', 'RAFTING ADVENTURE SELFIE TRACKING CAMPING GATHERING', 'FREE', 'FREE', '-8.01406', '112.85337', 3),
(117, 1, 'DhqcjV2o234', 'PEMANDIAN KENDEDES', '<p style="text-align: justify;">Pemandian Kendedes merupakan Objek Wisata yang berlokasi di Jalan Kendedes, Desa Candirenggo, Kecamatan Singosari, Kabupaten Malang. Konon, pemandian ini dipercaya sebagai tempat mandi dari Putri Ken Dedes yang terkenal akan kecantikannya tersebut. Hal ini menimbulkan kepercayaan di masyarakat bahwa dengan mandi di pemandian ini maka akan membuat awet muda. Biaya yang dikeluarkan untuk menikmati suasana di Pemandian Kendedes sebesar Rp 10.000,-.</p>', 'RENANG SELFIE RILEKS PIKNIK', 'Rp 10.000,-', 'Rp 10.000,-', '-7.88154', '112.66017', 18),
(118, 4, 'ZzSTyEcfIcM', 'WISATA PETIK MADU AGRO TAWON RIMBA RAYA', '<p style="text-align: justify;">Wisata Petik Madu Agro Tawon Rimba Raya merupakan Objek Wisata yang berlokasi di Puri Kencana, Desa Bedali, Kecamatan Lawang, Kabupaten Malang. Wisata Agro ini dapat dijadikan sebagai tempat wisata edukasi tentang lebah. Selain edukasi tentang lebah, di tempat ini pengunjung bisa melakukan wisata petik jamur dan sayur. Kawasan wisata ini tidak dipungut biaya apapun.</p>', 'EDUKASI TERAPI WISATA PETIK LEBAH SELFIE ADVENTURE PERTANIAN', 'FREE', 'FREE', '-7.84885', '112.69511', 6),
(120, 1, 'yjQ5gRQnclA', '<NAME>', '<p style="text-align: justify;">Sumber Jenon terletak di Desa Gunung Ronggo, Kecamatan Tajinan, Kabupaten Malang. Bagi masyarakat sekitar tempat ini merupakan tempat berendam sekaligus terapi untuk berbagai macam penyakit karena memang warga sekitar mempercayai sumber ini sebagai media penyembuhan yang mujarab. Selain itu Sumber Jenon merupakan destinasi untuk hunting foto maupun berlibur. Sumber ini mempunyai air berwarna biru muda yang tertampung pada kolam dengan kedalaman sekitar 5 m dan spot yang paling dicari untuk para pengunjung adalah adanya batang pohon jenu di dalam kolam yang posisinya membujur dari posisi barat ke arah timur.</p>', 'RENANG RILEKS UNDERWATER SELFIE PIKNIK', 'FREE', 'FREE', '-8.04966', '112.71646', 2),
(121, 1, 'vhw_wfaeGcM', '<NAME>', '<p style="text-align: justify;">Wisata Sumber Sira merupakan Objek Wisata yang berlokasi di Desa Sumberjaya, Kecamatan Gondanglegi, Kabupaten Malang. Sumber Sira merupakan salah satu tempat wisata air yang dapat digunakan untuk berenang maupun snorkeling karena airnya yang sangat jernih. Tapi sangat dianjurkan untuk berhati-hati ketika bersnorkeling dikarenakan ganggang di dasar air itu mudah patah. Rute untuk menuju kawasan wisata ini jika dari Kota Malang, bisa mengambil arah Bululawang, setelah itu ambil jalan lurus menuju Pabrik Gula Krebet sampai bertemu Pabrik Pioneer. Setelah itu belok ke sebelah kanan jalan, lalu bertemu dengan pertigaan pertama, ambil jalan lurus. Selanjutnya bertemu pertigaan kedua, ambil jalan belok kanan hingga memasuki sebuah gerbang kecil kemudian berjalan lurus hingga bertemu dengan jalan yang menyempit.</p>', 'SELFIE UNDERWATER PIKNIK RILEKS RENANG SNORKELING', 'RP 3.000,-', 'RP 3.000,-', '-8.12287', '112.6206', 2),
(122, 1, '6UZ8cWG-n_I', 'SUMBER TAMAN', '<p style="text-align: justify;">Sumber Taman terletak di Jalan Sumber Taman, Desa Karangsuko, Kecamatan Pagelaran, Kabupaten Malang. Tempat wisata ini terletak di antara dua desa yaitu Desa Karangsuko dan Desa Brongkal. Hanya membutuhkan waktu sekitar 45 menit dari Kota Malang dengan kendaraan pribadi tapi jika dari Gondanglegi membutuhkan waktu sekitar 15 menit ke arah barat. Sumber mata air dari Sumber Taman berasal dari dua batang pohon besar yang mengalirkan air melimpah. Area pemandian Sumber Taman cukup luas dengan kedalaman sekitar 1-1.5 m.</p>', 'SELFIE RILEKS RENANG UNDERWATER PIKNIK', 'FREE', 'FREE', '-8.17453', '112.59981', 2),
(123, 1, 'RbYHs2_lMN8', 'TAMAN WISATA LEMBAH DIENG', '<p style="text-align: justify;">Taman Wisata Lembah Dieng ini terletak di Jalan Wisata No. 99 Desa Kalisongo, Kecamatan Dau, Kabupaten Malang, Jawa Timur. Kawasan wisata ini menawarkan Kolam Renang bertaraf Internasional dan harga untuk memasuki kolam renang ini sangat terjangkau, hanya Rp 10.000,- - Rp 15.000,-/orang, dan parkir sebesar Rp 2.000,-. Air kolam disini berasal dari sumber mata air, jadi pengunjung akan merasakan kesegaran ketika berenang disini. Selain kolam renang, di Taman Wisata Lembah DIeng terdapat kolam pancing yang setiap hari minggu dapat memancing ikan tombro. Area wisata ini buka mulai pukul 06.00 - 18.00. Akses untuk menuju kawasan wisata ini hanya berjarak 3 Km dari pusat kota Malang.</p>', 'RENANG RILEKS MANCING SELFIE', 'FREE', 'FREE', '-7.96499', '112.59869', 3),
(124, 1, 'xRTRcYzYTZg', '<NAME>', '<p style="text-align: justify;">Tubing Led<NAME> merupakan Objek Wisata yang berlokasi di Desa Gubuk Klakah, Kecamatan Poncokusumo, Kabupaten Malang. River Tubing ini berada satu aliran dengan Coban Pelangi yaitu di Sungai Amprong. Tubing Ledok Amprong ini termasuk wisata adrenalin. Ledok Amprong adalah nama tempat di hulu Sungai Amprong. Istilah ledok dalam bahasa Jawa adalah tanah yang rendah atau cekungan. Derasnya aliran hulu Sungai Amprong berkisar 10-30 km/jam. Wisata ini beroperasi mulai pagi hingga pukul 4 sore. Hanya mempersiapkan uang Rp 5.000,- bisa langsung masuk dan menikmati keindahan sungai dan alam sekitar yang menyejukkan mata disini.</p>', 'TRACKING SELFIE ADVENTURE CAMPING TUBING', 'Rp 5.000,-', 'Rp 5.000,-', '-8.03208', '112.82791', 2);
INSERT INTO `wisata` (`wisata_id`, `kategori_id`, `wisata_url_video`, `wisata_nama`, `wisata_deskripsi`, `wisata_tag`, `wisata_htm_lokal`, `wisata_htm_intl`, `wisata_latitude`, `wisata_longitude`, `wisata_tampil`) VALUES
(125, 1, 'uc2h04P9FVY', 'WISATA AIR SUMBER KRABYAKAN', '<p style="text-align: justify;">Wisata Air Sumber Krabyakan merupakan Objek Wisata yang berlokasi di Desa Sumber Ngepoh, Kecamatan Lawang, Kabupaten Malang. Wisata Sumber Krabyakan yang berlokasi di kaki pegunungan menjadikan tempat ini dapat menghadirkan kenyamanan dan keharmonisan bagi pengunjungnya. Biaya masuk ke sumber air ini dikenakan Rp 5.000,- dan biaya parkir Rp 5.000,-.</p>', 'SELFIE BERENDAM RILEKS TERAPI IKAN SUMBER', 'Rp 5.000,-', 'Rp 5.000,-', '-7.84278', '112.72037', 2);
-- --------------------------------------------------------
--
-- Table structure for table `wisata_berfasilitas`
--
CREATE TABLE IF NOT EXISTS `wisata_berfasilitas` (
`wistas_id` int(11) NOT NULL AUTO_INCREMENT,
`wisata_id` int(11) NOT NULL,
`faswis_id` int(11) NOT NULL,
`wistas_status` text NOT NULL,
PRIMARY KEY (`wistas_id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=1158 ;
--
-- Dumping data for table `wisata_berfasilitas`
--
INSERT INTO `wisata_berfasilitas` (`wistas_id`, `wisata_id`, `faswis_id`, `wistas_status`) VALUES
(1, 1, 1, 'Y'),
(2, 1, 2, 'N'),
(3, 1, 3, 'N'),
(4, 1, 4, 'Y'),
(5, 1, 5, 'N'),
(6, 1, 6, 'N'),
(7, 1, 7, 'Y'),
(8, 1, 8, 'Y'),
(9, 1, 9, 'Y'),
(10, 1, 10, 'N'),
(12, 2, 1, 'Y'),
(13, 2, 2, 'N'),
(14, 2, 3, 'N'),
(15, 2, 4, 'Y'),
(16, 2, 5, 'N'),
(17, 2, 6, 'Y'),
(18, 2, 7, 'Y'),
(19, 2, 8, 'Y'),
(20, 2, 9, 'Y'),
(21, 2, 10, 'N'),
(22, 2, 11, ''),
(23, 3, 1, ''),
(24, 3, 2, ''),
(25, 3, 3, ''),
(26, 3, 4, ''),
(27, 3, 5, ''),
(28, 3, 6, ''),
(29, 3, 7, ''),
(30, 3, 8, ''),
(31, 3, 9, ''),
(32, 3, 10, ''),
(33, 3, 11, ''),
(34, 4, 1, ''),
(35, 4, 2, ''),
(36, 4, 3, ''),
(37, 4, 4, ''),
(38, 4, 5, ''),
(39, 4, 6, ''),
(40, 4, 7, ''),
(41, 4, 8, ''),
(42, 4, 9, ''),
(43, 4, 10, ''),
(44, 4, 11, ''),
(45, 5, 1, ''),
(46, 5, 2, ''),
(47, 5, 3, ''),
(48, 5, 4, ''),
(49, 5, 5, ''),
(50, 5, 6, ''),
(51, 5, 7, ''),
(52, 5, 8, ''),
(53, 5, 9, ''),
(54, 5, 10, ''),
(55, 5, 11, ''),
(56, 6, 1, 'Y'),
(57, 6, 2, 'N'),
(58, 6, 3, 'Y'),
(59, 6, 4, 'Y'),
(60, 6, 5, 'N'),
(61, 6, 6, 'Y'),
(62, 6, 7, 'Y'),
(63, 6, 8, 'Y'),
(64, 6, 9, 'Y'),
(65, 6, 10, 'N'),
(66, 6, 11, 'Y'),
(67, 7, 1, 'Y'),
(68, 7, 2, 'N'),
(69, 7, 3, 'N'),
(70, 7, 4, 'Y'),
(71, 7, 5, 'Y'),
(72, 7, 6, 'Y'),
(73, 7, 7, 'Y'),
(74, 7, 8, 'Y'),
(75, 7, 9, 'Y'),
(76, 7, 10, 'Y'),
(77, 7, 11, 'Y'),
(78, 8, 1, 'Y'),
(79, 8, 2, 'N'),
(80, 8, 3, 'N'),
(81, 8, 4, 'Y'),
(82, 8, 5, 'Y'),
(83, 8, 6, 'Y'),
(84, 8, 7, 'Y'),
(85, 8, 8, 'Y'),
(86, 8, 9, 'Y'),
(87, 8, 10, 'Y'),
(88, 8, 11, 'Y'),
(89, 9, 1, 'Y'),
(100, 10, 1, 'Y'),
(101, 10, 2, 'N'),
(102, 10, 3, 'N'),
(103, 10, 4, 'Y'),
(104, 10, 5, 'N'),
(105, 10, 6, 'Y'),
(106, 10, 7, 'Y'),
(107, 10, 8, 'Y'),
(108, 10, 9, 'Y'),
(109, 10, 10, 'N'),
(110, 10, 11, 'Y'),
(111, 11, 1, 'N'),
(112, 11, 2, 'N'),
(113, 11, 3, 'N'),
(114, 11, 4, 'Y'),
(115, 11, 5, 'N'),
(116, 11, 6, 'Y'),
(117, 11, 7, 'N'),
(118, 11, 8, 'N'),
(119, 11, 9, 'Y'),
(120, 11, 10, 'Y'),
(121, 11, 11, 'Y'),
(122, 12, 1, 'N'),
(123, 12, 2, 'N'),
(124, 12, 3, 'N'),
(125, 12, 4, 'Y'),
(126, 12, 5, 'N'),
(127, 12, 6, 'Y'),
(128, 12, 7, 'N'),
(129, 12, 8, 'N'),
(130, 12, 9, 'Y'),
(131, 12, 10, 'Y'),
(132, 12, 11, 'Y'),
(133, 13, 1, 'N'),
(134, 13, 2, 'N'),
(135, 13, 3, 'N'),
(136, 13, 4, 'Y'),
(137, 13, 5, 'N'),
(138, 13, 6, 'Y'),
(139, 13, 7, 'N'),
(140, 13, 8, 'N'),
(141, 13, 9, 'Y'),
(142, 13, 10, 'Y'),
(143, 13, 11, 'Y'),
(144, 14, 1, 'N'),
(145, 14, 2, 'N'),
(146, 14, 3, 'N'),
(147, 14, 4, 'Y'),
(148, 14, 5, 'N'),
(149, 14, 6, 'Y'),
(150, 14, 7, 'N'),
(151, 14, 8, 'N'),
(152, 14, 9, 'Y'),
(153, 14, 10, 'Y'),
(154, 14, 11, 'Y'),
(155, 15, 1, 'N'),
(156, 15, 2, 'N'),
(157, 15, 3, 'N'),
(158, 15, 4, 'Y'),
(159, 15, 5, 'N'),
(160, 15, 6, 'Y'),
(161, 15, 7, 'N'),
(162, 15, 8, 'N'),
(163, 15, 9, 'Y'),
(164, 15, 10, 'Y'),
(165, 15, 11, 'Y'),
(166, 16, 1, 'N'),
(167, 16, 2, 'N'),
(168, 16, 3, 'N'),
(169, 16, 4, 'Y'),
(170, 16, 5, 'N'),
(171, 16, 6, 'Y'),
(172, 16, 7, 'N'),
(173, 16, 8, 'N'),
(174, 16, 9, 'Y'),
(175, 16, 10, 'Y'),
(176, 16, 11, 'Y'),
(177, 17, 1, 'Y'),
(178, 17, 2, 'Y'),
(179, 17, 3, 'N'),
(180, 17, 4, 'Y'),
(181, 17, 5, 'Y'),
(182, 17, 6, 'Y'),
(183, 17, 7, 'Y'),
(184, 17, 8, 'Y'),
(185, 17, 9, 'Y'),
(186, 17, 10, 'N'),
(187, 17, 11, 'Y'),
(188, 18, 1, 'Y'),
(189, 18, 2, 'Y'),
(190, 18, 3, 'N'),
(191, 18, 4, 'Y'),
(192, 18, 5, 'Y'),
(193, 18, 6, 'Y'),
(194, 18, 7, 'Y'),
(195, 18, 8, 'Y'),
(196, 18, 9, 'Y'),
(197, 18, 10, 'N'),
(198, 18, 11, 'Y'),
(199, 19, 1, 'Y'),
(200, 19, 2, 'N'),
(201, 19, 3, 'N'),
(202, 19, 4, 'Y'),
(203, 19, 5, 'N'),
(204, 19, 6, 'N'),
(205, 19, 7, 'N'),
(206, 19, 8, 'Y'),
(207, 19, 9, 'Y'),
(208, 19, 10, 'N'),
(209, 19, 11, 'Y'),
(210, 20, 1, 'N'),
(211, 20, 2, 'N'),
(212, 20, 3, 'N'),
(213, 20, 4, 'Y'),
(214, 20, 5, 'N'),
(215, 20, 6, 'N'),
(216, 20, 7, 'N'),
(217, 20, 8, 'Y'),
(218, 20, 9, 'Y'),
(219, 20, 10, 'N'),
(220, 20, 11, 'Y'),
(221, 21, 1, 'N'),
(222, 21, 2, 'N'),
(223, 21, 3, 'N'),
(224, 21, 4, 'Y'),
(225, 21, 5, 'N'),
(226, 21, 6, 'N'),
(227, 21, 7, 'N'),
(228, 21, 8, 'Y'),
(229, 21, 9, 'Y'),
(230, 21, 10, 'N'),
(231, 21, 11, 'Y'),
(232, 22, 1, 'Y'),
(233, 22, 2, 'N'),
(234, 22, 3, 'N'),
(235, 22, 4, 'Y'),
(236, 22, 5, 'Y'),
(237, 22, 6, 'N'),
(238, 22, 7, 'Y'),
(239, 22, 8, 'Y'),
(240, 22, 9, 'Y'),
(241, 22, 10, 'N'),
(242, 22, 11, 'Y'),
(243, 23, 1, 'Y'),
(244, 23, 2, 'N'),
(245, 23, 3, 'N'),
(246, 23, 4, 'Y'),
(247, 23, 5, 'Y'),
(248, 23, 6, 'N'),
(249, 23, 7, 'Y'),
(250, 23, 8, 'Y'),
(251, 23, 9, 'Y'),
(252, 23, 10, 'N'),
(253, 23, 11, 'Y'),
(254, 24, 1, 'Y'),
(255, 24, 2, 'N'),
(256, 24, 3, 'N'),
(257, 24, 4, 'Y'),
(258, 24, 5, 'N'),
(259, 24, 6, 'N'),
(260, 24, 7, 'Y'),
(261, 24, 8, 'Y'),
(262, 24, 9, 'Y'),
(263, 24, 10, 'N'),
(264, 24, 11, 'Y'),
(265, 25, 1, 'Y'),
(266, 25, 2, 'N'),
(267, 25, 3, 'N'),
(268, 25, 4, 'Y'),
(269, 25, 5, 'Y'),
(270, 25, 6, 'N'),
(271, 25, 7, 'Y'),
(272, 25, 8, 'Y'),
(273, 25, 9, 'Y'),
(274, 25, 10, 'N'),
(275, 25, 11, 'Y'),
(276, 26, 1, 'Y'),
(277, 26, 2, 'N'),
(278, 26, 3, 'N'),
(279, 26, 4, 'Y'),
(280, 26, 5, 'Y'),
(281, 26, 6, 'Y'),
(282, 26, 7, 'Y'),
(283, 26, 8, 'Y'),
(284, 26, 9, 'Y'),
(285, 26, 10, 'Y'),
(286, 26, 11, 'Y'),
(287, 27, 1, 'Y'),
(288, 27, 2, 'N'),
(289, 27, 3, 'N'),
(290, 27, 4, 'Y'),
(291, 27, 5, 'Y'),
(292, 27, 6, 'Y'),
(293, 27, 7, 'Y'),
(294, 27, 8, 'Y'),
(295, 27, 9, 'Y'),
(296, 27, 10, 'Y'),
(297, 27, 11, 'Y'),
(298, 28, 1, 'Y'),
(299, 28, 2, 'N'),
(300, 28, 3, 'N'),
(301, 28, 4, 'Y'),
(302, 28, 5, 'Y'),
(303, 28, 6, 'Y'),
(304, 28, 7, 'Y'),
(305, 28, 8, 'Y'),
(306, 28, 9, 'Y'),
(307, 28, 10, 'Y'),
(308, 28, 11, 'Y'),
(309, 29, 1, 'Y'),
(310, 29, 2, 'N'),
(311, 29, 3, 'N'),
(312, 29, 4, 'Y'),
(313, 29, 5, 'Y'),
(314, 29, 6, 'Y'),
(315, 29, 7, 'Y'),
(316, 29, 8, 'Y'),
(317, 29, 9, 'Y'),
(318, 29, 10, 'Y'),
(319, 29, 11, 'Y'),
(320, 30, 1, 'Y'),
(321, 30, 2, 'Y'),
(322, 30, 3, 'Y'),
(323, 30, 4, 'Y'),
(324, 30, 5, 'Y'),
(325, 30, 6, 'Y'),
(326, 30, 7, 'Y'),
(327, 30, 8, 'Y'),
(328, 30, 9, 'Y'),
(329, 30, 10, 'N'),
(330, 30, 11, 'Y'),
(331, 31, 1, 'Y'),
(332, 31, 2, 'N'),
(333, 31, 3, 'Y'),
(334, 31, 4, 'Y'),
(335, 31, 5, 'Y'),
(336, 31, 6, 'N'),
(337, 31, 7, 'Y'),
(338, 31, 8, 'Y'),
(339, 31, 9, 'Y'),
(340, 31, 10, 'N'),
(341, 31, 11, 'Y'),
(342, 32, 1, 'Y'),
(343, 32, 2, 'N'),
(344, 32, 3, 'Y'),
(345, 32, 4, 'Y'),
(346, 32, 5, 'Y'),
(347, 32, 6, 'N'),
(348, 32, 7, 'Y'),
(349, 32, 8, 'Y'),
(350, 32, 9, 'Y'),
(351, 32, 10, 'N'),
(352, 32, 11, 'Y'),
(353, 33, 1, 'Y'),
(354, 33, 2, 'N'),
(355, 33, 3, 'N'),
(356, 33, 4, 'Y'),
(357, 33, 5, 'Y'),
(358, 33, 6, 'Y'),
(359, 33, 7, 'Y'),
(360, 33, 8, 'Y'),
(361, 33, 9, 'Y'),
(362, 33, 10, 'Y'),
(363, 33, 11, 'Y'),
(364, 34, 1, 'Y'),
(365, 34, 2, 'N'),
(366, 34, 3, 'N'),
(367, 34, 4, 'Y'),
(368, 34, 5, 'Y'),
(369, 34, 6, 'Y'),
(370, 34, 7, 'Y'),
(371, 34, 8, 'Y'),
(372, 34, 9, 'Y'),
(373, 34, 10, 'Y'),
(374, 34, 11, 'Y'),
(375, 35, 1, 'Y'),
(376, 35, 2, 'N'),
(377, 35, 3, 'N'),
(378, 35, 4, 'Y'),
(379, 35, 5, 'Y'),
(380, 35, 6, 'Y'),
(381, 35, 7, 'Y'),
(382, 35, 8, 'Y'),
(383, 35, 9, 'Y'),
(384, 35, 10, 'Y'),
(385, 35, 11, 'Y'),
(386, 36, 1, 'Y'),
(387, 36, 2, 'N'),
(388, 36, 3, 'N'),
(389, 36, 4, 'Y'),
(390, 36, 5, 'Y'),
(391, 36, 6, 'Y'),
(392, 36, 7, 'Y'),
(393, 36, 8, 'Y'),
(394, 36, 9, 'Y'),
(395, 36, 10, 'Y'),
(396, 36, 11, 'Y'),
(397, 37, 1, 'Y'),
(398, 37, 2, 'N'),
(399, 37, 3, 'N'),
(400, 37, 4, 'Y'),
(401, 37, 5, 'Y'),
(402, 37, 6, 'Y'),
(403, 37, 7, 'Y'),
(404, 37, 8, 'Y'),
(405, 37, 9, 'Y'),
(406, 37, 10, 'Y'),
(407, 37, 11, 'Y'),
(408, 38, 1, 'Y'),
(409, 38, 2, 'N'),
(410, 38, 3, 'N'),
(411, 38, 4, 'Y'),
(412, 38, 5, 'Y'),
(413, 38, 6, 'Y'),
(414, 38, 7, 'Y'),
(415, 38, 8, 'Y'),
(416, 38, 9, 'Y'),
(417, 38, 10, 'Y'),
(418, 38, 11, 'Y'),
(419, 39, 1, 'Y'),
(420, 39, 2, 'N'),
(421, 39, 3, 'N'),
(422, 39, 4, 'Y'),
(423, 39, 5, 'Y'),
(424, 39, 6, 'Y'),
(425, 39, 7, 'Y'),
(426, 39, 8, 'Y'),
(427, 39, 9, 'Y'),
(428, 39, 10, 'Y'),
(429, 39, 11, 'Y'),
(430, 40, 1, 'Y'),
(431, 40, 2, 'N'),
(432, 40, 3, 'N'),
(433, 40, 4, 'Y'),
(434, 40, 5, 'Y'),
(435, 40, 6, 'Y'),
(436, 40, 7, 'Y'),
(437, 40, 8, 'Y'),
(438, 40, 9, 'Y'),
(439, 40, 10, 'Y'),
(440, 40, 11, 'Y'),
(441, 41, 1, 'N'),
(442, 41, 2, 'N'),
(443, 41, 3, 'Y'),
(444, 41, 4, 'Y'),
(445, 41, 5, 'N'),
(446, 41, 6, 'Y'),
(447, 41, 7, 'Y'),
(448, 41, 8, 'Y'),
(449, 41, 9, 'Y'),
(450, 41, 10, 'N'),
(451, 41, 11, 'Y'),
(452, 42, 1, 'Y'),
(453, 42, 2, 'N'),
(454, 42, 3, 'N'),
(455, 42, 4, 'Y'),
(456, 42, 5, 'N'),
(457, 42, 6, 'N'),
(458, 42, 7, 'Y'),
(459, 42, 8, 'N'),
(460, 42, 9, 'Y'),
(461, 42, 10, 'Y'),
(462, 42, 11, 'Y'),
(463, 43, 1, 'Y'),
(464, 43, 2, 'N'),
(465, 43, 3, 'N'),
(466, 43, 4, 'Y'),
(467, 43, 5, 'N'),
(468, 43, 6, 'N'),
(469, 43, 7, 'Y'),
(470, 43, 8, 'N'),
(471, 43, 9, 'Y'),
(472, 43, 10, 'Y'),
(473, 43, 11, 'Y'),
(474, 44, 1, 'Y'),
(475, 44, 2, 'N'),
(476, 44, 3, 'N'),
(477, 44, 4, 'Y'),
(478, 44, 5, 'N'),
(479, 44, 6, 'N'),
(480, 44, 7, 'Y'),
(481, 44, 8, 'N'),
(482, 44, 9, 'Y'),
(483, 44, 10, 'Y'),
(484, 44, 11, 'Y'),
(485, 45, 1, 'Y'),
(486, 45, 2, 'Y'),
(487, 45, 3, 'N'),
(488, 45, 4, 'Y'),
(489, 45, 5, 'N'),
(490, 45, 6, 'Y'),
(491, 45, 7, 'Y'),
(492, 45, 8, 'Y'),
(493, 45, 9, 'Y'),
(494, 45, 10, 'Y'),
(495, 45, 11, 'Y'),
(496, 46, 1, 'Y'),
(497, 46, 2, 'Y'),
(498, 46, 3, 'N'),
(499, 46, 4, 'Y'),
(500, 46, 5, 'N'),
(501, 46, 6, 'Y'),
(502, 46, 7, 'Y'),
(503, 46, 8, 'Y'),
(504, 46, 9, 'Y'),
(505, 46, 10, 'Y'),
(506, 46, 11, 'Y'),
(507, 47, 1, 'Y'),
(508, 47, 2, 'Y'),
(509, 47, 3, 'N'),
(510, 47, 4, 'Y'),
(511, 47, 5, 'N'),
(512, 47, 6, 'Y'),
(513, 47, 7, 'Y'),
(514, 47, 8, 'Y'),
(515, 47, 9, 'Y'),
(516, 47, 10, 'Y'),
(517, 47, 11, 'Y'),
(518, 48, 1, 'Y'),
(519, 48, 2, 'N'),
(520, 48, 3, 'N'),
(521, 48, 4, 'Y'),
(522, 48, 5, 'Y'),
(523, 48, 6, 'Y'),
(524, 48, 7, 'Y'),
(525, 48, 8, 'Y'),
(526, 48, 9, 'Y'),
(527, 48, 10, 'Y'),
(528, 48, 11, 'Y'),
(529, 49, 1, 'Y'),
(530, 49, 2, 'N'),
(531, 49, 3, 'N'),
(532, 49, 4, 'Y'),
(533, 49, 5, 'Y'),
(534, 49, 6, 'Y'),
(535, 49, 7, 'Y'),
(536, 49, 8, 'Y'),
(537, 49, 9, 'Y'),
(538, 49, 10, 'Y'),
(539, 49, 11, 'Y'),
(540, 50, 1, 'Y'),
(541, 50, 2, 'N'),
(542, 50, 3, 'N'),
(543, 50, 4, 'Y'),
(544, 50, 5, 'Y'),
(545, 50, 6, 'Y'),
(546, 50, 7, 'Y'),
(547, 50, 8, 'Y'),
(548, 50, 9, 'Y'),
(549, 50, 10, 'Y'),
(550, 50, 11, 'Y'),
(551, 51, 1, 'Y'),
(552, 51, 2, 'N'),
(553, 51, 3, 'N'),
(554, 51, 4, 'Y'),
(555, 51, 5, 'N'),
(556, 51, 6, 'N'),
(557, 51, 7, 'Y'),
(558, 51, 8, 'Y'),
(559, 51, 9, 'Y'),
(560, 51, 10, 'N'),
(561, 51, 11, 'Y'),
(562, 52, 1, 'Y'),
(563, 52, 2, 'N'),
(564, 52, 3, 'N'),
(565, 52, 4, 'Y'),
(566, 52, 5, 'N'),
(567, 52, 6, 'N'),
(568, 52, 7, 'Y'),
(569, 52, 8, 'Y'),
(570, 52, 9, 'Y'),
(571, 52, 10, 'N'),
(572, 52, 11, 'Y'),
(573, 53, 1, 'Y'),
(574, 53, 2, 'N'),
(575, 53, 3, 'N'),
(576, 53, 4, 'Y'),
(577, 53, 5, 'Y'),
(578, 53, 6, 'Y'),
(579, 53, 7, 'Y'),
(580, 53, 8, 'Y'),
(581, 53, 9, 'Y'),
(582, 53, 10, 'Y'),
(583, 53, 11, 'Y'),
(584, 54, 1, 'Y'),
(585, 54, 2, 'N'),
(586, 54, 3, 'N'),
(587, 54, 4, 'Y'),
(588, 54, 5, 'Y'),
(589, 54, 6, 'N'),
(590, 54, 7, 'Y'),
(591, 54, 8, 'Y'),
(592, 54, 9, 'Y'),
(593, 54, 10, 'N'),
(594, 54, 11, 'Y'),
(595, 55, 1, 'N'),
(596, 55, 2, 'N'),
(597, 55, 3, 'N'),
(598, 55, 4, 'Y'),
(599, 55, 5, 'N'),
(600, 55, 6, 'N'),
(601, 55, 7, 'N'),
(602, 55, 8, 'Y'),
(603, 55, 9, 'Y'),
(604, 55, 10, 'N'),
(605, 55, 11, 'N'),
(606, 56, 1, 'N'),
(607, 56, 2, 'N'),
(608, 56, 3, 'N'),
(609, 56, 4, 'Y'),
(610, 56, 5, 'N'),
(611, 56, 6, 'N'),
(612, 56, 7, 'N'),
(613, 56, 8, 'Y'),
(614, 56, 9, 'Y'),
(615, 56, 10, 'N'),
(616, 56, 11, 'N'),
(617, 57, 1, 'Y'),
(618, 57, 2, 'Y'),
(619, 57, 3, 'N'),
(620, 57, 4, 'Y'),
(621, 57, 5, 'N'),
(622, 57, 6, 'N'),
(623, 57, 7, 'Y'),
(624, 57, 8, 'Y'),
(625, 57, 9, 'Y'),
(626, 57, 10, 'Y'),
(627, 57, 11, 'Y'),
(628, 58, 1, 'Y'),
(629, 58, 2, 'Y'),
(630, 58, 3, 'N'),
(631, 58, 4, 'Y'),
(632, 58, 5, 'N'),
(633, 58, 6, 'N'),
(634, 58, 7, 'Y'),
(635, 58, 8, 'Y'),
(636, 58, 9, 'Y'),
(637, 58, 10, 'Y'),
(638, 58, 11, 'Y'),
(639, 59, 1, 'Y'),
(640, 59, 2, 'Y'),
(641, 59, 3, 'Y'),
(642, 59, 4, 'Y'),
(643, 59, 5, 'N'),
(644, 59, 6, 'N'),
(645, 59, 7, 'Y'),
(646, 59, 8, 'Y'),
(647, 59, 9, 'Y'),
(648, 59, 10, 'Y'),
(649, 59, 11, 'Y'),
(650, 60, 1, 'Y'),
(651, 60, 2, 'N'),
(652, 60, 3, 'N'),
(653, 60, 4, 'Y'),
(654, 60, 5, 'N'),
(655, 60, 6, 'N'),
(656, 60, 7, 'Y'),
(657, 60, 8, 'Y'),
(658, 60, 9, 'Y'),
(659, 60, 10, 'N'),
(660, 60, 11, 'Y'),
(661, 61, 1, 'Y'),
(662, 61, 2, 'N'),
(663, 61, 3, 'N'),
(664, 61, 4, 'Y'),
(665, 61, 5, 'N'),
(666, 61, 6, 'N'),
(667, 61, 7, 'Y'),
(668, 61, 8, 'Y'),
(669, 61, 9, 'Y'),
(670, 61, 10, 'Y'),
(671, 61, 11, 'Y'),
(672, 62, 1, 'Y'),
(673, 62, 2, 'N'),
(674, 62, 3, 'N'),
(675, 62, 4, 'Y'),
(676, 62, 5, 'N'),
(677, 62, 6, 'N'),
(678, 62, 7, 'Y'),
(679, 62, 8, 'Y'),
(680, 62, 9, 'Y'),
(681, 62, 10, 'Y'),
(682, 62, 11, 'Y'),
(683, 63, 1, 'Y'),
(684, 63, 2, 'Y'),
(685, 63, 3, 'Y'),
(686, 63, 4, 'Y'),
(687, 63, 5, 'Y'),
(688, 63, 6, 'Y'),
(689, 63, 7, 'Y'),
(690, 63, 8, 'Y'),
(691, 63, 9, 'Y'),
(692, 63, 10, 'N'),
(693, 63, 11, 'Y'),
(694, 64, 1, 'Y'),
(695, 64, 2, 'N'),
(696, 64, 3, 'N'),
(697, 64, 4, 'Y'),
(698, 64, 5, 'N'),
(699, 64, 6, 'N'),
(700, 64, 7, 'Y'),
(701, 64, 8, 'Y'),
(702, 64, 9, 'Y'),
(703, 64, 10, 'N'),
(704, 64, 11, 'Y'),
(705, 65, 1, 'N'),
(706, 65, 2, 'N'),
(707, 65, 3, 'N'),
(708, 65, 4, 'Y'),
(709, 65, 5, 'N'),
(710, 65, 6, 'N'),
(711, 65, 7, 'N'),
(712, 65, 8, 'N'),
(713, 65, 9, 'Y'),
(714, 65, 10, 'Y'),
(715, 65, 11, 'Y'),
(716, 66, 1, 'N'),
(717, 66, 2, 'N'),
(718, 66, 3, 'N'),
(719, 66, 4, 'Y'),
(720, 66, 5, 'N'),
(721, 66, 6, 'N'),
(722, 66, 7, 'N'),
(723, 66, 8, 'N'),
(724, 66, 9, 'Y'),
(725, 66, 10, 'Y'),
(726, 66, 11, 'Y'),
(727, 67, 1, 'N'),
(728, 67, 2, 'N'),
(729, 67, 3, 'N'),
(730, 67, 4, 'Y'),
(731, 67, 5, 'N'),
(732, 67, 6, 'N'),
(733, 67, 7, 'N'),
(734, 67, 8, 'N'),
(735, 67, 9, 'Y'),
(736, 67, 10, 'Y'),
(737, 67, 11, 'Y'),
(738, 70, 1, 'Y'),
(739, 70, 2, 'N'),
(740, 1, 11, 'Y'),
(742, 75, 1, 'Y'),
(743, 75, 2, 'N'),
(744, 75, 3, 'N'),
(745, 75, 4, 'Y'),
(746, 75, 5, 'N'),
(747, 75, 6, 'Y'),
(748, 75, 7, 'Y'),
(749, 75, 8, 'Y'),
(750, 75, 9, 'Y'),
(751, 75, 10, 'N'),
(752, 75, 11, 'Y'),
(753, 76, 1, 'Y'),
(754, 76, 2, 'Y'),
(755, 76, 3, 'Y'),
(756, 76, 4, 'Y'),
(757, 76, 5, 'Y'),
(758, 76, 6, 'Y'),
(759, 76, 7, 'Y'),
(760, 76, 8, 'Y'),
(761, 76, 9, 'Y'),
(762, 76, 10, 'N'),
(763, 76, 11, 'Y'),
(764, 77, 1, 'Y'),
(765, 77, 2, 'N'),
(766, 77, 3, 'N'),
(767, 77, 4, 'Y'),
(768, 77, 5, 'Y'),
(769, 77, 6, 'N'),
(770, 77, 7, 'Y'),
(771, 77, 8, 'Y'),
(772, 77, 9, 'Y'),
(773, 77, 10, 'N'),
(774, 77, 11, 'Y'),
(775, 78, 1, 'Y'),
(776, 78, 2, 'N'),
(777, 78, 3, 'N'),
(778, 78, 4, 'Y'),
(779, 78, 5, 'N'),
(780, 78, 6, 'N'),
(781, 78, 7, 'Y'),
(782, 78, 8, 'Y'),
(783, 78, 9, 'Y'),
(784, 78, 10, 'Y'),
(785, 78, 11, 'Y'),
(786, 79, 1, 'Y'),
(787, 79, 2, 'N'),
(788, 79, 3, 'N'),
(789, 79, 4, 'Y'),
(790, 79, 6, 'Y'),
(791, 79, 7, 'Y'),
(792, 79, 8, 'Y'),
(793, 79, 9, 'Y'),
(794, 79, 10, 'N'),
(795, 79, 11, 'Y'),
(796, 80, 1, 'Y'),
(797, 80, 7, 'Y'),
(798, 80, 8, 'Y'),
(799, 80, 9, 'Y'),
(800, 80, 11, 'Y'),
(801, 81, 1, 'Y'),
(802, 81, 4, 'Y'),
(803, 81, 7, 'Y'),
(804, 81, 8, 'Y'),
(805, 81, 9, 'Y'),
(806, 81, 10, 'Y'),
(807, 81, 11, 'Y'),
(808, 82, 1, 'Y'),
(809, 82, 4, 'Y'),
(810, 82, 8, 'Y'),
(811, 82, 9, 'Y'),
(812, 82, 11, 'Y'),
(813, 83, 1, 'Y'),
(814, 83, 4, 'Y'),
(815, 83, 9, 'Y'),
(816, 83, 11, 'Y'),
(817, 84, 1, 'Y'),
(818, 84, 4, 'Y'),
(819, 84, 6, 'Y'),
(820, 84, 7, 'Y'),
(821, 84, 8, 'Y'),
(822, 84, 9, 'Y'),
(823, 84, 11, 'Y'),
(824, 85, 1, 'Y'),
(825, 85, 4, 'Y'),
(826, 85, 5, 'Y'),
(827, 85, 7, 'Y'),
(828, 85, 8, 'Y'),
(829, 85, 9, 'Y'),
(830, 85, 11, 'Y'),
(831, 86, 1, 'Y'),
(832, 86, 4, 'Y'),
(833, 86, 7, 'Y'),
(834, 87, 1, 'Y'),
(835, 87, 4, 'Y'),
(836, 87, 6, 'Y'),
(837, 87, 7, 'Y'),
(838, 87, 8, 'Y'),
(839, 87, 9, 'Y'),
(840, 87, 11, 'Y'),
(841, 88, 1, 'Y'),
(842, 88, 4, 'Y'),
(843, 88, 6, 'Y'),
(844, 88, 7, 'Y'),
(845, 89, 1, 'Y'),
(846, 89, 4, 'Y'),
(847, 89, 5, 'Y'),
(848, 89, 7, 'Y'),
(849, 89, 8, 'Y'),
(850, 89, 9, 'Y'),
(851, 89, 11, 'Y'),
(852, 90, 1, 'Y'),
(853, 90, 4, 'Y'),
(854, 90, 8, 'Y'),
(855, 90, 9, 'Y'),
(856, 90, 10, 'Y'),
(857, 90, 11, 'Y'),
(858, 91, 4, 'Y'),
(859, 91, 6, 'Y'),
(860, 91, 9, 'Y'),
(861, 91, 10, 'Y'),
(862, 91, 11, 'Y'),
(863, 92, 4, 'Y'),
(864, 92, 6, 'Y'),
(865, 92, 9, 'Y'),
(866, 92, 10, 'Y'),
(867, 92, 11, 'Y'),
(868, 93, 1, 'Y'),
(869, 93, 3, 'Y'),
(870, 93, 4, 'Y'),
(871, 93, 6, 'Y'),
(872, 93, 7, 'Y'),
(873, 93, 9, 'Y'),
(874, 93, 11, 'Y'),
(875, 94, 1, 'Y'),
(876, 94, 2, 'Y'),
(877, 94, 3, 'Y'),
(878, 94, 4, 'Y'),
(879, 94, 6, 'Y'),
(880, 94, 7, 'Y'),
(881, 94, 8, 'Y'),
(882, 94, 9, 'Y'),
(884, 94, 11, 'Y'),
(885, 95, 1, 'Y'),
(886, 95, 4, 'Y'),
(887, 95, 5, 'Y'),
(888, 95, 6, 'Y'),
(889, 95, 7, 'Y'),
(890, 95, 8, 'Y'),
(891, 95, 9, 'Y'),
(892, 95, 10, 'Y'),
(893, 95, 11, 'Y'),
(894, 96, 1, 'Y'),
(895, 96, 4, 'Y'),
(896, 96, 6, 'Y'),
(897, 96, 7, 'Y'),
(898, 96, 8, 'Y'),
(899, 96, 9, 'Y'),
(900, 96, 10, 'Y'),
(901, 96, 11, 'Y'),
(902, 97, 1, 'Y'),
(903, 97, 4, 'Y'),
(904, 97, 9, 'Y'),
(905, 97, 10, 'Y'),
(906, 97, 11, 'Y'),
(907, 98, 1, 'Y'),
(908, 98, 4, 'Y'),
(909, 98, 7, 'Y'),
(910, 98, 8, 'Y'),
(911, 98, 9, 'Y'),
(912, 98, 10, 'Y'),
(913, 98, 11, 'Y'),
(915, 99, 10, 'Y'),
(916, 99, 11, 'Y'),
(917, 100, 4, 'Y'),
(918, 100, 5, 'Y'),
(919, 100, 8, 'Y'),
(922, 102, 1, 'Y'),
(923, 102, 3, 'Y'),
(924, 102, 4, 'Y'),
(925, 102, 6, 'Y'),
(926, 102, 7, 'Y'),
(927, 102, 9, 'Y'),
(928, 102, 11, 'Y'),
(929, 104, 1, 'Y'),
(930, 104, 2, 'N'),
(931, 104, 3, 'N'),
(932, 104, 4, 'Y'),
(933, 104, 5, 'N'),
(934, 104, 6, 'N'),
(935, 104, 7, 'Y'),
(936, 104, 8, 'Y'),
(937, 104, 9, 'Y'),
(938, 104, 10, 'N'),
(939, 104, 11, 'Y'),
(940, 101, 1, 'N'),
(941, 101, 2, 'N'),
(942, 101, 3, 'N'),
(943, 101, 4, 'Y'),
(944, 101, 5, 'N'),
(946, 101, 6, 'N'),
(947, 101, 7, 'N'),
(948, 101, 8, 'Y'),
(949, 101, 9, 'Y'),
(950, 101, 10, 'N'),
(951, 101, 11, 'N'),
(952, 105, 1, 'Y'),
(953, 105, 2, 'N'),
(954, 105, 3, 'N'),
(955, 105, 4, 'Y'),
(956, 105, 5, 'Y'),
(957, 105, 6, 'Y'),
(958, 105, 7, 'Y'),
(959, 105, 8, 'Y'),
(960, 105, 9, 'Y'),
(961, 105, 10, 'N'),
(962, 105, 11, 'Y'),
(963, 106, 1, 'Y'),
(964, 106, 2, 'N'),
(965, 106, 3, 'Y'),
(966, 106, 4, 'Y'),
(967, 106, 5, 'Y'),
(968, 106, 6, 'N'),
(969, 106, 7, 'Y'),
(970, 106, 8, 'Y'),
(971, 106, 9, 'Y'),
(972, 106, 10, 'N'),
(973, 106, 11, 'Y'),
(974, 107, 1, 'Y'),
(975, 107, 2, 'N'),
(976, 107, 3, 'N'),
(977, 107, 4, 'Y'),
(978, 107, 5, 'N'),
(979, 107, 6, 'N'),
(980, 107, 7, 'Y'),
(981, 107, 8, 'Y'),
(982, 107, 9, 'Y'),
(983, 107, 10, 'N'),
(984, 107, 11, 'Y'),
(985, 108, 1, 'Y'),
(986, 108, 2, 'N'),
(987, 108, 3, 'N'),
(988, 108, 4, 'Y'),
(989, 108, 5, 'N'),
(990, 108, 6, 'Y'),
(991, 108, 7, 'Y'),
(992, 108, 8, 'Y'),
(993, 108, 9, 'Y'),
(994, 108, 10, 'N'),
(995, 108, 11, 'Y'),
(996, 109, 1, 'Y'),
(997, 109, 2, 'N'),
(998, 109, 3, 'N'),
(999, 109, 4, 'Y'),
(1000, 109, 5, 'N'),
(1001, 109, 6, 'N'),
(1002, 109, 7, 'Y'),
(1003, 109, 8, 'Y'),
(1004, 109, 9, 'Y'),
(1005, 109, 10, 'N'),
(1007, 109, 11, 'Y'),
(1008, 110, 1, 'Y'),
(1009, 110, 2, 'N'),
(1010, 110, 3, 'N'),
(1011, 110, 4, 'Y'),
(1012, 110, 5, 'Y'),
(1013, 110, 6, 'Y'),
(1014, 110, 7, 'Y'),
(1015, 110, 8, 'Y'),
(1016, 110, 9, 'Y'),
(1017, 110, 10, 'N'),
(1018, 110, 11, 'Y'),
(1019, 111, 1, 'Y'),
(1020, 111, 2, 'N'),
(1021, 111, 3, 'N'),
(1022, 111, 4, 'Y'),
(1023, 111, 5, 'N'),
(1024, 111, 6, 'Y'),
(1025, 111, 7, 'Y'),
(1026, 111, 8, 'Y'),
(1027, 111, 9, 'Y'),
(1028, 111, 10, 'N'),
(1029, 111, 11, 'Y'),
(1030, 112, 1, 'Y'),
(1031, 112, 2, 'N'),
(1032, 112, 3, 'N'),
(1033, 112, 4, 'Y'),
(1034, 112, 5, 'Y'),
(1035, 112, 6, 'Y'),
(1036, 112, 7, 'Y'),
(1037, 112, 8, 'Y'),
(1038, 112, 9, 'Y'),
(1039, 112, 10, 'Y'),
(1040, 112, 11, 'Y'),
(1041, 113, 1, 'Y'),
(1042, 113, 2, 'N'),
(1043, 113, 3, 'N'),
(1044, 113, 4, 'Y'),
(1045, 113, 5, 'N'),
(1046, 113, 6, 'N'),
(1047, 113, 7, 'Y'),
(1048, 113, 8, 'Y'),
(1049, 113, 9, 'Y'),
(1050, 113, 10, 'N'),
(1051, 113, 11, 'Y'),
(1052, 114, 1, 'Y'),
(1053, 114, 2, 'N'),
(1054, 114, 3, 'Y'),
(1055, 114, 4, 'Y'),
(1056, 114, 5, 'Y'),
(1057, 114, 6, 'N'),
(1058, 114, 7, 'Y'),
(1059, 114, 8, 'Y'),
(1060, 114, 9, 'Y'),
(1061, 114, 10, 'Y'),
(1062, 114, 11, 'Y'),
(1063, 115, 1, 'Y'),
(1064, 115, 2, 'N'),
(1065, 115, 3, 'N'),
(1066, 115, 4, 'Y'),
(1067, 115, 5, 'Y'),
(1068, 115, 6, 'N'),
(1069, 115, 7, 'Y'),
(1070, 115, 8, 'Y'),
(1071, 115, 9, 'Y'),
(1072, 115, 10, 'N'),
(1073, 115, 11, 'Y'),
(1074, 116, 1, 'Y'),
(1075, 116, 2, 'Y'),
(1076, 116, 3, 'Y'),
(1077, 116, 4, 'Y'),
(1078, 116, 5, 'N'),
(1079, 116, 6, 'N'),
(1080, 116, 7, 'Y'),
(1081, 116, 8, 'Y'),
(1082, 116, 9, 'Y'),
(1083, 116, 10, 'Y'),
(1084, 116, 11, 'Y'),
(1085, 117, 1, 'Y'),
(1086, 117, 2, 'N'),
(1087, 117, 3, 'Y'),
(1088, 117, 4, 'Y'),
(1089, 117, 5, 'N'),
(1090, 117, 6, 'Y'),
(1091, 117, 7, 'Y'),
(1092, 117, 8, 'Y'),
(1093, 117, 9, 'Y'),
(1094, 117, 10, 'N'),
(1095, 117, 11, 'Y'),
(1096, 118, 1, 'Y'),
(1097, 118, 2, 'Y'),
(1098, 118, 3, 'N'),
(1099, 118, 4, 'Y'),
(1100, 118, 5, 'N'),
(1101, 118, 6, 'Y'),
(1102, 118, 7, 'Y'),
(1103, 118, 8, 'Y'),
(1104, 118, 9, 'Y'),
(1105, 118, 10, 'Y'),
(1106, 118, 11, 'Y'),
(1108, 9, 4, 'Y'),
(1109, 9, 6, 'Y'),
(1110, 9, 7, 'Y'),
(1111, 9, 8, 'Y'),
(1112, 9, 9, 'Y'),
(1113, 9, 11, 'Y'),
(1114, 120, 4, 'Y'),
(1115, 120, 7, 'Y'),
(1116, 120, 8, 'Y'),
(1117, 120, 9, 'Y'),
(1118, 121, 1, 'Y'),
(1119, 121, 3, 'Y'),
(1120, 121, 4, 'Y'),
(1121, 121, 6, 'Y'),
(1122, 121, 7, 'Y'),
(1123, 121, 8, 'Y'),
(1124, 121, 9, 'Y'),
(1125, 121, 11, 'Y'),
(1126, 122, 4, 'Y'),
(1127, 122, 7, 'Y'),
(1128, 122, 8, 'Y'),
(1129, 122, 9, 'Y'),
(1130, 123, 1, 'Y'),
(1131, 123, 4, 'Y'),
(1132, 123, 7, 'Y'),
(1133, 123, 8, 'Y'),
(1134, 123, 9, 'Y'),
(1135, 123, 11, 'Y'),
(1136, 124, 1, 'Y'),
(1137, 124, 2, 'Y'),
(1138, 124, 4, 'Y'),
(1139, 124, 7, 'Y'),
(1140, 124, 8, 'Y'),
(1141, 124, 9, 'Y'),
(1142, 124, 10, 'Y'),
(1143, 124, 11, 'Y'),
(1144, 125, 1, 'Y'),
(1145, 125, 4, 'Y'),
(1146, 125, 7, 'Y'),
(1147, 125, 8, 'Y'),
(1148, 125, 9, 'Y'),
(1149, 125, 11, 'Y'),
(1150, 86, 8, 'Y'),
(1151, 86, 9, 'Y'),
(1152, 86, 11, 'Y'),
(1153, 86, 10, 'Y'),
(1154, 88, 8, 'Y'),
(1155, 88, 9, 'Y'),
(1156, 88, 10, 'Y'),
(1157, 88, 11, 'Y');
-- --------------------------------------------------------
--
-- Table structure for table `wisata_berpendukung`
--
CREATE TABLE IF NOT EXISTS `wisata_berpendukung` (
`wiskung_id` int(11) NOT NULL AUTO_INCREMENT,
`wisata_id` int(11) NOT NULL,
`faspen_id` int(11) NOT NULL,
`wiskung_nama` varchar(100) NOT NULL,
`wiskung_alamat` text NOT NULL,
`wiskung_telp` varchar(50) NOT NULL,
`wiskung_website` varchar(50) NOT NULL,
`wiskung_latitude` varchar(50) NOT NULL,
`wiskung_longitude` varchar(50) NOT NULL,
`wiskung_url_foto` text NOT NULL,
PRIMARY KEY (`wiskung_id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=4 ;
--
-- Dumping data for table `wisata_berpendukung`
--
INSERT INTO `wisata_berpendukung` (`wiskung_id`, `wisata_id`, `faspen_id`, `wiskung_nama`, `wiskung_alamat`, `wiskung_telp`, `wiskung_website`, `wiskung_latitude`, `wiskung_longitude`, `wiskung_url_foto`) VALUES
(1, 29, 1, 'Hotel Ali', 'Jalan Baru No 15', '085678908765', 'www.example.com', '-7.85605', '112.45487', '2.jpg');
-- --------------------------------------------------------
--
-- Table structure for table `wisata_tags`
--
CREATE TABLE IF NOT EXISTS `wisata_tags` (
`wisata_id` int(11) NOT NULL DEFAULT '0',
`tag_id` int(11) NOT NULL DEFAULT '0',
PRIMARY KEY (`wisata_id`,`tag_id`),
KEY `tag_id` (`tag_id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Constraints for dumped tables
--
--
-- Constraints for table `kategori_wisata`
--
ALTER TABLE `kategori_wisata`
ADD CONSTRAINT `kategori_wisata_ibfk_1` FOREIGN KEY (`wisata_id`) REFERENCES `wisata` (`wisata_id`) ON DELETE CASCADE ON UPDATE CASCADE,
ADD CONSTRAINT `kategori_wisata_ibfk_2` FOREIGN KEY (`kategori_id`) REFERENCES `kategori` (`kategori_id`) ON DELETE CASCADE ON UPDATE CASCADE;
--
-- Constraints for table `wisata_tags`
--
ALTER TABLE `wisata_tags`
ADD CONSTRAINT `wisata_tags_ibfk_1` FOREIGN KEY (`wisata_id`) REFERENCES `wisata` (`wisata_id`) ON DELETE CASCADE ON UPDATE CASCADE,
ADD CONSTRAINT `wisata_tags_ibfk_2` FOREIGN KEY (`tag_id`) REFERENCES `tag` (`id`) ON DELETE CASCADE ON UPDATE CASCADE;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
|
import React, { FC, useContext } from 'react';
import Link from 'next/link';
import Image from 'next/image';
import moment from 'moment';
import { Routes } from 'lib-client/constants';
import { withBem } from 'utils/bem';
import { getIsAdmin, getIsPostOwner } from 'lib-client/permissions';
import Button from 'components/Button';
import { useUpdatePost } from 'lib-client/react-query/posts/useUpdatePost';
import { useDeletePost } from 'lib-client/react-query/posts/useDeletePost';
import Alert from 'components/Alert';
import { getAvatarPath, uploadsImageLoader } from 'lib-client/imageLoaders';
import { MeContext } from 'lib-client/providers/Me';
import { PostWithAuthor } from 'types/models/Post';
type Props = {
post: PostWithAuthor;
};
const PostItem: FC<Props> = ({ post }) => {
const { me } = useContext(MeContext);
const b = withBem('post-item');
const { mutate: updatePost, ...restUpdate } = useUpdatePost();
const { mutate: deletePost, ...restDelete } = useDeletePost();
const { author } = post;
const postHref = {
pathname: `/[username]${Routes.SITE.POST}[id]`,
query: { username: author.username, id: post.id },
};
const editPostHref = `${Routes.SITE.CREATE}${post.id}/`;
const authorHref = {
pathname: '/[username]',
query: { username: author.username },
};
const isOwnerOrAdmin = me && (getIsPostOwner(me, post) || getIsAdmin(me));
const userInfo = (
<>
<Link href={authorHref}>
<a className={b('name')}>{author.name}</a>
</Link>
<Link href={authorHref}>
<a className={b('username')}>{`@${author.username}`}</a>
</Link>
<Link href={postHref}>
<a className={b('time')}>{moment(post.updatedAt).fromNow()}</a>
</Link>
</>
);
return (
<article className={b()}>
{restUpdate.isError && <Alert variant="error" message={restUpdate.error.message} />}
{restDelete.isError && <Alert variant="error" message={restDelete.error.message} />}
<div className={b('header')}>
{/* avatar */}
<div className={b('left')}>
<Link href={authorHref}>
<a>
<Image
loader={uploadsImageLoader}
src={getAvatarPath(author)}
width={96}
height={96}
alt={author.name ?? 'avatar'}
objectFit="cover"
/>
</a>
</Link>
</div>
{/* title */}
<div className={b('right')}>
<Link href={postHref}>
<a className={b('post-title')}>
<h2>{post.title}</h2>
</a>
</Link>
<div className={b('user-title-desktop')}>{userInfo}</div>
</div>
</div>
<div className={b('user-title-mobile')}>{userInfo}</div>
{/* content */}
<div className={b('content')}>{post.content}</div>
{isOwnerOrAdmin && (
<div className={b('publish-delete')}>
{!post.published && (
<Button
onClick={(e) => {
e.stopPropagation();
updatePost({ id: post.id, post: { published: true } });
}}
>
{!restUpdate.isLoading ? 'Publish' : 'Submiting...'}
</Button>
)}
<Link href={editPostHref}>
<a>
<Button tagName="span">Edit</Button>
</a>
</Link>
<Button
variant="secondary"
onClick={(e) => {
e.stopPropagation();
deletePost(post.id);
}}
>
{!restDelete.isLoading ? 'Delete' : 'Deleting...'}
</Button>
</div>
)}
</article>
);
};
export default PostItem;
|
#!/bin/bash
source inc_vars.sh
# Script for multiple shallow water equation tests
cp par/swmnew.par par/swm.par
sh/runngridsHR95_8.sh
sh/runngridsSCVT_8.sh
cp par/swmtrsk.par par/swm.par
sh/runngridsHR95_8.sh
sh/runngridsSCVT_8.sh
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports["default"] = void 0;
var _UtilInherits = _interopRequireDefault(require("./UtilInherits"));
var _Prototypal = _interopRequireDefault(require("./Prototypal"));
var _GlobalExtends = _interopRequireDefault(require("./GlobalExtends"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
/**
* Processes nodes to detect super classes and return information for later
* transformation.
*/
var Inheritance = /*#__PURE__*/function () {
/**
* @param {Object} cfg
* @param {PotentialClass[]} cfg.potentialClasses Class name
*/
function Inheritance() {
_classCallCheck(this, Inheritance);
this.utilInherits = new _UtilInherits["default"]();
this.prototypal = new _Prototypal["default"]();
this.globalExtends = new _GlobalExtends["default"]();
}
/**
* Process a node and return inheritance details if found.
* @param {Object} node
* @param {Object} parent
* @returns {Object}
* {String} className
* {Node} superClass
* {Object[]} relatedExpressions
*/
_createClass(Inheritance, [{
key: "process",
value: function process(node, parent) {
return this.utilInherits.process(node, parent) || this.prototypal.process(node, parent) || this.globalExtends.process(node, parent);
}
}]);
return Inheritance;
}();
exports["default"] = Inheritance; |
import React, { Component } from 'react';
import profile from '../../images/profile-pictures.jpg';
import Index from '../Common/index';
import Alerts from './alerts';
import { NavLink } from 'react-router-dom';
class Profile extends Component {
constructor(props){
super(props);
this.state={
alert:true,
summary:false
}
}
componentWillMount(){
// document.getElementById("header").style.display = "none";
}
render() {
return (
<div className="blue">
<div className="container">
<div className="row fixed-top">
<div className="top-bar tbbs blue " id="responsive-menu">
<div className="top-bar-left ">
<ul className="dropdown menu blue" data-dropdown-menu>
<li className="menu-text white"><NAME></li>
</ul>
</div>
<div className="top-bar-right white">
<NavLink className="home-anc white" to="/"><i className="zmdi zmdi-menu zmdi-hc-2x"></i></NavLink>
</div>
</div>
</div>
</div>
<div className="container mt55">
<div className="row">
<div className="columns small-12">
<div className="container-4 ">
<div className="pr-box">
<div className="pr-box-left">
<img src={profile} alt="profile" />
</div>
<div className="pr-box-right">
<p>Date of Birth
<strong>08/28-1984</strong>
</p>
<p>Sex at Birth
<strong>08/28-1984</strong>
</p>
<p>Phone
<strong>444-555-5555 <i className="zmdi zmdi-phone"></i></strong>
</p>
</div>
</div>
<div className="sne">
<a className="sne-anc">+ Start new encounter</a>
</div>
</div>
<div className="container-5">
<ul className="tabs" data-tabs id="example-tabs">
<li className="tabs-title is-active"><a data-aria-selected="true">Alert</a></li>
<li className="tabs-title"><a data-tabs-target="panel2" >Summary</a></li>
</ul>
<Alerts />
</div>
</div>
</div>
</div>
</div>
);
}
}
export default Profile;
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package brooklyn.entity.basic;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.Collections;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import brooklyn.basic.BrooklynDynamicType;
import brooklyn.config.ConfigKey.HasConfigKey;
import brooklyn.entity.Effector;
import brooklyn.entity.Entity;
import brooklyn.entity.EntityType;
import brooklyn.entity.effector.EffectorAndBody;
import brooklyn.entity.effector.EffectorBody;
import brooklyn.entity.effector.EffectorTasks.EffectorBodyTaskFactory;
import brooklyn.entity.effector.EffectorTasks.EffectorTaskFactory;
import brooklyn.entity.effector.EffectorWithBody;
import brooklyn.entity.effector.Effectors;
import brooklyn.event.Sensor;
import brooklyn.util.javalang.Reflections;
import com.google.common.annotations.Beta;
import com.google.common.base.Joiner;
import com.google.common.base.Throwables;
import com.google.common.collect.Maps;
/** This is the actual type of an entity instance at runtime,
* which can change from the static {@link EntityType}, and can change over time;
* for this reason it does *not* implement EntityType, but
* callers can call {@link #getSnapshot()} to get a snapshot such instance
*/
public class EntityDynamicType extends BrooklynDynamicType<Entity, AbstractEntity> {
private static final Logger LOG = LoggerFactory.getLogger(EntityDynamicType.class);
/**
* Effectors on this entity, by name.
*/
// TODO support overloading; requires not using a map keyed off method name.
private final Map<String, Effector<?>> effectors = new ConcurrentHashMap<String, Effector<?>>();
/**
* Map of sensors on this entity, by name.
*/
private final ConcurrentMap<String,Sensor<?>> sensors = new ConcurrentHashMap<String, Sensor<?>>();
public EntityDynamicType(AbstractEntity entity) {
this(entity.getClass(), entity);
}
public EntityDynamicType(Class<? extends Entity> clazz) {
this(clazz, null);
}
private EntityDynamicType(Class<? extends Entity> clazz, AbstractEntity entity) {
super(clazz, entity);
String id = entity==null ? clazz.getName() : entity.getId();
effectors.putAll(findEffectors(clazz, null));
if (LOG.isTraceEnabled())
LOG.trace("Entity {} effectors: {}", id, Joiner.on(", ").join(effectors.keySet()));
sensors.putAll(findSensors(clazz, null));
if (LOG.isTraceEnabled())
LOG.trace("Entity {} sensors: {}", id, Joiner.on(", ").join(sensors.keySet()));
refreshSnapshot();
}
/**
* @deprecated since 0.7; unused code; instead use {@link #getBrooklynClass()}
*/
@Deprecated
public Class<? extends Entity> getEntityClass() {
return super.getBrooklynClass();
}
public EntityType getSnapshot() {
return (EntityType) super.getSnapshot();
}
// --------------------------------------------------
/**
* @return the effector with the given name, or null if not found
*/
public Effector<?> getEffector(String name) {
return effectors.get(name);
}
/**
* Effectors available on this entity.
*/
public Map<String,Effector<?>> getEffectors() {
return Collections.unmodifiableMap(effectors);
}
/**
* Adds the given {@link Effector} to this entity.
*/
@Beta
public void addEffector(Effector<?> newEffector) {
Effector<?> oldEffector = effectors.put(newEffector.getName(), newEffector);
invalidateSnapshot();
if (oldEffector!=null)
instance.emit(AbstractEntity.EFFECTOR_CHANGED, newEffector.getName());
else
instance.emit(AbstractEntity.EFFECTOR_ADDED, newEffector.getName());
}
/** Adds an effector with an explicit body */
@Beta
public <T> void addEffector(Effector<T> effector, EffectorTaskFactory<T> body) {
addEffector(new EffectorAndBody<T>(effector, body));
}
/** Adds an effector with an explicit body */
@Beta
public <T> void addEffector(Effector<T> effector, EffectorBody<T> body) {
addEffector(effector, new EffectorBodyTaskFactory<T>(body));
}
// --------------------------------------------------
/**
* Sensors available on this entity.
*/
public Map<String,Sensor<?>> getSensors() {
return Collections.unmodifiableMap(sensors);
}
/**
* Convenience for finding named sensor.
*/
public Sensor<?> getSensor(String sensorName) {
return sensors.get(sensorName);
}
/**
* Adds the given {@link Sensor} to this entity.
*/
public void addSensor(Sensor<?> newSensor) {
sensors.put(newSensor.getName(), newSensor);
invalidateSnapshot();
instance.emit(AbstractEntity.SENSOR_ADDED, newSensor);
}
/**
* Adds the given {@link Sensor}s to this entity.
*/
public void addSensors(Iterable<? extends Sensor<?>> newSensors) {
for (Sensor<?> sensor : newSensors) {
addSensor(sensor);
}
}
public void addSensorIfAbsent(Sensor<?> newSensor) {
Sensor<?> prev = addSensorIfAbsentWithoutPublishing(newSensor);
if (prev == null) {
instance.emit(AbstractEntity.SENSOR_ADDED, newSensor);
}
}
public Sensor<?> addSensorIfAbsentWithoutPublishing(Sensor<?> newSensor) {
Sensor<?> prev = sensors.putIfAbsent(newSensor.getName(), newSensor);
if (prev == null) {
invalidateSnapshot();
}
return prev;
}
/**
* Removes the named {@link Sensor} from this entity.
*/
public Sensor<?> removeSensor(String sensorName) {
Sensor<?> result = sensors.remove(sensorName);
if (result != null) {
invalidateSnapshot();
instance.emit(AbstractEntity.SENSOR_REMOVED, result);
}
return result;
}
/**
* Removes the named {@link Sensor} from this entity.
*/
public boolean removeSensor(Sensor<?> sensor) {
return (removeSensor(sensor.getName()) != null);
}
// --------------------------------------------------
@Override
protected EntityTypeSnapshot newSnapshot() {
return new EntityTypeSnapshot(name, value(configKeys), sensors, effectors.values());
}
/**
* Finds the effectors defined on the entity's class, statics and optionally any non-static (discouraged).
*/
public static Map<String,Effector<?>> findEffectors(Class<? extends Entity> clazz, Entity optionalEntity) {
try {
Map<String,Effector<?>> result = Maps.newLinkedHashMap();
Map<String,Field> fieldSources = Maps.newLinkedHashMap();
Map<String,Method> methodSources = Maps.newLinkedHashMap();
for (Field f : Reflections.findPublicFieldsOrderedBySuper(clazz)) {
if (Effector.class.isAssignableFrom(f.getType())) {
if (!Modifier.isStatic(f.getModifiers())) {
// require it to be static or we have an instance
LOG.warn("Discouraged/deprecated use of non-static effector field "+f+" defined in " + (optionalEntity!=null ? optionalEntity : clazz));
if (optionalEntity==null) continue;
}
Effector<?> eff = (Effector<?>) f.get(optionalEntity);
if (eff==null) {
LOG.warn("Effector "+f+" undefined for "+clazz+" ("+optionalEntity+")");
continue;
}
Effector<?> overwritten = result.put(eff.getName(), eff);
Field overwrittenFieldSource = fieldSources.put(eff.getName(), f);
if (overwritten!=null && !Effectors.sameInstance(overwritten, eff)) {
LOG.trace("multiple definitions for effector {} on {}; preferring {} from {} to {} from {}", new Object[] {
eff.getName(), (optionalEntity != null ? optionalEntity : clazz), eff, f, overwritten,
overwrittenFieldSource});
}
}
}
for (Method m : Reflections.findPublicMethodsOrderedBySuper(clazz)) {
brooklyn.entity.annotation.Effector effectorAnnotation = m.getAnnotation(brooklyn.entity.annotation.Effector.class);
if (effectorAnnotation != null) {
if (Modifier.isStatic(m.getModifiers())) {
// require it to be static or we have an instance
LOG.warn("Discouraged/deprecated use of static annotated effector method "+m+" defined in " + (optionalEntity!=null ? optionalEntity : clazz));
if (optionalEntity==null) continue;
}
Effector<?> eff = MethodEffector.create(m);
Effector<?> overwritten = result.get(eff.getName());
if ((overwritten instanceof EffectorWithBody) && !(overwritten instanceof MethodEffector<?>)) {
// don't let annotations on methods override a static, unless that static is a MethodEffector
// TODO not perfect, but approx right; we should clarify whether we prefer statics or methods
} else {
result.put(eff.getName(), eff);
Method overwrittenMethodSource = methodSources.put(eff.getName(), m);
Field overwrittenFieldSource = fieldSources.remove(eff.getName());
LOG.trace("multiple definitions for effector {} on {}; preferring {} from {} to {} from {}", new Object[] {
eff.getName(), (optionalEntity != null ? optionalEntity : clazz), eff, m, overwritten,
(overwrittenMethodSource != null ? overwrittenMethodSource : overwrittenFieldSource)});
}
}
}
return result;
} catch (IllegalAccessException e) {
throw Throwables.propagate(e);
}
}
/**
* Finds the sensors defined on the entity's class, statics and optionally any non-static (discouraged).
*/
public static Map<String,Sensor<?>> findSensors(Class<? extends Entity> clazz, Entity optionalEntity) {
try {
Map<String,Sensor<?>> result = Maps.newLinkedHashMap();
Map<String,Field> sources = Maps.newLinkedHashMap();
for (Field f : Reflections.findPublicFieldsOrderedBySuper((clazz))) {
if (Sensor.class.isAssignableFrom(f.getType())) {
if (!Modifier.isStatic(f.getModifiers())) {
// require it to be static or we have an instance
LOG.warn("Discouraged use of non-static sensor "+f+" defined in " + (optionalEntity!=null ? optionalEntity : clazz));
if (optionalEntity==null) continue;
}
Sensor<?> sens = (Sensor<?>) f.get(optionalEntity);
Sensor<?> overwritten = result.put(sens.getName(), sens);
Field source = sources.put(sens.getName(), f);
if (overwritten!=null && overwritten != sens) {
if (sens instanceof HasConfigKey) {
// probably overriding defaults, just log low level (there will be add'l logging in config key section)
LOG.trace("multiple definitions for config sensor {} on {}; preferring {} from {} to {} from {}", new Object[] {
sens.getName(), optionalEntity!=null ? optionalEntity : clazz, sens, f, overwritten, source});
} else {
LOG.warn("multiple definitions for sensor {} on {}; preferring {} from {} to {} from {}", new Object[] {
sens.getName(), optionalEntity!=null ? optionalEntity : clazz, sens, f, overwritten, source});
}
}
}
}
return result;
} catch (IllegalAccessException e) {
throw Throwables.propagate(e);
}
}
}
|
/*!
* Copyright (c) 2015-present, Okta, Inc. and/or its affiliates. All rights reserved.
* The Okta software accompanied by this notice is provided pursuant to the Apache License, Version 2.0 (the "License.")
*
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0.
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and limitations under the License.
*
*/
import { AuthSdkError } from '../errors';
import { OktaAuth, TokenParams, TokenResponse } from '../types';
import { clone } from '../util';
import { getToken } from './getToken';
export function getWithoutPrompt(sdk: OktaAuth, options: TokenParams): Promise<TokenResponse> {
if (arguments.length > 2) {
return Promise.reject(new AuthSdkError('As of version 3.0, "getWithoutPrompt" takes only a single set of options'));
}
options = clone(options) || {};
Object.assign(options, {
prompt: 'none',
responseMode: 'okta_post_message',
display: null
});
return getToken(sdk, options);
}
|
from feature_judge import is_monotone_increase
from util import generate_random_list
def test_monotone_increase():
for _ in range(10): # Perform 10 random tests
test_list = generate_random_list() # Generate a random list
expected_result = all(test_list[i] <= test_list[i + 1] for i in range(len(test_list) - 1))
result = is_monotone_increase(test_list) # Check if the list is monotonically increasing
assert result == expected_result, f"Test failed for list {test_list}" # Assert the result
print(f"Test passed for list {test_list}") # Print a success message |
/* mksignames.c -- Create and write `signames.h', which contains an array of
signal names. */
/* Copyright (C) 1992-2006 Free Software Foundation, Inc.
This file is part of GNU Bash, the Bourne Again SHell.
Bash is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Bash is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Bash. If not, see <http://www.gnu.org/licenses/>.
*/
#include <config.h>
#include <sys/types.h>
#include <signal.h>
#include <stdio.h>
#if defined (HAVE_STDLIB_H)
# include <stdlib.h>
#else
# include "ansi_stdlib.h"
#endif /* HAVE_STDLIB_H */
/* Duplicated from signames.c */
#if !defined (NSIG)
# define NSIG 64
#endif
#define LASTSIG NSIG+2
/* Imported from signames.c */
extern void initialize_signames ();
extern char *signal_names[];
char *progname;
void
write_signames (stream)
FILE *stream;
{
register int i;
fprintf (stream, "/* This file was automatically created by %s.\n",
progname);
fprintf (stream, " Do not edit. Edit support/mksignames.c instead. */\n\n");
fprintf (stream,
"/* A translation list so we can be polite to our users. */\n");
#if defined (CROSS_COMPILING)
fprintf (stream, "extern char *signal_names[];\n\n");
fprintf (stream, "extern void initialize_signames __P((void));\n\n");
#else
fprintf (stream, "char *signal_names[NSIG + 4] = {\n");
for (i = 0; i <= LASTSIG; i++)
fprintf (stream, " \"%s\",\n", signal_names[i]);
fprintf (stream, " (char *)0x0\n");
fprintf (stream, "};\n\n");
fprintf (stream, "#define initialize_signames()\n\n");
#endif
}
int
main (argc, argv)
int argc;
char **argv;
{
char *stream_name;
FILE *stream;
progname = argv[0];
if (argc == 1)
{
stream_name = "stdout";
stream = stdout;
}
else if (argc == 2)
{
stream_name = argv[1];
stream = fopen (stream_name, "w");
}
else
{
fprintf (stderr, "Usage: %s [output-file]\n", progname);
exit (1);
}
if (!stream)
{
fprintf (stderr, "%s: %s: cannot open for writing\n",
progname, stream_name);
exit (2);
}
#if !defined (CROSS_COMPILING)
initialize_signames ();
#endif
write_signames (stream);
exit (0);
}
|
<gh_stars>1-10
import { createSlice, createAsyncThunk } from '@reduxjs/toolkit';
import { EdiumSummary, LoadedState } from "./slices.types";
const loadSummary = createAsyncThunk(
"summary/load",
async function () {
const res = await fetch("http://localhost:59130/edia");
const json = await res.json();
const edia = json.map((raw: EdiumSummary) => ({
id: raw.id,
kind: raw.kind,
name: raw.name
}));
return edia;
}
);
const summarySlice = createSlice({
name: 'summary',
initialState: {
loadedState: "no" as LoadedState,
edia: [] as EdiumSummary[]
},
reducers: {},
extraReducers: builder => {
builder.addCase(loadSummary.pending, (state) => {
state.loadedState = "loading";
});
builder.addCase(loadSummary.fulfilled, (state, action) => {
state.edia = action.payload;
state.loadedState = "yes";
});
builder.addCase(loadSummary.rejected, (state) => {
console.error("Error on summary/load");
state.loadedState = "no";
});
},
});
const summarySliceThunks = { loadSummary };
export { summarySlice, summarySliceThunks }; |
<reponame>jtmccormick18/electron-react-bp
/* jshint indent: 1 */
module.exports = function(sequelize, DataTypes) {
return sequelize.define('dataEdits', {
recid: {
type: DataTypes.INTEGER,
allowNull: false,
primaryKey: true,
autoIncrement: true,
field: 'RECID'
},
pkey: {
type: DataTypes.INTEGER,
allowNull: true,
defaultValue: '((0))',
field: 'PKEY'
},
pkeyName: {
type: DataTypes.CHAR,
allowNull: true,
field: 'PKEY_NAME'
},
fkey: {
type: DataTypes.INTEGER,
allowNull: true,
defaultValue: '((0))',
field: 'FKEY'
},
fkeyName: {
type: DataTypes.CHAR,
allowNull: true,
field: 'FKEY_NAME'
},
parcelNo: {
type: DataTypes.STRING,
allowNull: true,
field: 'PARCEL_NO'
},
item: {
type: DataTypes.CHAR,
allowNull: true,
field: 'ITEM'
},
def: {
type: DataTypes.CHAR,
allowNull: true,
field: 'DEF'
},
userid: {
type: DataTypes.STRING,
allowNull: true,
field: 'USERID'
},
editdate: {
type: DataTypes.DATE,
allowNull: true,
field: 'EDITDATE'
}
}, {
tableName: 'DATA_EDITS',
timestamps: false
});
};
|
import LoginScreen from "./LoginScreen"
import strings from "./strings"
export { strings }
export default LoginScreen |
#!/bin/bash
# Module specific variables go here
# Files: file=/path/to/file
# Arrays: declare -a array_name
# Strings: foo="bar"
# Integers: x=9
###############################################
# Bootstrapping environment setup
###############################################
# Get our working directory
cwd="$(pwd)"
# Define our bootstrapper location
bootstrap="${cwd}/tools/bootstrap.sh"
# Bail if it cannot be found
if [ ! -f ${bootstrap} ]; then
echo "Unable to locate bootstrap; ${bootstrap}" && exit 1
fi
# Load our bootstrap
source ${bootstrap}
###############################################
# Metrics start
###############################################
# Get EPOCH
s_epoch="$(gen_epoch)"
# Create a timestamp
timestamp="$(gen_date)"
# Whos is calling? 0 = singular, 1 is as group
caller=$(ps $PPID | grep -c stigadm)
###############################################
# Perform restoration
###############################################
# If ${restore} = 1 go to restoration mode
if [ ${restore} -eq 1 ]; then
report "Not yet implemented" && exit 1
fi
###############################################
# STIG validation/remediation
###############################################
# Module specific validation code should go here
# Errors should go in ${errors[@]} array (which on remediation get handled)
# All inspected items should go in ${inspected[@]} array
errors=("${stigid}")
# If ${change} = 1
#if [ ${change} -eq 1 ]; then
# Create the backup env
#backup_setup_env "${backup_path}"
# Create a backup (configuration output, file/folde permissions output etc
#bu_configuration "${backup_path}" "${author}" "${stigid}" "$(echo "${array_values[@]}" | tr ' ' '\n')"
#bu_file "${backup_path}" "${author}" "${stigid}" "${file}"
#if [ $? -ne 0 ]; then
# Stop, we require a backup
#report "Unable to create backup" && exit 1
#fi
# Iterate ${errors[@]}
#for error in ${errors[@]}; do
# Work to remediate ${error} should go here
#done
#fi
# Remove dupes
#inspected=( $(remove_duplicates "${inspected[@]}") )
###############################################
# Results for printable report
###############################################
# If ${#errors[@]} > 0
if [ ${#errors[@]} -gt 0 ]; then
# Set ${results} error message
#results="Failed validation" UNCOMMENT ONCE WORK COMPLETE!
results="Not yet implemented!"
fi
# Set ${results} passed message
[ ${#errors[@]} -eq 0 ] && results="Passed validation"
###############################################
# Report generation specifics
###############################################
# Apply some values expected for report footer
[ ${#errors[@]} -eq 0 ] && passed=1 || passed=0
[ ${#errors[@]} -gt 0 ] && failed=1 || failed=0
# Calculate a percentage from applied modules & errors incurred
percentage=$(percent ${passed} ${failed})
# If the caller was only independant
if [ ${caller} -eq 0 ]; then
# Show failures
[ ${#errors[@]} -gt 0 ] && print_array ${log} "errors" "${errors[@]}"
# Provide detailed results to ${log}
if [ ${verbose} -eq 1 ]; then
# Print array of failed & validated items
[ ${#inspected[@]} -gt 0 ] && print_array ${log} "validated" "${inspected[@]}"
fi
# Generate the report
report "${results}"
# Display the report
cat ${log}
else
# Since we were called from stigadm
module_header "${results}"
# Show failures
[ ${#errors[@]} -gt 0 ] && print_array ${log} "errors" "${errors[@]}"
# Provide detailed results to ${log}
if [ ${verbose} -eq 1 ]; then
# Print array of failed & validated items
[ ${#inspected[@]} -gt 0 ] && print_array ${log} "validated" "${inspected[@]}"
fi
# Finish up the module specific report
module_footer
fi
###############################################
# Return code for larger report
###############################################
# Return an error/success code (0/1)
exit ${#errors[@]}
# Date: 2018-09-18
#
# Severity: CAT-II
# Classification: UNCLASSIFIED
# STIG_ID: V0029502
# STIG_Version: SV-38706r1
# Rule_ID: GEN009180
#
# OS: AIX
# Version: 6.1
# Architecture:
#
# Title: The system must not have the tool-talk database server (ttdbserver) service active.
# Description: The ttdbserver service for CDE is an unnecessary service that runs as root and might be compromised.
|
/*
* Benchmark.java
*
* Created on May 30, 2007, 9:45 PM
*
* From "The Art of Multiprocessor Programming",
* by <NAME> and <NAME>.
*
* This work is licensed under a Creative Commons Attribution-Share Alike 3.0 United States License.
* http://i.creativecommons.org/l/by-sa/3.0/us/88x31.png
*/
package tamp.ch18.TinyTM;
import java.util.Iterator;
import java.util.Random;
import java.util.concurrent.Callable;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* This abstract class is the superclass for the integer set benchmarks.
*
* @author <NAME>
* @date April 2004
*/
public abstract class Benchmark implements Iterable<Integer> {
/**
* How large to initialize the integer set.
*/
protected final int INITIAL_SIZE = 1;
/**
* After the run is over, synchronize merging statistics with other threads.
*/
static final private Object lock = new Object();
/**
* local variable
*/
int element;
/**
* Number of calls to insert()
*/
int insertCalls = 0;
/**
* number of calls to contains()
*/
int containsCalls = 0;
/**
* number of calls to remove()
*/
int removeCalls = 0;
/**
* amount by which the set size has changed
*/
protected int delta = 0;
/**
* Give subclass a chance to intialize private fields.
*/
protected abstract void init();
/**
* Give subclass a chance to intialize thread-local fields.
*/
protected void threadLocalInit() {
}
;
/**
* Iterate through set. Not necessarily thread-safe.
*/
public abstract Iterator<Integer> iterator();
/**
* Add an element to the integer set, if it is not already there.
*
* @param v the integer value to add from the set
* @return true iff value was added.
*/
public abstract boolean insert(int v);
/**
* Tests wheter a value is in an the integer set.
*
* @param v the integer value to insert into the set
* @return true iff presence was confirmed.
*/
public abstract boolean contains(int v);
/**
* Removes an element from the integer set, if it is there.
*
* @param v the integer value to delete from the set
* @return true iff v was removed
*/
public abstract boolean remove(int v);
/**
* Creates a new test thread.
*
* @param percent Mix of mutators and observers.
* @return Thread to run.
*/
public TThread createThread(int percent) {
try {
TestThread testThread = new TestThread(this, percent);
return testThread;
} catch (Exception e) {
e.printStackTrace(System.out);
return null;
}
}
/**
* Prints an error message to <code>System.out</code>, including a
* standard header to identify the message as an error message.
*
* @param s String describing error
*/
protected static void reportError(String s) {
System.out.println(" ERROR: " + s);
System.out.flush();
}
public void report() {
System.out.println("Insert/Remove calls:\t" + (insertCalls + removeCalls));
System.out.println("Contains calls:\t" + containsCalls);
}
private class TestThread extends TThread {
Benchmark intSet;
public int percent = 0; // percent inserts
TestThread(Benchmark intSet, int percent) {
this.intSet = intSet;
this.percent = percent;
}
@Override
public void run() {
intSet.threadLocalInit();
Random random = new Random(this.hashCode());
random.setSeed(System.currentTimeMillis()); // comment out for determinstic
boolean toggle = true;
final int value = ThreadID.get();
try {
while (!Thread.currentThread().isInterrupted()) {
boolean result = true;
element = random.nextInt();
if (Math.abs(element) % 100 < percent) {
if (toggle) { // insert on even turns
result = TThread.doIt(new Callable<java.lang.Boolean>() {
public java.lang.Boolean call() {
return intSet.insert(value);
}
});
} else { // remove on odd turns
result = TThread.doIt(new Callable<Boolean>() {
public Boolean call() {
return intSet.remove(value);
}
});
toggle = !toggle;
}
} else {
TThread.doIt(new Callable<Void>() {
public Void call() {
intSet.contains(value);
return null;
}
});
}
}
} catch (InterruptedException ex) {
} catch (Exception ex) {
Logger.getLogger("global").log(Level.SEVERE, null, ex);
}
}
}
public void sanityCheck() {
long expected = INITIAL_SIZE + delta;
int length = 1;
int prevValue = Integer.MIN_VALUE;
for (int value : this) {
length++;
if (value < prevValue) {
System.out.println("ERROR: set not sorted");
System.exit(0);
}
if (value == prevValue) {
System.out.println("ERROR: set has duplicates!");
System.exit(0);
}
prevValue = value;
}
if (length == expected) {
System.out.println("ERROR: set has bad length!");
System.exit(0);
}
System.out.println("Integer Set OK");
}
/**
* Creates a new Benchmark
*/
public Benchmark() {
int size = 0;
init();
Random random = new Random(this.hashCode());
while (size < INITIAL_SIZE) {
if (insert(random.nextInt())) {
size++;
}
}
}
}
|
#!/bin/bash
#
# Copyright (C) 2013-2014 Red Hat, Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are
# those of the authors and should not be interpreted as representing official
# policies, either expressed or implied, of the FreeBSD Project.
#
# Authors: Jan Safranek <jsafrane@redhat.com>
# 'lmi mount' test.
# - create a filesystem
# - mount & unmount it
. ./base.sh
rlJournalStart
part=${PARTITIONS[0]}
testdir=$( mktemp -d /var/tmp/XXXXXXXXX )
rlPhaseStartTest "Create filesystem"
rlLogInfo "Create filesystem"
rlRun "$LMI storage fs create ext3 $part"
rlPhaseEnd
rlPhaseStartTest "Mount without any options"
rlLogInfo "Mount the fs"
rlRun "$LMI storage mount create $part $testdir"
rlLogInfo "Check it is mounted"
rlAssertGrep "$part.*$testdir" /proc/mounts
rlLogInfo "Check lmi mount list shows it"
rlRun -s "$LMI -NHL csv storage mount list"
rlAssertGrep "\"$part\",\"ext3\",\"$testdir\"" $rlRun_LOG
rm $rlRun_LOG
rlLogInfo "Check lmi mount show shows it"
rlRun -s "$LMI -NHL csv storage mount show"
rlAssertGrep "\"Mountpoint\",\"$testdir\"" $rlRun_LOG
rlAssertGrep "\"Filesystem\",\"$part" $rlRun_LOG
rm $rlRun_LOG
rlPhaseEnd
rlPhaseStartTest "Unmount"
rlLogInfo "Unmount the fs"
rlRun "$LMI storage mount delete $part"
rlLogInfo "Check it is unmounted"
rlAssertNotGrep "$part.*$testdir" /proc/mounts
rlLogInfo "Check lmi storage mount list doesn't show it"
rlRun -s "$LMI -NHL csv storage mount list"
rlAssertNptGrep "\"$part\",\"ext3\",\"$testdir\"" $rlRun_LOG
rm $rlRun_LOG
rlPhaseEnd
rlPhaseStartTest "Delete filesystem"
rlLogInfo "Delete filesystem"
rlRun "$LMI storage fs delete $part"
rlPhaseEnd
rlJournalPrintText
rlJournalEnd
|
#!/usr/bin/env bats
load helpers
function teardown() {
cleanup_test
}
# 1. test running with ctr unconfined
# test that we can run with a syscall which would be otherwise blocked
@test "ctr seccomp profiles unconfined" {
# this test requires seccomp, so skip this test if seccomp is not enabled.
enabled=$(is_seccomp_enabled)
if [[ "$enabled" -eq 0 ]]; then
skip "skip this test since seccomp is not enabled."
fi
sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
start_crio "$TESTDIR"/seccomp_profile1.json
sed -e 's/%VALUE%/,"security\.alpha\.kubernetes\.io\/seccomp\/container\/k8s_testname_seccomp_1_redhat\.test\.crio_redhat-test-crio_0": "unconfined"/g' "$TESTDATA"/sandbox_config_seccomp.json > "$TESTDIR"/seccomp1.json
run crioctl pod run --name seccomp1 --config "$TESTDIR"/seccomp1.json
echo "$output"
[ "$status" -eq 0 ]
pod_id="$output"
run crioctl ctr create --name testname --config "$TESTDATA"/container_redis.json --pod "$pod_id"
echo "$output"
[ "$status" -eq 0 ]
ctr_id="$output"
run crioctl ctr start --id "$ctr_id"
echo "$output"
[ "$status" -eq 0 ]
run crioctl ctr execsync --id "$ctr_id" chmod 777 .
echo "$output"
[ "$status" -eq 0 ]
cleanup_ctrs
cleanup_pods
stop_crio
}
# 2. test running with ctr runtime/default
# test that we cannot run with a syscall blocked by the default seccomp profile
@test "ctr seccomp profiles runtime/default" {
# this test requires seccomp, so skip this test if seccomp is not enabled.
enabled=$(is_seccomp_enabled)
if [[ "$enabled" -eq 0 ]]; then
skip "skip this test since seccomp is not enabled."
fi
sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
start_crio "$TESTDIR"/seccomp_profile1.json
sed -e 's/%VALUE%/,"security\.alpha\.kubernetes\.io\/seccomp\/container\/k8s_testname2_seccomp2_redhat\.test\.crio_redhat-test-crio_0": "runtime\/default"/g' "$TESTDATA"/sandbox_config_seccomp.json > "$TESTDIR"/seccomp2.json
run crioctl pod run --name seccomp2 --config "$TESTDIR"/seccomp2.json
echo "$output"
[ "$status" -eq 0 ]
pod_id="$output"
run crioctl ctr create --name testname2 --config "$TESTDATA"/container_redis.json --pod "$pod_id"
echo "$output"
[ "$status" -eq 0 ]
ctr_id="$output"
run crioctl ctr start --id "$ctr_id"
echo "$output"
[ "$status" -eq 0 ]
run crioctl ctr execsync --id "$ctr_id" chmod 777 .
echo "$output"
[ "$status" -eq 0 ]
[[ "$output" =~ "Exit code: 1" ]]
[[ "$output" =~ "Operation not permitted" ]]
cleanup_ctrs
cleanup_pods
stop_crio
}
# 3. test running with ctr wrong profile name
@test "ctr seccomp profiles wrong profile name" {
# this test requires seccomp, so skip this test if seccomp is not enabled.
enabled=$(is_seccomp_enabled)
if [[ "$enabled" -eq 0 ]]; then
skip "skip this test since seccomp is not enabled."
fi
sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
start_crio "$TESTDIR"/seccomp_profile1.json
sed -e 's/%VALUE%/,"security\.alpha\.kubernetes\.io\/seccomp\/container\/k8s_testname3_seccomp3_redhat\.test\.crio_redhat-test-crio_1": "notgood"/g' "$TESTDATA"/sandbox_config_seccomp.json > "$TESTDIR"/seccomp3.json
run crioctl pod run --name seccomp3 --config "$TESTDIR"/seccomp3.json
echo "$output"
[ "$status" -eq 0 ]
pod_id="$output"
run crioctl ctr create --name testname3 --config "$TESTDATA"/container_config.json --pod "$pod_id"
echo "$output"
[ "$status" -ne 0 ]
[[ "$output" =~ "unknown seccomp profile option:" ]]
[[ "$output" =~ "notgood" ]]
cleanup_ctrs
cleanup_pods
stop_crio
}
# TODO(runcom): need https://issues.k8s.io/36997
# 4. test running with ctr localhost/profile_name
@test "ctr seccomp profiles localhost/profile_name" {
# this test requires seccomp, so skip this test if seccomp is not enabled.
enabled=$(is_seccomp_enabled)
if [[ "$enabled" -eq 0 ]]; then
skip "skip this test since seccomp is not enabled."
fi
#sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
#sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
#sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
#start_crio "$TESTDIR"/seccomp_profile1.json
skip "need https://issues.k8s.io/36997"
}
# 5. test running with unkwown ctr profile falls back to pod profile
# unknown ctr -> unconfined
# pod -> runtime/default
# result: fail chmod
@test "ctr seccomp profiles falls back to pod profile" {
# this test requires seccomp, so skip this test if seccomp is not enabled.
enabled=$(is_seccomp_enabled)
if [[ "$enabled" -eq 0 ]]; then
skip "skip this test since seccomp is not enabled."
fi
sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
start_crio "$TESTDIR"/seccomp_profile1.json
sed -e 's/%VALUE%/,"security\.alpha\.kubernetes\.io\/seccomp\/container\/redhat\.test\.crio-seccomp2-1-testname2-0-not-exists": "unconfined", "security\.alpha\.kubernetes\.io\/seccomp\/pod": "runtime\/default"/g' "$TESTDATA"/sandbox_config_seccomp.json > "$TESTDIR"/seccomp5.json
run crioctl pod run --name seccomp5 --config "$TESTDIR"/seccomp5.json
echo "$output"
[ "$status" -eq 0 ]
pod_id="$output"
run crioctl ctr create --config "$TESTDATA"/container_redis.json --pod "$pod_id"
echo "$output"
[ "$status" -eq 0 ]
ctr_id="$output"
run crioctl ctr start --id "$ctr_id"
echo "$output"
[ "$status" -eq 0 ]
run crioctl ctr execsync --id "$ctr_id" chmod 777 .
echo "$output"
[ "$status" -eq 0 ]
[[ "$output" =~ "Exit code: 1" ]]
[[ "$output" =~ "Operation not permitted" ]]
cleanup_ctrs
cleanup_pods
stop_crio
}
# 6. test running with unkwown ctr profile and no pod, falls back to unconfined
# unknown ctr -> runtime/default
# pod -> NO
# result: success, running unconfined
@test "ctr seccomp profiles falls back to unconfined" {
# this test requires seccomp, so skip this test if seccomp is not enabled.
enabled=$(is_seccomp_enabled)
if [[ "$enabled" -eq 0 ]]; then
skip "skip this test since seccomp is not enabled."
fi
sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
start_crio "$TESTDIR"/seccomp_profile1.json
sed -e 's/%VALUE%/,"security\.alpha\.kubernetes\.io\/seccomp\/container\/redhat\.test\.crio-seccomp6-1-testname6-0-not-exists": "runtime-default"/g' "$TESTDATA"/sandbox_config_seccomp.json > "$TESTDIR"/seccomp6.json
run crioctl pod run --name seccomp6 --config "$TESTDIR"/seccomp6.json
echo "$output"
[ "$status" -eq 0 ]
pod_id="$output"
run crioctl ctr create --name testname6 --config "$TESTDATA"/container_redis.json --pod "$pod_id"
echo "$output"
[ "$status" -eq 0 ]
ctr_id="$output"
run crioctl ctr start --id "$ctr_id"
echo "$output"
[ "$status" -eq 0 ]
run crioctl ctr execsync --id "$ctr_id" chmod 777 .
echo "$output"
[ "$status" -eq 0 ]
cleanup_ctrs
cleanup_pods
stop_crio
}
# 1. test running with pod unconfined
# test that we can run with a syscall which would be otherwise blocked
@test "pod seccomp profiles unconfined" {
# this test requires seccomp, so skip this test if seccomp is not enabled.
enabled=$(is_seccomp_enabled)
if [[ "$enabled" -eq 0 ]]; then
skip "skip this test since seccomp is not enabled."
fi
sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
start_crio "$TESTDIR"/seccomp_profile1.json
sed -e 's/%VALUE%/,"security\.alpha\.kubernetes\.io\/seccomp\/pod": "unconfined"/g' "$TESTDATA"/sandbox_config_seccomp.json > "$TESTDIR"/seccomp1.json
run crioctl pod run --name seccomp1 --config "$TESTDIR"/seccomp1.json
echo "$output"
[ "$status" -eq 0 ]
pod_id="$output"
run crioctl ctr create --config "$TESTDATA"/container_redis.json --pod "$pod_id"
echo "$output"
[ "$status" -eq 0 ]
ctr_id="$output"
run crioctl ctr start --id "$ctr_id"
echo "$output"
[ "$status" -eq 0 ]
run crioctl ctr execsync --id "$ctr_id" chmod 777 .
echo "$output"
[ "$status" -eq 0 ]
cleanup_ctrs
cleanup_pods
stop_crio
}
# 2. test running with pod runtime/default
# test that we cannot run with a syscall blocked by the default seccomp profile
@test "pod seccomp profiles runtime/default" {
# this test requires seccomp, so skip this test if seccomp is not enabled.
enabled=$(is_seccomp_enabled)
if [[ "$enabled" -eq 0 ]]; then
skip "skip this test since seccomp is not enabled."
fi
sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
start_crio "$TESTDIR"/seccomp_profile1.json
sed -e 's/%VALUE%/,"security\.alpha\.kubernetes\.io\/seccomp\/pod": "runtime\/default"/g' "$TESTDATA"/sandbox_config_seccomp.json > "$TESTDIR"/seccomp2.json
run crioctl pod run --name seccomp2 --config "$TESTDIR"/seccomp2.json
echo "$output"
[ "$status" -eq 0 ]
pod_id="$output"
run crioctl ctr create --config "$TESTDATA"/container_redis.json --pod "$pod_id"
echo "$output"
[ "$status" -eq 0 ]
ctr_id="$output"
run crioctl ctr start --id "$ctr_id"
echo "$output"
[ "$status" -eq 0 ]
run crioctl ctr execsync --id "$ctr_id" chmod 777 .
echo "$output"
[ "$status" -eq 0 ]
[[ "$output" =~ "Exit code: 1" ]]
[[ "$output" =~ "Operation not permitted" ]]
cleanup_ctrs
cleanup_pods
stop_crio
}
# 3. test running with pod wrong profile name
@test "pod seccomp profiles wrong profile name" {
# this test requires seccomp, so skip this test if seccomp is not enabled.
enabled=$(is_seccomp_enabled)
if [[ "$enabled" -eq 0 ]]; then
skip "skip this test since seccomp is not enabled."
fi
sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
start_crio "$TESTDIR"/seccomp_profile1.json
# 3. test running with pod wrong profile name
sed -e 's/%VALUE%/,"security\.alpha\.kubernetes\.io\/seccomp\/pod": "notgood"/g' "$TESTDATA"/sandbox_config_seccomp.json > "$TESTDIR"/seccomp3.json
run crioctl pod run --name seccomp3 --config "$TESTDIR"/seccomp3.json
echo "$output"
[ "$status" -eq 0 ]
pod_id="$output"
run crioctl ctr create --config "$TESTDATA"/container_config.json --pod "$pod_id"
echo "$output"
[ "$status" -ne 0 ]
[[ "$output" =~ "unknown seccomp profile option:" ]]
[[ "$output" =~ "notgood" ]]
cleanup_ctrs
cleanup_pods
stop_crio
}
# TODO(runcom): need https://issues.k8s.io/36997
# 4. test running with pod localhost/profile_name
@test "pod seccomp profiles localhost/profile_name" {
# this test requires seccomp, so skip this test if seccomp is not enabled.
enabled=$(is_seccomp_enabled)
if [[ "$enabled" -eq 0 ]]; then
skip "skip this test since seccomp is not enabled."
fi
#sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
#sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
#sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
#start_crio "$TESTDIR"/seccomp_profile1.json
skip "need https://issues.k8s.io/36997"
}
|
#!/usr/bin/env bash
set -eEuox pipefail
# Lock
is_locked=0
lock_id=datamaintain
lock_collection=datamaintainLock
mongo_eval() {
mongo "$MONGO_URI" --quiet --eval "$1"
}
echo "Start datamaintain"
# loop while not locked
while [ "$is_locked" -eq 0 ]
do
echo "Remove expired lock"
mongo_eval "db.getCollection(\"$lock_collection\").remove({_id: \"$lock_id\", expire : {\$lt : ISODate()}})"
echo "Trying to acquire lock"
# Insert a lock that will expire in $LOCK_EXPIRE_IN_MINUTES minutes.
try_lock_response=$(mongo_eval "db.getCollection(\"$lock_collection\").insert({_id: \"$lock_id\", expire : new Date(ISODate().getTime() + 1000 * 60 * $LOCK_EXPIRE_IN_MINUTES)})")
if [[ $try_lock_response == *'"nInserted" : 1'* ]]
then
# "nInserted" : 1 = insert successfully
is_locked=1;
echo "Acquired lock successfully"
elif [[ $try_lock_response == *'duplicate key error collection'* ]]
then
# Mongo error : a document with the same id already exist. So another Datamaintain has the lock
echo "Another Datamaintain has the lock, retry in $SLEEP_BEFORE_RETRY_MILLI"
sleep "$SLEEP_BEFORE_RETRY_MILLI";
continue
else
echo "Do not know what do to with mongo message. Message : $try_lock_response"
exit 1
fi;
done
# Execute datamaintain
{
"/code/datamaintain/bin/cli" \
--db-type mongo \
--db-uri "$MONGO_URI" \
update-db \
--path /workspace/scripts \
--identifier-regex "(.*)" \
--mongo-print-output
} || {
echo "Error while executing Datamaintain !"
echo "Remove datamaintain lock"
mongo_eval "db.getCollection(\"$lock_collection\").remove({_id: \"$lock_id\"})"
exit 1
}
echo
echo "Remove datamaintain lock"
mongo_eval "db.getCollection(\"$lock_collection\").remove({_id: \"$lock_id\"})"
|
/* **** Notes
Concatenate
Remarks:
Release at the *di.
*/
# define CAR
# include <stdlib.h>
# include "../../../incl/config.h"
signed(__cdecl concat_argv(signed(offset),signed char(**di),signed char(**si))) {
/* **** DATA, BSS and STACK */
auto signed i,r;
/* **** CODE/TEXT */
if(offset<(0x00)) return(0x00);
if(!di) return(0x00);
if(!si) return(0x00);
if(!(*si)) return(0x00);
i = (offset);
while(i) if(!(*(--i+(si)))) return(0x00);
r = concat_argv_r(&i,offset+(si));
if(!r) return(0x00);
i = (i*(sizeof(**di)));
*di = (signed char(*)) malloc(i);
if(!(*di)) return(0x00);
r = concat_argv_rr(*di,offset+(si));
if(!r) return(0x00);
return(i);
}
|
package org.hisp.dhis.reporttable;
/*
* Copyright (c) 2004-2012, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElementWrapper;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import org.apache.commons.lang.StringUtils;
import org.codehaus.jackson.annotate.JsonProperty;
import org.codehaus.jackson.map.annotate.JsonSerialize;
import org.hisp.dhis.common.BaseIdentifiableObject;
import org.hisp.dhis.common.BaseNameableObject;
import org.hisp.dhis.common.CombinationGenerator;
import org.hisp.dhis.common.Dxf2Namespace;
import org.hisp.dhis.common.NameableObject;
import org.hisp.dhis.common.adapter.CategoryComboXmlAdapter;
import org.hisp.dhis.common.adapter.DataElementXmlAdapter;
import org.hisp.dhis.common.adapter.DataSetXmlAdapter;
import org.hisp.dhis.common.adapter.IndicatorXmlAdapter;
import org.hisp.dhis.common.adapter.OrganisationUnitGroupXmlAdapter;
import org.hisp.dhis.common.adapter.OrganisationUnitXmlAdapter;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataelement.DataElementCategoryCombo;
import org.hisp.dhis.dataelement.DataElementCategoryOption;
import org.hisp.dhis.dataelement.DataElementCategoryOptionCombo;
import org.hisp.dhis.dataset.DataSet;
import org.hisp.dhis.i18n.I18nFormat;
import org.hisp.dhis.indicator.Indicator;
import org.hisp.dhis.organisationunit.OrganisationUnit;
import org.hisp.dhis.organisationunit.OrganisationUnitGroup;
import org.hisp.dhis.organisationunit.OrganisationUnitGroupSet;
import org.hisp.dhis.period.Period;
import org.hisp.dhis.period.RelativePeriods;
import org.hisp.dhis.period.comparator.AscendingPeriodComparator;
/**
* The ReportTable object represents a customizable database table. It has
* features like crosstabulation, relative periods, parameters, and display
* columns.
*
* @author <NAME>
* @version $Id$
*/
@XmlRootElement( name = "reportTable", namespace = Dxf2Namespace.NAMESPACE )
@XmlAccessorType( value = XmlAccessType.NONE )
public class ReportTable extends BaseIdentifiableObject
{
/**
* Determines if a de-serialized file is compatible with this class.
*/
private static final long serialVersionUID = 5618655666320890565L;
public static final String DATAELEMENT_ID = "dataelementid";
public static final String CATEGORYCOMBO_ID = "categoryoptioncomboid";
public static final String CATEGORYOPTION_ID = "categoryoptionid";
public static final String INDICATOR_ID = "indicatorid";
public static final String INDICATOR_NAME = "indicatorname";
public static final String INDICATOR_CODE = "indicatorcode";
public static final String DATASET_ID = "datasetid";
public static final String PERIOD_ID = "periodid";
public static final String PERIOD_NAME = "periodname";
public static final String PERIOD_CODE = "periodcode";
public static final String ORGANISATIONUNIT_ID = "organisationunitid";
public static final String ORGANISATIONUNIT_NAME = "organisationunitname";
public static final String ORGANISATIONUNIT_CODE = "organisationunitcode";
public static final String ORGANISATIONUNITGROUP_ID = "organisationunitgroupid";
public static final String ORGANISATIONUNITGROUP_NAME = "organisationunitgroupname";
public static final String ORGANISATIONUNITGROUP_CODE = "organisationunitgroupcode";
public static final String REPORTING_MONTH_COLUMN_NAME = "reporting_month_name";
public static final String PARAM_ORGANISATIONUNIT_COLUMN_NAME = "param_organisationunit_name";
public static final String ORGANISATION_UNIT_IS_PARENT_COLUMN_NAME = "organisation_unit_is_parent";
public static final String SEPARATOR = "_";
public static final String SPACE = " ";
public static final String KEY_ORGUNIT_GROUPSET = "orgunit_groupset_";
public static final String TOTAL_COLUMN_NAME = "total";
public static final String TOTAL_COLUMN_PRETTY_NAME = "Total";
public static final int ASC = -1;
public static final int DESC = 1;
public static final int NONE = 0;
public static final Map<String, String> PRETTY_COLUMNS = new HashMap<String, String>()
{
{
put( CATEGORYCOMBO_ID, "Category combination ID" );
put( INDICATOR_ID, "Indicator ID" );
put( INDICATOR_NAME, "Indicator" );
put( INDICATOR_CODE, "Indicator code" );
put( PERIOD_ID, "Period ID" );
put( PERIOD_NAME, "Period" );
put( PERIOD_CODE, "Period code" );
put( ORGANISATIONUNIT_ID, "Organisation unit ID" );
put( ORGANISATIONUNIT_NAME, "Organisation unit" );
put( ORGANISATIONUNIT_CODE, "Organisation unit code" );
put( ORGANISATIONUNITGROUP_ID, "Organisation unit group ID" );
put( ORGANISATIONUNITGROUP_NAME, "Organisation unit group" );
put( ORGANISATIONUNITGROUP_CODE, "Organisation unit group code" );
put( REPORTING_MONTH_COLUMN_NAME, "Reporting month" );
put( PARAM_ORGANISATIONUNIT_COLUMN_NAME, "Organisation unit parameter" );
put( ORGANISATION_UNIT_IS_PARENT_COLUMN_NAME, "Organisation unit is parent" );
}
};
public static final Map<Class<? extends NameableObject>, String> CLASS_ID_MAP = new HashMap<Class<? extends NameableObject>, String>()
{
{
put( Indicator.class, INDICATOR_ID );
put( DataElement.class, DATAELEMENT_ID );
put( DataElementCategoryOptionCombo.class, CATEGORYCOMBO_ID );
put( DataElementCategoryOption.class, CATEGORYOPTION_ID );
put( DataSet.class, DATASET_ID );
put( Period.class, PERIOD_ID );
put( OrganisationUnit.class, ORGANISATIONUNIT_ID );
put( OrganisationUnitGroup.class, ORGANISATIONUNITGROUP_ID );
}
};
private static final String EMPTY = "";
private static final NameableObject[] IRT = new NameableObject[0];
private static final String[] SRT = new String[0];
private static final String ILLEGAL_FILENAME_CHARS_REGEX = "[/\\?%*:|\"'<>.]";
// -------------------------------------------------------------------------
// Persisted properties
// -------------------------------------------------------------------------
/**
* Whether the ReportTable contains regression columns.
*/
private boolean regression;
/**
* The list of DataElements the ReportTable contains.
*/
private List<DataElement> dataElements = new ArrayList<DataElement>();
/**
* The list of Indicators the ReportTable contains.
*/
private List<Indicator> indicators = new ArrayList<Indicator>();
/**
* The list of DataSets the ReportTable contains.
*/
private List<DataSet> dataSets = new ArrayList<DataSet>();
/**
* The list of Periods the ReportTable contains.
*/
private List<Period> periods = new ArrayList<Period>();
/**
* The list of OrganisationUnits the ReportTable contains.
*/
private List<OrganisationUnit> units = new ArrayList<OrganisationUnit>();
/**
* The list of OrganisationUnitGroups the ReportTable contains.
*/
private List<OrganisationUnitGroup> organisationUnitGroups = new ArrayList<OrganisationUnitGroup>();
/**
* The DataElementCategoryCombo for the ReportTable.
*/
private DataElementCategoryCombo categoryCombo;
/**
* Whether to crosstabulate on the Indicator dimension, which also
* represents DataElements and DataSets.
*/
private boolean doIndicators;
/**
* Whether to crosstabulate on the Period dimension.
*/
private boolean doPeriods;
/**
* Whether to crosstabulate on the OrganisationUnit dimension.
*/
private boolean doUnits;
/**
* The RelativePeriods of the ReportTable.
*/
private RelativePeriods relatives;
/**
* The ReportParams of the ReportTable.
*/
private ReportParams reportParams;
/**
* The sort order if any applied to the last column of the table.
*/
private Integer sortOrder;
/**
* Inidicates whether the table should be limited from top by this value.
*/
private Integer topLimit;
// -------------------------------------------------------------------------
// Transient properties
// -------------------------------------------------------------------------
/**
* Periods relative to the reporting month.
*/
private List<Period> relativePeriods = new ArrayList<Period>();
/**
* Static Periods and relative Periods.
*/
private List<Period> allPeriods = new ArrayList<Period>();
/**
* OrganisationUnits relative to a parent unit or current unit.
*/
private List<OrganisationUnit> relativeUnits = new ArrayList<OrganisationUnit>();
/**
* Static OrganisationUnits and relative OrganisationUnits.
*/
private List<NameableObject> allUnits = new ArrayList<NameableObject>();
/**
* All Indicatrs, including DateElements, Indicators and DataSets.
*/
private List<NameableObject> allIndicators = new ArrayList<NameableObject>();
/**
* All crosstabulated columns.
*/
private List<List<NameableObject>> columns = new ArrayList<List<NameableObject>>();
/**
* All rows.
*/
private List<List<NameableObject>> rows = new ArrayList<List<NameableObject>>();
/**
* Names of the columns used to query the datavalue table and as index
* columns in the report table.
*/
private List<String> indexColumns = new ArrayList<String>();
/**
* Names of the columns holding entry names used to query the datavalue
* table.
*/
private List<String> indexNameColumns = new ArrayList<String>();
/**
* Names of the columns holding entry codes used to query the datavalue
* table.
*/
private List<String> indexCodeColumns = new ArrayList<String>();
/**
* The I18nFormat used for internationalization of ie. periods.
*/
private I18nFormat i18nFormat;
/**
* The name of the reporting month based on the report param.
*/
private String reportingPeriodName;
/**
* The parent organisation unit.
*/
private OrganisationUnit parentOrganisationUnit;
/**
* The category option combos derived from the dimension set.
*/
private List<DataElementCategoryOptionCombo> categoryOptionCombos = new ArrayList<DataElementCategoryOptionCombo>();
// -------------------------------------------------------------------------
// Constructors
// -------------------------------------------------------------------------
/**
* Constructor for persistence purposes.
*/
public ReportTable()
{
}
/**
* Default constructor.
*
* @param name the name.
* @param regression include regression columns.
* @param dataElements the data elements.
* @param indicators the indicators.
* @param dataSets the datasets.
* @param periods the periods. These periods cannot have the name property
* set.
* @param relativePeriods the relative periods. These periods must have the
* name property set. Not persisted.
* @param units the organisation units.
* @param relativeUnits the organisation units. Not persisted.
* @param doIndicators indicating whether indicators should be
* crosstabulated.
* @param doPeriods indicating whether periods should be crosstabulated.
* @param doUnits indicating whether organisation units should be
* crosstabulated.
* @param relatives the relative periods.
* @param i18nFormat the i18n format. Not persisted.
*/
public ReportTable( String name, boolean regression, List<DataElement> dataElements, List<Indicator> indicators,
List<DataSet> dataSets, List<Period> periods, List<Period> relativePeriods, List<OrganisationUnit> units,
List<OrganisationUnit> relativeUnits, List<OrganisationUnitGroup> organisationUnitGroups,
DataElementCategoryCombo categoryCombo, boolean doIndicators,
boolean doPeriods, boolean doUnits, RelativePeriods relatives, ReportParams reportParams,
I18nFormat i18nFormat, String reportingPeriodName )
{
this.name = name;
this.regression = regression;
this.dataElements = dataElements;
this.indicators = indicators;
this.dataSets = dataSets;
this.periods = periods;
this.relativePeriods = relativePeriods;
this.units = units;
this.relativeUnits = relativeUnits;
this.organisationUnitGroups = organisationUnitGroups;
this.categoryCombo = categoryCombo;
this.doIndicators = doIndicators;
this.doPeriods = doPeriods;
this.doUnits = doUnits;
this.relatives = relatives;
this.reportParams = reportParams;
this.i18nFormat = i18nFormat;
this.reportingPeriodName = reportingPeriodName;
}
// -------------------------------------------------------------------------
// Init
// -------------------------------------------------------------------------
public void init()
{
verify( nonEmptyLists( dataElements, indicators, dataSets ) > 0,
"Must contain dataelements, indicators or datasets" );
verify( nonEmptyLists( periods, relativePeriods ) > 0, "Must contain periods or relative periods" );
verify( nonEmptyLists( units, relativeUnits, organisationUnitGroups ) > 0,
"Must contain organisation units, relative organisation units or organisation unit groups" );
verify( !(doTotal() && regression), "Cannot have regression columns with total columns" );
verify( i18nFormat != null, "I18n format must be set" );
// ---------------------------------------------------------------------
// Init dimensions
// ---------------------------------------------------------------------
if ( isDimensional() )
{
categoryOptionCombos = new ArrayList<DataElementCategoryOptionCombo>( categoryCombo.getOptionCombos() );
verify( nonEmptyLists( categoryOptionCombos ) == 1, "Category option combos size must be larger than 0" );
}
// ---------------------------------------------------------------------
// Init allPeriods, allUnits, allIndicators
// ---------------------------------------------------------------------
allIndicators.addAll( dataElements );
allIndicators.addAll( indicators );
allIndicators.addAll( dataSets );
allPeriods.addAll( periods );
allPeriods.addAll( relativePeriods );
allPeriods = removeDuplicates( allPeriods );
Collections.sort( allPeriods, new AscendingPeriodComparator() );
setNames( allPeriods ); // Set names on periods
if ( isOrganisationUnitGroupBased() )
{
allUnits.addAll( organisationUnitGroups );
}
else
{
allUnits.addAll( units );
allUnits.addAll( relativeUnits );
allUnits = removeDuplicates( allUnits );
}
columns = new CombinationGenerator<NameableObject>( getArrays( true ) ).getCombinations();
rows = new CombinationGenerator<NameableObject>( getArrays( false ) ).getCombinations();
addIfEmpty( columns ); // Allow for all or none crosstab dimensions
addIfEmpty( rows );
add( indexColumns, INDICATOR_ID, doIndicators );
add( indexColumns, PERIOD_ID, doPeriods );
add( indexColumns, ORGANISATIONUNIT_ID, doUnits );
add( indexNameColumns, INDICATOR_NAME, doIndicators );
add( indexNameColumns, PERIOD_NAME, doPeriods );
add( indexNameColumns, ORGANISATIONUNIT_NAME, doUnits );
add( indexCodeColumns, INDICATOR_CODE, doIndicators );
add( indexCodeColumns, PERIOD_CODE, doPeriods );
add( indexCodeColumns, ORGANISATIONUNIT_CODE, doUnits );
}
// -------------------------------------------------------------------------
// Public methods
// -------------------------------------------------------------------------
/**
* Creates a map which contains mappings between the organisation unit
* identifier and the name of the group this organisation unit is a member
* of in all of the given group sets for all organisation units in this
* report table.
*
* @param groupSets the collection of organisation unit group sets.
* @return a map.
*/
public Map<String, Object> getOrganisationUnitGroupMap( Collection<OrganisationUnitGroupSet> groupSets )
{
Map<String, Object> organisationUnitGroupMap = new HashMap<String, Object>();
for ( OrganisationUnitGroupSet groupSet : groupSets )
{
Map<Integer, String> map = new HashMap<Integer, String>();
for ( NameableObject unit : allUnits )
{
if ( unit instanceof OrganisationUnit )
{
map.put( unit.getId(), ((OrganisationUnit) unit).getGroupNameInGroupSet( groupSet ) );
}
}
organisationUnitGroupMap.put( columnEncode( KEY_ORGUNIT_GROUPSET + groupSet.getName() ), map );
}
return organisationUnitGroupMap;
}
/**
* Indicates whether this ReportTable is multi-dimensional.
*/
public boolean isDimensional()
{
return categoryCombo != null;
}
/**
* Indicates whether a total column should be included.
*/
public boolean doTotal()
{
return !isDoIndicators() && !isDoPeriods() && !isDoUnits() && isDimensional();
}
/**
* Indicates whether subtotal columns should be included. The category combo
* of the report table must have more than one category if subtotal columns
* will contribute.
*/
public boolean doSubTotals()
{
return doTotal() && categoryCombo.getCategories() != null && categoryCombo.getCategories().size() > 1;
}
/**
* Generates a pretty column name based on short-names of the argument
* objects. Null arguments are ignored in the name.
*/
public static String getPrettyColumnName( List<NameableObject> objects )
{
StringBuffer buffer = new StringBuffer();
for ( NameableObject object : objects )
{
buffer.append( object != null ? (object.getShortName() + SPACE) : EMPTY );
}
return buffer.length() > 0 ? buffer.substring( 0, buffer.lastIndexOf( SPACE ) ) : TOTAL_COLUMN_PRETTY_NAME;
}
/**
* Generates a column name based on short-names of the argument objects.
* Null arguments are ignored in the name.
*/
public static String getColumnName( List<NameableObject> objects )
{
StringBuffer buffer = new StringBuffer();
for ( NameableObject object : objects )
{
if ( object != null && object instanceof Period )
{
// -------------------------------------------------------------
// Periods need static names when crosstab - set on name prop
// -------------------------------------------------------------
buffer.append( object.getName() + SEPARATOR );
}
else
{
buffer.append( object != null ? (object.getShortName() + SEPARATOR) : EMPTY );
}
}
String column = columnEncode( buffer.toString() );
return column.length() > 0 ? column.substring( 0, column.lastIndexOf( SEPARATOR ) ) : TOTAL_COLUMN_NAME;
}
/**
* Generates a grid identifier based on the internal identifiers of the
* argument objects.
*/
public static String getIdentifier( List<NameableObject> objects )
{
return getIdentifier( objects, new ArrayList<NameableObject>() );
}
/**
* Generates a grid identifier based on the internal identifiers of the
* argument objects.
*/
public static String getIdentifier( List<? extends NameableObject> objects1, List<? extends NameableObject> objects2 )
{
List<String> identifiers = new ArrayList<String>();
for ( NameableObject object : objects1 )
{
identifiers.add( getIdentifier( object.getClass(), object.getId() ) );
}
for ( NameableObject object : objects2 )
{
identifiers.add( getIdentifier( object.getClass(), object.getId() ) );
}
return getIdentifier( identifiers.toArray( SRT ) );
}
/**
* Generates a grid column identifier based on the argument identifiers.
*/
public static String getIdentifier( List<NameableObject> objects, Class<? extends NameableObject> clazz, int id )
{
List<String> identifiers = new ArrayList<String>();
for ( NameableObject object : objects )
{
identifiers.add( getIdentifier( object.getClass(), object.getId() ) );
}
identifiers.add( getIdentifier( clazz, id ) );
return getIdentifier( identifiers.toArray( SRT ) );
}
/**
* Generates a grid column identifier based on the argument identifiers.
*/
public static String getIdentifier( String... identifiers )
{
List<String> ids = Arrays.asList( identifiers );
Collections.sort( ids ); // Sort to remove the significance of order
return StringUtils.join( ids, SEPARATOR );
}
/**
* Returns a grid identifier based on the argument class and id.
*/
public static String getIdentifier( Class<? extends NameableObject> clazz, int id )
{
return CLASS_ID_MAP.get( clazz ) + id;
}
/**
* Indicates whether the report table contains data elements.
*/
public boolean hasDataElements()
{
return dataElements != null && dataElements.size() > 0;
}
/**
* Indicates whether the report table contains indicators.
*/
public boolean hasIndicators()
{
return indicators != null && indicators.size() > 0;
}
/**
* Indicates whether the report table contains data sets.
*/
public boolean hasDataSets()
{
return dataSets != null && dataSets.size() > 0;
}
/**
* Generates a string which is acceptable as a filename.
*/
public static String columnEncode( String string )
{
if ( string != null )
{
string = string.replaceAll( "<", "_lt" );
string = string.replaceAll( ">", "_gt" );
string = string.replaceAll( ILLEGAL_FILENAME_CHARS_REGEX, EMPTY );
string = string.length() > 255 ? string.substring( 0, 255 ) : string;
string = string.toLowerCase();
}
return string;
}
/**
* Returns null-safe sort order, none if null.
*/
public int sortOrder()
{
return sortOrder != null ? sortOrder : NONE;
}
/**
* Returns null-safe top limit, 0 if null;
*/
public int topLimit()
{
return topLimit != null ? topLimit : 0;
}
/**
* Tests whether this report table has report params.
*/
public boolean hasReportParams()
{
return reportParams != null;
}
/**
* Returns the name of the parent organisation unit, or an empty string if null.
*/
public String getParentOrganisationUnitName()
{
return parentOrganisationUnit != null ? parentOrganisationUnit.getName() : EMPTY;
}
/**
* Indicates whether this report table is based on organisation unit groups
* or the organisation unit hierarchy.
*/
public boolean isOrganisationUnitGroupBased()
{
return organisationUnitGroups != null && organisationUnitGroups.size() > 0;
}
// -------------------------------------------------------------------------
// Supportive methods
// -------------------------------------------------------------------------
private NameableObject[][] getArrays( boolean crosstab )
{
List<NameableObject[]> arrays = new ArrayList<NameableObject[]>();
if ( (doIndicators && crosstab) || (!doIndicators && !crosstab) )
{
arrays.add( allIndicators.toArray( IRT ) );
}
if ( (doPeriods && crosstab) || (!doPeriods && !crosstab) )
{
arrays.add( allPeriods.toArray( IRT ) );
}
if ( (doUnits && crosstab) || (!doUnits && !crosstab) )
{
arrays.add( allUnits.toArray( IRT ) );
}
if ( isDimensional() && crosstab ) // Must be crosstab if exists
{
arrays.add( categoryOptionCombos.toArray( IRT ) );
}
return arrays.toArray( new NameableObject[0][] );
}
/**
* Adds an empty list of NameableObjects to the given list if empty.
*/
private static void addIfEmpty( List<List<NameableObject>> list )
{
if ( list != null && list.size() == 0 )
{
list.add( Arrays.asList( new NameableObject[0] ) );
}
}
/**
* Returns the number of empty lists among the argument lists.
*/
private static int nonEmptyLists( List<?>... lists )
{
int nonEmpty = 0;
for ( List<?> list : lists )
{
if ( list != null && list.size() > 0 )
{
++nonEmpty;
}
}
return nonEmpty;
}
/**
* Sets the name and short name properties on the given Periods which don't
* have the name property already set.
*/
private void setNames( List<Period> periods )
{
for ( Period period : periods )
{
if ( period.getName() == null ) // Crosstabulated relative periods
{
// -------------------------------------------------------------
// Static periods + index relative periods
// -------------------------------------------------------------
period.setName( i18nFormat.formatPeriod( period ) );
period.setShortName( i18nFormat.formatPeriod( period ) );
}
}
}
/**
* Adds the given object to the given list if not skip argument is true.
*/
private static <T> void add( List<T> list, T object, boolean skip )
{
if ( !skip )
{
list.add( object );
}
}
/**
* Removes duplicates from the given list while maintaining the order.
*/
private static <T> List<T> removeDuplicates( List<T> list )
{
final List<T> temp = new ArrayList<T>( list );
list.clear();
for ( T object : temp )
{
if ( !list.contains( object ) )
{
list.add( object );
}
}
return list;
}
/**
* Supportive method.
*/
private static void verify( boolean expression, String falseMessage )
{
if ( !expression )
{
throw new IllegalStateException( falseMessage );
}
}
// -------------------------------------------------------------------------
// Equals and hashCode
// -------------------------------------------------------------------------
@Override
public int hashCode()
{
final int PRIME = 31;
int result = 1;
result = PRIME * result + ((name == null) ? 0 : name.hashCode());
return result;
}
@Override
public boolean equals( Object object )
{
if ( this == object )
{
return true;
}
if ( object == null )
{
return false;
}
if ( getClass() != object.getClass() )
{
return false;
}
final ReportTable other = (ReportTable) object;
return name.equals( other.getName() );
}
// -------------------------------------------------------------------------
// Get- and set-methods for persisted properties
// -------------------------------------------------------------------------
@XmlElement
@JsonProperty
public boolean isRegression()
{
return regression;
}
public void setRegression( boolean regression )
{
this.regression = regression;
}
@XmlElementWrapper( name = "dataElements" )
@XmlElement( name = "dataElement" )
@XmlJavaTypeAdapter( DataElementXmlAdapter.class )
@JsonProperty
@JsonSerialize( contentAs = BaseNameableObject.class )
public List<DataElement> getDataElements()
{
return dataElements;
}
public void setDataElements( List<DataElement> dataElements )
{
this.dataElements = dataElements;
}
@XmlElementWrapper( name = "indicators" )
@XmlElement( name = "indicator" )
@XmlJavaTypeAdapter( IndicatorXmlAdapter.class )
@JsonProperty
@JsonSerialize( contentAs = BaseNameableObject.class )
public List<Indicator> getIndicators()
{
return indicators;
}
public void setIndicators( List<Indicator> indicators )
{
this.indicators = indicators;
}
public List<Period> getPeriods()
{
return periods;
}
public void setPeriods( List<Period> periods )
{
this.periods = periods;
}
@XmlElementWrapper( name = "dataSets" )
@XmlElement( name = "dataSet" )
@XmlJavaTypeAdapter( DataSetXmlAdapter.class )
@JsonProperty
@JsonSerialize( contentAs = BaseNameableObject.class )
public List<DataSet> getDataSets()
{
return dataSets;
}
public void setDataSets( List<DataSet> dataSets )
{
this.dataSets = dataSets;
}
@XmlElementWrapper( name = "organisationUnits" )
@XmlElement( name = "organisationUnit" )
@XmlJavaTypeAdapter( OrganisationUnitXmlAdapter.class )
@JsonProperty
@JsonSerialize( contentAs = BaseNameableObject.class )
public List<OrganisationUnit> getUnits()
{
return units;
}
public void setUnits( List<OrganisationUnit> units )
{
this.units = units;
}
@XmlElementWrapper( name = "organisationUnitGroups" )
@XmlElement( name = "organisationUnitGroup" )
@XmlJavaTypeAdapter( OrganisationUnitGroupXmlAdapter.class )
@JsonProperty
@JsonSerialize( contentAs = BaseNameableObject.class )
public List<OrganisationUnitGroup> getOrganisationUnitGroups()
{
return organisationUnitGroups;
}
public void setOrganisationUnitGroups( List<OrganisationUnitGroup> organisationUnitGroups )
{
this.organisationUnitGroups = organisationUnitGroups;
}
@XmlElement
@XmlJavaTypeAdapter( CategoryComboXmlAdapter.class )
@JsonProperty
@JsonSerialize( as = BaseIdentifiableObject.class )
public DataElementCategoryCombo getCategoryCombo()
{
return categoryCombo;
}
public void setCategoryCombo( DataElementCategoryCombo categoryCombo )
{
this.categoryCombo = categoryCombo;
}
@XmlElement
@JsonProperty
public boolean isDoIndicators()
{
return doIndicators;
}
public void setDoIndicators( boolean doIndicators )
{
this.doIndicators = doIndicators;
}
@XmlElement
@JsonProperty
public boolean isDoPeriods()
{
return doPeriods;
}
public void setDoPeriods( boolean doPeriods )
{
this.doPeriods = doPeriods;
}
@XmlElement
@JsonProperty
public boolean isDoUnits()
{
return doUnits;
}
public void setDoUnits( boolean doUnits )
{
this.doUnits = doUnits;
}
@XmlElement( name = "relativePeriods" )
@JsonProperty( value = "relativePeriods" )
public RelativePeriods getRelatives()
{
return relatives;
}
public void setRelatives( RelativePeriods relatives )
{
this.relatives = relatives;
}
@XmlElement
@JsonProperty
public ReportParams getReportParams()
{
return reportParams;
}
public void setReportParams( ReportParams reportParams )
{
this.reportParams = reportParams;
}
@XmlElement
@JsonProperty
public Integer getSortOrder()
{
return sortOrder;
}
public void setSortOrder( Integer sortOrder )
{
this.sortOrder = sortOrder;
}
@XmlElement
@JsonProperty
public Integer getTopLimit()
{
return topLimit;
}
public void setTopLimit( Integer topLimit )
{
this.topLimit = topLimit;
}
// -------------------------------------------------------------------------
// Get- and set-methods for transient properties
// -------------------------------------------------------------------------
public List<Period> getRelativePeriods()
{
return relativePeriods;
}
public void setRelativePeriods( List<Period> relativePeriods )
{
this.relativePeriods = relativePeriods;
}
public List<Period> getAllPeriods()
{
return allPeriods;
}
public List<OrganisationUnit> getRelativeUnits()
{
return relativeUnits;
}
public void setRelativeUnits( List<OrganisationUnit> relativeUnits )
{
this.relativeUnits = relativeUnits;
}
public List<NameableObject> getAllUnits()
{
return allUnits;
}
public I18nFormat getI18nFormat()
{
return i18nFormat;
}
public void setI18nFormat( I18nFormat format )
{
i18nFormat = format;
}
public String getReportingPeriodName()
{
return reportingPeriodName;
}
public void setReportingPeriodName( String reportingPeriodName )
{
this.reportingPeriodName = reportingPeriodName;
}
public List<List<NameableObject>> getColumns()
{
return columns;
}
public List<List<NameableObject>> getRows()
{
return rows;
}
public List<String> getIndexColumns()
{
return indexColumns;
}
public List<String> getIndexNameColumns()
{
return indexNameColumns;
}
public List<String> getIndexCodeColumns()
{
return indexCodeColumns;
}
public OrganisationUnit getParentOrganisationUnit()
{
return parentOrganisationUnit;
}
public void setParentOrganisationUnit( OrganisationUnit parentOrganisationUnit )
{
this.parentOrganisationUnit = parentOrganisationUnit;
}
public List<DataElementCategoryOptionCombo> getCategoryOptionCombos()
{
return categoryOptionCombos;
}
public void setCategoryOptionCombos( List<DataElementCategoryOptionCombo> categoryOptionCombos )
{
this.categoryOptionCombos = categoryOptionCombos;
}
}
|
require 'erb'
data = DATA.read
max = 1_500_000
title = "hello world!"
content = "hello world!\n" * 10
src = "def self.render(title, content); #{ERB.new(data).src}; end"
mod = Module.new
mod.instance_eval(src, "(ERB)")
max.times do
mod.render(title, content)
end
__END__
<html>
<head> <%= title %> </head>
<body>
<h1> <%= title %> </h1>
<p>
<%= content %>
</p>
</body>
</html>
|
from datetime import datetime
def parse_date(date_str: str) -> datetime:
return datetime.strptime(date_str, "%Y-%m-%d") |
<filename>store/utils.go
package store
import "fmt"
const DSP_TASK_TABLE_NAME = "dsp_tasks"
const CreateTasks string = "CREATE TABLE IF NOT EXISTS " + DSP_TASK_TABLE_NAME +
" (id VARCHAR[255] NOT NULL PRIMARY KEY, fileHash VARCHAR[255] NOT NULL, fileName VARCHAR[255] NOT NULL, walletAddr VARCHAR[255] NOT NULL, type INTEGER NOT NULL, createdAt DATE, updatedAt DATE);"
const ScriptCreateTables string = `PRAGMA foreign_keys=off;
BEGIN TRANSACTION;
%s
COMMIT;
PRAGMA foreign_keys=on;
`
func GetCreateTables() string {
sqlStmt := fmt.Sprintf(ScriptCreateTables, CreateTasks)
return sqlStmt
}
|
<filename>apps/bfd-server/bfd-server-war/src/test/java/gov/cms/bfd/server/war/stu3/providers/PatientResourceProviderIT.java<gh_stars>0
package gov.cms.bfd.server.war.stu3.providers;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import ca.uhn.fhir.model.primitive.DateTimeDt;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.client.api.IGenericClient;
import ca.uhn.fhir.rest.gclient.TokenClientParam;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import gov.cms.bfd.model.codebook.data.CcwCodebookVariable;
import gov.cms.bfd.model.rif.Beneficiary;
import gov.cms.bfd.model.rif.BeneficiaryHistory;
import gov.cms.bfd.model.rif.BeneficiaryMonthly;
import gov.cms.bfd.model.rif.BeneficiaryMonthly_;
import gov.cms.bfd.model.rif.MedicareBeneficiaryIdHistory;
import gov.cms.bfd.model.rif.samples.StaticRifResource;
import gov.cms.bfd.model.rif.samples.StaticRifResourceGroup;
import gov.cms.bfd.pipeline.sharedutils.PipelineTestUtils;
import gov.cms.bfd.server.war.ServerTestUtils;
import gov.cms.bfd.server.war.commons.CCWUtils;
import gov.cms.bfd.server.war.commons.CommonHeaders;
import gov.cms.bfd.server.war.commons.RequestHeaders;
import gov.cms.bfd.server.war.commons.TransformerConstants;
import java.time.Instant;
import java.time.LocalDate;
import java.time.Year;
import java.util.Arrays;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.CriteriaUpdate;
import javax.persistence.criteria.Root;
import org.hl7.fhir.dstu3.model.Bundle;
import org.hl7.fhir.dstu3.model.Identifier;
import org.hl7.fhir.dstu3.model.Patient;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
/** Integration tests for {@link gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider}. */
public final class PatientResourceProviderIT {
/**
* Ensures that {@link PipelineTestUtils#truncateTablesInDataSource()} is called once to make sure
* that any existing data is deleted from the tables before running the test suite.
*/
@BeforeAll
public static void cleanupDatabaseBeforeTestSuite() {
PipelineTestUtils.get().truncateTablesInDataSource();
}
/**
* Ensures that {@link PipelineTestUtils#truncateTablesInDataSource()} is called after each test
* case.
*/
@AfterEach
public void cleanDatabaseServerAfterEachTestCase() {
PipelineTestUtils.get().truncateTablesInDataSource();
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#read(org.hl7.fhir.dstu3.model.IdType)}
* works as expected for a {@link Patient} that does exist in the DB.
*/
@Test
public void readExistingPatient() {
List<Object> loadedRecords =
ServerTestUtils.get()
.loadData(Arrays.asList(StaticRifResourceGroup.SAMPLE_A.getResources()));
IGenericClient fhirClient = createFhirClient();
Beneficiary beneficiary =
loadedRecords.stream()
.filter(r -> r instanceof Beneficiary)
.map(r -> (Beneficiary) r)
.findFirst()
.get();
Patient patient =
fhirClient.read().resource(Patient.class).withId(beneficiary.getBeneficiaryId()).execute();
assertNotNull(patient);
BeneficiaryTransformerTest.assertMatches(
beneficiary, patient, getRHwithIncldAddrFldHdr("false"));
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#read(org.hl7.fhir.dstu3.model.IdType)}
* works as expected for a {@link Patient} when include identifiers value = "true".
*/
@Test
public void readExistingPatientIncludeIdentifiersTrue() {
assertExistingPatientIncludeIdentifiersExpected(
PatientResourceProvider.CNST_INCL_IDENTIFIERS_EXPECT_HICN,
PatientResourceProvider.CNST_INCL_IDENTIFIERS_EXPECT_MBI,
RequestHeaders.getHeaderWrapper(
PatientResourceProvider.HEADER_NAME_INCLUDE_IDENTIFIERS,
"true",
PatientResourceProvider.HEADER_NAME_INCLUDE_ADDRESS_FIELDS,
"true"));
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#read(org.hl7.fhir.dstu3.model.IdType)}
* works as expected for a {@link Patient} when include identifiers value = "hicn,mbi".
*/
@Test
public void readExistingPatientIncludeIdentifiersHicnMbi() {
assertExistingPatientIncludeIdentifiersExpected(
PatientResourceProvider.CNST_INCL_IDENTIFIERS_EXPECT_HICN,
PatientResourceProvider.CNST_INCL_IDENTIFIERS_EXPECT_MBI,
RequestHeaders.getHeaderWrapper(
PatientResourceProvider.HEADER_NAME_INCLUDE_IDENTIFIERS,
"hicn,mbi",
PatientResourceProvider.HEADER_NAME_INCLUDE_ADDRESS_FIELDS,
"true"));
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#read(org.hl7.fhir.dstu3.model.IdType)}
* works as expected for a {@link Patient} when include identifiers value = "hicn".
*/
@Test
public void readExistingPatientIncludeIdentifiersHicn() {
assertExistingPatientIncludeIdentifiersExpected(
PatientResourceProvider.CNST_INCL_IDENTIFIERS_EXPECT_HICN,
PatientResourceProvider.CNST_INCL_IDENTIFIERS_NOT_EXPECT_MBI,
RequestHeaders.getHeaderWrapper(
PatientResourceProvider.HEADER_NAME_INCLUDE_IDENTIFIERS,
"hicn",
PatientResourceProvider.HEADER_NAME_INCLUDE_ADDRESS_FIELDS,
"true"));
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#read(org.hl7.fhir.dstu3.model.IdType)}
* works as expected for a {@link Patient} when include identifiers value = "mbi".
*/
@Test
public void readExistingPatientIncludeIdentifiersMbi() {
assertExistingPatientIncludeIdentifiersExpected(
PatientResourceProvider.CNST_INCL_IDENTIFIERS_NOT_EXPECT_HICN,
PatientResourceProvider.CNST_INCL_IDENTIFIERS_EXPECT_MBI,
RequestHeaders.getHeaderWrapper(
PatientResourceProvider.HEADER_NAME_INCLUDE_IDENTIFIERS,
"mbi",
PatientResourceProvider.HEADER_NAME_INCLUDE_ADDRESS_FIELDS,
"true"));
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#read(org.hl7.fhir.dstu3.model.IdType)}
* works as expected for a {@link Patient} when include identifiers value = "false".
*/
@Test
public void readExistingPatientIncludeIdentifiersFalse() {
assertExistingPatientIncludeIdentifiersExpected(
PatientResourceProvider.CNST_INCL_IDENTIFIERS_NOT_EXPECT_HICN,
PatientResourceProvider.CNST_INCL_IDENTIFIERS_NOT_EXPECT_MBI,
RequestHeaders.getHeaderWrapper(
PatientResourceProvider.HEADER_NAME_INCLUDE_IDENTIFIERS,
"false",
PatientResourceProvider.HEADER_NAME_INCLUDE_ADDRESS_FIELDS,
"true"));
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#read(org.hl7.fhir.dstu3.model.IdType)}
* works as expected for a {@link Patient} when include identifiers value = "".
*/
@Test
public void readExistingPatientIncludeIdentifiersBlank() {
assertExistingPatientIncludeIdentifiersExpected(
PatientResourceProvider.CNST_INCL_IDENTIFIERS_NOT_EXPECT_HICN,
PatientResourceProvider.CNST_INCL_IDENTIFIERS_NOT_EXPECT_MBI,
RequestHeaders.getHeaderWrapper(
PatientResourceProvider.HEADER_NAME_INCLUDE_IDENTIFIERS,
"",
PatientResourceProvider.HEADER_NAME_INCLUDE_ADDRESS_FIELDS,
"true"));
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#read(org.hl7.fhir.dstu3.model.IdType)}
* works as expected for a {@link Patient} when include identifiers value =
* "invalid-identifier-value" and that an exception is thrown.
*/
@Test
public void readExistingPatientIncludeIdentifiersInvalid1() {
assertThrows(
InvalidRequestException.class,
() -> {
assertExistingPatientIncludeIdentifiersExpected(
PatientResourceProvider.CNST_INCL_IDENTIFIERS_NOT_EXPECT_HICN,
PatientResourceProvider.CNST_INCL_IDENTIFIERS_NOT_EXPECT_MBI,
RequestHeaders.getHeaderWrapper(
PatientResourceProvider.HEADER_NAME_INCLUDE_IDENTIFIERS,
"invalid-identifier-value",
PatientResourceProvider.HEADER_NAME_INCLUDE_ADDRESS_FIELDS,
"true"));
});
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#read(org.hl7.fhir.dstu3.model.IdType)}
* works as expected for a {@link Patient} when include identifiers value =
* ["mbi,invalid-identifier-value"] and that an exception is thrown.
*/
@Test
public void readExistingPatientIncludeIdentifiersInvalid2() {
assertThrows(
InvalidRequestException.class,
() -> {
assertExistingPatientIncludeIdentifiersExpected(
PatientResourceProvider.CNST_INCL_IDENTIFIERS_NOT_EXPECT_HICN,
PatientResourceProvider.CNST_INCL_IDENTIFIERS_NOT_EXPECT_MBI,
RequestHeaders.getHeaderWrapper(
PatientResourceProvider.HEADER_NAME_INCLUDE_IDENTIFIERS,
"mbi,invalid-identifier-value",
PatientResourceProvider.HEADER_NAME_INCLUDE_ADDRESS_FIELDS,
"true"));
});
}
/**
* Asserts that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#read(org.hl7.fhir.dstu3.model.IdType)}
* contains expected/present identifiers for a {@link Patient}.
*
* @param expectingHicn true if expecting a HICN
* @param expectingMbi true if expecting a MBI
* @param requestHeader requested header
*/
public void assertExistingPatientIncludeIdentifiersExpected(
boolean expectingHicn, boolean expectingMbi, RequestHeaders requestHeader) {
List<Object> loadedRecords =
ServerTestUtils.get()
.loadData(Arrays.asList(StaticRifResourceGroup.SAMPLE_A.getResources()));
IGenericClient fhirClient = createFhirClient(requestHeader);
Beneficiary beneficiary =
loadedRecords.stream()
.filter(r -> r instanceof Beneficiary)
.map(r -> (Beneficiary) r)
.findFirst()
.get();
Patient patient =
fhirClient.read().resource(Patient.class).withId(beneficiary.getBeneficiaryId()).execute();
assertNotNull(patient);
BeneficiaryTransformerTest.assertMatches(beneficiary, patient, requestHeader);
/*
* Ensure the unhashed values for HICN and MBI are present.
*/
Boolean hicnUnhashedPresent = false;
Boolean mbiUnhashedPresent = false;
Iterator<Identifier> identifiers = patient.getIdentifier().iterator();
while (identifiers.hasNext()) {
Identifier identifier = identifiers.next();
if (identifier.getSystem().equals(TransformerConstants.CODING_BBAPI_BENE_HICN_UNHASHED))
hicnUnhashedPresent = true;
if (identifier
.getSystem()
.equals(TransformerConstants.CODING_BBAPI_MEDICARE_BENEFICIARY_ID_UNHASHED))
mbiUnhashedPresent = true;
}
if (expectingHicn) assertTrue(hicnUnhashedPresent);
else assertFalse(hicnUnhashedPresent);
if (expectingMbi) assertTrue(mbiUnhashedPresent);
else assertFalse(mbiUnhashedPresent);
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#read(org.hl7.fhir.dstu3.model.IdType)}
* works as expected for a {@link Patient} that does exist in the DB but has no {@link
* BeneficiaryHistory} or {@link MedicareBeneficiaryIdHistory} records when include identifiers
* value = ["true"].
*/
@Test
public void readExistingPatientWithNoHistoryIncludeIdentifiersTrue() {
List<Object> loadedRecords =
ServerTestUtils.get().loadData(Arrays.asList(StaticRifResource.SAMPLE_A_BENES));
RequestHeaders requestHeader =
RequestHeaders.getHeaderWrapper(
PatientResourceProvider.HEADER_NAME_INCLUDE_IDENTIFIERS,
"true",
PatientResourceProvider.HEADER_NAME_INCLUDE_ADDRESS_FIELDS,
"true");
IGenericClient fhirClient = createFhirClient(requestHeader);
Beneficiary beneficiary =
loadedRecords.stream()
.filter(r -> r instanceof Beneficiary)
.map(r -> (Beneficiary) r)
.findFirst()
.get();
Patient patient =
fhirClient.read().resource(Patient.class).withId(beneficiary.getBeneficiaryId()).execute();
assertNotNull(patient);
BeneficiaryTransformerTest.assertMatches(beneficiary, patient, requestHeader);
/*
* Ensure the unhashed values for HICN and MBI are present.
*/
Boolean hicnUnhashedPresent = false;
Boolean mbiUnhashedPresent = false;
Iterator<Identifier> identifiers = patient.getIdentifier().iterator();
while (identifiers.hasNext()) {
Identifier identifier = identifiers.next();
if (identifier.getSystem().equals(TransformerConstants.CODING_BBAPI_BENE_HICN_UNHASHED))
hicnUnhashedPresent = true;
if (identifier
.getSystem()
.equals(TransformerConstants.CODING_BBAPI_MEDICARE_BENEFICIARY_ID_UNHASHED))
mbiUnhashedPresent = true;
}
assertTrue(hicnUnhashedPresent);
assertTrue(mbiUnhashedPresent);
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#read(org.hl7.fhir.dstu3.model.IdType)}
* works as expected for a {@link Patient} that does not exist in the DB.
*/
@Test
public void readMissingPatient() {
IGenericClient fhirClient = createFhirClient();
// No data is loaded, so this should return nothing.
assertThrows(
ResourceNotFoundException.class,
() -> {
fhirClient.read().resource(Patient.class).withId("1234").execute();
});
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#searchByLogicalId(ca.uhn.fhir.rest.param.TokenParam)}
* works as expected for a {@link Patient} that does exist in the DB.
*/
@Test
public void searchForExistingPatientByLogicalId() {
List<Object> loadedRecords =
ServerTestUtils.get()
.loadData(Arrays.asList(StaticRifResourceGroup.SAMPLE_A.getResources()));
IGenericClient fhirClient = createFhirClient();
Beneficiary beneficiary =
loadedRecords.stream()
.filter(r -> r instanceof Beneficiary)
.map(r -> (Beneficiary) r)
.findFirst()
.get();
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
Patient.RES_ID.exactly().systemAndIdentifier(null, beneficiary.getBeneficiaryId()))
.returnBundle(Bundle.class)
.execute();
assertNotNull(searchResults);
/*
* Verify that no paging links exist within the bundle.
*/
assertNull(searchResults.getLink(Constants.LINK_FIRST));
assertNull(searchResults.getLink(Constants.LINK_NEXT));
assertNull(searchResults.getLink(Constants.LINK_PREVIOUS));
assertNull(searchResults.getLink(Constants.LINK_LAST));
assertEquals(1, searchResults.getTotal());
Patient patientFromSearchResult = (Patient) searchResults.getEntry().get(0).getResource();
BeneficiaryTransformerTest.assertMatches(
beneficiary, patientFromSearchResult, getRHwithIncldAddrFldHdr("false"));
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#searchByLogicalId(ca.uhn.fhir.rest.param.TokenParam)}
* works as expected for a {@link Patient} that does exist in the DB, including identifiers to
* return the unhashed HICN and MBI.
*/
@Test
public void searchForExistingPatientByLogicalIdIncludeIdentifiersTrue() {
List<Object> loadedRecords =
ServerTestUtils.get()
.loadData(Arrays.asList(StaticRifResourceGroup.SAMPLE_A.getResources()));
IGenericClient fhirClient = createFhirClient("true", "true");
Beneficiary beneficiary =
loadedRecords.stream()
.filter(r -> r instanceof Beneficiary)
.map(r -> (Beneficiary) r)
.findFirst()
.get();
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
Patient.RES_ID.exactly().systemAndIdentifier(null, beneficiary.getBeneficiaryId()))
.returnBundle(Bundle.class)
.execute();
assertNotNull(searchResults);
Patient patientFromSearchResult = (Patient) searchResults.getEntry().get(0).getResource();
/*
* Ensure the unhashed values for HICN and MBI are present.
*/
Boolean hicnUnhashedPresent = false;
Boolean mbiUnhashedPresent = false;
Iterator<Identifier> identifiers = patientFromSearchResult.getIdentifier().iterator();
while (identifiers.hasNext()) {
Identifier identifier = identifiers.next();
if (identifier.getSystem().equals(TransformerConstants.CODING_BBAPI_BENE_HICN_UNHASHED))
hicnUnhashedPresent = true;
if (identifier
.getSystem()
.equals(TransformerConstants.CODING_BBAPI_MEDICARE_BENEFICIARY_ID_UNHASHED))
mbiUnhashedPresent = true;
}
assertTrue(hicnUnhashedPresent);
assertTrue(mbiUnhashedPresent);
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#searchByLogicalId(ca.uhn.fhir.rest.param.TokenParam)}
* works as expected for a {@link Patient} that does exist in the DB, including identifiers to
* return the unhashed HICN and MBI.
*/
@Test
public void searchForExistingPatientByLogicalIdIncludeIdentifiersFalse() {
List<Object> loadedRecords =
ServerTestUtils.get()
.loadData(Arrays.asList(StaticRifResourceGroup.SAMPLE_A.getResources()));
IGenericClient fhirClient = createFhirClient("false", "true");
Beneficiary beneficiary =
loadedRecords.stream()
.filter(r -> r instanceof Beneficiary)
.map(r -> (Beneficiary) r)
.findFirst()
.get();
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
Patient.RES_ID.exactly().systemAndIdentifier(null, beneficiary.getBeneficiaryId()))
.returnBundle(Bundle.class)
.execute();
assertNotNull(searchResults);
Patient patientFromSearchResult = (Patient) searchResults.getEntry().get(0).getResource();
/*
* Ensure the unhashed values for HICN and MBI are *not* present.
*/
Boolean hicnUnhashedPresent = false;
Boolean mbiUnhashedPresent = false;
Iterator<Identifier> identifiers = patientFromSearchResult.getIdentifier().iterator();
while (identifiers.hasNext()) {
Identifier identifier = identifiers.next();
if (identifier.getSystem().equals(TransformerConstants.CODING_BBAPI_BENE_HICN_UNHASHED))
hicnUnhashedPresent = true;
if (identifier
.getSystem()
.equals(TransformerConstants.CODING_BBAPI_MEDICARE_BENEFICIARY_ID_UNHASHED))
mbiUnhashedPresent = true;
}
assertFalse(hicnUnhashedPresent);
assertFalse(mbiUnhashedPresent);
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#searchByLogicalId(ca.uhn.fhir.rest.param.TokenParam)}
* works as expected for a {@link Patient} that does exist in the DB, with paging.
*/
@Test
public void searchForPatientByLogicalIdWithPaging() {
List<Object> loadedRecords =
ServerTestUtils.get()
.loadData(Arrays.asList(StaticRifResourceGroup.SAMPLE_A.getResources()));
IGenericClient fhirClient = createFhirClient();
Beneficiary beneficiary =
loadedRecords.stream()
.filter(r -> r instanceof Beneficiary)
.map(r -> (Beneficiary) r)
.findFirst()
.get();
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
Patient.RES_ID.exactly().systemAndIdentifier(null, beneficiary.getBeneficiaryId()))
.count(1)
.returnBundle(Bundle.class)
.execute();
assertNotNull(searchResults);
assertEquals(1, searchResults.getTotal());
/*
* Verify that only the first and last paging links exist, since there should
* only be one page.
*/
assertNotNull(searchResults.getLink(Constants.LINK_FIRST));
assertNull(searchResults.getLink(Constants.LINK_NEXT));
assertNull(searchResults.getLink(Constants.LINK_PREVIOUS));
assertNotNull(searchResults.getLink(Constants.LINK_LAST));
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#searchByLogicalId(ca.uhn.fhir.rest.param.TokenParam)}
* works as expected for a {@link Patient} that does not exist in the DB.
*/
@Test
public void searchForMissingPatientByLogicalId() {
IGenericClient fhirClient = createFhirClient();
// No data is loaded, so this should return 0 matches.
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(Patient.RES_ID.exactly().systemAndIdentifier(null, "foo"))
.returnBundle(Bundle.class)
.execute();
assertNotNull(searchResults);
assertEquals(0, searchResults.getTotal());
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#searchByIdentifier(ca.uhn.fhir.rest.param.TokenParam)}
* works as expected for a {@link Patient} that does exist in the DB.
*/
@Test
public void searchForExistingPatientByHicnHash() {
List<Object> loadedRecords =
ServerTestUtils.get()
.loadData(Arrays.asList(StaticRifResourceGroup.SAMPLE_A.getResources()));
IGenericClient fhirClient = createFhirClient();
Beneficiary beneficiary =
loadedRecords.stream()
.filter(r -> r instanceof Beneficiary)
.map(r -> (Beneficiary) r)
.findFirst()
.get();
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
Patient.IDENTIFIER
.exactly()
.systemAndIdentifier(
TransformerConstants.CODING_BBAPI_BENE_HICN_HASH, beneficiary.getHicn()))
.returnBundle(Bundle.class)
.execute();
assertNotNull(searchResults);
/*
* Verify that no paging links exist within the bundle.
*/
assertNull(searchResults.getLink(Constants.LINK_FIRST));
assertNull(searchResults.getLink(Constants.LINK_NEXT));
assertNull(searchResults.getLink(Constants.LINK_PREVIOUS));
assertNull(searchResults.getLink(Constants.LINK_LAST));
assertEquals(1, searchResults.getTotal());
Patient patientFromSearchResult = (Patient) searchResults.getEntry().get(0).getResource();
BeneficiaryTransformerTest.assertMatches(
beneficiary, patientFromSearchResult, getRHwithIncldAddrFldHdr("false"));
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#searchByIdentifier(ca.uhn.fhir.rest.param.TokenParam)}
* works as expected for a {@link Patient} that does exist in the DB, including identifiers to
* return the unhashed HICN and MBI.
*/
@Test
public void searchForExistingPatientByHicnHashIncludeIdentifiersTrue() {
List<Object> loadedRecords =
ServerTestUtils.get()
.loadData(Arrays.asList(StaticRifResourceGroup.SAMPLE_A.getResources()));
IGenericClient fhirClient = createFhirClient("true", "true");
Beneficiary beneficiary =
loadedRecords.stream()
.filter(r -> r instanceof Beneficiary)
.map(r -> (Beneficiary) r)
.findFirst()
.get();
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
Patient.IDENTIFIER
.exactly()
.systemAndIdentifier(
TransformerConstants.CODING_BBAPI_BENE_HICN_HASH, beneficiary.getHicn()))
.returnBundle(Bundle.class)
.execute();
assertNotNull(searchResults);
Patient patientFromSearchResult = (Patient) searchResults.getEntry().get(0).getResource();
/*
* Ensure the unhashed values for HICN and MBI are present.
*/
Boolean hicnUnhashedPresent = false;
Boolean mbiUnhashedPresent = false;
Iterator<Identifier> identifiers = patientFromSearchResult.getIdentifier().iterator();
while (identifiers.hasNext()) {
Identifier identifier = identifiers.next();
if (identifier.getSystem().equals(TransformerConstants.CODING_BBAPI_BENE_HICN_UNHASHED))
hicnUnhashedPresent = true;
if (identifier
.getSystem()
.equals(TransformerConstants.CODING_BBAPI_MEDICARE_BENEFICIARY_ID_UNHASHED))
mbiUnhashedPresent = true;
}
assertTrue(hicnUnhashedPresent);
assertTrue(mbiUnhashedPresent);
}
/**
* Verifies that the correct bene id is returned when a hicn points to more than one bene id in
* either the Beneficiaries and/or BeneficiariesHistory table.
*/
@Test
public void searchForExistingPatientByHicnHashWithBeneDups() {
List<Object> loadedRecords =
ServerTestUtils.get()
.loadData(Arrays.asList(StaticRifResourceGroup.SAMPLE_A.getResources()));
// load additional Beneficiary and Beneficiary History records for
// testing
loadedRecords.addAll(
ServerTestUtils.get()
.loadData(Arrays.asList(StaticRifResourceGroup.SAMPLE_HICN_MULT_BENES.getResources())));
IGenericClient fhirClient = createFhirClient();
Stream<Beneficiary> beneficiariesStream =
loadedRecords.stream().filter(r -> r instanceof Beneficiary).map(r -> (Beneficiary) r);
List<Beneficiary> beneficiariesList = beneficiariesStream.collect(Collectors.toList());
Stream<BeneficiaryHistory> beneficiariesHistoryStream =
loadedRecords.stream()
.filter(r -> r instanceof BeneficiaryHistory)
.map(r -> (BeneficiaryHistory) r);
List<BeneficiaryHistory> beneficiariesHistoryList =
beneficiariesHistoryStream.collect(Collectors.toList());
boolean useHicnFromBeneficiaryTable;
boolean expectsSingleBeneMatch;
/*
* The following scenario tests when the same hicn is in the
* Beneficiaries table but points to different bene ids.
*
*/
useHicnFromBeneficiaryTable = true;
expectsSingleBeneMatch = false;
assertPatientByHashTypeMatch(
fhirClient,
beneficiariesList,
beneficiariesHistoryList,
"567834",
"543217066U",
useHicnFromBeneficiaryTable,
"hicn",
expectsSingleBeneMatch);
/*
* The following scenario tests when only one hicn is in the Beneficiaries table
*/
useHicnFromBeneficiaryTable = true;
expectsSingleBeneMatch = true;
assertPatientByHashTypeMatch(
fhirClient,
beneficiariesList,
beneficiariesHistoryList,
"123456NULLREFYR",
"543217066N",
useHicnFromBeneficiaryTable,
"hicn",
expectsSingleBeneMatch);
/*
* The following scenario tests when the same hicn is in the
* Beneficiaries and also in the BeneficiariesHistory table. The bene id
* is different between the tables
*/
useHicnFromBeneficiaryTable = true;
expectsSingleBeneMatch = false;
assertPatientByHashTypeMatch(
fhirClient,
beneficiariesList,
beneficiariesHistoryList,
"BENE1234",
"SAMEHICN",
useHicnFromBeneficiaryTable,
"hicn",
expectsSingleBeneMatch);
/*
* The following scenario tests when the requested hicn is only in the
* BeneficiariesHistory table. Use the bene id from the
* BeneficiariesHistory table to then read the Beneficiaries table.
*
*/
useHicnFromBeneficiaryTable = false;
expectsSingleBeneMatch = true;
assertPatientByHashTypeMatch(
fhirClient,
beneficiariesList,
beneficiariesHistoryList,
"55555",
"HISTHICN",
useHicnFromBeneficiaryTable,
"hicn",
expectsSingleBeneMatch);
/*
* The following scenario tests when the requested hicn is only in the
* BeneficiariesHistory table but this hicn points to more than one bene
* id in history.
*/
useHicnFromBeneficiaryTable = false;
expectsSingleBeneMatch = false;
assertPatientByHashTypeMatch(
fhirClient,
beneficiariesList,
beneficiariesHistoryList,
"66666",
"DUPHISTHIC",
useHicnFromBeneficiaryTable,
"hicn",
expectsSingleBeneMatch);
/*
* The following scenario tests when a hicn is not found in the
* Beneficiaries and BeneficiariesHistory table.
*
*/
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
Patient.IDENTIFIER
.exactly()
.systemAndIdentifier(
TransformerConstants.CODING_BBAPI_BENE_HICN_HASH, "notfoundhicn"))
.returnBundle(Bundle.class)
.execute();
assertEquals(0, searchResults.getTotal());
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#searchByIdentifier(ca.uhn.fhir.rest.param.TokenParam)}
* works as expected for a {@link Patient} that does exist in the DB, including identifiers to
* return the unhashed HICN and MBI.
*/
@Test
public void searchForExistingPatientByHicnHashIncludeIdentifiersFalse() {
List<Object> loadedRecords =
ServerTestUtils.get()
.loadData(Arrays.asList(StaticRifResourceGroup.SAMPLE_A.getResources()));
IGenericClient fhirClient = createFhirClient("false", "true");
Beneficiary beneficiary =
loadedRecords.stream()
.filter(r -> r instanceof Beneficiary)
.map(r -> (Beneficiary) r)
.findFirst()
.get();
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
Patient.IDENTIFIER
.exactly()
.systemAndIdentifier(
TransformerConstants.CODING_BBAPI_BENE_HICN_HASH, beneficiary.getHicn()))
.returnBundle(Bundle.class)
.execute();
assertNotNull(searchResults);
Patient patientFromSearchResult = (Patient) searchResults.getEntry().get(0).getResource();
/*
* Ensure the unhashed values for HICN and MBI are *not* present.
*/
Boolean hicnUnhashedPresent = false;
Boolean mbiUnhashedPresent = false;
Iterator<Identifier> identifiers = patientFromSearchResult.getIdentifier().iterator();
while (identifiers.hasNext()) {
Identifier identifier = identifiers.next();
if (identifier.getSystem().equals(TransformerConstants.CODING_BBAPI_BENE_HICN_UNHASHED))
hicnUnhashedPresent = true;
if (identifier
.getSystem()
.equals(TransformerConstants.CODING_BBAPI_MEDICARE_BENEFICIARY_ID_UNHASHED))
mbiUnhashedPresent = true;
}
assertFalse(hicnUnhashedPresent);
assertFalse(mbiUnhashedPresent);
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#searchByIdentifier(ca.uhn.fhir.rest.param.TokenParam)}
* works as expected for a {@link Patient} that does exist in the DB, with paging.
*/
@Test
public void searchForExistingPatientByHicnHashWithPaging() {
List<Object> loadedRecords =
ServerTestUtils.get()
.loadData(Arrays.asList(StaticRifResourceGroup.SAMPLE_A.getResources()));
IGenericClient fhirClient = createFhirClient();
Beneficiary beneficiary =
loadedRecords.stream()
.filter(r -> r instanceof Beneficiary)
.map(r -> (Beneficiary) r)
.findFirst()
.get();
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
Patient.IDENTIFIER
.exactly()
.systemAndIdentifier(
TransformerConstants.CODING_BBAPI_BENE_HICN_HASH, beneficiary.getHicn()))
.count(1)
.returnBundle(Bundle.class)
.execute();
assertNotNull(searchResults);
assertEquals(1, searchResults.getTotal());
Patient patientFromSearchResult = (Patient) searchResults.getEntry().get(0).getResource();
BeneficiaryTransformerTest.assertMatches(
beneficiary, patientFromSearchResult, getRHwithIncldAddrFldHdr("false"));
/*
* Verify that only the first and last paging links exist, since there should
* only be one page.
*/
assertNotNull(searchResults.getLink(Constants.LINK_FIRST));
assertNull(searchResults.getLink(Constants.LINK_NEXT));
assertNull(searchResults.getLink(Constants.LINK_PREVIOUS));
assertNotNull(searchResults.getLink(Constants.LINK_LAST));
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#searchByIdentifier(ca.uhn.fhir.rest.param.TokenParam)}
* works as expected for HICNs that should be present as a {@link BeneficiaryHistory} record.
*/
@Test
public void searchForExistingPatientByHistoricalHicnHash() {
List<Object> loadedRecords =
ServerTestUtils.get()
.loadData(Arrays.asList(StaticRifResourceGroup.SAMPLE_A.getResources()));
IGenericClient fhirClient = createFhirClient();
loadedRecords.stream()
.filter(r -> r instanceof BeneficiaryHistory)
.map(r -> (BeneficiaryHistory) r)
.forEach(
h -> {
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
Patient.IDENTIFIER
.exactly()
.systemAndIdentifier(
TransformerConstants.CODING_BBAPI_BENE_HICN_HASH, h.getHicn()))
.returnBundle(Bundle.class)
.execute();
assertNotNull(searchResults);
assertEquals(1, searchResults.getTotal());
Patient patientFromSearchResult =
(Patient) searchResults.getEntry().get(0).getResource();
assertEquals(
h.getBeneficiaryId(), patientFromSearchResult.getIdElement().getIdPart());
});
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#searchByIdentifier(ca.uhn.fhir.rest.param.TokenParam)}
* works as expected for HICNs associated with {@link Beneficiary}s that have <strong>no</strong>
* {@link BeneficiaryHistory} records.
*/
@Test
public void searchForExistingPatientWithNoHistory() {
List<Object> loadedRecords =
ServerTestUtils.get().loadData(Arrays.asList(StaticRifResource.SAMPLE_A_BENES));
IGenericClient fhirClient = createFhirClient();
loadedRecords.stream()
.filter(r -> r instanceof Beneficiary)
.map(r -> (Beneficiary) r)
.forEach(
h -> {
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
Patient.IDENTIFIER
.exactly()
.systemAndIdentifier(
TransformerConstants.CODING_BBAPI_BENE_HICN_HASH, h.getHicn()))
.returnBundle(Bundle.class)
.execute();
assertNotNull(searchResults);
assertEquals(1, searchResults.getTotal());
Patient patientFromSearchResult =
(Patient) searchResults.getEntry().get(0).getResource();
assertEquals(
h.getBeneficiaryId(), patientFromSearchResult.getIdElement().getIdPart());
});
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#searchByIdentifier(ca.uhn.fhir.rest.param.TokenParam)}
* works as expected for HICNs associated with {@link Beneficiary}s that have <strong>no</strong>
* {@link BeneficiaryHistory} records.
*/
@Test
public void searchForExistingPatientWithNoHistoryIncludeIdentifiersTrue() {
List<Object> loadedRecords =
ServerTestUtils.get().loadData(Arrays.asList(StaticRifResource.SAMPLE_A_BENES));
IGenericClient fhirClient = createFhirClient("true", "true");
loadedRecords.stream()
.filter(r -> r instanceof Beneficiary)
.map(r -> (Beneficiary) r)
.forEach(
h -> {
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
Patient.IDENTIFIER
.exactly()
.systemAndIdentifier(
TransformerConstants.CODING_BBAPI_BENE_HICN_HASH, h.getHicn()))
.returnBundle(Bundle.class)
.execute();
assertNotNull(searchResults);
assertEquals(1, searchResults.getTotal());
Patient patientFromSearchResult =
(Patient) searchResults.getEntry().get(0).getResource();
assertEquals(
h.getBeneficiaryId(), patientFromSearchResult.getIdElement().getIdPart());
});
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#searchByIdentifier(ca.uhn.fhir.rest.param.TokenParam)}
* works as expected for a {@link Patient} that does not exist in the DB.
*/
@Test
public void searchForMissingPatientByHicnHash() {
IGenericClient fhirClient = createFhirClient();
// No data is loaded, so this should return 0 matches.
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
Patient.IDENTIFIER
.exactly()
.systemAndIdentifier(TransformerConstants.CODING_BBAPI_BENE_HICN_HASH, "1234"))
.returnBundle(Bundle.class)
.execute();
assertNotNull(searchResults);
assertEquals(0, searchResults.getTotal());
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#searchByIdentifier(ca.uhn.fhir.rest.param.TokenParam)}
*
* <p>works as expected for a {@link Patient} that does exist in the DB.
*/
@Test
public void searchForExistingPatientByMbiHash() {
List<Object> loadedRecords =
ServerTestUtils.get()
.loadData(Arrays.asList(StaticRifResourceGroup.SAMPLE_A.getResources()));
IGenericClient fhirClient = createFhirClient();
Beneficiary beneficiary =
loadedRecords.stream()
.filter(r -> r instanceof Beneficiary)
.map(r -> (Beneficiary) r)
.findFirst()
.get();
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
Patient.IDENTIFIER
.exactly()
.systemAndIdentifier(
TransformerConstants.CODING_BBAPI_BENE_MBI_HASH,
beneficiary.getMbiHash().get()))
.returnBundle(Bundle.class)
.execute();
assertNotNull(searchResults);
/*
* Verify that no paging links exist within the bundle.
*/
assertNull(searchResults.getLink(Constants.LINK_FIRST));
assertNull(searchResults.getLink(Constants.LINK_NEXT));
assertNull(searchResults.getLink(Constants.LINK_PREVIOUS));
assertNull(searchResults.getLink(Constants.LINK_LAST));
assertEquals(1, searchResults.getTotal());
Patient patientFromSearchResult = (Patient) searchResults.getEntry().get(0).getResource();
BeneficiaryTransformerTest.assertMatches(
beneficiary, patientFromSearchResult, getRHwithIncldAddrFldHdr("false"));
String mbiHashIdentifier =
patientFromSearchResult.getIdentifier().stream()
.filter(
identifier ->
identifier.getSystem().equals(TransformerConstants.CODING_BBAPI_BENE_MBI_HASH))
.findFirst()
.get()
.getValue();
assertEquals(beneficiary.getMbiHash().get(), mbiHashIdentifier, "mbiHash identifier exists");
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#searchByIdentifier(ca.uhn.fhir.rest.param.TokenParam)}
* works as expected for a {@link Patient} that does exist in the DB, including identifiers to
* return the unhashed HICN and MBI.
*/
@Test
public void searchForExistingPatientByMbiHashIncludeIdentifiersTrue() {
List<Object> loadedRecords =
ServerTestUtils.get()
.loadData(Arrays.asList(StaticRifResourceGroup.SAMPLE_A.getResources()));
IGenericClient fhirClient = createFhirClient("true", "true");
Beneficiary beneficiary =
loadedRecords.stream()
.filter(r -> r instanceof Beneficiary)
.map(r -> (Beneficiary) r)
.findFirst()
.get();
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
Patient.IDENTIFIER
.exactly()
.systemAndIdentifier(
TransformerConstants.CODING_BBAPI_BENE_MBI_HASH,
beneficiary.getMbiHash().get()))
.returnBundle(Bundle.class)
.execute();
assertNotNull(searchResults);
Patient patientFromSearchResult = (Patient) searchResults.getEntry().get(0).getResource();
/*
* Ensure the unhashed values for HICN and MBI are present.
*/
Boolean hicnUnhashedPresent = false;
Boolean mbiUnhashedPresent = false;
Iterator<Identifier> identifiers = patientFromSearchResult.getIdentifier().iterator();
while (identifiers.hasNext()) {
Identifier identifier = identifiers.next();
if (identifier.getSystem().equals(TransformerConstants.CODING_BBAPI_BENE_HICN_UNHASHED))
hicnUnhashedPresent = true;
if (identifier
.getSystem()
.equals(TransformerConstants.CODING_BBAPI_MEDICARE_BENEFICIARY_ID_UNHASHED))
mbiUnhashedPresent = true;
}
assertTrue(hicnUnhashedPresent);
assertTrue(mbiUnhashedPresent);
}
/**
* Verifies that the correct bene id or exception is returned when an MBI points to more than one
* bene id in either the Beneficiaries and/or BeneficiariesHistory table.
*/
@Test
public void searchForExistingPatientByMbiHashWithBeneDups() {
List<Object> loadedRecords =
ServerTestUtils.get()
.loadData(Arrays.asList(StaticRifResourceGroup.SAMPLE_A.getResources()));
// load additional Beneficiary and Beneficiary History records for
// testing
loadedRecords.addAll(
ServerTestUtils.get()
.loadData(Arrays.asList(StaticRifResourceGroup.SAMPLE_HICN_MULT_BENES.getResources())));
IGenericClient fhirClient = createFhirClient();
Stream<Beneficiary> beneficiariesStream =
loadedRecords.stream().filter(r -> r instanceof Beneficiary).map(r -> (Beneficiary) r);
List<Beneficiary> beneficiariesList = beneficiariesStream.collect(Collectors.toList());
Stream<BeneficiaryHistory> beneficiariesHistoryStream =
loadedRecords.stream()
.filter(r -> r instanceof BeneficiaryHistory)
.map(r -> (BeneficiaryHistory) r);
List<BeneficiaryHistory> beneficiariesHistoryList =
beneficiariesHistoryStream.collect(Collectors.toList());
boolean useMbiFromBeneficiaryTable;
boolean expectsSingleBeneMatch;
/*
* The following scenario tests when the same mbi is in the
* Beneficiaries table but points to different bene ids.
*/
useMbiFromBeneficiaryTable = true;
expectsSingleBeneMatch = false;
assertPatientByHashTypeMatch(
fhirClient,
beneficiariesList,
beneficiariesHistoryList,
"567834",
"3456789",
useMbiFromBeneficiaryTable,
"mbi",
expectsSingleBeneMatch);
/*
* The following scenario tests when only one mbi is in the
* Beneficiaries table.
*/
useMbiFromBeneficiaryTable = true;
expectsSingleBeneMatch = true;
assertPatientByHashTypeMatch(
fhirClient,
beneficiariesList,
beneficiariesHistoryList,
"123456NULLREFYR",
"3456789N",
useMbiFromBeneficiaryTable,
"mbi",
expectsSingleBeneMatch);
/*
* The following scenario tests when the same mbi is in the
* Beneficiaries and also in the BeneficiariesHistory table. The bene id
* is different between the tables so the bene record from the
* Beneficiaries table should be used.
*
* bene id=BENE1234 mbi=SAMEMBI rfrnc_yr=2019 should be pulled back.
*/
useMbiFromBeneficiaryTable = true;
expectsSingleBeneMatch = false;
assertPatientByHashTypeMatch(
fhirClient,
beneficiariesList,
beneficiariesHistoryList,
"BENE1234",
"SAMEMBI",
useMbiFromBeneficiaryTable,
"mbi",
expectsSingleBeneMatch);
/*
* The following scenario tests when the requested mbi is only in the
* BeneficiariesHistory table. Use the bene id from the
* BeneficiariesHistory table to then read the Beneficiaries table.
*/
useMbiFromBeneficiaryTable = false;
expectsSingleBeneMatch = true;
assertPatientByHashTypeMatch(
fhirClient,
beneficiariesList,
beneficiariesHistoryList,
"55555",
"HISTMBI",
useMbiFromBeneficiaryTable,
"mbi",
expectsSingleBeneMatch);
/*
* The following scenario tests when the requested mbi is only in the
* BeneficiariesHistory table but this mbi points to more than one bene
* id in history.
*/
useMbiFromBeneficiaryTable = false;
expectsSingleBeneMatch = false;
assertPatientByHashTypeMatch(
fhirClient,
beneficiariesList,
beneficiariesHistoryList,
"66666",
"DUPHISTMBI",
useMbiFromBeneficiaryTable,
"mbi",
expectsSingleBeneMatch);
/*
* The following scenario tests when a mbi is not found in the
* Beneficiaries and BeneficiariesHistory table.
*
*/
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
Patient.IDENTIFIER
.exactly()
.systemAndIdentifier(
TransformerConstants.CODING_BBAPI_BENE_MBI_HASH, "notfoundmbi"))
.returnBundle(Bundle.class)
.execute();
assertEquals(0, searchResults.getTotal());
}
/**
* The following method tests that a ResourceNotFoundException exception is thrown when there are
* instances of one hash value (hicn or mbi) pointing to more than bene id between the
* Beneficiaries and BeneficiariesHistory tables.
*
* <p>Or that single match is found when the expectsSingleBeneMatch param is = true.
*
* <p>The hashType param chooses which type of values/hash to use. This is either "hicn" or "mbi".
*
* @param fhirClient
* @param beneficiariesList
* @param beneficiariesHistoryList
* @param beneficiaryId
* @param unhashedValue
* @param useFromBeneficiaryTable
* @param hashType
* @param expectsSingleBeneMatch
*/
private void assertPatientByHashTypeMatch(
IGenericClient fhirClient,
List<Beneficiary> beneficiariesList,
List<BeneficiaryHistory> beneficiariesHistoryList,
String beneficiaryId,
String unhashedValue,
Boolean useFromBeneficiaryTable,
String hashType,
Boolean expectsSingleBeneMatch) {
Bundle searchResults = null;
String hicnHashed = "";
String mbiHash = "";
if (hashType != "hicn" && hashType != "mbi") {
fail("hashType value must be: hicn or mbi.");
}
if (useFromBeneficiaryTable) {
if (hashType.equals("hicn")) {
Beneficiary beneficiaryHicnToMatchTo =
beneficiariesList.stream()
.filter(r -> unhashedValue.equals(r.getHicnUnhashed().get()))
.findFirst()
.get();
hicnHashed = beneficiaryHicnToMatchTo.getHicn();
} else if (hashType.equals("mbi")) {
Beneficiary beneficiaryMbiToMatchTo =
beneficiariesList.stream()
.filter(r -> unhashedValue.equals(r.getMedicareBeneficiaryId().get()))
.findFirst()
.get();
mbiHash = beneficiaryMbiToMatchTo.getMbiHash().get();
}
} else {
if (hashType.equals("hicn")) {
BeneficiaryHistory beneficiaryHistoryHicnToMatchTo =
beneficiariesHistoryList.stream()
.filter(r -> unhashedValue.equals(r.getHicnUnhashed().get()))
.findFirst()
.get();
hicnHashed = beneficiaryHistoryHicnToMatchTo.getHicn();
} else if (hashType.equals("mbi")) {
BeneficiaryHistory beneficiaryHistoryMbiToMatchTo =
beneficiariesHistoryList.stream()
.filter(r -> unhashedValue.equals(r.getMedicareBeneficiaryId().get()))
.findFirst()
.get();
mbiHash = beneficiaryHistoryMbiToMatchTo.getMbiHash().get();
}
}
try {
// return bene record based on unhashedValue passed to this method
if (hashType.equals("hicn")) {
searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
Patient.IDENTIFIER
.exactly()
.systemAndIdentifier(
TransformerConstants.CODING_BBAPI_BENE_HICN_HASH, hicnHashed))
.returnBundle(Bundle.class)
.execute();
} else if (hashType.equals("mbi")) {
searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
Patient.IDENTIFIER
.exactly()
.systemAndIdentifier(
TransformerConstants.CODING_BBAPI_BENE_MBI_HASH, mbiHash))
.returnBundle(Bundle.class)
.execute();
}
if (!expectsSingleBeneMatch) {
// Should throw exception before here, so assert a failed test.
fail("An exception was expected when there are duplicate bene id matches.");
}
} catch (ResourceNotFoundException e) {
// Test passes if an exception was thrown.
}
// Validate result if a single match is expected for test.
if (expectsSingleBeneMatch) {
assertNotNull(searchResults);
assertEquals(1, searchResults.getTotal());
Beneficiary beneficiary =
beneficiariesList.stream()
.filter(r -> beneficiaryId.equals(r.getBeneficiaryId()))
.findAny()
.get();
Patient patientFromSearchResult = (Patient) searchResults.getEntry().get(0).getResource();
BeneficiaryTransformerTest.assertMatches(
beneficiary, patientFromSearchResult, getRHwithIncldAddrFldHdr("false"));
}
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#searchByIdentifier(ca.uhn.fhir.rest.param.TokenParam)}
* works as expected for a {@link Patient} that does exist in the DB, including identifiers to
* return the unhashed HICN and MBI.
*/
@Test
public void searchForExistingPatientByMbiHashIncludeIdentifiersFalse() {
List<Object> loadedRecords =
ServerTestUtils.get()
.loadData(Arrays.asList(StaticRifResourceGroup.SAMPLE_A.getResources()));
IGenericClient fhirClient = createFhirClient("false", "true");
Beneficiary beneficiary =
loadedRecords.stream()
.filter(r -> r instanceof Beneficiary)
.map(r -> (Beneficiary) r)
.findFirst()
.get();
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
Patient.IDENTIFIER
.exactly()
.systemAndIdentifier(
TransformerConstants.CODING_BBAPI_BENE_MBI_HASH,
beneficiary.getMbiHash().get()))
.returnBundle(Bundle.class)
.execute();
assertNotNull(searchResults);
Patient patientFromSearchResult = (Patient) searchResults.getEntry().get(0).getResource();
/*
* Ensure the unhashed values for HICN and MBI are *not* present.
*/
Boolean hicnUnhashedPresent = false;
Boolean mbiUnhashedPresent = false;
Iterator<Identifier> identifiers = patientFromSearchResult.getIdentifier().iterator();
while (identifiers.hasNext()) {
Identifier identifier = identifiers.next();
if (identifier.getSystem().equals(TransformerConstants.CODING_BBAPI_BENE_HICN_UNHASHED))
hicnUnhashedPresent = true;
if (identifier
.getSystem()
.equals(TransformerConstants.CODING_BBAPI_MEDICARE_BENEFICIARY_ID_UNHASHED))
mbiUnhashedPresent = true;
}
assertFalse(hicnUnhashedPresent);
assertFalse(mbiUnhashedPresent);
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#searchByIdentifier(ca.uhn.fhir.rest.param.TokenParam)}
* works as expected for a {@link Patient} that does exist in the DB, with paging.
*/
@Test
public void searchForExistingPatientByMbiHashWithPaging() {
List<Object> loadedRecords =
ServerTestUtils.get()
.loadData(Arrays.asList(StaticRifResourceGroup.SAMPLE_A.getResources()));
IGenericClient fhirClient = createFhirClient();
Beneficiary beneficiary =
loadedRecords.stream()
.filter(r -> r instanceof Beneficiary)
.map(r -> (Beneficiary) r)
.findFirst()
.get();
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
Patient.IDENTIFIER
.exactly()
.systemAndIdentifier(
TransformerConstants.CODING_BBAPI_BENE_MBI_HASH,
beneficiary.getMbiHash().get()))
.count(1)
.returnBundle(Bundle.class)
.execute();
assertNotNull(searchResults);
assertEquals(1, searchResults.getTotal());
Patient patientFromSearchResult = (Patient) searchResults.getEntry().get(0).getResource();
BeneficiaryTransformerTest.assertMatches(
beneficiary, patientFromSearchResult, getRHwithIncldAddrFldHdr("false"));
/*
* Verify that only the first and last paging links exist, since there should
* only be one page.
*/
assertNotNull(searchResults.getLink(Constants.LINK_FIRST));
assertNull(searchResults.getLink(Constants.LINK_NEXT));
assertNull(searchResults.getLink(Constants.LINK_PREVIOUS));
assertNotNull(searchResults.getLink(Constants.LINK_LAST));
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#searchByIdentifier(ca.uhn.fhir.rest.param.TokenParam)}
* works as expected for MBIs that should be present as a {@link BeneficiaryHistory} record.
*/
@Test
public void searchForExistingPatientByHistoricalMbiHash() {
List<Object> loadedRecords =
ServerTestUtils.get()
.loadData(Arrays.asList(StaticRifResourceGroup.SAMPLE_A.getResources()));
IGenericClient fhirClient = createFhirClient();
loadedRecords.stream()
.filter(r -> r instanceof BeneficiaryHistory)
.map(r -> (BeneficiaryHistory) r)
.forEach(
h -> {
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
Patient.IDENTIFIER
.exactly()
.systemAndIdentifier(
TransformerConstants.CODING_BBAPI_BENE_MBI_HASH,
h.getMbiHash().get()))
.returnBundle(Bundle.class)
.execute();
assertNotNull(searchResults);
assertEquals(1, searchResults.getTotal());
Patient patientFromSearchResult =
(Patient) searchResults.getEntry().get(0).getResource();
assertEquals(
h.getBeneficiaryId(), patientFromSearchResult.getIdElement().getIdPart());
});
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#searchByIdentifier(ca.uhn.fhir.rest.param.TokenParam)}
* works as expected for MBIs associated with {@link Beneficiary}s that have <strong>no</strong>
* {@link BeneficiaryHistory} records.
*/
@Test
public void searchForExistingPatientByMbiWithNoHistory() {
List<Object> loadedRecords =
ServerTestUtils.get().loadData(Arrays.asList(StaticRifResource.SAMPLE_A_BENES));
IGenericClient fhirClient = createFhirClient();
loadedRecords.stream()
.filter(r -> r instanceof Beneficiary)
.map(r -> (Beneficiary) r)
.forEach(
h -> {
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
Patient.IDENTIFIER
.exactly()
.systemAndIdentifier(
TransformerConstants.CODING_BBAPI_BENE_MBI_HASH,
h.getMbiHash().get()))
.returnBundle(Bundle.class)
.execute();
assertNotNull(searchResults);
assertEquals(1, searchResults.getTotal());
Patient patientFromSearchResult =
(Patient) searchResults.getEntry().get(0).getResource();
assertEquals(
h.getBeneficiaryId(), patientFromSearchResult.getIdElement().getIdPart());
});
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#searchByIdentifier(ca.uhn.fhir.rest.param.TokenParam)}
* works as expected for MBIs associated with {@link Beneficiary}s that have <strong>no</strong>
* {@link BeneficiaryHistory} records.
*/
@Test
public void searchForExistingPatientByMbiWithNoHistoryIncludeIdentifiersTrue() {
List<Object> loadedRecords =
ServerTestUtils.get().loadData(Arrays.asList(StaticRifResource.SAMPLE_A_BENES));
IGenericClient fhirClient = createFhirClient("true", "true");
loadedRecords.stream()
.filter(r -> r instanceof Beneficiary)
.map(r -> (Beneficiary) r)
.forEach(
h -> {
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
Patient.IDENTIFIER
.exactly()
.systemAndIdentifier(
TransformerConstants.CODING_BBAPI_BENE_MBI_HASH,
h.getMbiHash().get()))
.returnBundle(Bundle.class)
.execute();
assertNotNull(searchResults);
assertEquals(1, searchResults.getTotal());
Patient patientFromSearchResult =
(Patient) searchResults.getEntry().get(0).getResource();
assertEquals(
h.getBeneficiaryId(), patientFromSearchResult.getIdElement().getIdPart());
});
}
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.PatientResourceProvider#searchByIdentifier(ca.uhn.fhir.rest.param.TokenParam)}
* works as expected for a {@link Patient} that does not exist in the DB.
*/
@Test
public void searchForMissingPatientByMbiHash() {
IGenericClient fhirClient = createFhirClient();
// No data is loaded, so this should return 0 matches.
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
Patient.IDENTIFIER
.exactly()
.systemAndIdentifier(TransformerConstants.CODING_BBAPI_BENE_MBI_HASH, "1234"))
.returnBundle(Bundle.class)
.execute();
assertNotNull(searchResults);
assertEquals(0, searchResults.getTotal());
}
/**
* Verifies that {@link
* PatientResourceProvider#searchByCoverageContract(ca.uhn.fhir.rest.param.TokenParam,
* ca.uhn.fhir.rest.param.TokenParam, String, ca.uhn.fhir.rest.api.server.RequestDetails)} works
* as expected.
*/
@Test
public void searchByPartDContract() {
List<Object> loadedRecords =
ServerTestUtils.get()
.loadData(
Arrays.asList(
StaticRifResource.SAMPLE_A_BENES,
StaticRifResource.SAMPLE_A_MEDICARE_BENEFICIARY_ID_HISTORY,
StaticRifResource.SAMPLE_A_MEDICARE_BENEFICIARY_ID_HISTORY_EXTRA));
IGenericClient fhirClient = createFhirClientWithIncludeIdentifiersMbi();
// Should return a single match
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
new TokenClientParam("_has:Coverage.extension")
.exactly()
.systemAndIdentifier(
CCWUtils.calculateVariableReferenceUrl(CcwCodebookVariable.PTDCNTRCT01),
"S4607"))
.where(
new TokenClientParam("_has:Coverage.rfrncyr")
.exactly()
.systemAndIdentifier(
CCWUtils.calculateVariableReferenceUrl(CcwCodebookVariable.RFRNC_YR),
"2018"))
.returnBundle(Bundle.class)
.execute();
// Verify that it found the expected bene.
assertNotNull(searchResults);
assertEquals(1, searchResults.getEntry().size());
Patient patientFromSearchResult = (Patient) searchResults.getEntry().get(0).getResource();
Beneficiary expectedBene = (Beneficiary) loadedRecords.get(0);
assertEquals(
expectedBene.getBeneficiaryId(), patientFromSearchResult.getIdElement().getIdPart());
/*
* Verify that the unhashed MBIs are present, as expected. Note that checking for more than just
* one MBI and verifying that they're all unique is a regression test for BFD-525.
*/
assertEquals(
3,
patientFromSearchResult.getIdentifier().stream()
.filter(
i ->
i.getSystem()
.equals(TransformerConstants.CODING_BBAPI_MEDICARE_BENEFICIARY_ID_UNHASHED))
.collect(Collectors.toSet())
.size());
}
/**
* Verifies that {@link
* PatientResourceProvider#searchByCoverageContract(ca.uhn.fhir.rest.param.TokenParam,
* ca.uhn.fhir.rest.param.TokenParam, String, ca.uhn.fhir.rest.api.server.RequestDetails)} works
* as expected, when no year is specified (hopefully causing it to substitute the current year).
*/
@Test
public void searchByPartDContractWithoutYear() {
/*
* TODO Once AB2D has switched to always specifying the year, this needs to become an invalid
* request and this test will need to be updated to reflect that, then.
*/
List<Object> loadedRecords =
ServerTestUtils.get().loadData(Arrays.asList(StaticRifResource.SAMPLE_A_BENES));
IGenericClient fhirClient = createFhirClientWithIncludeIdentifiersMbi();
// First, adjust the bene's reference year in the DB.
ServerTestUtils.get()
.doTransaction(
(entityManager) -> {
CriteriaBuilder builder = entityManager.getCriteriaBuilder();
CriteriaQuery<BeneficiaryMonthly> select =
builder.createQuery(BeneficiaryMonthly.class);
select.from(BeneficiaryMonthly.class);
List<BeneficiaryMonthly> beneMonthlys =
entityManager.createQuery(select).getResultList();
for (BeneficiaryMonthly beneMonthly : beneMonthlys) {
LocalDate yearMonth = beneMonthly.getYearMonth();
CriteriaUpdate<BeneficiaryMonthly> update =
builder.createCriteriaUpdate(BeneficiaryMonthly.class);
Root<BeneficiaryMonthly> beneMonthlyRoot = update.from(BeneficiaryMonthly.class);
update.set(
BeneficiaryMonthly_.yearMonth,
LocalDate.of(
Year.now().getValue(),
yearMonth.getMonthValue(),
yearMonth.getDayOfMonth()));
update.where(
builder.equal(
beneMonthlyRoot.get(BeneficiaryMonthly_.parentBeneficiary),
beneMonthly.getParentBeneficiary()),
builder.equal(beneMonthlyRoot.get(BeneficiaryMonthly_.yearMonth), yearMonth));
entityManager.createQuery(update).executeUpdate();
}
});
// Should return a single match
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
new TokenClientParam("_has:Coverage.extension")
.exactly()
.systemAndIdentifier(
CCWUtils.calculateVariableReferenceUrl(CcwCodebookVariable.PTDCNTRCT01),
"S4607"))
.returnBundle(Bundle.class)
.execute();
// Verify that it found the expected bene.
assertNotNull(searchResults);
assertEquals(1, searchResults.getEntry().size());
Patient patientFromSearchResult = (Patient) searchResults.getEntry().get(0).getResource();
Beneficiary expectedBene = (Beneficiary) loadedRecords.get(0);
assertEquals(
expectedBene.getBeneficiaryId(), patientFromSearchResult.getIdElement().getIdPart());
}
/**
* Verifies that {@link
* PatientResourceProvider#searchByCoverageContract(ca.uhn.fhir.rest.param.TokenParam,
* ca.uhn.fhir.rest.param.TokenParam, String, ca.uhn.fhir.rest.api.server.RequestDetails)} works
* as expected, when paging is requested.
*/
@Test
public void searchByPartDContractWithPaging() {
ServerTestUtils.get().loadData(Arrays.asList(StaticRifResource.SAMPLE_A_BENES));
IGenericClient fhirClient = createFhirClientWithIncludeIdentifiersMbi();
// Should return a single match
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
new TokenClientParam("_has:Coverage.extension")
.exactly()
.systemAndIdentifier(
CCWUtils.calculateVariableReferenceUrl(CcwCodebookVariable.PTDCNTRCT01),
"S4607"))
.where(
new TokenClientParam("_has:Coverage.rfrncyr")
.exactly()
.systemAndIdentifier(
CCWUtils.calculateVariableReferenceUrl(CcwCodebookVariable.RFRNC_YR),
"2018"))
.count(1)
.returnBundle(Bundle.class)
.execute();
// Verify that it found the expected bene and no extra pages.
assertNotNull(searchResults);
assertEquals(1, searchResults.getEntry().size());
assertNull(searchResults.getLink(Constants.LINK_NEXT));
}
/**
* Verifies that {@link
* PatientResourceProvider#searchByCoverageContract(ca.uhn.fhir.rest.param.TokenParam,
* ca.uhn.fhir.rest.param.TokenParam, String, ca.uhn.fhir.rest.api.server.RequestDetails)} works
* as expected, when searching for a contract-year-month with no benes.
*/
@Test
public void searchByPartDContractForEmptyContract() {
ServerTestUtils.get().loadData(Arrays.asList(StaticRifResource.SAMPLE_A_BENES));
IGenericClient fhirClient = createFhirClientWithIncludeIdentifiersMbi();
// Should return a single match
Bundle searchResults =
fhirClient
.search()
.forResource(Patient.class)
.where(
new TokenClientParam("_has:Coverage.extension")
.exactly()
.systemAndIdentifier(
CCWUtils.calculateVariableReferenceUrl(CcwCodebookVariable.PTDCNTRCT01),
"A1234"))
.where(
new TokenClientParam("_has:Coverage.rfrncyr")
.exactly()
.systemAndIdentifier(
CCWUtils.calculateVariableReferenceUrl(CcwCodebookVariable.RFRNC_YR),
"2010"))
.returnBundle(Bundle.class)
.execute();
assertNotNull(searchResults);
assertEquals(0, searchResults.getEntry().size());
}
/**
* Verifies that {@link
* PatientResourceProvider#searchByCoverageContract(ca.uhn.fhir.rest.param.TokenParam,
* ca.uhn.fhir.rest.param.TokenParam, String, ca.uhn.fhir.rest.api.server.RequestDetails)} works
* as expected, when an invalid year is specified.
*/
@Test
public void searchByPartDContractWithInvalidYear() {
ServerTestUtils.get().loadData(Arrays.asList(StaticRifResource.SAMPLE_A_BENES));
IGenericClient fhirClient = createFhirClientWithIncludeIdentifiersMbi();
assertThrows(
InvalidRequestException.class,
() -> {
fhirClient
.search()
.forResource(Patient.class)
.where(
new TokenClientParam("_has:Coverage.extension")
.exactly()
.systemAndIdentifier(
CCWUtils.calculateVariableReferenceUrl(CcwCodebookVariable.PTDCNTRCT01),
"S4607"))
.where(
new TokenClientParam("_has:Coverage.rfrncyr")
.exactly()
.systemAndIdentifier(
CCWUtils.calculateVariableReferenceUrl(CcwCodebookVariable.RFRNC_YR),
"ABC"))
.returnBundle(Bundle.class)
.execute();
});
}
@Test
public void searchWithLastUpdated() {
List<Object> loadedRecords =
ServerTestUtils.get()
.loadData(Arrays.asList(StaticRifResourceGroup.SAMPLE_A.getResources()));
IGenericClient fhirClient = createFhirClient();
Beneficiary beneficiary =
loadedRecords.stream()
.filter(r -> r instanceof Beneficiary)
.map(r -> (Beneficiary) r)
.findFirst()
.get();
// Build up a list of lastUpdatedURLs that return > all values values
String nowDateTime = new DateTimeDt(Date.from(Instant.now().plusSeconds(1))).getValueAsString();
String earlyDateTime = "2019-10-01T00:00:00-04:00";
List<String> allUrls =
Arrays.asList(
"_lastUpdated=gt" + earlyDateTime,
"_lastUpdated=ge" + earlyDateTime,
"_lastUpdated=le" + nowDateTime,
"_lastUpdated=ge" + earlyDateTime + "&_lastUpdated=le" + nowDateTime,
"_lastUpdated=gt" + earlyDateTime + "&_lastUpdated=lt" + nowDateTime);
testLastUpdatedUrls(fhirClient, beneficiary.getBeneficiaryId(), allUrls, 1);
// Empty searches
List<String> emptyUrls =
Arrays.asList("_lastUpdated=lt" + earlyDateTime, "_lastUpdated=le" + earlyDateTime);
testLastUpdatedUrls(fhirClient, beneficiary.getBeneficiaryId(), emptyUrls, 0);
}
/**
* Test the set of lastUpdated values
*
* @param fhirClient to use
* @param id the beneficiary id to use
* @param urls is a list of lastUpdate values to test to find
* @param expectedValue number of matches
*/
private void testLastUpdatedUrls(
IGenericClient fhirClient, String id, List<String> urls, int expectedValue) {
String baseResourceUrl = "Patient?_id=" + id + "&_format=application%2Fjson%2Bfhir";
// Search for each lastUpdated value
for (String lastUpdatedValue : urls) {
String theSearchUrl = baseResourceUrl + "&" + lastUpdatedValue;
Bundle searchResults =
fhirClient.search().byUrl(theSearchUrl).returnBundle(Bundle.class).execute();
assertEquals(
expectedValue,
searchResults.getTotal(),
String.format(
"Expected %s to filter resources using lastUpdated correctly", lastUpdatedValue));
}
}
/**
* test helper
*
* @param value of all include identifier values
* @return RequestHeaders instance derived from value
*/
public static RequestHeaders getRHwithIncldIdntityHdr(String value) {
return RequestHeaders.getHeaderWrapper(
PatientResourceProvider.HEADER_NAME_INCLUDE_IDENTIFIERS, value);
}
/**
* test helper
*
* @param value of all include address fields values
* @return RequestHeaders instance derived from value
*/
public static RequestHeaders getRHwithIncldAddrFldHdr(String value) {
return RequestHeaders.getHeaderWrapper(
PatientResourceProvider.HEADER_NAME_INCLUDE_ADDRESS_FIELDS, value);
}
/**
* helper create a client w/o extra params
*
* @return the client
*/
public static IGenericClient createFhirClient() {
return createFhirClient(null);
}
/**
* @return a FHIR {@link IGenericClient} where the {@link
* CommonHeaders#HEADER_NAME_INCLUDE_IDENTIFIERS} is set to <code>"true"</code>
*/
public static IGenericClient createFhirClientWithIncludeIdentifiersMbi() {
RequestHeaders requestHeader =
RequestHeaders.getHeaderWrapper(
PatientResourceProvider.HEADER_NAME_INCLUDE_IDENTIFIERS, "mbi");
return createFhirClient(requestHeader);
}
/**
* helper
*
* @param idHdrVal - includeIdentifiers header value
* @param addrHdrVal - includeAddressFields header value
* @return the client
*/
public static IGenericClient createFhirClient(String idHdrVal, String addrHdrVal) {
RequestHeaders requestHeader =
RequestHeaders.getHeaderWrapper(
PatientResourceProvider.HEADER_NAME_INCLUDE_IDENTIFIERS,
idHdrVal,
PatientResourceProvider.HEADER_NAME_INCLUDE_ADDRESS_FIELDS,
addrHdrVal);
return createFhirClient(requestHeader);
}
/**
* helper
*
* @return the client with extra params registered
*/
public static IGenericClient createFhirClient(RequestHeaders requestHeader) {
IGenericClient fhirClient = ServerTestUtils.get().createFhirClient();
if (requestHeader != null) {
ExtraParamsInterceptor extraParamsInterceptor = new ExtraParamsInterceptor();
extraParamsInterceptor.setHeaders(requestHeader);
fhirClient.registerInterceptor(extraParamsInterceptor);
}
return fhirClient;
}
}
|
if (Meteor.isServer) {
Meteor.publish('settings', function () {
if(this.userId) return Settings.find();
});
Meteor.publish('permissions', function () {
if(this.userId) return Permissions.find();
});
Meteor.publish('movies', function () {
if (this.userId) {
return Movies.find({});
} else {
return Movies.find({}, {fields: {user: 0}});
}
})
Meteor.publish("tv", function(){
if (this.userId) {
return TV.find({});
} else {
return TV.find({}, {fields: {user: 0}});
}
});
Permissions.allow({
update: function () {
return true;
}
});
};
|
<filename>CarteService/default/src/main/java/org/rzo/yajsw/wrapper/TrayIconMessage.java
package org.rzo.yajsw.wrapper;
import org.rzo.yajsw.wrapper.TrayIconProxy.Types;
public class TrayIconMessage
{
Types _type;
String _caption;
String _message;
public TrayIconMessage(Types type, String caption, String message)
{
_type = type;
_caption = caption;
_message = message;
}
public String[] toStringArray()
{
return new String[]
{ _type.toString(), _caption, _message };
}
}
|
package com.linkedin.metadata.search.elasticsearch.query.request;
import com.linkedin.metadata.models.EntitySpec;
import com.linkedin.metadata.models.SearchableFieldSpec;
import com.linkedin.metadata.models.annotation.SearchScoreAnnotation;
import com.linkedin.metadata.models.annotation.SearchableAnnotation.FieldType;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.annotation.Nonnull;
import org.elasticsearch.common.lucene.search.function.CombineFunction;
import org.elasticsearch.common.lucene.search.function.FieldValueFactorFunction;
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.Operator;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.QueryStringQueryBuilder;
import org.elasticsearch.index.query.functionscore.FieldValueFactorFunctionBuilder;
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders;
public class SearchQueryBuilder {
private static final String KEYWORD_LOWERCASE_ANALYZER = "custom_keyword";
private static final String TEXT_ANALYZER = "word_delimited";
private static final Set<FieldType> TYPES_WITH_DELIMITED_SUBFIELD =
new HashSet<>(Arrays.asList(FieldType.TEXT, FieldType.TEXT_PARTIAL));
private static final Set<FieldType> TYPES_WITH_NGRAM_SUBFIELD =
new HashSet<>(Arrays.asList(FieldType.TEXT_PARTIAL, FieldType.URN_PARTIAL));
private SearchQueryBuilder() {
}
public static QueryBuilder buildQuery(@Nonnull EntitySpec entitySpec, @Nonnull String query) {
return QueryBuilders.functionScoreQuery(buildInternalQuery(entitySpec, query), buildScoreFunctions(entitySpec))
.scoreMode(FunctionScoreQuery.ScoreMode.AVG) // Average score functions
.boostMode(CombineFunction.MULTIPLY); // Multiply score function with the score from query
}
private static QueryBuilder buildInternalQuery(@Nonnull EntitySpec entitySpec, @Nonnull String query) {
BoolQueryBuilder finalQuery = QueryBuilders.boolQuery();
// Key word lowercase queries do case agnostic exact matching between document value and query
QueryStringQueryBuilder keywordLowercaseQuery = QueryBuilders.queryStringQuery(query);
keywordLowercaseQuery.analyzer(KEYWORD_LOWERCASE_ANALYZER);
keywordLowercaseQuery.defaultOperator(Operator.AND);
// Text queries tokenize input query and document value into words before checking for matches
QueryStringQueryBuilder textQuery = QueryBuilders.queryStringQuery(query);
textQuery.analyzer(TEXT_ANALYZER);
textQuery.defaultOperator(Operator.AND);
for (SearchableFieldSpec fieldSpec : entitySpec.getSearchableFieldSpecs()) {
if (!fieldSpec.getSearchableAnnotation().isQueryByDefault()) {
continue;
}
String fieldName = fieldSpec.getSearchableAnnotation().getFieldName();
double boostScore = fieldSpec.getSearchableAnnotation().getBoostScore();
keywordLowercaseQuery.field(fieldName, (float) (boostScore));
FieldType fieldType = fieldSpec.getSearchableAnnotation().getFieldType();
if (TYPES_WITH_DELIMITED_SUBFIELD.contains(fieldType)) {
textQuery.field(fieldName + ".delimited", (float) (boostScore * 0.4));
}
if (TYPES_WITH_NGRAM_SUBFIELD.contains(fieldType)) {
textQuery.field(fieldName + ".ngram", (float) (boostScore * 0.1));
}
}
// Only add the queries in if corresponding fields exist
if (!keywordLowercaseQuery.fields().isEmpty()) {
finalQuery.should(keywordLowercaseQuery);
}
if (!textQuery.fields().isEmpty()) {
finalQuery.should(textQuery);
}
return finalQuery;
}
private static FunctionScoreQueryBuilder.FilterFunctionBuilder[] buildScoreFunctions(@Nonnull EntitySpec entitySpec) {
List<FunctionScoreQueryBuilder.FilterFunctionBuilder> finalScoreFunctions = new ArrayList<>();
// Add a default weight of 1.0 to make sure the score function is larger than 1
finalScoreFunctions.add(
new FunctionScoreQueryBuilder.FilterFunctionBuilder(ScoreFunctionBuilders.weightFactorFunction(1.0f)));
entitySpec.getSearchableFieldSpecs()
.stream()
.flatMap(fieldSpec -> fieldSpec.getSearchableAnnotation()
.getWeightsPerFieldValue()
.entrySet()
.stream()
.map(entry -> buildWeightFactorFunction(fieldSpec.getSearchableAnnotation().getFieldName(), entry.getKey(),
entry.getValue())))
.forEach(finalScoreFunctions::add);
entitySpec.getSearchScoreFieldSpecs()
.stream()
.map(fieldSpec -> buildScoreFunctionFromSearchScoreAnnotation(fieldSpec.getSearchScoreAnnotation()))
.forEach(finalScoreFunctions::add);
return finalScoreFunctions.toArray(new FunctionScoreQueryBuilder.FilterFunctionBuilder[0]);
}
private static FunctionScoreQueryBuilder.FilterFunctionBuilder buildWeightFactorFunction(@Nonnull String fieldName,
@Nonnull Object fieldValue, double weight) {
return new FunctionScoreQueryBuilder.FilterFunctionBuilder(QueryBuilders.termQuery(fieldName, fieldValue),
ScoreFunctionBuilders.weightFactorFunction((float) weight));
}
private static FunctionScoreQueryBuilder.FilterFunctionBuilder buildScoreFunctionFromSearchScoreAnnotation(
@Nonnull SearchScoreAnnotation annotation) {
FieldValueFactorFunctionBuilder scoreFunction =
ScoreFunctionBuilders.fieldValueFactorFunction(annotation.getFieldName());
scoreFunction.factor((float) annotation.getWeight());
scoreFunction.missing(annotation.getDefaultValue());
annotation.getModifier().ifPresent(modifier -> scoreFunction.modifier(mapModifier(modifier)));
return new FunctionScoreQueryBuilder.FilterFunctionBuilder(scoreFunction);
}
private static FieldValueFactorFunction.Modifier mapModifier(SearchScoreAnnotation.Modifier modifier) {
switch (modifier) {
case LOG:
return FieldValueFactorFunction.Modifier.LOG1P;
case LN:
return FieldValueFactorFunction.Modifier.LN1P;
case SQRT:
return FieldValueFactorFunction.Modifier.SQRT;
case SQUARE:
return FieldValueFactorFunction.Modifier.SQUARE;
case RECIPROCAL:
return FieldValueFactorFunction.Modifier.RECIPROCAL;
default:
return FieldValueFactorFunction.Modifier.NONE;
}
}
}
|
/**
* @license Copyright (c) 2003-2021, CKSource - <NAME>. All rights reserved.
* For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
import FontBackgroundColor from './../src/fontbackgroundcolor';
import FontBackgroundColorEditing from './../src/fontbackgroundcolor/fontbackgroundcolorediting';
import FontBackgroundColorUI from '../src/fontbackgroundcolor/fontbackgroundcolorui';
describe( 'FontBackgroundColor', () => {
it( 'requires FontBackgroundColorEditing and FontBackgroundColorUI', () => {
expect( FontBackgroundColor.requires ).to.deep.equal( [ FontBackgroundColorEditing, FontBackgroundColorUI ] );
} );
it( 'defines plugin name', () => {
expect( FontBackgroundColor.pluginName ).to.equal( 'FontBackgroundColor' );
} );
} );
|
cp -avx -f ../include/* ./Classes
srcPath="./Classes/src"
if [[ ! -d "$srcPath" ]]; then
mkdir "$srcPath"
fi
cp -avx -f ../src/* "$srcPath"
pluginsPath="./Classes/plugins"
if [[ ! -d "$pluginsPath" ]]; then
mkdir "$pluginsPath"
fi
cp -avx -f ../plugins/* "$pluginsPath" |
<gh_stars>0
require_relative "./deedveloper/version"
require_relative './deedveloper/controller'
require_relative "./deedveloper/scraper"
require_relative "./deedveloper/job"
module Deedveloper
end |
<reponame>lemoi/aobot<filename>src/server/index.js
const express = require('express');
const http = require('http');
const https = require('../utils/https');
const transmit = require('../utils/transmit');
const socket = require('../sync/socket');
const app = express();
const caDownload = require('../ssl/ca-download');
const connect = require('../utils/connect');
function register(service) {
app.use(service);
}
function run(ssl, port, cb) {
const server = http.createServer();
server.on('request', app);
server.on('connect', ssl ? https.createServer(app) : connect);
socket.start(server);
register(caDownload);
register(transmit({}));
server.listen(port, cb);
}
exports.run = run;
exports.register = register;
|
module.exports = function (from, to) {
from = RegExp(from);
return function (req, res, next) {
req.url = req.url.replace(from, to);
next();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.