identifier
stringlengths 42
383
| collection
stringclasses 1
value | open_type
stringclasses 1
value | license
stringlengths 0
1.81k
| date
float64 1.99k
2.02k
⌀ | title
stringlengths 0
100
| creator
stringlengths 1
39
| language
stringclasses 157
values | language_type
stringclasses 2
values | word_count
int64 1
20k
| token_count
int64 4
1.32M
| text
stringlengths 5
1.53M
| __index_level_0__
int64 0
57.5k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
https://github.com/sbd3/bigdata-examples/blob/master/bigdata-java/src/main/java/geeksforgeeks/medium/MoneyDistributions.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,018
|
bigdata-examples
|
sbd3
|
Java
|
Code
| 73
| 188
|
package geeksforgeeks.medium;
import java.util.Scanner;
public class MoneyDistributions {
public static void main(String[] args) {
Scanner scan = new Scanner(System.in);
int noOfTests = scan.nextInt();
for (int i = 0; i < noOfTests; i++) {
long n = scan.nextInt();
long k = scan.nextInt();
long mod = 1000000007;
long mul = 1;
while(k > 1) {
mul = (mul * (((n-1) % mod) * ((k-1) % mod))) % mod;
k--;
}
System.out.println(mul);
}
scan.close();
}
}
| 50,366
|
https://github.com/Joyesong/mobstac-awesome-qr/blob/master/src/tests/QR-frame.test.ts
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,022
|
mobstac-awesome-qr
|
Joyesong
|
TypeScript
|
Code
| 616
| 2,482
|
import 'mocha';
import { CanvasType, DataPattern, EyeBallShape, EyeFrameShape, GradientType, QRCodeFrame, QRErrorCorrectLevel } from '../Enums';
import { QRCodeBuilder } from '../index';
import { QRCode } from '../Models';
// tslint:disable-next-line:no-var-requires
const fs = require('fs');
const frameStyle = QRCodeFrame.BANNER_TOP;
const size = 1024;
const fileNameSuffix = `${frameStyle}_${size}_c`
const c_12 = {
text: "www.beaconstac.com",
canvasType: CanvasType.SVG,
dotScale: 1,
colorDark: "#13544A",
frameStyle: frameStyle,
frameText: "QQQQQQQQQQQQ",
frameColor: "#0E9E88",
size:size,
margin: 40/512*size
};
const c_16 = {
text: "www.beaconstac.com",
canvasType: CanvasType.SVG,
dotScale: 1,
colorDark: "#13544A",
frameStyle: frameStyle,
frameText: "QQQQQQQQQQQQQQQQ",
frameColor: "#0E9E88",
size:size,
margin: 40/512*size
};
const c_20 = {
text: "www.beaconstac.com",
canvasType: CanvasType.SVG,
dotScale: 1,
colorDark: "#13544A",
frameStyle: frameStyle,
frameText: "QQQQQQQQQQQQQQQQQQQQ",
frameColor: "#0E9E88",
size:size,
margin: 40/512*size
};
const c_24 = {
text: "www.beaconstac.com",
canvasType: CanvasType.SVG,
dotScale: 1,
colorDark: "#13544A",
frameStyle: frameStyle,
frameText: "QQQQQQQQQQQQQQQQQQQQQQQQ",
frameColor: "#0E9E88",
size:size,
margin: 40/512*size
};
const c_27 = {
text: "www.beaconstac.com",
canvasType: CanvasType.SVG,
dotScale: 1,
colorDark: "#13544A",
frameStyle: frameStyle,
frameText: "QQQQQQQQQQQQQQQQQQQQQQQQQQQ",
frameColor: "#0E9E88",
size:size,
margin: 40/512*size
};
const c_30 = {
text: "www.beaconstac.com",
canvasType: CanvasType.SVG,
dotScale: 1,
colorDark: "#13544A",
frameStyle: frameStyle,
frameText: "QQQQQQQQQQQQQQQQQQQQQQQQQQQQQQ",
frameColor: "#0E9E88",
size:size,
margin: 40/512*size
};
function prepareImageBuffer(qrCode: QRCode, name: string) {
const dataUrl = qrCode.canvas.toDataURL('image/png');
const matches: any = dataUrl.match(
/^data:([A-Za-z-+\/]+);base64,(.+)$/
),
response: any ={};
response.type = matches[1];
response.data = Buffer.from(matches[2], "base64");
const decodedImg = response;
const imageBuffer = decodedImg.data;
const extension ='png';
const fileName = `/frameTests/${name}` + "." + extension;
return {
name: fileName,
buffer: imageBuffer
};
}
describe('QR code frame tests', () => {
it('Frame test SVG 12 characters', done => {
const qrCodeGenerator = new QRCodeBuilder(c_12);
qrCodeGenerator.build(CanvasType.SVG).then(qrCode => {
fs.writeFileSync(__dirname + `/frameTests/${fileNameSuffix}_12.` + CanvasType.SVG.toLowerCase(), qrCode.toBuffer());
done();
}).catch(err => {
console.error(err);
done();
});
});
it('Frame test PNG 12 characters', done => {
const qrCodeGenerator = new QRCodeBuilder(c_12);
qrCodeGenerator.build(CanvasType.PNG).then(qrCode => {
const bufferObject = prepareImageBuffer(qrCode, `${fileNameSuffix}_12`);
fs.writeFileSync(__dirname + bufferObject.name, bufferObject.buffer);
done();
}).catch(err => {
console.error(err);
done();
});
});
it('Frame test SVG 16 characters', done => {
const qrCodeGenerator = new QRCodeBuilder(c_16);
qrCodeGenerator.build(CanvasType.SVG).then(qrCode => {
fs.writeFileSync(__dirname + `/frameTests/${fileNameSuffix}_16.` + CanvasType.SVG.toLowerCase(), qrCode.toBuffer());
done();
}).catch(err => {
console.error(err);
done();
});
});
it('Frame test PNG 16 characters', done => {
const qrCodeGenerator = new QRCodeBuilder(c_16);
qrCodeGenerator.build(CanvasType.PNG).then(qrCode => {
const bufferObject = prepareImageBuffer(qrCode, `${fileNameSuffix}_16`);
fs.writeFileSync(__dirname + bufferObject.name, bufferObject.buffer);
done();
}).catch(err => {
console.error(err);
done();
});
});
it('Frame test SVG 20 characters', done => {
const qrCodeGenerator = new QRCodeBuilder(c_20);
qrCodeGenerator.build(CanvasType.SVG).then(qrCode => {
fs.writeFileSync(__dirname + `/frameTests/${fileNameSuffix}_20.` + CanvasType.SVG.toLowerCase(), qrCode.toBuffer());
done();
}).catch(err => {
console.error(err);
done();
});
});
it('Frame test PNG 20 characters', done => {
const qrCodeGenerator = new QRCodeBuilder(c_20);
qrCodeGenerator.build(CanvasType.PNG).then(qrCode => {
const bufferObject = prepareImageBuffer(qrCode, `${fileNameSuffix}_20`);
fs.writeFileSync(__dirname + bufferObject.name, bufferObject.buffer);
done();
}).catch(err => {
console.error(err);
done();
});
});
it('Frame test SVG 24 characters', done => {
const qrCodeGenerator = new QRCodeBuilder(c_24);
qrCodeGenerator.build(CanvasType.SVG).then(qrCode => {
fs.writeFileSync(__dirname + `/frameTests/${fileNameSuffix}_24.` + CanvasType.SVG.toLowerCase(), qrCode.toBuffer());
done();
}).catch(err => {
console.error(err);
done();
});
});
it('Frame test PNG 24 characters', done => {
const qrCodeGenerator = new QRCodeBuilder(c_24);
qrCodeGenerator.build(CanvasType.PNG).then(qrCode => {
const bufferObject = prepareImageBuffer(qrCode, `${fileNameSuffix}_24`);
fs.writeFileSync(__dirname + bufferObject.name, bufferObject.buffer);
done();
}).catch(err => {
console.error(err);
done();
});
});
it('Frame test SVG 27 characters', done => {
const qrCodeGenerator = new QRCodeBuilder(c_27);
qrCodeGenerator.build(CanvasType.SVG).then(qrCode => {
fs.writeFileSync(__dirname + `/frameTests/${fileNameSuffix}_27.` + CanvasType.SVG.toLowerCase(), qrCode.toBuffer());
done();
}).catch(err => {
console.error(err);
done();
});
});
it('Frame test PNG 27 characters', done => {
const qrCodeGenerator = new QRCodeBuilder(c_27);
qrCodeGenerator.build(CanvasType.PNG).then(qrCode => {
const bufferObject = prepareImageBuffer(qrCode, `${fileNameSuffix}_27`);
fs.writeFileSync(__dirname + bufferObject.name, bufferObject.buffer);
done();
}).catch(err => {
console.error(err);
done();
});
});
it('Frame test SVG 30 characters', done => {
const qrCodeGenerator = new QRCodeBuilder(c_30);
qrCodeGenerator.build(CanvasType.SVG).then(qrCode => {
fs.writeFileSync(__dirname + `/frameTests/${fileNameSuffix}_30.` + CanvasType.SVG.toLowerCase(), qrCode.toBuffer());
done();
}).catch(err => {
console.error(err);
done();
});
});
it('Frame test PNG 30 characters', done => {
const qrCodeGenerator = new QRCodeBuilder(c_30);
qrCodeGenerator.build(CanvasType.PNG).then(qrCode => {
const bufferObject = prepareImageBuffer(qrCode, `${fileNameSuffix}_30`);
fs.writeFileSync(__dirname + bufferObject.name, bufferObject.buffer);
done();
}).catch(err => {
console.error(err);
done();
});
});
});
| 36,316
|
https://github.com/ZaqueuCavalcante/Data-Structures/blob/master/Graph/sketch.js
|
Github Open Source
|
Open Source
|
MIT
| null |
Data-Structures
|
ZaqueuCavalcante
|
JavaScript
|
Code
| 63
| 264
|
function setup() {
createCanvas(700, 500);
ug = new UndirectedGraph();
ug.addNode("A");
ug.addNode("B");
ug.addNode("C");
ug.addNode("D");
ug.addNode("E");
ug.connectNodes(0, 1, 12);
ug.connectNodes(0, 2, 11);
ug.connectNodes(0, 3, 5);
ug.connectNodes(1, 2, 22);
ug.connectNodes(1, 3, 55);
ug.connectNodes(1, 4, 69);
ug.connectNodes(2, 3, 10);
ug.connectNodes(2, 4, 9);
ug.connectNodes(3, 4, 1);
}
function draw() {
background(100);
ug.show();
if (mouseIsPressed) {
for (let node of ug.nodes)
if (node.mouseAbove()) {
node.setPosition(mouseX, mouseY);
}
}
}
| 8,565
|
https://github.com/fishergj/louyi_ry/blob/master/src/main/java/com/ruoyi/project/venue/order/mapper/OrderMapper.java
|
Github Open Source
|
Open Source
|
MIT
| null |
louyi_ry
|
fishergj
|
Java
|
Code
| 188
| 810
|
package com.ruoyi.project.venue.order.mapper;
import java.util.HashMap;
import java.util.List;
import com.ruoyi.project.system.wechat.domain.Comment;
import com.ruoyi.project.venue.order.bo.OrderResultBo;
import com.ruoyi.project.venue.order.domain.Order;
import com.ruoyi.project.venue.order.domain.OrderStastics;
import com.ruoyi.project.venue.order.domain.OrderVo;
import org.apache.ibatis.annotations.Param;
public interface OrderMapper {
public int insert(Order order);
/**
* 查询预约记录列表
* @param bo
* @return
*/
public List<Order> selectOrderList(Order bo);
/**
* 根据Id获取信息
* @param id
* @return
*/
public OrderResultBo getOrderById(long id);
/**
* 查看预约明细记录
* @param id
* @return
*/
public Order selectOrder(int id);
/**
* 更新状态
* @param map
* @return
*/
public int updateStatus(HashMap<String,Object> map);
/**
* 强制更改时间段
* @param bo
* @return
*/
public int updateOrderTime(OrderResultBo bo);
/**
* 统计人数
*/
public List<OrderStastics> selectOrderStasticsList(Order order);
/**
* 统计历史人数
* @param order
* @return
*/
public OrderStastics selectHisOrderStastics(Order order);
/**
* 按星期统计历史数据
* @param order
* @return
*/
public List<OrderStastics> selectHisOrderStasticsByWeek(Order order);
/**
* 按星期统计未来一个星期的数据
* @param map
* @return
*/
public List<OrderStastics> selectNextOrderStasticsByWeek(HashMap<String,Object> map);
/**
* 获取待审核的预约数
* @return
*/
public int selectWaiteAuditCount();
/**
* 某一个时间段预约总人数
* @param id
* @return
*/
public int selectSubscribePerson(int id);
/**
* 根据公众号openid查询当前有效预约
* @param w_no
* @return
*/
List<Order> getNormalOrderByWno(@Param("w_no") String w_no);
int getValidCountByTimeId(@Param("subscribe_time_id") int subscribe_time_id);
int insertComment(Comment comment);
List<OrderVo> getNormalOrderVoByWno(String w_no);
int cancelReserveById(@Param("id") int id);
}
| 34,967
|
https://github.com/ai-ba/tesler-ui/blob/master/src/reducers/data.ts
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
tesler-ui
|
ai-ba
|
TypeScript
|
Code
| 235
| 734
|
import {AnyAction, types} from '../actions/actions'
import {DataState, DataItem} from '../interfaces/data'
const initialState: DataState = {}
const emptyData: DataItem[] = []
export function view(state = initialState, action: AnyAction) {
switch (action.type) {
case types.bcFetchDataSuccess: {
return (action.payload.depth && action.payload.depth > 1)
? state
: {
...state,
[action.payload.bcName]: action.payload.data
}
}
case types.bcNewDataSuccess: {
return {
...state,
[action.payload.bcName]: [ ...(state[action.payload.bcName] || emptyData), action.payload.dataItem ]
}
}
case types.bcSaveDataSuccess: {
const nextDataItem = action.payload.dataItem
return {
...state,
[action.payload.bcName]: (state[action.payload.bcName] || emptyData).map(item => item.id === nextDataItem.id
? nextDataItem
: item
)
}
}
case types.bcFetchRowMetaSuccess: {
const cursor = action.payload.cursor
if (!cursor) {
return state
}
const prevDataItem = (state[action.payload.bcName] || emptyData).find(item => item.id === cursor)
const nextDataItem: DataItem = {
...prevDataItem,
id: cursor,
vstamp: -1,
_associate: prevDataItem && prevDataItem._associate
}
// BC is unable to update value from row meta if id is null
const valueUpdateUnsupported = action.payload.rowMeta.fields
.find(item => item.key === 'id' && !item.currentValue)
if (valueUpdateUnsupported) {
return state
}
action.payload.rowMeta.fields.filter(field => {
// TODO: check if previous condition covered that case
return field.key !== '_associate'
})
.forEach(field => nextDataItem[field.key] = field.currentValue)
if (!prevDataItem) {
return {
...state,
[action.payload.bcName]: [ ...(state[action.payload.bcName] || emptyData), nextDataItem ]
}
}
return {
...state,
[action.payload.bcName]: (state[action.payload.bcName] || emptyData).map(item => item === prevDataItem
? nextDataItem
: item
)
}
}
case types.changeAssociations:
return {
...state,
[`${action.payload.bcName}Delta`]: action.payload.records
}
case types.selectView: {
return initialState
}
default:
return state
}
}
export default view
| 21,754
|
https://github.com/CodingNav/Easy-Eats-Angular/blob/master/client/src/app/features/recipe-page/recipe-page.component.spec.ts
|
Github Open Source
|
Open Source
|
MIT
| null |
Easy-Eats-Angular
|
CodingNav
|
TypeScript
|
Code
| 55
| 186
|
import { ComponentFixture, TestBed } from '@angular/core/testing';
import { RecipePageComponent } from './recipe-page.component';
describe('RecipePageComponent', () => {
let component: RecipePageComponent;
let fixture: ComponentFixture<RecipePageComponent>;
beforeEach(async () => {
await TestBed.configureTestingModule({
declarations: [ RecipePageComponent ]
})
.compileComponents();
});
beforeEach(() => {
fixture = TestBed.createComponent(RecipePageComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});
| 20,469
|
https://github.com/magma/magma/blob/master/lte/gateway/release/pydep
|
Github Open Source
|
Open Source
|
BSD-3-Clause
| 2,023
|
magma
|
magma
|
Python
|
Code
| 3,997
| 11,304
|
#!/usr/bin/python3
# Copyright 2020 The Magma Authors.
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script needs to be run inside the dev VM.
"""
pydep is a tool to deal with finding, building, and tracking Python
dependencies that get shipped as part of a release process that results in
Debian packages.
There are two modes for pydep. First, you can pass in a setup.py file (or a
list of python dependencies), and pydep will recursively determine the list of
Python packages that your input depends on. From there, it'll generate a
"lockfile", which is a JSON description of specific packages and versions that
will satisfy the collective set of dependencies. If debian packages for the
Python packages you're interested in are available, it'll assume you can use
those (and that those handle their own dependencies); otherwise, pydep can
download the packages from PyPI and build them as Debian packages for you
(using fpm).
Second, given a lockfile, it can produce a "dependency string" that you can
pass as a parameter to fpm.
"""
from __future__ import annotations
import argparse
import contextlib
import copy
import glob
import hashlib
import json
import logging
import os
import pickle
import re
import shlex
import shutil
import subprocess
import sys
from functools import lru_cache, partial
from typing import Any, Dict, List, NamedTuple, Optional, Tuple, Union, cast
import apt # type: ignore
import pkg_resources
import requests
log = logging.getLogger(__name__)
PYPI_API_BASE = "https://pypi.python.org/pypi/{}/json"
PIP_BLACKLIST = {'pip', 'wheel', 'setuptools', 'virtualenv'}
# In general, system package repos use the same name as PyPI package names,
# with a prefix (for example, python3-requests is the system package for
# requests). Some packages break this convention, however, and we maintain a
# list here. We use the all lower-case version of the package name here.
PYPI_TO_DEB = {
'pyyaml': 'yaml',
'msgpack-python': 'msgpack',
'scapy-python3': 'scapy',
'python-apt': 'apt',
'jsonpointer': 'json-pointer'
}
# Some packages exist in the standard Debian repos with an epoch number
# prepended (this is to handle changes in upstream versioning scheme). This
# allows us to handle that.
DEB_EPOCH = {
'python3-oslo.config': 1,
}
def os_release():
# copied from third_party/build/build.py
# FIXME: should be a magma python library for this kind of thing
release_info = {}
with open('/etc/os-release', 'r') as f:
for line in f:
try:
k, v = line.rstrip().split('=')
release_info[k] = v.strip('"')
except Exception:
pass
return release_info
def _md5sum(filename: str) -> str:
"""
It's like calling md5sum. Calculates the md5 checksum of a file.
"""
with open(filename, mode='rb') as f:
d = hashlib.md5()
for buf in iter(partial(f.read, 4096), b''):
d.update(buf)
return d.hexdigest()
def _wget(url: str, dest: Optional[str] = None) -> str:
"""
It's like calling wget. Downloads file to current working directory if
dest=None, otherwise puts it in dest (must be a directory).
"""
filename = url.split('/')[-1]
if dest:
local_path = "{}/{}".format(dest, filename)
else:
local_path = filename
# do the request
r = requests.get(url)
if r.status_code != 200:
raise ValueError("couldn't fetch {} ({})".format(url, r.status_code))
# make sure it exists
os.makedirs(os.path.dirname(local_path), exist_ok=True)
with open(local_path, 'wb') as f:
f.write(r.content)
return local_path
def gen_sys_package_name(py_pkgname: str, use_py2: bool = False) -> str:
"""
Generate the system package name for a Python package. In general, this
will be `python3-pkgname` for py3 and `python-pkgname` for py2, but some
packages break convention. We use the PyPI package name as our canonical
name source, and PYPI_TO_DEB to handle edge cases.
Args:
py_pkgname: PyPI package name
use_py2: Generate a py2 package name (python-pkgname). Default: False
(i.e., use python3-pkgname).
Return: String representing the system package name.
"""
if use_py2:
prefix = "python"
else:
prefix = "python3"
suffix = PYPI_TO_DEB.get(py_pkgname.lower(), py_pkgname)
return "{}-{}".format(prefix, suffix).lower() # deb likes lowercase
def _get_requires(pkg_dir: str) -> Optional[str]:
"""
Given an unpacked python package, search for a requires.txt file in an
egg-info directory. If we don't have one, assumes we have no deps.
This assumes we have an egg-info directory for the package.
"""
for f in os.walk(pkg_dir):
if "egg-info" in f[0]:
if "requires.txt" in f[2]:
return os.path.join(f[0], "requires.txt")
else:
return None
return None
def get_setup_deps(setup_file: str) -> List[str]:
"""
Given a path to a setup.py file, return a list of Requirement objects
required for installation.
This is far more complicated than it seems like it should be. Installation
requirements are method of a call to the setup() function. The clean way to
do this is to write a custom setuptools command that loads the distribution
and can then access the install_requires. But we can also use a built-in
command (egg_info) to create a requires.txt file and parse it like we do
our other python deps from pypi, so that's what we do here.
"""
tmp_egg_path = "/tmp/pydep-egginfo/"
os.makedirs(os.path.dirname(tmp_egg_path), exist_ok=True)
cmd = "python3 {} -q egg_info --egg-base {}".format(setup_file, tmp_egg_path)
# we ignore the result of this, because (a) we'll check later if the file
# we need exists and (b) it returns non-zero in the case where the setup.py
# file isn't in the current working directory.
subprocess.call(cmd.split())
r = _get_requires(tmp_egg_path)
if r:
dependencies = _parse_requires_txt(r)
else:
dependencies = []
# cleanup
if os.path.isdir(tmp_egg_path):
shutil.rmtree(tmp_egg_path)
return dependencies
def _parse_requires_txt(requires_path: str) -> List[str]:
"""
Given a path to a requires.txt file, return a listing of dependencies.
Args:
requires_path: Path to a requires.txt file
"""
dependencies = [] # type: List[str]
try:
with open(requires_path, "r") as f:
lines = []
all_lines = f.readlines()
for line in all_lines:
line = line.strip()
if line.startswith("["):
break
lines.append(line)
deps = pkg_resources.parse_requirements(lines)
except FileNotFoundError:
return dependencies
for d in deps:
if d.specs:
for op, ver in d.specs:
dependencies.append("{} {} {}".format(d.project_name, op, ver))
else:
dependencies.append(d.project_name)
return dependencies
@lru_cache(maxsize=128)
def get_latest_apt_pkg_version(pkgname: str, py2: bool = False) -> Optional[str]:
"""
Check what's available in our apt repo and use that if available.
"""
cache = apt.Cache()
sys_pkgname = gen_sys_package_name(pkgname, py2)
try:
package = cache[sys_pkgname]
except KeyError:
# package isn't available
return None
return package.candidate.version
def gen_fpm_dep_string_from_lockfile(lockfile_str: str) -> str:
"""
Given the contents of a lockfile (as a string), return a string that can be
passed to fpm to list dependencies (e.g., '-d python3-foo>=1.0 -d
python3-bar>=4.2').
We always do >= deps to make upgrades easier.
Args:
lockfile_str: Path to the lockfile
"""
lockfile = Lockfile(data=lockfile_str)
deps = []
for dep in lockfile.dependencies().values():
if dep['root']:
syspkg = dep['sysdep']
if syspkg in DEB_EPOCH:
version = "{}:{}".format(DEB_EPOCH[syspkg], dep['version'])
else:
version = dep['version']
deps.append('-d "{} >= {}"'.format(syspkg, version))
return " ".join(deps)
def _format_dep(syspkg: str, op: Optional[str], ver: Optional[str]) \
-> Tuple[str, Optional[str], Optional[str]]:
"""
Formats the dependency for the command line.
- Any given "=="-operator is overwritten with ">=".
- Removes any trailing ".*" strings from the version number
Args:
syspkg: A package name, e.g. "python3-urllib"
op: The version operator, e.g. "==" or ">="
ver: The version number, e.g. "1.2.3"
Returns:
A tuple containing syspkg (unaltered), the modified operator,
and the modified version string.
"""
if op and '<' not in op:
# TODO: might have issues with '~=' ops using this method
op = '>='
if ver and '.*' in ver:
# remove trailing '.*'
ver = re.sub(r'\.\*$', '', ver)
if ver and syspkg in DEB_EPOCH:
log.debug("replacing epoch for {}".format(syspkg))
epoch = DEB_EPOCH[syspkg]
ver = "{}:{}".format(epoch, ver)
return syspkg, op, ver
class Lockfile(object):
"""
Add-only copy of a lockfile. To remove a root package or derived
dependency, manually remove it from the source lockfile.
Args:
data: The lockfile data formatted as json string.
reference_lockfile: The reference lockfile as lockfile object.
If no data and reference lockfile are given try to use a
default lockfile.
"""
_default_release = 'stretch'
def __init__(self, data: Optional[str] = None, reference_lockfile: Optional[Lockfile] = None):
release_info = os_release()
if 'VERSION_CODENAME' not in release_info:
log.warning('missing expected key VERSION_CODENAME in /etc/os-release')
log.warning('dependencies may not be generated correctly')
self._release = release_info.get('VERSION_CODENAME', self._default_release)
default_reference_lockfile = "./release/magma.lockfile.{}".format(self._release)
if data is None and reference_lockfile is None:
try:
with open(default_reference_lockfile, "r") as rf:
log.info("Reading default reference lockfile.".format(default_reference_lockfile))
reference_lockfile = Lockfile(data=rf.read())
except FileNotFoundError as e:
log.error(e)
log.info("Default lockfile {} not found. Using empty reference lockfile instead."
.format(default_reference_lockfile))
def lower_keys(d: Dict[str, Any]):
return {k.lower(): v for k, v in d.items()}
if data:
lockfile = json.loads(data)
self._root_packages = lower_keys(lockfile['root_packages'])
self._dependencies = lower_keys(lockfile['dependencies'])
self._override = lower_keys(lockfile.get('override', {}))
elif reference_lockfile:
self._root_packages = copy.copy(reference_lockfile._root_packages)
self._dependencies = copy.copy(reference_lockfile._dependencies)
self._override = copy.copy(reference_lockfile._override)
else:
self._root_packages = {}
self._dependencies = {}
self._override = {}
if self._release != self._default_release and self._release not in self._override:
self._override[self._release] = {'root_packages': {}, 'dependencies': {}}
def __str__(self):
output = {}
output['root_packages'] = self._root_packages
output['dependencies'] = self._dependencies
output['override'] = self._override
return json.dumps(output, sort_keys=True, indent=2)
def root_packages(self):
output = copy.copy(self._root_packages)
if self._release != self._default_release:
output.update(copy.copy(self._override[self._release]['root_packages']))
return output
def dependencies(self):
output = copy.copy(self._dependencies)
if self._release != self._default_release:
output.update(copy.copy(self._override[self._release]['dependencies']))
return output
def add_root_package(self, pkgname, pkginfo):
pkgname = pkgname.lower()
if self._release == self._default_release:
self._root_packages[pkgname] = pkginfo
else:
default_info = self._root_packages.get(pkgname)
if default_info and default_info == pkginfo:
# incoming pkginfo is the same as default_info -- no need to override
pass
else:
self._override[self._release]['root_packages'][pkgname] = pkginfo
return
def add_dependency(self, pkgname, depinfo):
pkgname = pkgname.lower()
if self._release == self._default_release:
self._dependencies[pkgname] = depinfo
else:
default_info = self._dependencies.get(pkgname)
if default_info and default_info == depinfo:
# incoming pkginfo is the same as default_info -- no need to override
pass
else:
self._override[self._release]['dependencies'][pkgname] = depinfo
@contextlib.contextmanager
def lcd(path):
"""
Creates a context to temporary change to a different directory.
"""
oldcwd = os.getcwd()
try:
os.chdir(os.path.expanduser(path))
yield
finally:
os.chdir(oldcwd)
class PkgInfo(NamedTuple):
name: str
version: str
arch: str
def _cleanup(pkgname: str) -> None:
"""
Deletes a directory associated with a package, namely /tmp/pyreq-{pkgname}
"""
path = "/tmp/pyreq-{}".format(pkgname)
if os.path.isdir(path):
shutil.rmtree(path)
def py_to_deb(pkgname: str,
pip_distributions: Dict[str, pkg_resources.Distribution] = None,
build_output: Optional[str] = None,
version: Optional[str] = None,
more_args: Optional[str] = None,
py2: bool = False) -> int:
"""
Generates a Debian package from a Python package downloaded from PyPI
(using fpm).
This builds a command that we then shell out to run. Roughly, the
command is:
fpm -s python -t deb \
-n {syspkg_name} \
{--python-package-name-prefix=python3} # (default) \
{--python-bin=python3} # (default) \
{more_args} \
{pkgname}{==version} \
Args:
pkgname: The name of the Python package (on PyPI)
pip_distributions: List of the pip dependencies.
build_output: Output directory for the package. Default is the current directory
version: (optional) The desired version (Default: most recent).
more_args: (optional) Add raw arguments to the fpm call.
py2: Whether to use python 2 package names
Return:
Exit code of fpm call (0 is success)
"""
if not pip_distributions:
pip_distributions = {}
pkgname = pkgname.lower()
if pkgname in PIP_BLACKLIST:
return 0
dist = pip_distributions.get(pkgname, None)
if not dist:
# by this point any valid package to be built will have an entry
# in pip_distributions dict
msg = ('no distributions found in virtualenv '
'-- invalid config for {}').format(pkgname)
log.error(msg)
raise Exception(msg)
if "/.virtualenvs/" not in dist.location:
# valid package but found in system packages -- source is apt
return 0
syspkg_name = gen_sys_package_name(pkgname, py2)
if not build_output:
build_output = os.getcwd()
with lcd(build_output):
candidates = glob.glob(build_output + '/' + syspkg_name + '_*.deb')
if _existing_build(version, candidates):
log.info('found existing build of ' + syspkg_name + ' in ' + str(candidates))
return 0
log.info('attempting to build ' + syspkg_name + ' ' + str(version))
cmd = "fpm -s python -t deb --no-python-dependencies --no-python-internal-pip "
if not py2:
cmd += "--python-package-name-prefix=python3 "
cmd += "--python-bin=python3 "
cmd += "-n {}".format(syspkg_name)
# check if the package has an epoch
if syspkg_name in DEB_EPOCH:
cmd += " --epoch {}".format(DEB_EPOCH[syspkg_name])
reqs = dist.requires()
deps = []
for req in reqs:
pkg = req.key
dep_syspkg = gen_sys_package_name(pkg, py2)
if req.specs:
for spec in req.specs:
op, ver = spec
dep = _format_dep(dep_syspkg, op, ver)
deps.append(dep)
else:
deps.append((dep_syspkg, None, None))
for dep in deps:
deppkg, depop, depver = dep
if depver:
cmd += ' -d "{} {} {}"'.format(deppkg, depop, depver)
else:
cmd += ' -d {}'.format(deppkg)
if more_args:
cmd += " {} ".format(more_args)
if version:
cmd += " {}{}{}".format(pkgname, '==', version)
else:
cmd += " {}".format(pkgname)
log.debug(cmd)
return subprocess.call(shlex.split(cmd))
def _existing_build(version: Optional[str], candidates: List[str]) -> bool:
"""
Checks if the desired version already exists in candidates.
Args:
version: The desired version.
candidates: A list of paths to different build versions.
These would be formatted like this
"{build_output}/{syspkg_name}_{version}.deb".
Returns:
Whether the version already exists.
"""
already_built = False
if version:
parsed_version = pkg_resources.parse_version(version)
for existing in candidates:
parts = os.path.basename(existing).split('_')
if len(parts) > 2:
info = PkgInfo(name=parts[0],
version=parts[1],
arch=parts[-1][:-4])
existing_version = pkg_resources.parse_version(info.version)
if existing_version >= parsed_version:
already_built = True
break
elif candidates:
# version not specified but found a match on package name
already_built = True
return already_built
@contextlib.contextmanager
def tmpvirtualenv():
"""
Creates a context with a temporary virtual environment.
"""
load_virtualenvwrapper = 'source /usr/share/virtualenvwrapper/virtualenvwrapper.sh'
initcmd = [load_virtualenvwrapper,
'mktmpenv -p /usr/bin/python3 --system-site-packages 2>&1 > /dev/null',
'echo $VIRTUAL_ENV']
def runcmd(cmd: Union[List[str], str],
capture: int = None) -> subprocess.CompletedProcess:
# run cmd in separate shell
# cmd must be
# * a single string representing a valid command
# * a list of single strings each representing a valid command
capture = subprocess.PIPE if capture else None
if isinstance(cmd, list):
cmd = '; '.join(cmd)
script = ['/bin/bash', '-c', cmd]
log.debug(script)
result = subprocess.run(script, stdout=capture, stderr=capture,
check=True)
return result
active = True
envpath = runcmd(initcmd, capture=True).stdout.decode('utf-8')
envname = envpath.split(os.path.sep)[-1].strip()
def venv(cmd: Union[List[str], str],
cd: Optional[str] = None,
capture: int = None) -> subprocess.CompletedProcess:
if not active:
raise RuntimeError('tmpvirtualenv context already destroyed')
setup = [load_virtualenvwrapper,
'workon ' + envname]
if cd:
setup.append(' '.join(['cd', shlex.quote(cd)]))
if isinstance(cmd, list):
cmd = ' '.join([shlex.quote(token) for token in cmd])
result = runcmd(setup + [cmd], capture=capture)
return result
try:
yield venv
finally:
venv('deactivate')
active = False
def gen_pip_distributions(
root_requirements: List[pkg_resources.Requirement],
existing_versions: Optional[Dict[str, str]] = None,
venv: Optional[callable] = None) -> Dict[str, pkg_resources.Distribution]:
"""
Recursively get the dependency tree for a given package.
For each package, we calculate the list of deps and add it to a running
list.
"""
if not venv:
raise ValueError('must supply valid virtualenv command (see tmpvirtualenv)')
if not existing_versions:
existing_versions = {}
selected_versions = copy.copy(existing_versions)
new_requirements = [] # type: List[pkg_resources.Requirement]
for req in root_requirements:
log.info(req)
pkg = req.key
version = selected_versions.get(pkg)
if version and version not in req:
# new version requirement not satisfied by existing version
# obeying specific instructions -- existing_versions may be
# inconsistent and need editing before this process succeeds
del selected_versions[pkg]
new_requirements.append(req)
elif not version:
new_requirements.append(req)
# else: existing version is fine, do nothing
args = ([shlex.quote(str(r)) for r in sorted(new_requirements,
key=lambda r: str(r))]
+ [shlex.quote('{}=={}'.format(str(key), ver))
for key, ver in sorted(selected_versions.items(),
key=lambda item: item[0])])
log.debug(args)
# clean up from any previous runs
release_info = os_release()
extra_args = []
if release_info.get('VERSION_CODENAME', '') == 'stretch':
extra_args.append('--use-feature=2020-resolver')
# install all packages to populate pkg_resources.working_set
venv('pip install ' + " ".join(extra_args + args))
# python code to be run in subprocess inside venv
# fetching pickled pkg_resources.working_set
freeze_script = '; '.join(['import pickle',
'import sys',
'import pkg_resources',
('pickle.dump([d for d in pkg_resources.working_set], '
'sys.stdout.buffer)')])
result = venv(['python', '-c', freeze_script], capture=True)
pip_distributions = {str(p.key): cast(pkg_resources.Distribution, p)
for p in pickle.loads(result.stdout)}
return pip_distributions
def gen_dep_sources(dep_set: Dict[str, Optional[str]],
pypi_only: bool = True,
py2: bool = False) -> Dict[str, str]:
"""
Given a populated `dep_set`, figure out where we can get
the dependencies from. This function returns a dictionary that contains the
dependency name and the source, e.g. {"urllib": "pypi"}.
1. Check if `pypi_only` is set to True. Set "pypi" for all dependencies then.
2. For each package in `dep_set`, try to get the latest apt version.
3. If the latest apt version is the same as the one specified in `dep_set`,
then just use the latest.
4. Else check if apt can satisfy with a newer version.
5. If apt can not satisfy, fall back to pypi.
Args:
dep_set: The dict of dependencies and versions.
pypi_only: If given, always write "pypi" as source for every dependency.
py2: Whether to use python2 package names for apt.
"""
if pypi_only:
return {k: "pypi" for k in dep_set}
dep_source = {} # type: Dict[str, str]
for pkg, ver in dep_set.items():
# check if apt can satisfy the version
apt_ver = get_latest_apt_pkg_version(pkg, py2)
if apt_ver:
# if the requirement is an apt version, pkg_resources may not
# be able to parse it. so first, we check if it's identical
# (which must be the case if we decided on a apt package).
if ver == apt_ver:
dep_source[pkg] = "apt"
continue
# else, the req is a pypi one, but check if apt can satisfy
req = pkg_resources.Requirement.parse("{}>={}".format(pkg, ver))
if apt_ver in req:
# we can satisfy the req w/ the apt ver. use it.
dep_source[pkg] = "apt"
continue
# if it can't, we fall back to pypi.
dep_source[pkg] = "pypi"
return dep_source
def lockfile(root_packages: Dict[str, Dict[str, str]],
dep_set: Dict[str, Optional[str]],
dep_source: Dict[str, str],
py2: bool = False,
reference_lockfile: Optional[Lockfile] = None) -> str:
"""
Based on the root packages, produces a json description of
the actual packages we depend on.
The dependency set is a list of packages that we depend on, but it
could be possible to satisfy those deps from multiple sources. We
resolve that here based on what's available in the apt repos.
"""
lf = Lockfile(reference_lockfile=reference_lockfile)
root_package_names = set(root_packages.keys())
for pkgname, pkginfo in root_packages.items():
lf.add_root_package(pkgname, pkginfo)
for k in sorted(dep_set.keys()):
item = {
"version": dep_set[k],
"source": dep_source[k],
"root": (k in root_package_names),
"sysdep": gen_sys_package_name(k, py2)
}
lf.add_dependency(k, item)
return str(lf)
def build_all(pip_distributions: Dict[str, pkg_resources.Distribution],
dep_source: Dict[str, str],
build_output: Optional[str] = None) -> None:
"""
Build all pip distributions into debian packages.
"""
for p in pip_distributions:
# contents of PIP_BLACKLIST were excluded from dep_source
if p not in PIP_BLACKLIST and dep_source.get(p) == "pypi":
py_to_deb(p, pip_distributions,
version=pip_distributions[p].version,
build_output=build_output)
def save_lockfile(lockfilename: str, lockfilecontent: str) -> None:
"""
Save a lockfile string into a file.
"""
with open(lockfilename, "w") as f:
f.write(lockfilecontent)
if lockfilecontent[-1] != "\n":
f.write("\n")
def expand_deps(input_deps: List[str]) -> List[pkg_resources.Requirement]:
"""
Puts all input dependencies into a single list, consolidates repeated dependencies, and sorts output.
Args:
input_deps: List of dependencies. Can be paths to setup.py files or explicit dependencies.
Returns:
List of pkg_resources.Requirement objects.
"""
dependencies = [] # type: List[pkg_resources.Requirement]
explicit = [] # type: List[str]
setup_py = [] # type: List[str]
for item in input_deps:
if 'setup.py' in item:
setup_py.append(item)
else:
explicit.append(item)
input_deps = explicit + sum([get_setup_deps(item) for item in setup_py], [])
# parse dependencies from command line
for d in input_deps:
req = pkg_resources.Requirement.parse(d)
dependencies.append(req)
# consolidate repeated dependencies
# this may reveal inconsistencies in supplied requirements
dependencies = _consolidate_dependencies(dependencies)
return sorted(dependencies, key=lambda d: str(d))
def _consolidate_dependencies(dependencies: List[pkg_resources.Requirement]) -> List[pkg_resources.Requirement]:
"""
Consolidates dependencies to remove repeated entries.
"""
consolidated = {} # type: Dict[str, pkg_resources.Requirement]
for dep in dependencies:
if dep.key in consolidated:
existing_dep = consolidated[dep.key]
all_specs = dep.specs + existing_dep.specs
new_req_str = '{}{}'.format(dep.key,
','.join([''.join(spec)
for spec in all_specs]))
new_dep = pkg_resources.Requirement.parse(new_req_str)
consolidated[dep.key] = new_dep
else:
consolidated[dep.key] = dep
dependencies = list(consolidated.values())
return dependencies
def split_input_root_requirements(
input_root_requirements: List[pkg_resources.Requirement],
use_py2: bool) -> Tuple[List[pkg_resources.Requirement], List[pkg_resources.Requirement]]:
"""
Args:
input_root_requirements: List of dependencies. Can be paths to setup.py files or explicit dependencies.
use_py2: Whether we are using Python 2.x
Returns:
List of pkg_resources.Requirement objects.
"""
root_requirements = [] # type: List[pkg_resources.Requirement]
repo_installable = [] # type: List[pkg_resources.Requirement]
for req in input_root_requirements:
repo_version = get_latest_apt_pkg_version(req.key, use_py2)
if repo_version and repo_version in req:
repo_installable.append(req)
else:
root_requirements.append(req)
return repo_installable, root_requirements
def get_lockfile(lockfile_path: str) -> Lockfile:
"""
Safely tries to read in lockfile from a given path.
"""
try:
with open(lockfile_path, 'r') as r:
lf = Lockfile(data=r.read())
except FileNotFoundError:
log.error("Lockfile not found: {}".format(args.lockfile))
log.info("Use default reference lockfile instead.")
lf = Lockfile()
return lf
def get_required_distributions(root_requirements: List[pkg_resources.Requirement],
pip_distributions: Dict[str, pkg_resources.Distribution]) \
-> Dict[str, pkg_resources.Distribution]:
"""
Finds the `required_distributions` dict for the main function.
The function iterates with a breadth-first search over the dependency tree to
add all items of `root_requirements`, i.e. the packages we did not find an apt
dependency via `apt.Cache`. During iteration, it adds all further upstream dependencies.
"""
required_distributions = {}
to_traverse = [pip_distributions[k] for k in [req.key for req in root_requirements]]
while to_traverse:
dist = to_traverse.pop()
required_distributions[dist.key] = dist
requires = dist.requires()
for req in requires:
prereq_dist = pip_distributions.get(req.key)
if prereq_dist:
to_traverse.append(prereq_dist)
else:
log.error('{} required but not detected'.format(prereq_dist))
return required_distributions
def main(args):
"""
This script resolves and packages python dependencies.
In general: Dependencies are either obtained via apt (magma artifactory) or pypi.
Packages that are not on the magma artifactory obtained via pypi and then built as .deb packages.
Steps:
1. Exit if we are using a virtualenv.
2. Expand and consolidate all dependencies given as input (either explicit or in setup.py files) into
`input_root_requirements`.
3. Sort dependencies into apt-installable (`repo_installable`) and pip-installable (`root_requirements`).
4. Open a lockfile if "-l" or "--lockfile" argument is given and write lockfile dependencies into `deps`.
5. Write all lockfile pypi dependencies into `existing_versions`.
6. Infer the repo name of apt-installable packages, e.g. for the dependency "jinja2" we infer the name
"python3-jinja2".
7. If "--install-from-repo" is given, install apt-installable dependencies. Run a "pip3 uninstall" command before
to ensure the package was not pip-installed before.
8. Start a temporary virtual environment.
8.1 Generate a dict `pip_distributions` with all pip dependencies with `gen_pip_distributions`.
8.2 Create a dict `to_traverse` which has the same entries as `pip_distributions`, except for what is already in
`repo_installable`.
8.3 Traverse `to_traverse` and add upstream dependencies if they are already in `pip_distributions`.
8.4 Close temporary virtual environment.
9. Write set of dependencies with exact versions into `dep_set`.
10. Check where we can get the dependencies in `dep_set` from. If not available on apt, fall back to pypi.
11. Generate `root_versions` dict which contains all root requirement and versions which are installed via pip and
not apt. This is different from `root_requirements` because it contains the exact version, rather than a version
requirement.
12. If "-l" or "--lockfile" is given, save the lockfile.
13. If "-b" or "--build" is given, install the python dependencies.
"""
# Building dependency packages with virtualenv enabled would cause packages
# to be installed under
# "/home/vagrant/build/python/lib/python3.4/site-packages/" instead of
# "/usr/local/lib/python3.4/dist-packages/"
if "VIRTUAL_ENV" in os.environ:
log.error("Error: virtualenv detected. Please deactivate.")
return -1
input_root_requirements = expand_deps(args.deps)
if args.dump_root_deps:
print("'" + "' '".join(sorted([str(r) for r in input_root_requirements])) + "'")
sys.exit(0)
repo_installable, root_requirements = split_input_root_requirements(input_root_requirements, args.use_py2)
repo_pkgs = [gen_sys_package_name(p.key, args.use_py2) for p in repo_installable]
if args.install_from_repo:
try:
# If apt packages are installed over existing pip packages
# then side effects can occur - see GH13075.
# Approach: remove existing pip packages before apt install
pip_packages = [req.key for req in repo_installable]
subprocess.call(shlex.split('sudo pip3 uninstall -y '
+ ' '.join(pip_packages)))
subprocess.call(shlex.split('sudo apt reinstall -y '
+ ' '.join(repo_pkgs)))
except Exception:
log.error('error trying to install repo packages')
raise
lf = get_lockfile(lockfile_path=args.lockfile)
deps = lf.dependencies()
existing_versions = {} # type: Dict[str, str]
for key in deps:
if deps[key]['source'] == 'pypi':
try:
ver = cast(str, deps[key]['version'])
existing_versions[key] = ver
except KeyError as e:
log.error('{} missing key: {}'.format(key, e))
log.debug(deps)
with tmpvirtualenv() as venv:
pip_distributions = gen_pip_distributions(root_requirements,
existing_versions=existing_versions,
venv=venv)
required_distributions = get_required_distributions(root_requirements=root_requirements,
pip_distributions=pip_distributions)
dep_set = {p.key: p.version for p in required_distributions.values()
if p.key not in PIP_BLACKLIST}
dep_source = gen_dep_sources(dep_set, pypi_only=args.force_pypi, py2=args.use_py2)
root_versions = {k: {'version': pip_distributions[k].version}
for k in [rr.key for rr in root_requirements]}
log.debug(root_versions)
save_lockfile(args.lockfile, lockfile(root_versions, dep_set,
dep_source, py2=args.use_py2,
reference_lockfile=lf))
if args.build:
build_all(pip_distributions, dep_source, build_output=args.build_output)
if __name__ == "__main__":
logging.basicConfig(stream=sys.stderr, level=logging.WARNING,
format='{asctime} [{levelname:5}] {name}:{funcName}:{lineno} {msg}',
style='{}'[0])
parser = argparse.ArgumentParser("pydep")
subparsers = parser.add_subparsers(help="Sub-commands")
dep_p = subparsers.add_parser("finddep",
help="Find dependencies")
dep_p.add_argument("--log-level", default="info")
dep_p.add_argument('-d', '--dump-root-deps',
action='store_true',
help="Show root dependencies and exit.")
dep_p.add_argument('-o', '--old-python', dest='use_py2',
action='store_true',
help="Target Python 2")
dep_p.add_argument('-p', '--preserve', dest='preserve',
action='store_true', help="Preserve temporary files")
dep_p.add_argument('-b', '--build', dest='build',
action='store_true', help="Build dependency packages")
dep_p.add_argument('-l', '--lockfile', dest='lockfile',
default="pydep.lockfile",
help="Write dependencies to a lockfile (default: pydep.lockfile)")
dep_p.add_argument('--install-from-repo', action='store_true',
help='Install packages from configured repo sources (requires sudo)')
dep_p.add_argument('--pypi', dest='force_pypi', action='store_true',
help="Force using PyPI, ignoring system packages.")
dep_p.add_argument('deps', nargs='+',
help=("List of root dependencies or path to a "
"setup.py file."))
dep_p.add_argument('--build-output')
lock_p = subparsers.add_parser("lockfile",
help="Working with pydep lockfiles.")
lock_p.add_argument('lockfile_path', nargs='?',
help=("Generate fpm dependency string from a lockfile,"
" then exit immediately.")),
if len(sys.argv) == 1:
parser.print_help()
sys.exit(0)
args = parser.parse_args()
if "lockfile_path" in args and args.lockfile_path:
# generate the string and return
with open(args.lockfile_path, "r") as f:
print(gen_fpm_dep_string_from_lockfile(f.read()))
exit(0)
log.setLevel(getattr(logging, args.log_level.upper()))
sys.exit(main(args))
| 7,569
|
https://github.com/PearsonEducation/apigee-ios-sdk/blob/master/source/Classes/Services/ApigeeSessionMetricsCompiler.h
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
apigee-ios-sdk
|
PearsonEducation
|
C
|
Code
| 34
| 121
|
//
// ApigeeSessionMetricsCompiler.h
// ApigeeAppMonitor
//
// Copyright (c) 2012 Apigee. All rights reserved.
//
#import "ApigeeSessionMetrics.h"
#import "ApigeeActiveSettings.h"
@interface ApigeeSessionMetricsCompiler : NSObject
+ (ApigeeSessionMetricsCompiler *) systemCompiler;
- (ApigeeSessionMetrics *) compileMetricsForSettings:(ApigeeActiveSettings *) settings;
@end
| 16,207
|
https://github.com/neverever1533/EvolutionVector/blob/master/src/javaev/io/FileUtils.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
EvolutionVector
|
neverever1533
|
Java
|
Code
| 1,018
| 3,060
|
package javaev.io;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.InvalidPropertiesFormatException;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import cn.imaginary.toolkit.image.ImageLayer;
public class FileUtils {
private static FileUtils instance;
public static FileUtils getInstance() {
if (null == instance) {
syncInit();
}
return instance;
}
private static synchronized void syncInit() {
if (null == instance) {
instance = new FileUtils();
}
}
private String encoding_Default = "utf-8";
private String[] suffixArray;
private String suffixProperties = ".xml";
private FileUtils() {
}
private boolean checkFileArray(File[] fileArray, boolean isDirectoryOnly) {
if (null == fileArray) {
return false;
}
File file;
for (int i = 0; i < fileArray.length; i++) {
file = fileArray[i];
if (isDirectoryOnly) {
if (!file.isDirectory()) {
return false;
}
} else {
if (!file.isFile()) {
return false;
}
}
}
return true;
}
public String[] getCharsetArray() {
return (String[]) Charset.availableCharsets().keySet().toArray();
}
public String getContentType(File file) {
if (null != file) {
FileNameMapUtils fnmu = new FileNameMapUtils();
return fnmu.getContentTypeFor(file);
}
return null;
}
/*
* 暂时仅用于解析音频文件正确格式类型;
*/
public String getContentTypeReal(File file) {
if (null != file) {
FileTypeUtils ftu = new FileTypeUtils();
return ftu.decodeFileType(file);
}
return null;
}
public File getFile(File file, Object name, boolean isReplace, String suffix) {
if (null != file && null != suffix) {
File dir;
String fileName;
if (file.isDirectory()) {
dir = file;
fileName = dir.getName();
} else {
dir = file.getParentFile();
fileName = getFileNamePrefix(file);
}
StringBuffer sbuf = new StringBuffer();
if (null != dir && null != fileName) {
if (null == name || !isReplace) {
sbuf.append(fileName);
} else {
sbuf.append(name);
}
sbuf.append(suffix);
return new File(dir, sbuf.toString());
}
}
return null;
}
public File getFile(File file, Object name, String suffix) {
return getFile(file, name, false, suffix);
}
public File getFile(File file, String suffix) {
return getFile(file, null, suffix);
}
public File getFile(List<File> list, String fileName) {
if (null != list && null != fileName) {
File file;
for (Iterator<File> iterator = list.iterator(); iterator.hasNext();) {
file = (File) iterator.next();
if (null != file) {
if (file.getAbsolutePath().endsWith(fileName)) {
return file;
}
}
}
}
return null;
}
public FileFilter getFileFilter() {
return getFileFilter(getSuffixArray());
}
public FileFilter getFileFilter(final String[] suffixArray) {
if (null != suffixArray) {
FileFilter fileFilter = new FileFilter() {
public boolean accept(File pathname) {
String name = pathname.getName().toLowerCase();
String suffix;
for (int i = 0, iLength = suffixArray.length; i < iLength; i++) {
suffix = suffixArray[i];
if (pathname.isDirectory() || (null != suffix && name.endsWith(suffix))) {
return true;
}
}
return false;
}
};
return fileFilter;
}
return null;
}
private String getFileName(File file, boolean isSuffix) {
if (null != file) {
String fileName = file.getName();
int ids = fileName.lastIndexOf(".");
if (isSuffix) {
if (ids != -1) {
return fileName.substring(ids);
}
} else {
if (ids != -1) {
return fileName.substring(0, ids);
} else {
return fileName;
}
}
}
return null;
}
public String[] getFileNameArray(File[] fileArray) {
return toStringArray(fileArray, true);
}
public FilenameFilter getFileNameFilter() {
return getFileNameFilter(getSuffixArray());
}
public FilenameFilter getFileNameFilter(final String[] suffixArray) {
FilenameFilter fileFilter = new FilenameFilter() {
public boolean accept(File dir, String name) {
String fName = name.toLowerCase();
File file = new File(dir, name);
String temp;
for (int i = 0, iLength = suffixArray.length; i < iLength; i++) {
temp = suffixArray[i];
if (file.isDirectory() || (null != temp && fName.endsWith(temp))) {
return true;
}
}
return false;
}
};
return fileFilter;
}
public String getFileNamePrefix(File file) {
return getFileName(file, false);
}
public String getFileNameSuffix(File file) {
return getFileName(file, true);
}
public String[] getFilePathArray(File[] fileArray) {
return toStringArray(fileArray, false);
}
public File getFileReplace(File file, Object name, String suffix) {
return getFile(file, name, true, suffix);
}
public ImageLayer getImageLayer(List<ImageLayer> list, File file) {
if (null != file) {
return getImageLayer(list, file.getPath());
}
return null;
}
public ImageLayer getImageLayer(List<ImageLayer> layerList, int depth) {
if (null != layerList) {
ImageLayer imageLayer;
int depthLayer;
for (Iterator<ImageLayer> iterator = layerList.iterator(); iterator.hasNext();) {
imageLayer = iterator.next();
if (null != imageLayer) {
depthLayer = imageLayer.getDepth();
if (depthLayer == depth) {
return imageLayer;
}
}
}
}
return null;
}
public ImageLayer getImageLayer(List<ImageLayer> list, String fileName) {
if (null != list && null != fileName) {
ImageLayer imageLayer;
String path;
for (Iterator<ImageLayer> iterator = list.iterator(); iterator.hasNext();) {
imageLayer = iterator.next();
if (null != imageLayer) {
path = imageLayer.getImagePath();
if (null != path && path.endsWith(fileName)) {
return imageLayer;
}
}
}
}
return null;
}
public int getImageLayerDepthMax(List<ImageLayer> list) {
if (null != list) {
ImageLayer layer;
int depth;
int index = -1;
for (Iterator<ImageLayer> iterator = list.iterator(); iterator.hasNext();) {
layer = iterator.next();
if (null != layer) {
depth = layer.getDepth();
if (depth > index) {
index = depth;
}
}
}
return index;
}
return -1;
}
public String[] getSuffixArray() {
return suffixArray;
}
public String getSuffixProperties() {
return suffixProperties;
}
public boolean isDirectoryArrayOnly(File[] fileArray) {
return checkFileArray(fileArray, true);
}
public boolean isFileArrayOnly(File[] fileArray) {
return checkFileArray(fileArray, false);
}
public boolean isFileProperties(File file) {
String name = file.getName().toLowerCase();
if (name.endsWith(suffixProperties)) {
return true;
}
return false;
}
public Properties loadProperties(File file) {
if (null != file) {
try {
Properties properties = new Properties();
FileInputStream fileInputStream = new FileInputStream(file);
if (isFileProperties(file)) {
properties.loadFromXML(fileInputStream);
} else {
properties.load(fileInputStream);
}
fileInputStream.close();
return properties;
} catch (InvalidPropertiesFormatException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
public void setSuffixArray(String[] array) {
suffixArray = array;
}
public void setSuffixProperties(String suffix) {
if (null != suffix) {
suffixProperties = suffix.toLowerCase();
}
}
public void storeProperties(File file, Properties properties, String comment, String encoding) {
if (null == file || null == properties) {
return;
}
if (null == encoding || !Charset.isSupported(encoding)) {
encoding = encoding_Default;
}
try {
if (!file.exists()) {
if (file.isDirectory()) {
file = getFile(file, suffixProperties);
}
file.createNewFile();
}
FileOutputStream fileOutputStream = new FileOutputStream(file);
properties.storeToXML(fileOutputStream, comment, encoding);
fileOutputStream.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
private String[] toStringArray(File[] fileArray, boolean isFileName) {
int len = fileArray.length;
String[] array = new String[len];
for (int i = 0, iLength = len; i < iLength; i++) {
File file = fileArray[i];
if (isFileName) {
array[i] = file.getName();
} else {
array[i] = file.getAbsolutePath();
}
}
return array;
}
}
| 35,592
|
https://github.com/zspitz/Periscope.Debuggee/blob/master/VisualizerObjectSourceBase.cs
|
Github Open Source
|
Open Source
|
MIT
| null |
Periscope.Debuggee
|
zspitz
|
C#
|
Code
| 181
| 521
|
using Microsoft.VisualStudio.DebuggerVisualizers;
using System;
using System.IO;
namespace Periscope.Debuggee {
public abstract class VisualizerObjectSourceBase<TTarget, TConfig> : VisualizerObjectSource where TConfig : ConfigBase<TConfig> {
private object? value;
public override void GetData(object target, Stream outgoingData) {
// Fix for https://github.com/zspitz/Periscope/issues/24
if (target is ValueType) { value = target; }
Serialize(outgoingData, GetConfigKey(target));
}
public override void TransferData(object? target, Stream incomingData, Stream outgoingData) {
void logException(string logMessage, Exception ex) => Serialize(outgoingData, new ExceptionData(ex, logMessage));
// Fix for https://github.com/zspitz/Periscope/issues/24
target ??= value;
TConfig? config;
try {
config = (TConfig)Deserialize(incomingData);
} catch (Exception ex) {
logException("Deserialize incoming config", ex);
return;
}
TTarget t;
if (target is TTarget t1) {
t = t1;
} else {
var message =
target is null ?
"Target is null." :
$"Target is of type {target.GetType()}; expected {typeof(TTarget)}.";
logException("Mismatched target type", new Exception(message));
return;
}
object? model;
try {
model = GetResponse(t, config!);
} catch (Exception ex) {
logException("Get response", ex);
return;
}
try {
Serialize(outgoingData, model);
} catch (Exception ex) {
logException("Serialize outgoing", ex);
return;
}
}
public virtual string GetConfigKey(object target) => "";
public abstract object GetResponse(TTarget target, TConfig config);
}
}
| 18,414
|
https://github.com/yurivish/plot/blob/master/test/marks/link-test.js
|
Github Open Source
|
Open Source
|
0BSD
| 2,021
|
plot
|
yurivish
|
JavaScript
|
Code
| 211
| 764
|
import * as Plot from "@observablehq/plot";
import tape from "tape-await";
tape("link(data, options) has the expected defaults", test => {
const link = Plot.link(undefined, {x1: "0", y1: "1", x2: "2", y2: "3"});
test.strictEqual(link.data, undefined);
test.strictEqual(link.transform, undefined);
test.deepEqual(link.channels.map(c => c.name), ["x1", "y1", "x2", "y2"]);
test.deepEqual(link.channels.map(c => c.value.label), ["0", "1", "2", "3"]);
test.deepEqual(link.channels.map(c => c.scale), ["x", "y", "x", "y"]);
test.strictEqual(link.fill, "none");
test.strictEqual(link.fillOpacity, undefined);
test.strictEqual(link.stroke, "currentColor");
test.strictEqual(link.strokeWidth, undefined);
test.strictEqual(link.strokeOpacity, undefined);
test.strictEqual(link.strokeLinejoin, undefined);
test.strictEqual(link.strokeLinecap, undefined);
test.strictEqual(link.strokeMiterlimit, 1);
test.strictEqual(link.strokeDasharray, undefined);
test.strictEqual(link.mixBlendMode, undefined);
});
tape("link(data, {title}) specifies an optional title channel", test => {
const link = Plot.link(undefined, {x1: "0", y1: "1", x2: "2", y2: "3", title: "4"});
const title = link.channels.find(c => c.name === "title");
test.strictEqual(title.value.label, "4");
test.strictEqual(title.scale, undefined);
});
tape("link(data, {stroke}) allows stroke to be a constant color", test => {
const link = Plot.link(undefined, {x1: "0", y1: "1", x2: "2", y2: "3", stroke: "red"});
test.strictEqual(link.stroke, "red");
});
tape("link(data, {stroke}) allows stroke to be null", test => {
const link = Plot.link(undefined, {x1: "0", y1: "1", x2: "2", y2: "3", stroke: null});
test.strictEqual(link.stroke, undefined);
});
tape("link(data, {stroke}) allows stroke to be a variable color", test => {
const link = Plot.link(undefined, {x1: "0", y1: "1", x2: "2", y2: "3", stroke: "4"});
test.strictEqual(link.stroke, undefined);
const stroke = link.channels.find(c => c.name === "stroke");
test.strictEqual(stroke.value.label, "4");
test.strictEqual(stroke.scale, "color");
});
| 40,708
|
https://github.com/louib/panbuild/blob/master/panbuild/modules.rs
|
Github Open Source
|
Open Source
|
BSD-3-Clause
| 2,021
|
panbuild
|
louib
|
Rust
|
Code
| 523
| 1,507
|
use serde::{Deserialize, Serialize};
pub enum OS {
Bsd,
Mac,
Ios,
Linux,
Android,
Symbian,
// Add RT Oses??
// Add misc Oses like calculators and PAs???
}
// TODO Should we allow those systems to be available
// when the generated manifest will be used? We could
// consider optionally downloading those dependencies
// to ensure the version of the build system...
#[derive(Debug, Serialize, Deserialize)]
pub enum BuildSystem {
Make,
Cmake,
Qmake,
Autotools,
Meson,
Cargo,
Maven,
Xcode,
Npm,
// if ever http://git.savannah.gnu.org/cgit/bash.git
// git@github.com:bminor/bash.git
Bash,
Pip2,
Pip3,
// if ever git@github.com:PowerShell/PowerShell.git
// powershell,
Manual,
// if ever git@github.com:apple/swift.git.
Swift,
Apt,
// perl ??
Gem,
// simple?
// haskell??
// LaTeX??
// mono??
Unknown,
}
impl BuildSystem {
pub fn get_build_system(path: &str) -> BuildSystem {
if path.ends_with("meson_options.txt") {
return BuildSystem::Meson;
}
if path.ends_with("control") {
return BuildSystem::Apt;
}
if path.ends_with("package.json") {
return BuildSystem::Npm;
}
if path.ends_with("Gemfile") {
return BuildSystem::Gem;
}
if path.ends_with("requirements.txt") {
// We could also default to pip2...
return BuildSystem::Pip3;
}
if path.ends_with(".spec") {
// return crate::manifests::manifest::BuildSystem::Fedora;
}
if path.ends_with("Makefile") {
return BuildSystem::Make;
}
return DEFAULT_BUILD_SYSTEM;
}
}
pub const DEFAULT_BUILD_SYSTEM: BuildSystem = BuildSystem::Unknown;
impl Default for BuildSystem {
fn default() -> Self {
DEFAULT_BUILD_SYSTEM
}
}
#[derive(Debug, Serialize, Deserialize)]
pub enum SourceType {
Bzr,
Deb,
Git,
Local,
Mercurial,
Rpm,
Subversion,
Svn,
Tar,
Tarball,
Zip,
// 7z
Sevenzip,
Unknown,
}
pub const DEFAULT_SOURCE_TYPE: SourceType = SourceType::Unknown;
impl Default for SourceType {
fn default() -> Self {
DEFAULT_SOURCE_TYPE
}
}
#[derive(Debug, Serialize, Deserialize)]
pub enum ModuleType {
CLIApp,
GUIApp,
Lib,
Driver,
Daemon,
Kernel,
Plugin,
Runtime,
Emulator,
Compiler,
Bootloader,
Firmware,
Media,
Unknown,
}
pub const DEFAULT_MODULE_TYPE: ModuleType = ModuleType::Lib;
impl Default for ModuleType {
fn default() -> Self {
DEFAULT_MODULE_TYPE
}
}
#[derive(Default, Debug, Serialize, Deserialize)]
/// Generic representation of a software module.
pub struct SoftwareModule {
pub name: String,
pub project_id: Option<String>,
// The version of the current module.
pub version: String,
// The tag associated with the module, if any.
pub tag: String,
// The hash of the commit associated with the module, if any.
pub commit: String,
pub module_type: ModuleType,
pub download_urls: Vec<String>,
pub url: String,
pub build_system: BuildSystem,
pub archive_checksum: String,
pub source_checksum: String,
// When we have reproducible builds.
pub executable_checksum: String,
// Fields mostly taken from the Flatpak manifest.
// Array of files and directories to cleanup after installing.
pub config_options: Vec<String>,
pub build_commands: Vec<String>,
pub install_instructions: String,
pub install_path: String,
}
impl SoftwareModule {
pub fn get_identifier(&self) -> &str {
if self.version.len() != 0 {
return self.version.as_str();
}
if self.tag.len() != 0 {
return self.tag.as_str();
}
if self.commit.len() != 0 {
return self.commit.as_str();
}
return "";
}
}
#[derive(Default, Debug, Serialize, Deserialize)]
pub struct AbstractExecutable {
pub name: String,
pub path: String,
pub is_desktop: bool,
pub is_daemon: bool,
// Whether or not this is the primary executable of the bundle.
pub is_primary: bool,
pub icon_path: String,
}
#[derive(Default, Debug, Serialize, Deserialize)]
pub struct AbstractPermission {
pub name: String,
pub description: String,
pub api_type: APIType,
}
#[derive(Debug, Serialize, Deserialize)]
pub enum APIType {
Dbus,
Files,
Socket,
Camera,
Mic,
Gps,
Unknown,
}
pub const DEFAULT_API_TYPE: APIType = APIType::Unknown;
impl Default for APIType {
fn default() -> Self {
DEFAULT_API_TYPE
}
}
| 40,971
|
https://github.com/mukasaj/Ex-assemblyline-client/blob/master/test/test_file.py
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
Ex-assemblyline-client
|
mukasaj
|
Python
|
Code
| 266
| 1,181
|
import hashlib
import os
try:
import cart
from utils import random_id_from_collection
except ImportError:
import pytest
import sys
if sys.version_info < (3, 0):
pytestmark = pytest.mark.skip
else:
raise
def test_children(datastore, client):
submission_id = random_id_from_collection(datastore, 'submission', q="file_count:[2 TO *]")
submission_data = datastore.submission.get(submission_id)
file_id = submission_data.files[0].sha256
res = client.file.children(file_id)
assert len(res) >= 1
def test_ascii(datastore, client):
file_id = random_id_from_collection(datastore, 'file')
res = client.file.ascii(file_id)
assert len(res) >= 1
def test_hex(datastore, client):
file_id = random_id_from_collection(datastore, 'file')
res = client.file.hex(file_id)
assert res.startswith('00000000:')
def test_strings(datastore, client):
file_id = random_id_from_collection(datastore, 'file')
res = client.file.strings(file_id)
assert len(res) >= 1
# noinspection PyUnusedLocal
def test_download_to_obj(datastore, client):
file_id = random_id_from_collection(datastore, 'file')
res = client.file.download(file_id)
assert res[:4] == b"CART"
# noinspection PyUnusedLocal
def test_download_to_obj_raw(datastore, client):
file_id = random_id_from_collection(datastore, 'file')
res = client.file.download(file_id, encoding="raw")
assert hashlib.sha256(res).hexdigest() == file_id
# noinspection PyUnusedLocal
def test_download_to_file(datastore, client):
file_id = random_id_from_collection(datastore, 'file')
download_output = "/tmp/download_{}".format(file_id)
try:
client.file.download(file_id, output=download_output)
assert open(download_output, 'rb').read(4) == b"CART"
metadata = cart.get_metadata_only(download_output)
assert file_id == metadata['sha256']
finally:
os.unlink(download_output)
# noinspection PyUnusedLocal
def test_download_to_file_handle(datastore, client):
file_id = random_id_from_collection(datastore, 'file')
download_output = "/tmp/download_{}_fobj".format(file_id)
try:
client.file.download(file_id, output=open(download_output, "wb"))
assert open(download_output, 'rb').read(4) == b"CART"
finally:
os.unlink(download_output)
# noinspection PyUnusedLocal
def test_info(datastore, client):
file_id = random_id_from_collection(datastore, 'file')
res = client.file.info(file_id)
assert res['sha256'] == file_id
# noinspection PyUnusedLocal
def test_result(datastore, client):
file_id = random_id_from_collection(datastore, 'file')
res = client.file.result(file_id)
assert res['file_info']['sha256'] == file_id
# noinspection PyUnusedLocal
def test_result_for_service(datastore, client):
result_id = random_id_from_collection(datastore, 'result')
file_id, service_name, _ = result_id.split('.', 2)
res = client.file.result(file_id, service=service_name)
assert res['file_info']['sha256'] == file_id
assert res['results'][0]['response']['service_name'] == service_name
# noinspection PyUnusedLocal
def test_score(datastore, client):
file_id = random_id_from_collection(datastore, 'file')
res = client.file.score(file_id)
assert res['file_info']['sha256'] == file_id
for k in res['result_keys']:
assert k[:64] == file_id
| 21,658
|
https://github.com/JGeraldoLima/pos-facisa-nativescript/blob/master/NASAImageryNSSample/app/app-routing.module.ts
|
Github Open Source
|
Open Source
|
MIT
| null |
pos-facisa-nativescript
|
JGeraldoLima
|
TypeScript
|
Code
| 138
| 448
|
import { NgModule, OnInit } from "@angular/core";
import { Routes } from "@angular/router";
import { RouterExtensions } from "nativescript-angular";
import { NativeScriptRouterModule } from "nativescript-angular/router";
import firebase = require("nativescript-plugin-firebase");
const routes: Routes = [
// see how to change default path at runtime
{ path: "", redirectTo: "/login", pathMatch: "full" },
{ path: "login", loadChildren: "./pages/login-page/login-page.module#LoginPageModule" },
{ path: "registry", loadChildren: "./pages/registry-page/registry-page.module#RegistryPageModule" },
{ path: "home", loadChildren: "./pages/home/home.module#HomeModule" },
{ path: "searchs", loadChildren: "./pages/searchs-page/searchs-page.module#SearchsModule" }
];
@NgModule({
imports: [NativeScriptRouterModule.forRoot(routes)],
exports: [NativeScriptRouterModule]
})
export class AppRoutingModule implements OnInit {
constructor(private router: RouterExtensions) {
}
ngOnInit(): void {
firebase
.init({
persist: true,
onAuthStateChanged: (data: any) => {
console.log("auth stage changed: " + JSON.stringify(data));
if (data.loggedIn) {
this.router.navigate(["/home"], {clearHistory: true});
}
}
}).then(
(instance) => {
console.log("firebase.init done");
},
(error) => {
console.log("firebase.init error: " + error);
}
);
}
}
| 10,801
|
https://github.com/alex-titarenko/testcheck/blob/master/Testcheck.Tester/Behaviours/BrowserBehavior.cs
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
testcheck
|
alex-titarenko
|
C#
|
Code
| 77
| 283
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
namespace TAlex.Testcheck.Tester.Behaviours
{
public class BrowserBehavior
{
public static readonly DependencyProperty HtmlProperty = DependencyProperty.RegisterAttached(
"Html",
typeof(string),
typeof(BrowserBehavior),
new FrameworkPropertyMetadata(OnHtmlChanged));
[AttachedPropertyBrowsableForType(typeof(WebBrowser))]
public static string GetHtml(WebBrowser d)
{
return (string)d.GetValue(HtmlProperty);
}
public static void SetHtml(WebBrowser d, string value)
{
d.SetValue(HtmlProperty, value);
}
static void OnHtmlChanged(DependencyObject dependencyObject, DependencyPropertyChangedEventArgs e)
{
WebBrowser webBrowser = dependencyObject as WebBrowser;
if (webBrowser != null)
webBrowser.NavigateToString(e.NewValue as string);
}
}
}
| 27,585
|
https://github.com/skynixukraine/Clodify-Business-CRM/blob/master/modules/api/view-models/ContractCreate.php
|
Github Open Source
|
Open Source
|
BSD-3-Clause
| null |
Clodify-Business-CRM
|
skynixukraine
|
PHP
|
Code
| 86
| 352
|
<?php
/**
* Created by Skynix Team
* Date: 20.04.17
* Time: 14:09
*/
namespace viewModel;
use Yii;
use app\components\DateUtil;
use app\models\User;
use app\modules\api\components\Api\Processor;
class ContractCreate extends ViewModelAbstract
{
/** @deprecated */
public function define()
{
trigger_error('Method ' . Yii::$app->controller->action->id . ' is deprecated', E_USER_DEPRECATED);
$this->model->created_by = Yii::$app->user->id;
if (User::hasPermission([User::ROLE_ADMIN, User::ROLE_FIN, User::ROLE_SALES])){
if ($this->validate()) {
$this->model->start_date = DateUtil::convertData($this->model->start_date);
$this->model->end_date = DateUtil::convertData($this->model->end_date);
$this->model->act_date = DateUtil::convertData($this->model->act_date);
$this->model->save();
$this->setData([
'contract_id' => $this->model->id
]);
}
} else {
return $this->addError(Processor::ERROR_PARAM, 'You have no permission for this action');
}
}
}
| 30,869
|
https://github.com/jpverkamp/schempy/blob/master/globals/__init__.py
|
Github Open Source
|
Open Source
|
BSD-3-Clause
| 2,015
|
schempy
|
jpverkamp
|
Python
|
Code
| 20
| 93
|
import os
files = [
'environment.py',
'lambda.py',
'mathematical.py',
'logical.py',
'lists.py',
'predicates.py',
'control.py',
'values.py',
]
for file in files:
execfile(os.path.join('globals', file))
| 31,051
|
https://github.com/armutcom/iyzipay-dotnet-client/blob/master/src/Iyzipay/Model/CrossBookingToSubMerchant.cs
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
iyzipay-dotnet-client
|
armutcom
|
C#
|
Code
| 55
| 235
|
using System.Threading.Tasks;
using Armut.Iyzipay.Request;
namespace Armut.Iyzipay.Model
{
public class CrossBookingToSubMerchant : IyzipayResource
{
private const string CrossBookingToSubMerchantUrl = "/crossbooking/send";
public static CrossBookingToSubMerchant Create(CreateCrossBookingRequest request, Options options)
{
return RestHttpClient.Instance.Post<CrossBookingToSubMerchant>(options.BaseUrl + CrossBookingToSubMerchantUrl, GetHttpHeaders(request, options), request);
}
public static async Task<CrossBookingToSubMerchant> CreateAsync(CreateCrossBookingRequest request, Options options)
{
return await RestHttpClient.Instance.PostAsync<CrossBookingToSubMerchant>(options.BaseUrl + CrossBookingToSubMerchantUrl, GetHttpHeaders(request, options), request);
}
}
}
| 29,447
|
https://github.com/shaan1337/EventStore/blob/master/src/EventStore.Projections.Core.Tests/Services/core_projection/projection_checkpoint/when_handling_stream_awaiting_message.cs
|
Github Open Source
|
Open Source
|
MIT, BSD-3-Clause, Apache-2.0
| 2,017
|
EventStore
|
shaan1337
|
C#
|
Code
| 89
| 602
|
using EventStore.Core.Tests.Services.Replication;
using EventStore.Projections.Core.Messages;
using EventStore.Projections.Core.Services.Processing;
using NUnit.Framework;
namespace EventStore.Projections.Core.Tests.Services.core_projection.projection_checkpoint
{
[TestFixture]
public class when_handling_stream_awaiting_message : TestFixtureWithExistingEvents
{
private ProjectionCheckpoint _checkpoint;
private TestCheckpointManagerMessageHandler _readyHandler;
private FakeEnvelope _fakeEnvelope;
[SetUp]
public void setup()
{
_readyHandler = new TestCheckpointManagerMessageHandler();
_checkpoint = new ProjectionCheckpoint(
_ioDispatcher, new ProjectionVersion(1, 0, 0), null, _readyHandler,
CheckpointTag.FromPosition(0, 100, 50), new TransactionFilePositionTagger(0), 250);
_fakeEnvelope = new FakeEnvelope();
_checkpoint.Handle(new CoreProjectionProcessingMessage.EmittedStreamAwaiting("awaiting_stream", _fakeEnvelope));
}
[Test]
public void broadcasts_write_completed_to_awaiting_streams()
{
_checkpoint.Handle(new CoreProjectionProcessingMessage.EmittedStreamWriteCompleted("completed_stream"));
Assert.AreEqual(1, _fakeEnvelope.Replies.Count);
Assert.IsInstanceOf<CoreProjectionProcessingMessage.EmittedStreamWriteCompleted>(_fakeEnvelope.Replies[0]);
}
[Test]
public void does_not_broadcast_second_write_completed_to_awaiting_streams()
{
_checkpoint.Handle(new CoreProjectionProcessingMessage.EmittedStreamWriteCompleted("completed_stream1"));
_checkpoint.Handle(new CoreProjectionProcessingMessage.EmittedStreamWriteCompleted("completed_stream2"));
Assert.AreEqual(1, _fakeEnvelope.Replies.Count);
Assert.IsInstanceOf<CoreProjectionProcessingMessage.EmittedStreamWriteCompleted>(_fakeEnvelope.Replies[0]);
Assert.AreEqual("completed_stream1", ((CoreProjectionProcessingMessage.EmittedStreamWriteCompleted)_fakeEnvelope.Replies[0]).StreamId);
}
}
}
| 47,762
|
https://github.com/Cheesebaron/MavenRepoBrowser/blob/master/MavenRepoBrowser/ArtifactProjectPage.xaml.cs
|
Github Open Source
|
Open Source
|
MIT
| null |
MavenRepoBrowser
|
Cheesebaron
|
C#
|
Code
| 62
| 205
|
using System;
using System.Collections.Generic;
using MavenNet.Models;
using Xamarin.Forms;
namespace MavenRepoBrowser
{
public partial class ArtifactProjectPage : ContentPage
{
public ArtifactProjectPage(Project project, string version)
{
InitializeComponent();
Title = project.Name + " - " + project.Version;
viewModel = new ViewModels.ArtifactProjectViewModel(Navigation, project, version);
BindingContext = viewModel;
}
ViewModels.ArtifactProjectViewModel viewModel;
void Handle_ItemSelected(object sender, Xamarin.Forms.SelectedItemChangedEventArgs e)
{
var vm = e.SelectedItem as ViewModels.ProjectActionItemViewModel;
vm?.Command();
}
}
}
| 34,456
|
https://github.com/JuGoo/android-library/blob/master/urbanairship-core/src/test/java/com/urbanairship/remotedata/RemoteDataPayloadTest.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
android-library
|
JuGoo
|
Java
|
Code
| 160
| 575
|
/* Copyright Airship and Contributors */
package com.urbanairship.remotedata;
import com.urbanairship.BaseTestCase;
import com.urbanairship.json.JsonList;
import com.urbanairship.json.JsonMap;
import com.urbanairship.json.JsonValue;
import com.urbanairship.util.DateUtils;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.util.Arrays;
import java.util.Set;
public class RemoteDataPayloadTest extends BaseTestCase {
private JsonValue payloads;
private JsonValue payload;
private String timestamp;
private JsonMap data;
private JsonMap metadata;
@Before
public void setup() {
timestamp = DateUtils.createIso8601TimeStamp(System.currentTimeMillis());
data = JsonMap.newBuilder().put("foo", "bar").build();
payload = JsonMap.newBuilder().put("type", "test").put("timestamp", timestamp).put("data", data).build().toJsonValue();
payloads = new JsonList(Arrays.asList(payload)).toJsonValue();
metadata = JsonMap.newBuilder().put("foo", "bar").build();
}
@Test
public void testParsePayload() throws Exception {
RemoteDataPayload parsedPayload = RemoteDataPayload.parsePayload(payload, metadata);
verifyPayload(parsedPayload);
}
@Test
public void testParsePayloads() {
Set<RemoteDataPayload> parsedPayloads = RemoteDataPayload.parsePayloads(payloads, metadata);
Assert.assertEquals("Parsed payloads should have a size of one", parsedPayloads.size(), 1);
for (RemoteDataPayload parsedPayload : parsedPayloads) {
verifyPayload(parsedPayload);
}
}
private void verifyPayload(RemoteDataPayload parsedPayload) {
Assert.assertEquals("Payload should have type 'test'", parsedPayload.getType(), "test");
Assert.assertEquals("Payload should have timestamp: " + timestamp, DateUtils.createIso8601TimeStamp(parsedPayload.getTimestamp()), timestamp);
Assert.assertEquals("Payload should have data: " + data, parsedPayload.getData(), data);
Assert.assertEquals("Payload should have metadata: " + metadata, parsedPayload.getData(), metadata);
}
}
| 9,349
|
https://github.com/BinarySerializer/BinarySerializer.PS2/blob/master/src/GS/Registers/GSReg_TEX0_1.cs
|
Github Open Source
|
Open Source
|
MIT
| null |
BinarySerializer.PS2
|
BinarySerializer
|
C#
|
Code
| 184
| 584
|
namespace BinarySerializer.PS2
{
public class GSReg_TEX0_1 : GSRegister
{
public override GSRegisters RegisterByte => GSRegisters.TEX0_1;
public ushort TPB0 { get; set; }
public byte TBW { get; set; }
public GS.PixelStorageMode PSM { get; set; }
public byte TW { get; set; }
public byte TH { get; set; }
public bool TCC { get; set; }
public GS.TextureFunction TFX { get; set; }
public ushort CBP { get; set; }
public GS.CLUTPixelStorageMode CPSM { get; set; }
public GS.ColorStorageMode CSM { get; set; }
public byte CSA { get; set; }
public byte CLD { get; set; }
public override void SerializeRegisterImpl(SerializerObject s)
{
s.DoBits<long>(b =>
{
TPB0 = b.SerializeBits<ushort>(TPB0, 14, name: nameof(TPB0));
TBW = b.SerializeBits<byte>(TBW, 6, name: nameof(TBW));
PSM = b.SerializeBits<GS.PixelStorageMode>(PSM, 6, name: nameof(PSM));
TW = b.SerializeBits<byte>(TW, 4, name: nameof(TW));
TH = b.SerializeBits<byte>(TH, 4, name: nameof(TH));
TCC = b.SerializeBits<bool>(TCC, 1, name: nameof(TCC));
TFX = b.SerializeBits<GS.TextureFunction>(TFX, 2, name: nameof(TFX));
CBP = b.SerializeBits<ushort>(CBP, 14, name: nameof(CBP));
CPSM = b.SerializeBits<GS.CLUTPixelStorageMode>(CPSM, 4, name: nameof(CPSM));
CSM = b.SerializeBits<GS.ColorStorageMode>(CSM, 1, name: nameof(CSM));
CSA = b.SerializeBits<byte>(CSA, 5, name: nameof(CSA));
CLD = b.SerializeBits<byte>(CLD, 3, name: nameof(CLD));
});
}
}
}
| 20,969
|
https://github.com/stevewgh/Topshelf.Leader/blob/master/src/Topshelf.Leader/LeaderConfigurationBuilder.cs
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
Topshelf.Leader
|
stevewgh
|
C#
|
Code
| 210
| 778
|
using System;
using System.Threading;
using System.Threading.Tasks;
using Topshelf.Leader.InMemory;
namespace Topshelf.Leader
{
public class LeaderConfigurationBuilder<T>
{
private Func<T, CancellationToken, Task> whenStarted;
private string nodeId;
private CancellationTokenSource serviceIsStopping;
private Action<bool> whenLeaderIsElected;
private Action<LeaseConfigurationBuilder> leaseManagerAction = builder => builder.WithInMemoryLeaseManager();
private TimeSpan heartBeatInterval = TimeSpan.FromSeconds(30);
private Func<bool, CancellationToken, Task> onHeartBeat = (b, token) => Task.FromResult(true);
public LeaderConfigurationBuilder()
{
nodeId = Guid.NewGuid().ToString();
whenLeaderIsElected = b => { };
serviceIsStopping = new CancellationTokenSource();
}
public LeaderConfigurationBuilder<T> Lease(Action<LeaseConfigurationBuilder> action)
{
leaseManagerAction = action ?? throw new ArgumentNullException(nameof(action));
return this;
}
public LeaderConfigurationBuilder<T> WhenStarted(Func<T, CancellationToken, Task> startup)
{
whenStarted = startup ?? throw new ArgumentNullException(nameof(startup));
return this;
}
public LeaderConfigurationBuilder<T> SetNodeId(string id)
{
nodeId = id;
return this;
}
public LeaderConfigurationBuilder<T> WhenLeaderIsElected(Action<bool> leaderElection)
{
whenLeaderIsElected = leaderElection ?? throw new ArgumentNullException(nameof(leaderElection));
return this;
}
public LeaderConfigurationBuilder<T> WithHeartBeat(TimeSpan heartBeatInterval, Func<bool, CancellationToken, Task> onHeartBeat)
{
this.heartBeatInterval = heartBeatInterval;
this.onHeartBeat = onHeartBeat ?? throw new ArgumentNullException(nameof(onHeartBeat));
return this;
}
internal bool ServiceStoppingTokenIsSet { get; private set; }
internal LeaderConfigurationBuilder<T> WhenStopping(CancellationTokenSource serviceStopping)
{
serviceIsStopping = serviceStopping;
ServiceStoppingTokenIsSet = true;
return this;
}
public LeaderConfiguration<T> Build()
{
if (whenStarted == null)
{
throw new HostConfigurationException($"{nameof(WhenStarted)} must be provided.");
}
var leaseManagerBuilder = new LeaseConfigurationBuilder(nodeId);
leaseManagerAction(leaseManagerBuilder);
var leaseManagerConfiguration = leaseManagerBuilder.Build();
return new LeaderConfiguration<T>(
whenStarted,
nodeId,
leaseManagerConfiguration.LeaseManager(leaseManagerConfiguration),
leaseManagerConfiguration,
serviceIsStopping,
whenLeaderIsElected,
heartBeatInterval,
onHeartBeat);
}
}
}
| 35,848
|
https://github.com/nstone101/lararepair/blob/master/app/Helpers/Helper.php
|
Github Open Source
|
Open Source
|
MIT
| null |
lararepair
|
nstone101
|
PHP
|
Code
| 1,782
| 5,153
|
<?php
if (!function_exists('dataTable')) {
function dataTable()
{
return app('dataTable');
}
}
/*
* Used to write in .env file
* @param
* $data as array of .env key & value
* @return nothing
*/
function envu($data = array())
{
foreach ($data as $key => $value) {
if (env($key) === $value) {
unset($data[$key]);
}
}
if (!count($data)) {
return false;
}
// write only if there is change in content
$env = file_get_contents(base_path() . '/.env');
$env = explode("\n", $env);
foreach ((array)$data as $key => $value) {
foreach ($env as $env_key => $env_value) {
$entry = explode("=", $env_value, 2);
if ($entry[0] === $key) {
$env[$env_key] = $key . "=" . (is_string($value) ? '"'.$value.'"' : $value);
} else {
$env[$env_key] = $env_value;
}
}
}
$env = implode("\n", $env);
file_put_contents(base_path() . '/.env', $env);
return true;
}
//////////////////////////////////////////////////////////////////////// Date helper function starts
/*
* Used to check whether date is valid or not
* @param
* $date as timestamp or date variable
* @return true if valid date, else if not
*/
function validateDate($date)
{
$d = DateTime::createFromFormat('Y-m-d', $date);
return $d && $d->format('Y-m-d') === $date;
}
/*
* Used to get date with start midnight time
* @param
* $date as timestamp or date variable
* @return date with start midnight time
*/
function getStartOfDate($date)
{
return date('Y-m-d', strtotime($date)).' 00:00';
}
/*
* Used to get date with end midnight time
* @param
* $date as timestamp or date variable
* @return date with end midnight time
*/
function getEndOfDate($date)
{
return date('Y-m-d', strtotime($date)).' 23:59';
}
/*
* Used to get date in desired format
* @return date format
*/
function decode_html($str)
{
return html_entity_decode($str, ENT_QUOTES | ENT_XHTML | ENT_HTML5, 'UTF-8');
}
function base64url_decode($data)
{
return base64_decode(str_replace(['-', '_'], ['+', '/'], $data));
}
function base64url_encode($data, $pad = null)
{
$data = str_replace(['+', '/'], ['-', '_'], base64_encode($data));
if (!$pad) {
$data = rtrim($data, '=');
}
return $data;
}
function getDateFormat()
{
if (config('config.date_format') === 'DD-MM-YYYY') {
return 'd-m-Y';
} elseif (config('config.date_format') === 'MM-DD-YYYY') {
return 'm-d-Y';
} elseif (config('config.date_format') === 'DD-MMM-YYYY') {
return 'd-M-Y';
} elseif (config('config.date_format') === 'MMM-DD-YYYY') {
return 'M-d-Y';
} else {
return 'd-m-Y';
}
}
/*
* Used to convert date for database
* @param
* $date as date
* @return date
*/
function toDate($date)
{
if (!$date) {
return;
}
return date('Y-m-d', strtotime($date));
}
/*
* Used to convert date in desired format
* @param
* $date as date
* @return date
*/
function showDate($date)
{
if (!$date) {
return;
}
$date_format = getDateFormat();
return date($date_format, strtotime($date));
}
/*
* Used to convert time in desired format
* @param
* $datetime as datetime
* @return datetime
*/
function showDateTime($time = '')
{
if (!$time) {
return;
}
$date_format = getDateFormat();
if (config('config.time_format') === 'H:mm') {
return date($date_format.',H:i', strtotime($time));
} else {
return date($date_format.',h:i a', strtotime($time));
}
}
/*
* Used to convert time in desired format
* @param
* $time as time
* @return time
*/
function showTime($time = '')
{
if (!$time) {
return;
}
if (config('config.time_format') === 'H:mm') {
return date('H:i', strtotime($time));
} else {
return date('h:i a', strtotime($time));
}
}
//////////////////////////////////////////////////////////////////////// Date helper function ends
//////////////////////////////////////////////////////////////////////// String helper function starts
/*
* Used to convert slugs into human readable words
* @param
* $word as string
* @return string
*/
function toWord($word)
{
$word = str_replace('_', ' ', $word);
$word = str_replace('-', ' ', $word);
$word = ucwords($word);
return $word;
}
/*
* Used to generate random string of certain lenght
* @param
* $length as numeric
* $type as optional param, can be token or password or username. Default is token
* @return random string
*/
function randomString($length, $type = 'token')
{
if ($type === 'password') {
$chars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!@#$%^&*()_-=+;:,.?";
} elseif ($type === 'username') {
$chars = "abcdefghijklmnopqrstuvwxyz0123456789";
} else {
$chars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
}
$token = substr(str_shuffle($chars), 0, $length);
return $token;
}
/*
* Used to whether string contains unicode
* @param
* $string as string
* @return boolean
*/
function checkUnicode($string)
{
if (strlen($string) != strlen(utf8_decode($string))) {
return true;
} else {
return false;
}
}
/*
* Used to generate slug from string
* @param
* $string as string
* @return slug
*/
function createSlug($string)
{
if (checkUnicode($string)) {
$slug = str_replace(' ', '-', $string);
} else {
$slug = preg_replace('/[^A-Za-z0-9-]+/', '-', strtolower($string));
}
return $slug;
}
/*
* Used to remove script tag from input
* @param
* $string as string
* @return slug
*/
function scriptStripper($string)
{
return preg_replace('#<script(.*?)>(.*?)</script>#is', '', $string);
}
//////////////////////////////////////////////////////////////////////////////////// String helper function ends
//////////////////////////////////////////////////////////////////////////////////// Select helper function starts
/*
* Used to generate select option for vue.js multiselect plugin
* @param
* $data as array of key & value pair
* @return select options
*/
function generateSelectOption($data)
{
$options = array();
foreach ($data as $key => $value) {
$options[] = ['name' => $value, 'id' => $key];
}
return $options;
}
/*
* Used to generate translated select option for vue.js multiselect plugin
* @param
* $data as array of key & value pair
* @return select options
*/
function generateTranslatedSelectOption($data)
{
$options = array();
foreach ($data as $key => $value) {
$options[] = ['name' => trans('list.'.$value), 'id' => $value];
}
return $options;
}
/*
* Used to generate select option for default select box
* @param
* $data as array of key & value pair
* @return select options
*/
function generateNormalSelectOption($data)
{
$options = array();
foreach ($data as $key => $value) {
$options[] = ['text' => $value, 'value' => $key];
}
return $options;
}
/*
* Used to generate select option for default select box where value is same as text
* @param
* $data as array of key & value pair
* @return select options
*/
function generateNormalSelectOptionValueOnly($data)
{
$options = array();
foreach ($data as $value) {
$options[] = ['text' => $value, 'value' => $value];
}
return $options;
}
//////////////////////////////////////////////////////////////////////////////////// Select helper function ends
/*
* Used to round number
* @param
* $number as numeric value
* $decimal_place as integer for round precision
* @return number
*/
function formatNumber($number, $decimal_place = 2)
{
return round($number, $decimal_place);
}
////////////////////////////////////////////////////////////////////////////////////// IP helper function starts
/*
* Used to check whether IP is in range
*/
function ipRange($network, $ip)
{
$network=trim($network);
$orig_network = $network;
$ip = trim($ip);
if ($ip === $network) {
return true;
}
$network = str_replace(' ', '', $network);
if (strpos($network, '*') != false) {
if (strpos($network, '/') != false) {
$asParts = explode('/', $network);
$network = @ $asParts[0];
}
$nCount = substr_count($network, '*');
$network = str_replace('*', '0', $network);
if ($nCount === 1) {
$network .= '/24';
} elseif ($nCount === 2) {
$network .= '/16';
} elseif ($nCount === 3) {
$network .= '/8';
} elseif ($nCount > 3) {
return true;
}
}
$d = strpos($network, '-');
if ($d === false) {
$ip_arr = explode('/', $network);
if (!preg_match("@\d*\.\d*\.\d*\.\d*@", $ip_arr[0], $matches)) {
$ip_arr[0].=".0";
}
$network_long = ip2long($ip_arr[0]);
$x = ip2long($ip_arr[1]);
$mask = long2ip($x) === $ip_arr[1] ? $x : (0xffffffff << (32 - $ip_arr[1]));
$ip_long = ip2long($ip);
return ($ip_long & $mask) === ($network_long & $mask);
} else {
$from = trim(ip2long(substr($network, 0, $d)));
$to = trim(ip2long(substr($network, $d+1)));
$ip = ip2long($ip);
return ($ip>=$from and $ip<=$to);
}
}
/*
* Used to check whether IP is valid or not
* @return boolean
*/
function validateIp($wl_ips)
{
// $ip = getClientIp();
$ip = '192.168.1.1';
$allowedIps = array();
foreach ($wl_ips as $wl_ip) {
if ($wl_ip->end_ip) {
$allowedIps[] = $wl_ip->start_ip.'-'.$wl_ip->end_ip;
} else {
$allowedIps[] = $wl_ip->start_ip;
}
}
foreach ($allowedIps as $allowedIp) {
if (strpos($allowedIp, '*')) {
$range = [
str_replace('*', '0', $allowedIp),
str_replace('*', '255', $allowedIp)
];
if (ipExistsInRange($range, $ip)) {
return true;
}
} elseif (strpos($allowedIp, '-')) {
$range = explode('-', str_replace(' ', '', $allowedIp));
if (ipExistsInRange($range, $ip)) {
return true;
}
} else {
if (ip2long($allowedIp) === ip2long($ip)) {
return true;
}
}
}
return false;
}
function ipExistsInRange(array $range, $ip)
{
if (ip2long($ip) >= ip2long($range[0]) && ip2long($ip) <= ip2long($range[1])) {
return true;
}
return false;
}
/*
* Used to get IP address of visitor
* @return date
*/
function getRemoteIPAddress()
{
if (!empty($_SERVER['HTTP_CLIENT_IP'])) {
return $_SERVER['HTTP_CLIENT_IP'];
} elseif (!empty($_SERVER['HTTP_X_FORWARDED_FOR'])) {
return $_SERVER['HTTP_X_FORWARDED_FOR'];
}
return $_SERVER['REMOTE_ADDR'];
}
/*
* Used to get IP address of visitor
* @return IP address
*/
function getClientIp()
{
$ips = getRemoteIPAddress();
$ips = explode(',', $ips);
return !empty($ips[0]) ? $ips[0] : \Request::getClientIp();
}
////////////////////////////////////////////////////////////////////////////////////////// IP helper function ends
/*
* Used to check mode
* @return boolean
*/
function isTestMode()
{
if (env('APP_DEMO')) {
return true;
} else {
return false;
}
}
/*
* get Maximum post size of server
*/
function getPostMaxSize()
{
if (is_numeric($postMaxSize = ini_get('post_max_size'))) {
return (int) $postMaxSize;
}
$metric = strtoupper(substr($postMaxSize, -1));
$postMaxSize = (int) $postMaxSize;
switch ($metric) {
case 'K':
return $postMaxSize * 1024;
case 'M':
return $postMaxSize * 1048576;
case 'G':
return $postMaxSize * 1073741824;
default:
return $postMaxSize;
}
}
/*
* Used to get value-list json
* @return array
*/
function getVar($list)
{
$file = resource_path('var/'.$list.'.json');
return (\File::exists($file)) ? json_decode(file_get_contents($file), true) : [];
}
function getDefaultCurrencyCode() {
$currency = \App\Currency::find(config('config.currency'));
if (!$currency) {
$currency = \App\Currency::create([
'code'=>'USD',
'name'=>'US Dollars',
'symbol'=>'$',
'symbol_position'=>'before',
]);
}
return $currency;
}
function formatMoney($number, $decimal_place = 2) {
$currency = \App\Currency::find(config('config.currency'));
if (!$currency) {
$currency = \App\Currency::create([
'code'=>'USD',
'name'=>'US Dollars',
'symbol'=>'$',
'symbol_position'=>'before',
]);
}
$symbol = $currency->symbol;
$symbol_position = $currency->symbol_position;
return ($symbol_position == 'before' ? $symbol : '') . number_format($number,$decimal_place) . ($symbol_position == 'after' ? $symbol : '');
}
function getXMin($im, $w, $h) {
for($x=0;$x<$w;++$x) {
for($y=0;$y<$h;++$y) {
if(imagecolorat($im, $x, $y) != 0xFFFFFF) {
return $x;
}
}
}
}
function getYMin($im, $w, $h) {
for($y=0;$y<$h;++$y) {
for($x=0;$x<$w;++$x) {
if(imagecolorat($im, $x, $y) != 0xFFFFFF) {
return $y;
}
}
}
}
function getXMax($im, $w, $h) {
for($x=($w-1);$x>=0;--$x) {
for($y=0;$y<$h;++$y) {
if(imagecolorat($im, $x, $y) != 0xFFFFFF) {
return $x;
}
}
}
}
function getYMax($im, $w, $h) {
for($y=($h-1);$y>=0;--$y) {
for($x=0;$x<$w;++$x) {
if(imagecolorat($im, $x, $y) != 0xFFFFFF) {
return $y;
}
}
}
}
function getImage($filename) {
$type = exif_imagetype($filename);
switch($type) {
case IMAGETYPE_GIF:
return imagecreatefromgif($filename);
break;
case IMAGETYPE_JPEG:
return imagecreatefromjpeg($filename);
break;
case IMAGETYPE_PNG:
return imagecreatefrompng($filename);
break;
case IMAGETYPE_BMP:
return imagecreatefromwbmp($filename);
break;
default:
print 'UNKNOWN IMAGE TYPE: ' . image_type_to_mime_type($type) . "\n";
return FALSE;
break;
}
}
function remove_whitespace($img)
{
/* Get image */
$im = getImage($img);
$cropped = imagecropauto($im, IMG_CROP_WHITE);
if ($cropped !== false) { // in case a new image resource was returned
imagedestroy($im); // we destroy the original image
$im = $cropped; // and assign the cropped image to $im
}
$cropped = imagecropauto($im, IMG_CROP_DEFAULT);
if ($cropped !== false) { // in case a new image resource was returned
imagedestroy($im); // we destroy the original image
$im = $cropped; // and assign the cropped image to $im
}
header('Content-Type: image/png');
imagepng($im);
imagedestroy($im);
die();
}
function getCardBalance($number)
{
if ($card = \App\Voucher::where('card_no', $number)->first()) {
return $card->balance;
}
return 0;
}
| 16,857
|
https://github.com/hugoleeney/leetcode_problems/blob/master/p820_short_encoding_of_words.py
|
Github Open Source
|
Open Source
|
MIT
| null |
leetcode_problems
|
hugoleeney
|
Python
|
Code
| 202
| 426
|
'''
820. Short Encoding of Words
Difficulty - medium
Given a list of words, we may encode it by writing a reference string S and a list of indexes A.
For example, if the list of words is ["time", "me", "bell"], we can write it as S = "time#bell#" and indexes = [0, 2, 5].
Then for each index, we will recover the word by reading from the reference string from that index until we reach a "#" character.
What is the length of the shortest reference string S possible that encodes the given words?
Example:
Input: words = ["time", "me", "bell"]
Output: 10
Explanation: S = "time#bell#" and indexes = [0, 2, 5].
Note:
1 <= words.length <= 2000.
1 <= words[i].length <= 7.
Each word has only lowercase letters.
'''
class Solution:
def register_count(self, c):
self.count_n += c
def minimumLengthEncoding(self, words: List[str]) -> int:
self.count_n = 0
root = {}
for w in words:
curr = root
for l in w[::-1]:
curr = curr.setdefault(l, {})
word_dft(root, self.register_count)
return self.count_n
def word_dft(n, f):
for n, children in n.items():
dft(children, 1, f)
def dft(n, depth, f):
if n:
for l, children in n.items():
dft(children, depth + 1, f)
else:
f(depth + 1)
| 22,435
|
https://github.com/BeMyEye/angularjs-webpack/blob/master/src/app/contacts/containers/contacts/contacts.container.spec.ts
|
Github Open Source
|
Open Source
|
MIT
| 2,018
|
angularjs-webpack
|
BeMyEye
|
TypeScript
|
Code
| 116
| 477
|
import * as angular from 'angular';
import { IQService } from 'angular';
import 'angular-mocks';
import { ContactsContainer } from './contacts.container';
describe('Contacts container', () => {
const _contactsService = {
remove: jasmine.createSpy('remove'),
getAll: jasmine.createSpy('getAll')
};
const _$state = {
go: jasmine.createSpy('go')
};
beforeEach(() => {
angular
.module('app', [])
.component(ContactsContainer.selector, ContactsContainer)
.value('contactsService', _contactsService);
angular.mock.module('app');
});
it('should exist', angular.mock.inject(($componentController: any) => {
const component = $componentController(ContactsContainer.selector, {}, {});
expect(component).toBeDefined();
}));
it('should call `contactsService.remove` when removing', angular.mock.inject((
$componentController: any,
contactsService: any,
$q: IQService) => {
const component = $componentController(ContactsContainer.selector, {}, {});
_contactsService.getAll.and.returnValue($q.resolve());
component.remove(1);
expect(contactsService.remove).toHaveBeenCalledWith(1);
}));
it('should call `contactsService.getAll` on init', angular.mock.inject((
$componentController: any,
contactsService: any,
$q: IQService) => {
const component = $componentController(ContactsContainer.selector, {}, {});
_contactsService.getAll.and.returnValue($q.resolve());
component.$onInit();
expect(contactsService.getAll).toHaveBeenCalled();
}));
});
| 30,319
|
https://github.com/SeptianFauzi/innosoft/blob/master/application/views/backend/struktur/form_input.php
|
Github Open Source
|
Open Source
|
MIT
| 2,019
|
innosoft
|
SeptianFauzi
|
PHP
|
Code
| 210
| 1,184
|
<div class="box box-info">
<div class="box-header with-border">
<h3 class="box-title">Tambah Artikel Struktur</h3>
</div>
<div class="box-body">
<?php
echo form_open_multipart('struktur/post','class="form-horizontal"');
?>
<div class="box-body">
<div class="form-group">
<label for="artikel_judul" class="col-sm-2 control-label">Judul</label>
<div class="col-sm-6">
<input type="text" name="artikel_judul" class="form-control" required>
</div>
</div>
<div class="form-group">
<label for="artikel_slug" class="col-sm-2 control-label">Slug</label>
<div class="col-sm-6">
<input type="text" name="artikel_slug" class="form-control" placeholder="untuk url ex. judul-artikel-rpl" required>
</div>
</div>
<div class="form-group">
<label for="artikel_isi_ringkas" class="col-sm-2 control-label">Isi Ringkas</label>
<div class="col-sm-6">
<textarea type="text" name="artikel_isi_ringkas" class="form-control" required></textarea>
</div>
</div>
<div class="form-group">
<label for="artikel_isi" class="col-sm-2 control-label">Isi </label>
<div class="col-sm-6">
<textarea type="text" name="artikel_isi" class="form-control" required></textarea>
</div>
</div>
<input type="hidden" name="artikel_katagori" class="form-control" value="struktur">
<div class="form-group">
<label for="artikel_terbit" class="col-sm-2 control-label">Tanggal Terbit</label>
<div class="col-sm-6">
<input type="date" name="artikel_terbit" class="form-control" required>
</div>
</div>
<div class="form-group">
<label for="artikel_status" class="col-sm-2 control-label">Status</label>
<div class="col-sm-6">
<?php
$status = array('- Pilih status','aktif' => 'aktif', 'pasif' => 'pasif');
echo form_dropdown('artikel_status',$status,'','class="form-control"');
?>
</div>
</div>
<div class="form-group">
<label for="artikel_gambar" class="col-sm-2 control-label">Gambar</label>
<div class="col-sm-6">
<input type="file" name="artikel_gambar" class="form-control" required>
</div>
</div>
<div class="form-group">
<label for="user_id" class="col-sm-2 control-label">Penulis</label>
<div class="col-sm-6">
<select class="form-control" name="user_id">
<option value="0">Pilih Penulis</option>
<?php
foreach ($user_profil_nama as $upn) {
echo "<option value='$upn->user_id'>$upn->user_profil_nama</option>";
}?>
</select>
</div>
</div>
<div class="form-group">
<label for="artikel_semat" class="col-sm-2 control-label">artikel_semat</label>
<div class="col-sm-6">
<?php
$status = array('- Pilih status','ya' => 'ya', 'tidak' => 'tidak');
echo form_dropdown('artikel_semat',$status,'','class="form-control"');
?>
</div>
</div>
</div>
<div class="box-footer">
<button type="submit" class="btn btn-info pull-right" name="submit">Tambah Artikel</button>
</div>
</form>
</div>
</div>
| 28,346
|
https://github.com/NLightning/NLightning/blob/master/NLightning.Test/Transport/Messaging/SetupMessages/InitMessageTests.cs
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
NLightning
|
NLightning
|
C#
|
Code
| 71
| 270
|
using NLightning.Transport.Messaging.SetupMessages;
using NLightning.Utils;
using NLightning.Utils.Extensions;
using Xunit;
namespace NLightning.Test.Transport.Messaging.SetupMessages
{
public class InitMessageTests
{
[Fact]
public void GetBytesTest()
{
byte[] globalFeatures = {0, 5};
byte[] localFeatures = {0, 6};
InitMessage message = new InitMessage(globalFeatures, localFeatures);
byte[] actual = message.GetBytes();
Assert.Equal("00100002000500020006", actual.ToHex());
}
[Fact]
public void ParseTest()
{
InitMessage message = new InitMessage();
message.ParsePayload("0002000500020006".HexToByteArray());
byte[] expectedGlobalFeatures = {0, 5};
byte[] expectedLocalFeatures = {0, 6};
Assert.Equal(expectedLocalFeatures, message.Localfeatures);
Assert.Equal(expectedGlobalFeatures, message.Globalfeatures);
}
}
}
| 21,720
|
https://github.com/BeastNeedsMoreTorque/Distribution/blob/master/s/std/src/main/scala/typings/std/MSStream.scala
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
Distribution
|
BeastNeedsMoreTorque
|
Scala
|
Code
| 54
| 216
|
package typings.std
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation._
trait MSStream extends js.Object {
val `type`: java.lang.String
def msClose(): Unit
def msDetachStream(): js.Any
}
object MSStream {
@scala.inline
def apply(msClose: () => Unit, msDetachStream: () => js.Any, `type`: java.lang.String): MSStream = {
val __obj = js.Dynamic.literal(msClose = js.Any.fromFunction0(msClose), msDetachStream = js.Any.fromFunction0(msDetachStream))
__obj.updateDynamic("type")(`type`.asInstanceOf[js.Any])
__obj.asInstanceOf[MSStream]
}
}
| 44,321
|
https://github.com/ga-explorer/NumericalGeometryLib/blob/master/DataStructuresLib/DataStructuresLib/Collections/PeriodicLists2D/ProListConstantValues2D.cs
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
NumericalGeometryLib
|
ga-explorer
|
C#
|
Code
| 123
| 373
|
using System.Collections;
using System.Collections.Generic;
using System.Linq;
namespace DataStructuresLib.Collections.PeriodicLists2D
{
public class ProListConstantValues2D<TValue> :
IPeriodicReadOnlyList2D<TValue>
{
public TValue Value { get; }
public int Count
=> Count1 * Count2;
public int Count1 { get; }
public int Count2 { get; }
public TValue this[int index]
=> Value;
public TValue this[int index1, int index2]
=> Value;
public ProListConstantValues2D(int rowsCount, int columnsCount, TValue value)
{
Count1 = rowsCount;
Count2 = columnsCount;
Value = value;
}
public TValue[,] ToArray2D()
{
var valuesArray = new TValue[Count1, Count2];
for (var index2 = 0; index2 < Count2; index2++)
for (var index1 = 0; index1 < Count1; index1++)
valuesArray[index2, index1] = Value;
return valuesArray;
}
public IEnumerator<TValue> GetEnumerator()
{
return Enumerable.Repeat(Value, Count).GetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
}
}
| 17,586
|
https://github.com/XiaotaoChen/model-quantization/blob/master/start_on_terminate.sh
|
Github Open Source
|
Open Source
|
BSD-2-Clause
| 2,021
|
model-quantization
|
XiaotaoChen
|
Shell
|
Code
| 57
| 115
|
# check current task, the script would run the next round experiment when current pid is finished
pid=$1
script=train.sh
config=$2
if [ "$pid" != "" ]
then
while true
do
nvidia-smi | grep $pid
if [ $? -eq 0 ]; then sleep 1m; continue; else break; fi
done
#sleep 10
echo "starting script"
bash $script $config
fi
| 10,374
|
https://github.com/Liz303/wunderwerkz-react/blob/master/src/components/pages/ContactPage.js
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
wunderwerkz-react
|
Liz303
|
JavaScript
|
Code
| 171
| 633
|
import React from "react";
import { browserHistory } from "react-router";
import HalfText from "../HalfText.jsx";
import ScrollText from "../ScrollText.jsx";
import Navigation from "../Navigation.jsx";
class ContactPage extends React.Component {
render() {
return (
<Navigation one="contact" two="root" three="about" four="work">
<div className="page contact">
<div className="flex-wrapper about">
<div
className="flex-half center"
onClick={() => {
browserHistory.push("/");
}}
>
<HalfText className="cursor-click">
Reach Out and Touch Us
</HalfText>
</div>
<div
className="flex-half center"
onClick={e => e.stopPropagation()}
>
<ScrollText className="cursor-scroll">
<div className="info-wrapper">
<div className="info-group">
<p className="left"> ✌ </p>
<p className="left">
Stop By <br />
<a
href="https://goo.gl/maps/VvSC31W36duda8oTA"
className="center"
>
3455 Ringsby Ct #112 <br /> DENVER, CO 80216 <br /> USA
<br /> Earf
</a>
</p>
</div>
<div className="info-group">
<p className="left"> 📫 </p>
<p className="left">
send a note <br />
<a href="mailto:hello@iheartwunderwerkz.com">
hello@iheartwunderwerkz.com
</a>
</p>
</div>
<div className="info-group">
<p className="left"> 📟 </p>
<p className="left">
drop a line <br />
303.594.4990
</p>
</div>
<div className="info-group">
<p className="left"> 📷 </p>
<p className="left">
peep the grams <br />
<a href="https://www.instagram.com/wunder_werkz/">
@wunder_werkz
</a>
</p>
</div>
</div>
</ScrollText>
</div>
</div>
</div>
</Navigation>
);
}
}
export default ContactPage;
| 1,363
|
https://github.com/josephbeuysmum/Cirk/blob/master/Cirk/Dertisch/Salle/Sommelier.swift
|
Github Open Source
|
Open Source
|
MIT
| null |
Cirk
|
josephbeuysmum
|
Swift
|
Code
| 271
| 674
|
//
// Sommelier.swift
// Dertisch
//
// Created by Richard Willis on 08/10/2018.
// Copyright © 2018 Rich Text Format Ltd. All rights reserved.
//
public enum Regions: String {
case
england = "en",
france = "fr"
}
public protocol SommelierProtocol {
// var region: Regions { get set }
// init(larder: Larder)
// func set(_ customer: CustomerForSommelier?)
// subscript(name: String) -> String? { get }
}
public final class Sommelier {
public var region: Regions {
get { return region_ }
// todo what should happen after the region gets re-set?
set {
guard region_ != newValue else { return }
region_ = newValue
}
}
fileprivate let wines: [String: Wine]?
fileprivate var
region_: Regions
// , customer: CustomerForSommelier?
public required init(larder: Larder) {
region_ = .england
if let unbottledWines = larder.decode(json: "text", into: Wines.self) {
var bottledWines: [String: Wine] = [:]
for var unbottledWine in unbottledWines.copy {
if let name = unbottledWine.key {
unbottledWine.key = nil
bottledWines[name] = unbottledWine
}
}
wines = bottledWines
} else {
wines = nil
}
}
// public func assign(_ customer: CustomerForSommelier?) {
// lo("assign", customer)
// self.customer = customer
// }
}
extension Sommelier: SommelierProtocol {
public subscript(name: String) -> String? {
guard let wine = wines?[name] else { return nil }
if wine.isGlobal {
return wine.all
} else {
switch region.rawValue {
case Regions.england.rawValue: return wine.en
case Regions.france.rawValue: return wine.fr
default: return nil
}
}
}
}
internal struct Wines: Decodable {
let copy: [Wine]
}
internal struct Wine: Decodable {
var isGlobal: Bool { return all != nil }
var key: String?
let
en: String?,
fr: String?,
all: String?
}
| 15,545
|
https://github.com/GuardTime/ksi-net-sdk/blob/master/ksi-net-api/Service/AggregatorConfigChangedEventArgs.cs
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,018
|
ksi-net-sdk
|
GuardTime
|
C#
|
Code
| 300
| 631
|
/*
* Copyright 2013-2018 Guardtime, Inc.
*
* This file is part of the Guardtime client SDK.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES, CONDITIONS, OR OTHER LICENSES OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
* "Guardtime" and "KSI" are trademarks or registered trademarks of
* Guardtime, Inc., and no license to trademarks is granted; Guardtime
* reserves and retains all trademark rights.
*/
using System;
using Guardtime.KSI.Exceptions;
namespace Guardtime.KSI.Service
{
/// <summary>
/// Class holding aggregator configuration changed event information.
/// </summary>
public class AggregatorConfigChangedEventArgs : EventArgs
{
/// <summary>
/// Exception thrown while processing agregation configuration request.
/// </summary>
public KsiException Exception { get; set; }
/// <summary>
/// New aggregator configuration.
/// </summary>
public AggregatorConfig AggregatorConfig { get; }
/// <summary>
/// KsiService that made the aggregator configuration request.
/// </summary>
public IKsiService KsiService { get; }
/// <summary>
/// Create aggregator configuration changed event arguments class instance.
/// </summary>
/// <param name="exception">Exception thrown while processing aggregator configuration request</param>
/// <param name="ksiService">KsiService that made the aggregator configuration request</param>
public AggregatorConfigChangedEventArgs(KsiException exception, IKsiService ksiService)
{
Exception = exception;
KsiService = ksiService;
}
/// <summary>
/// Create aggregator configuration changed event arguments class instance.
/// </summary>
/// <param name="aggregatorConfig">New aggregator configuration</param>
/// <param name="ksiService">KsiService that made the aggregator configuration request</param>
public AggregatorConfigChangedEventArgs(AggregatorConfig aggregatorConfig, IKsiService ksiService = null)
{
AggregatorConfig = aggregatorConfig;
KsiService = ksiService;
}
}
}
| 35,134
|
https://github.com/openshift/openshift-tools/blob/master/scripts/monitoring/cron-send-pcp-sampled-metrics.py
|
Github Open Source
|
Open Source
|
LicenseRef-scancode-warranty-disclaimer, Apache-2.0
| 2,022
|
openshift-tools
|
openshift
|
Python
|
Code
| 290
| 934
|
#!/usr/bin/python
'''
Sample pcp for cpu statistics over an interval
Example:
./cron-send-pcp-sample.py -m kernel.all.cpu.idle -m kernel.all.cpu.nice -m kernel.all.cpu.steal \
-m kernel.all.cpu.sys -m kernel.all.cpu.user -m kernel.all.cpu.wait.total -i 2 -v
'''
# Disabling invalid-name because pylint doesn't like the naming conention we have.
# pylint: disable=invalid-name,import-error
import sys
import argparse
import collections
from openshift_tools.monitoring import pminfo
from openshift_tools.monitoring.metric_sender import MetricSender
def parse_args():
'''Parse the arguments for this script'''
parser = argparse.ArgumentParser(description="Tool to sample pcp metrics")
parser.add_argument('-m', '--metrics', action="append",
help="metrics to send to zabbix")
parser.add_argument('-i', '--interval', default=10,
action="store", help="Sample pcp metrics for $i amount of time. Default: 10")
parser.add_argument('-c', '--count', default=2,
action="store", help="Sample pcp metrics for $i amount of time for $c times. Default: 2")
parser.add_argument('-d', '--debug', default=False,
action="store_true", help="debug mode")
parser.add_argument('-v', '--verbose', default=False,
action="store_true", help="Verbose?")
parser.add_argument('-t', '--test', default=False,
action="store_true", help="Run the script but don't send to zabbix")
args = parser.parse_args()
return args, parser
def get_averages(samples):
''' Calculate the average of the results of the samples'''
return {metric: sum(values)/len(values) for (metric, values) in samples.items()}
def main():
'''Run pminfo against a list of metrics.
Sample metrics passed in for an amount of time and report data to zabbix
'''
args, parser = parse_args()
if not args.metrics:
print
print 'Please specify metrics with -m.'
print
parser.print_help()
sys.exit(1)
metrics = args.metrics
interval = int(args.interval)
count = int(args.count)
# Gather sampled data
data = pminfo.get_sampled_data(metrics, interval, count)
zab_results = collections.defaultdict(list)
for metric_name, val in data.items():
if 'kernel' in metric_name:
for sample in range(len(val)):
if sample + 1 == len(val):
break
zab_results[metric_name].append(pminfo.calculate_percent_cpu(val[sample], val[sample+1], interval))
else:
print 'NOT SUPPORTED: [%s]' % metric_name
if zab_results.get(metric_name, None) != None and (args.verbose or args.debug):
print '%s: %.2f' % (metric_name, zab_results[metric_name][-1])
zab_results = get_averages(zab_results)
# Send the data to zabbix
if not args.test:
mts = MetricSender(verbose=args.debug)
mts.add_metric(zab_results)
mts.send_metrics()
if __name__ == '__main__':
main()
| 3,792
|
https://github.com/cmgladding/aslam-project/blob/master/src/quat2taitbryan.cpp
|
Github Open Source
|
Open Source
|
MIT
| null |
aslam-project
|
cmgladding
|
C++
|
Code
| 317
| 1,057
|
#include <cmath>
#include "ros/ros.h"
#include "gazebo_msgs/ModelStates.h"
#include "geometry_msgs/Pose.h"
#include "geometry_msgs/Twist.h"
#include "geometry_msgs/Quaternion.h"
#include "geometry_msgs/Vector3.h"
#include <std_msgs/Float64.h>
#include <vector>
class Rotation{
public:
// Quaternion parameters
// Other convention - w is x0, x is x1, y is x2, z is x3
static double X;
static double Y;
static double Z;
static double W;
Rotation();
~Rotation();
}; // class
// Initialize quaternion values (in case we try to calculate before calling CB)
double Rotation::W = 1.0;
double Rotation::X = 0.0;
double Rotation::Y = 0.0;
double Rotation::Z = 0.0;
// Read data from gazebo in quaternion format and update public data values
void readOrientation(geometry_msgs::Quaternion orientation)
{
Rotation::X = orientation.x;
Rotation::Y = orientation.y;
Rotation::Z = orientation.z;
Rotation::W = orientation.w;
}
void rotationCB(const gazebo_msgs::ModelStates::ConstPtr& msg)
{
// Figure out which array element is for atlas
int index_max = msg->name.size();
int index_atlas = -1;
std::string atlas_name = "atlas";
std::string index_name;
for (int index=0;index<index_max;index++){
index_name = msg->name[index];
if (index_name == atlas_name){
index_atlas = index;
}
}
if (index_atlas != -1){
readOrientation(msg->pose[index_atlas].orientation);
}
}
int main(int argc, char **argv)
{
ros::init(argc, argv, "quat2taitbryan");
ros::NodeHandle n;
// Subscribe to the arrays of actual model poses and twists published by gazebo
ros::Subscriber sub_model_states = n.subscribe<gazebo_msgs::ModelStates>("gazebo/model_states", 1, rotationCB);
// Consider putting these all into a single custom message type
ros::Publisher pub_roll = n.advertise<std_msgs::Float64>("roll", 1000);
ros::Publisher pub_pitch = n.advertise<std_msgs::Float64>("pitch", 1000);
ros::Publisher pub_yaw = n.advertise<std_msgs::Float64>("yaw", 1000);
ros::Rate loop_rate(200);
while (ros::ok())
{
// Create message objects
std_msgs::Float64 roll, pitch, yaw;
// create and initialize calculation variables
double x, y, z, w, r, p, pi;
pi = 3.14159265359;
w = Rotation::W;
x = Rotation::X;
y = Rotation::Y;
z = Rotation::Z;
// Perform Calculation and assign message object data values
// Reference http://www.sedris.org/wg8home/Documents/WG80485.pdf
roll.data = atan2((y*z + w*x),0.5 - (pow(x,2.0) + pow(y,2.0)));
pitch.data = asin(-2*(x*z - w*y));
yaw.data = atan2((x*y + w*z),0.5 - (pow(y,2.0) + pow(z,2.0)));
// Use publish() function to send message objects
pub_roll.publish(roll);
pub_pitch.publish(pitch);
pub_yaw.publish(yaw);
ros::spinOnce();
loop_rate.sleep();
}
return 0;
}
| 42,340
|
https://github.com/Sandermoen/portfolio/blob/master/src/components/Button/Button.js
|
Github Open Source
|
Open Source
|
MIT
| null |
portfolio
|
Sandermoen
|
JavaScript
|
Code
| 71
| 232
|
import React, { useContext, Fragment } from "react"
import { ThemeManagerContext } from "gatsby-styled-components-dark-mode"
import { StyledButton } from "./Button.styles"
const Button = ({ inverted, className, children, link }) => {
const { isDark } = useContext(ThemeManagerContext)
return (
<Fragment>
{link ? (
<a href={link} target="_blank" rel="noreferrer">
<StyledButton
className={className}
isDark={isDark}
inverted={inverted}
>
{children}
</StyledButton>
</a>
) : (
<StyledButton className={className} isDark={isDark} inverted={inverted}>
{children}
</StyledButton>
)}
</Fragment>
)
}
export default Button
| 2,759
|
https://github.com/aseerishraque/puc-cgpa/blob/master/resources/views/cgpa.blade.php
|
Github Open Source
|
Open Source
|
MIT
| null |
puc-cgpa
|
aseerishraque
|
PHP
|
Code
| 276
| 1,720
|
<!doctype html>
<html lang="en">
<head>
<!-- Required meta tags -->
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- Bootstrap CSS -->
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.0.2/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-EVSTQN3/azprG1Anm3QDgpJLIm9Nao0Yz1ztcQTwFspd3yD65VohhpuuCOmLASjC" crossorigin="anonymous">
<link rel="stylesheet" type="text/css" href="https://cdn.datatables.net/1.11.3/css/jquery.dataTables.css">
<title>CGPA</title>
</head>
<body>
<div class="container">
<h2>
CGPA: 0.00
</h2>
<!-- Button trigger modal -->
<button type="button" class="btn btn-primary" data-bs-toggle="modal" data-bs-target="#exampleModal">
Insert Result
</button>
<!-- Modal -->
<div class="modal fade" id="exampleModal" tabindex="-1" aria-labelledby="exampleModalLabel" aria-hidden="true">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="exampleModalLabel">Modal title</h5>
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
</div>
<div class="modal-body">
<form action="{{ route('result.store') }}" method="post">
@csrf
<div class="mb-3">
<label for="exampleInputEmail1" class="form-label">Select Semester</label>
<select name="semester" id="" class="form-control">
<option value="">Choose Semester </option>
<option value="1st">1st</option>
<option value="2nd">2nd</option>
<option value="3rd">3rd</option>
<option value="4th">4th</option>
<option value="5th">5th</option>
<option value="6th">6th</option>
<option value="7th">7th</option>
<option value="8th">8th</option>
</select>
</div>
<div class="mb-3">
<label for="exampleInputEmail1" class="form-label">Email address</label>
<input type="email" class="form-control" id="exampleInputEmail1" aria-describedby="emailHelp">
</div>
<div class="mb-3">
<label for="exampleInputPassword1" class="form-label">Password</label>
<input type="password" class="form-control" id="exampleInputPassword1">
</div>
<div class="mb-3 form-check">
<input type="checkbox" class="form-check-input" id="exampleCheck1">
<label class="form-check-label" for="exampleCheck1">Check me out</label>
</div>
<button type="submit" class="btn btn-primary">Submit</button>
</form>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
<button type="button" class="btn btn-primary">Save changes</button>
</div>
</div>
</div>
</div>
<div class="row">
<div class="col-md-10">
<table id="table_id" class="display">
<thead>
<tr>
<th>Column 1</th>
<th>Column 2</th>
</tr>
</thead>
<tbody>
<tr>
<td>Row 1 Data 1</td>
<td>Row 1 Data 2</td>
</tr>
<tr>
<td>Row 2 Data 1</td>
<td>Row 2 Data 2</td>
</tr>
</tbody>
</table>
</div>
</div>
</div>
<!-- Optional JavaScript; choose one of the two! -->
<!-- Option 1: Bootstrap Bundle with Popper -->
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.0.2/dist/js/bootstrap.bundle.min.js" integrity="sha384-MrcW6ZMFYlzcLA8Nl+NtUVF0sA7MsXsP1UyJoMp4YLEuNSfAP+JcXn/tWtIaxVXM" crossorigin="anonymous"></script>
<!-- Option 2: Separate Popper and Bootstrap JS -->
<!--
<script src="https://cdn.jsdelivr.net/npm/@popperjs/core@2.9.2/dist/umd/popper.min.js" integrity="sha384-IQsoLXl5PILFhosVNubq5LC7Qb9DXgDA9i+tQ8Zj3iwWAwPtgFTxbJ8NT4GN1R8p" crossorigin="anonymous"></script>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.0.2/dist/js/bootstrap.min.js" integrity="sha384-cVKIPhGWiC2Al4u+LWgxfKTRIcfu0JTxR+EQDz/bgldoEyl4H0zUF0QKbrJ0EcQF" crossorigin="anonymous"></script>
-->
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.6.0/jquery.js" integrity="sha512-n/4gHW3atM3QqRcbCn6ewmpxcLAHGaDjpEBu4xZd47N0W2oQ+6q7oc3PXstrJYXcbNU1OHdQ1T7pAP+gi5Yu8g==" crossorigin="anonymous" referrerpolicy="no-referrer"></script>
<script type="text/javascript" charset="utf8" src="https://cdn.datatables.net/1.11.3/js/jquery.dataTables.js"></script>
<script>
$(document).ready( function () {
$('#table_id').DataTable();
} );
</script>
</body>
</html>
| 35,517
|
https://github.com/XurrencyFX/EhPanda/blob/master/EhPanda/View/Support/Components/Cells/GalleryRankingCell.swift
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
EhPanda
|
XurrencyFX
|
Swift
|
Code
| 109
| 415
|
//
// GalleryRankingCell.swift
// EhPanda
//
// Created by 荒木辰造 on R 3/12/14.
//
import SwiftUI
import Kingfisher
struct GalleryRankingCell: View {
private let gallery: Gallery
private let ranking: Int
init(gallery: Gallery, ranking: Int) {
self.gallery = gallery
self.ranking = ranking
}
var body: some View {
HStack {
KFImage(URL(string: gallery.coverURL))
.placeholder { Placeholder(style: .activity(ratio: Defaults.ImageSize.headerAspect)) }.defaultModifier()
.scaledToFill().frame(width: Defaults.ImageSize.rowW * 0.75, height: Defaults.ImageSize.rowH * 0.75)
.cornerRadius(2)
Text(String(ranking)).fontWeight(.medium).font(.title2).padding(.horizontal)
VStack(alignment: .leading) {
Text(gallery.trimmedTitle).bold().lineLimit(2).fixedSize(horizontal: false, vertical: true)
if let uploader = gallery.uploader {
Text(uploader).foregroundColor(.secondary).lineLimit(1)
}
}
.font(.caption)
Spacer()
}
}
}
struct GalleryRankingCell_Previews: PreviewProvider {
static var previews: some View {
GalleryRankingCell(gallery: .preview, ranking: 1)
.previewLayout(.fixed(width: 300, height: 100))
.preferredColorScheme(.dark)
}
}
| 24,175
|
https://github.com/ep1804/others/blob/master/practice/hackerrank/functional_programming/src/euler/E003.scala
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
others
|
ep1804
|
Scala
|
Code
| 164
| 389
|
package euler
object LargestPrimeFactor {
lazy val from3by2: Stream[Long] = 3L #:: from3by2.map(_ + 2)
lazy val primes: Stream[Long] =
2L #:: from3by2.filter(i => primes.takeWhile(j => j * j <= i).forall(i % _ > 0))
// c.f.
//def isPrime(n: Long): Boolean = primes.takeWhile(_ <= math.sqrt(n)).filter(n % _ == 0).size == 0
// repeat division by p until there's no p factor in n
def divRep(n: Long, p: Long): Long =
if(n % p != 0) n
else divRep(n / p, p)
def solve(n: Long): Long = {
// if n is prime, return n
val ps = primes.takeWhile(_ <= math.sqrt(n)).filter(n % _ == 0) // small prime factors
if(ps.size == 0) return n // n is prime
// remove small prime factors from n -> n2
val n2 = ps.foldLeft(n)(divRep(_ , _))
if(n2 == 1) ps.last
else solve(n2)
}
def main(args: Array[String]): Unit = {
val in = io.Source.stdin.getLines
val T = in.next.toInt
val ns = in.take(T).map(_.toLong)
ns map solve foreach println
}
}
| 48,827
|
https://github.com/KamilKarpus/home-budget/blob/master/src/modules/hb.core/hb.core.domain/money.ts
|
Github Open Source
|
Open Source
|
MIT
| null |
home-budget
|
KamilKarpus
|
TypeScript
|
Code
| 112
| 290
|
export class Money{
private _value: number;
private _currency: string;
constructor(value : number, currency: string) {
this._value = value;
this._currency = currency;
}
public static of(value : number, currency : string) : Money{
return new Money(value, currency);
}
public static default(){
return new Money(0, "");
}
public static empty(currency : string){
return new Money(0, currency);
}
public add(money : Money) : Money{
if(money._currency === this._currency && !this._currency){
//throw exception
}
return new Money(this._value + money._value, money._currency);
}
public sub(money : Money) : Money{
if(money._currency === this._currency && !this._currency){
//throw exception
}
return new Money(this._value - money._value, money._currency);
}
public getValue() : number{
return this._value;
}
public getCurrency() : string{
return this._currency;
}
}
| 13,055
|
https://github.com/AbacusPowers/zentheme/blob/master/sass/forms/_fields.scss
|
Github Open Source
|
Open Source
|
MIT
| null |
zentheme
|
AbacusPowers
|
SCSS
|
Code
| 39
| 169
|
input[type="text"],
input[type="email"],
input[type="url"],
input[type="password"],
input[type="search"],
textarea {
max-width: 100%;
color: $text-black-tertiary;
border: 1px solid ;
border-radius: 3px;
&:focus {
color: $text-black-secondary;
}
}
input[type="text"],
input[type="email"],
input[type="url"],
input[type="password"],
input[type="search"] {
padding: 3px;
}
textarea {
padding-left: 3px;
width: 100%;
}
| 28,702
|
https://github.com/jeenlee/Aoite/blob/master/src/core/Aoite.Tests/Aoite/Reflection/SampleModel/Generics/AbstractGenericBase.cs
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,022
|
Aoite
|
jeenlee
|
C#
|
Code
| 20
| 51
|
namespace Aoite.ReflectionTest.SampleModel.Generics
{
internal abstract class AbstractGenericBase<T>
{
public virtual T Value { get; protected set; }
}
}
| 30,166
|
https://github.com/splunk/syntheticsclient/blob/master/syntheticsclient/create_browsercheck.go
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
syntheticsclient
|
splunk
|
Go
|
Code
| 190
| 430
|
// Copyright 2021 Splunk, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package syntheticsclient
import (
"bytes"
"encoding/json"
)
func parseCreateBrowserCheckResponse(response string) (*BrowserCheckResponse, error) {
var createBrowserCheck BrowserCheckResponse
JSONResponse := []byte(response)
err := json.Unmarshal(JSONResponse, &createBrowserCheck)
if err != nil {
return nil, err
}
return &createBrowserCheck, err
}
func (c Client) CreateBrowserCheck(browserCheckDetails *BrowserCheckInput) (*BrowserCheckResponse, *RequestDetails, error) {
body, err := json.Marshal(browserCheckDetails)
if err != nil {
return nil, nil, err
}
details, err := c.makePublicAPICall("POST", "/v2/checks/real_browsers", bytes.NewBuffer(body), nil)
if err != nil {
return nil, details, err
}
newBrowserCheck, err := parseCreateBrowserCheckResponse(details.ResponseBody)
if err != nil {
return newBrowserCheck, details, err
}
return newBrowserCheck, details, nil
}
| 25,775
|
https://github.com/bkonk/dicom2roi/blob/master/Frameworks/OsiriXAPI.framework/Versions/A/Headers/OSIVoxel.h
|
Github Open Source
|
Open Source
|
BSL-1.0
| 2,021
|
dicom2roi
|
bkonk
|
Objective-C
|
Code
| 209
| 578
|
/*=========================================================================
Program: OsiriX
Copyright (c) 2010 - 2019 Pixmeo SARL
266 rue de Bernex
CH-1233 Bernex
Switzerland
All rights reserved.
=========================================================================*/
#import <Cocoa/Cocoa.h>
@class Point3D;
/** \brief Represents a Voxel
*
* Represents a Voxel
* Has x, y, and z positions as float
*/
@interface OSIVoxel : NSObject {
float _x;
float _y;
float _z;
NSNumber *_value;
float _voxelWidth;
float _voxelHeight;
float _voxelDepth;
id _userInfo;
}
@property float voxelWidth;
@property float voxelHeight;
@property float voxelDepth;
@property float x;
@property float y;
@property float z;
@property (copy, readwrite) NSNumber *value;
@property (retain, readwrite) id userInfo;
/** set the x, y, z position */
- (void) setX:(float)x y:(float)y z:(float)z;
/** init with x, y, and z position and pixel value */
- (id)initWithX:(float)x y:(float)y z:(float)z value:(NSNumber *)value;
/** init with image point and the slice */
- (id)initWithPoint:(NSPoint)point slice:(long)slice value:(NSNumber *)value;
/** init with Point3D */
- (id)initWithPoint3D:(Point3D *)point3D;
/** Class init with x, y, and z position and pixel value */
+ (id)pointWithX:(float)x y:(float)y z:(float)z value:(NSNumber *)value;
/** Class init with image point and the slice */
+ (id)pointWithNSPoint:(NSPoint)point slice:(long)slice value:(NSNumber *)value;
/** Class init with Point3D */
+ (id)pointWithPoint3D:(Point3D *)point3D;
/** export to xml */
-(NSMutableDictionary*) exportToXML;
/** init with xml dictonary */
-(id) initWithDictionary: (NSDictionary*) xml;
@end
| 15,069
|
https://github.com/getlantern/lantern-archive/blob/master/install.bash
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,017
|
lantern-archive
|
getlantern
|
Shell
|
Code
| 47
| 125
|
#!/usr/bin/env bash
function die() {
echo $*
exit 1
}
./copypt.bash || die "Could not copy pluggable transports?"
mvn --version || die "Please install maven from http://maven.apache.org"
rm -f target/lantern*-small.jar || die "Could not remove old jar?"
mvn -U package -Dmaven.artifact.threads=1 -Dmaven.test.skip=true || die "Could not package"
| 45,527
|
https://github.com/Irvingsoft/sodo-platform/blob/master/sodo-common/sodo-log-starter/src/main/java/cool/sodo/log/starter/handler/AsyncExceptionHandler.java
|
Github Open Source
|
Open Source
|
MulanPSL-1.0
| 2,022
|
sodo-platform
|
Irvingsoft
|
Java
|
Code
| 77
| 306
|
package cool.sodo.log.starter.handler;
import cool.sodo.common.base.util.JsonUtil;
import cool.sodo.log.starter.publisher.ErrorLogPublisher;
import lombok.extern.slf4j.Slf4j;
import org.springframework.aop.interceptor.AsyncUncaughtExceptionHandler;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
import java.lang.reflect.Method;
/**
* 异步任务异常处理器
*
* @author TimeChaser
* @date 2021/6/17 22:05
*/
@Component
@Slf4j
public class AsyncExceptionHandler implements AsyncUncaughtExceptionHandler {
@Resource
private ErrorLogPublisher errorLogPublisher;
@Override
public void handleUncaughtException(Throwable throwable, Method method, Object... objects) {
log.error("Async execution error on method: " + method.toString() +
", with message: " + throwable.getMessage() +
", with parameters: " + JsonUtil.toJsonString(objects));
errorLogPublisher.publishEvent(null, throwable, JsonUtil.toJsonString(objects));
}
}
| 37,450
|
https://github.com/logicmoo/old_logicmoo_workspace/blob/master/pack/logicmoo_experimental/prolog/logicmoo/dra/swi_toplevel.pl
|
Github Open Source
|
Open Source
|
MIT
| 2,018
|
old_logicmoo_workspace
|
logicmoo
|
Perl
|
Code
| 826
| 5,539
|
#! swipl -L8G -G8G -T8G -f
/** <module> MUD server startup script in SWI-Prolog
*/
end_of_file.
:-module(swi_toplevel,[]).
% :- shell(cls).
:- dynamic user:file_search_path/2.
:- multifile user:file_search_path/2.
:- prolog_load_context(directory,H),absolute_file_name('../../../..',A,[file_type(directory),relative_to(H)]),asserta(user:file_search_path(pack,A)).
:- attach_packs.
:- initialization(attach_packs).
property_pred((table),is_tabled).
property_pred(builtin,is_builtin).
property_pred(never_table,is_never_tabled).
property_pred(old_first,is_old_first).
property_pred(coinductive0,is_coinductive0).
property_pred(coinductive1,is_coinductive1).
property_pred(topl,is_topl).
property_pred(support,is_support).
property_pred(local,is_local).
property_pred((traces),is_tracing).
property_pred(set_default_extension,default_extension).
property_pred(hilog,is_hilog).
:- forall(property_pred(D,F) ,((DG=..[D,_],asserta((DG:-execute_directive(DG)))),
( \+ current_op(_,fy,D) -> op(900,fy,D) ; true), multifile(F/1),dynamic(F/1))).
% :- multifile sandbox:safe_primitive/1.
% :-asserta((sandbox:safe_primitive(Z):-wdmsg(Z))).
%%% ON :- initialization( profiler(_,walltime) ).
%%% ON :- initialization(user:use_module(library(swi/pce_profile))).
:- user:ensure_loaded(library(ape/get_ape_results)).
:- user:ensure_loaded(library(logicmoo/util/logicmoo_util_all)).
% :- op(1150, fy, (dynamic)).
:- op(900, fy, (dynamic)).
:- op(900, fx, (dynamic)).
:- use_module(library(coinduction),
[ (coinductive)/1,
op(1150, fx, (coinductive))
]).
retract_all0(R):- ignore((retract(R),fail)).
pf(F):- must(retract_all0(topl(_))),
to_filename(F,FC),
must([(FC)]),!. % ,once(ignore((run_curent_test,sleep(2)))))).
run_curent_test:- show_call_failure(if_defined(go,if_defined(test,if_defined(top)))),!.
run_curent_test:- top(_),!,forall(top(I),time((ignore(show_call((nonvar(I),query(I))))))),!.
:- dynamic(is_clause_module/2).
clause_module(_:Goal,M):- clause_module0(Goal,M),!.
clause_module(Goal,M):- clause_module0(Goal,M),!.
clause_module(M:_ ,M):-atom(M).
clause_module0(:- (Goal) ,M):- !, clause_module0(Goal,M).
clause_module0((Goal:- _ ),M):- !, clause_module0(Goal,M).
clause_module0((Goal , _ ),M):- !, clause_module0(Goal,M).
clause_module0(Goal,M):-is_clause_module(Goal,M),!.
clause_module0(Goal,M):-clause_module1(Goal,M),asserta(is_clause_module(Goal,M)),!.
clause_module1(Goal,M):-predicate_property(Goal,imported_from(M)),!.
clause_module1(Goal,M):-current_predicate(_,M:Goal),!.
clause_module1(Goal,M):-source_file(Goal,F), module_property(M,file(F)),!.
clause_module1(Goal,M):-predicate_property(_:Goal,imported_from(M)),!.
clause_module1(Goal,M):-current_module(M),current_predicate(_,M:Goal),!.
clause_module1(Goal,M):-clause(Goal,_,Ref),clause_propery(Ref,module(M)).
clause_module1(Goal,M):-clause(_:Goal,_,Ref),clause_propery(Ref,module(M)).
%% load( + file name ):
%% Initialise, then load a program from this file, processing directives and
%% queries. After this is done, enter interactive mode.
:-export(load/1).
load( FileName ) :-
must_det_l((
setup,
initialise, % provided by a metainterpreter
process_file( FileName ),!,
check_general_consistency,
program_loaded)),!. % provided by a metainterpreter
cputime(X):- statistics(cputime,X).
%% process_file( + file name ):
%% Load a program from this file, processing directives and queries.
% :- mode process_file( + ).
do_process_file( FileName ) :-
open_the_file( FileName, ProgStream ),
process_input( ProgStream ),!,
sanity(at_end_of_stream(ProgStream)),
% atom_to_memory_file('',Null_stream),
% file_directory_name(FileName,D),
stream_property(ProgStream,file_name(FN)),
load_files(FN,[derived_from(FileName),register(true),stream(ProgStream)]),
close( ProgStream ),!.
%
current_dirs(DO):-no_repeats(DO,(current_dirs0(D),(atom_concat(DO,'/',D)->true;DO=D))).
current_dirs0(D):- prolog_load_context(directory,D).
current_dirs0(D):- working_directory(D,D).
current_dirs0(D):- current_stream(_,read,Y), stream_property(Y,file_name(FN)), file_directory_name(FN,D).
current_dirs0(D):- stream_property(_,file_name(FN)), file_directory_name(FN,D).
current_dirs0(D):- expand_file_name('*/',X),member(E,X),absolute_file_name(E,D),exists_directory(D).
current_dirs0(D):- expand_file_name('*/*/',X),member(E,X),absolute_file_name(E,D),exists_directory(D).
current_dirs0(D):- expand_file_name('*/*/*/',X),member(E,X),absolute_file_name(E,D),exists_directory(D).
current_dirs0(D):- source_file_property(FN, modified(_)), file_directory_name(FN,D).
current_dirs0('.').
to_filename( FileName, FileName ) :- atomic(FileName),exists_file(FileName),!.
to_filename( FileName, AFN ) :-
must(default_extension( Ext );Ext='.tlp'),
must((current_dirs(D),
member(TF,[false,true]),
absolute_file_name(FileName,AFN,[solutions(all),expand(TF),access(read),relative_to(D),file_errors(fail),extensions(['',Ext,'.pl','.tlp','.clp','.P'])]),
exists_file(AFN))),!.
:-dynamic(is_pred_metainterp/2).
pred_metainterp(Pred,M):- is_pred_metainterp(Pred,M),!.
pred_metainterp(Pred,M):-
is_tabled(Pred)-> M = is_tabled ;
is_coinductive1(Pred)-> M = is_coinductive1 ;
is_support(Pred)-> M = is_support ;
is_builtin(Pred)-> M = is_builtin ;
is_never_tabled(Pred)-> M = is_never_tabled.
pred_metainterp(Pred,M):- source_file(Pred,File),is_file_meta(File,M),!.
pred_metainterp(Pred,M):- might_be_clause_meta(Pred)-> M = is_never_tabled.
pred_metainterp(_ ,unknown).
add_file_meta(FileName,Type):-to_filename(FileName,File),assert_if_new(is_file_meta(File,Type)).
:-add_file_meta('compatibility_utilities_swi',is_builtin).
:-add_file_meta('swi_toplevel',is_builtin).
:-add_file_meta('dra_common',is_builtin).
%% top:
%% Interactive mode. Each term that is not a directive or a query is treated
%% as an abbreviated query. After displaying the results of each query read
%% characters upto the nearest newline: if the first character is ";",
%% backtrack to find alternative solutions.
%% Exit upon encountering end of file.
%% NOTE: When running on Sicstus, each term must come on a separate line: after
%% reading the term the rest of the line is ignored, to facilitate
%% interaction with the user when asking whether more answers are needed.
:- user:dynamic(expand_query/4).
:- user:multifile(expand_query/4).
user:expand_query(_Goal, _Expanded, _Bindings, _ExpandedBindings):-fail.
:- user:dynamic(expand_answer/2).
:- user:multifile(expand_answer/2).
user:expand_answer(_Goal, _Expanded):-fail.
/********************************
* EXECUTION *
********************************/
:-meta_predicate user:dra_execute(0, ?).
:-meta_predicate user:user:residue_vars(0, ?).
:-meta_predicate user:user:dra_execute_goal22(0, ?).
user:dra_execute(Var, _) :-
var(Var), !,
print_message(informational, var_query(Var)),
fail.
user:dra_execute(end_of_file, _) :- !,
print_message(query, query(eof)).
user:dra_execute(Goal, Bindings) :-
'$module'(TypeIn, TypeIn),
'$dwim_correct_goal'(TypeIn:Goal, Bindings, Corrected), !,
setup_call_cleanup('$set_source_module'(M0, TypeIn),
expand_goal(Corrected, Expanded),
'$set_source_module'(_, M0)),
print_message(silent, toplevel_goal(Expanded, Bindings)),
user:dra_execute_goal22(Expanded, Bindings).
user:dra_execute(_, _) :-
notrace,
print_message(query, query(no)),
fail.
user:dra_execute_goal22(Goal, Bindings) :-
'$toplevel':restore_debug,
user:residue_vars(Goal, Vars),
deterministic(Det),
true,
( '$toplevel':save_debug
; '$toplevel':restore_debug, fail
),
flush_output(user_output),
'$toplevel':call_expand_answer(Bindings, NewBindings),
( \+ \+ '$toplevel':write_bindings(NewBindings, Vars, Det)
-> !, fail
).
user:dra_execute_goal22(_, _) :-
'$toplevel':save_debug,
print_message(query, query(no)),
fail.
user:residue_vars(Goal, Vars) :-
current_prolog_flag(toplevel_residue_vars, true), !,
call_residue_vars(query(Goal), Vars).
user:residue_vars(Goal, []) :-
query(Goal).
user:dra_prompt(Module, BrekLev, Prompt) :-
current_prolog_flag(toplevel_prompt, PAtom),
atom_codes(PAtom, P0),
( Module \== user
-> '$toplevel':'$substitute'('~m', [Module, ': '], P0, P1)
; '$toplevel':'$substitute'('~m', [], P0, P1)
),
( BrekLev > 0
-> '$toplevel':'$substitute'('~l', ['[', BrekLev, '] '], P1, P2)
; '$toplevel':'$substitute'('~l', [], P1, P2)
),
current_prolog_flag(query_debug_settings, debug(Debugging, Tracing)),
( Tracing == true
-> '$toplevel':'$substitute'('~d', ['[traced] '], P2, P3)
; Debugging == true
-> '$toplevel':'$substitute'('~d', ['[debug] '], P2, P3)
; '$toplevel':'$substitute'('~d', [], P2, P3)
),
atom_chars(Prompt, P3).
%:- call(user:rl_add_history(ls)).
%:- call(user:rl_add_history('traced,go')).
:- '$toplevel':setup_history.
user:listing_mpred_hook(What):- debugOnError(dra_listing(What)).
dra_listing(What):-get_pi(What,PI),PI\=@=What,!,dra_listing(PI).
dra_listing(Matches):- ignore(dra_listing_0(Matches)),!.
dra_listing_0(MatchesIn):-
forall(property_pred(DECLF,DBF),
(ignore(( DB=..[DBF,Matches],
clause(DB,true),
get_functor(Matches,PI0),
get_functor(MatchesIn,PI1),!,
PI0==PI1,
functor(Matches,F,A),
Decl=..[DECLF,F/A],
format('~N:- ~q.~n',[Decl]))))).
set_meta(Goal, Prop):- is_pred_metainterp(Goal,Prop),!.
set_meta(Goal, Prop):- functor(Goal,F,A),functor(TGoal,F,A),
show_call(set_meta0(TGoal, Prop)),!,
retract_all0(is_pred_metainterp(TGoal,_)),
asserta_if_new(is_pred_metainterp(TGoal,Prop)).
set_meta0(TGoal,is_builtin):-
retract_all0(is_tabled(TGoal)),(predicate_property(TGoal,dynamic)->retract_all0((TGoal:-!,query(TGoal)));true),
retract_all0(is_never_tabled(TGoal)),
asserta_if_new(is_builtin(TGoal)).
set_meta0(TGoal,is_never_tabled):-
retract_all0(is_tabled(TGoal)),(predicate_property(TGoal,dynamic)->retract_all0((TGoal:-!,query(TGoal)));true),
retract_all0(is_builtin(TGoal)),
asserta_if_new(is_never_tabled(TGoal)).
set_meta0(TGoal,is_tabled):- asserta_if_new(is_tabled(TGoal)),
retract_all0(is_never_tabled(TGoal)),
retract_all0(is_builtin(TGoal)),
(predicate_property(TGoal,dynamic)->asserta_new((TGoal:-!,query(TGoal)));true).
:-dynamic(clause_meta/1).
might_be_clause_meta( Goal ):- compound(Goal), \+ \+ (arg(_,Goal,[_|_])),!.
%legal_directive(M:P):-atom(M),M:legal_directive(P).
%legal_directive(P):-compound(P),functor(P,F,1),property_pred(F).
%:-use_module(boot('$toplevel'),[]).
% '$query_loop'/0
(tprolog) :- \+ lp_system( eclipse ),!,
user:w_tl(op(0,fy,(traced)),
((( current_prolog_flag(break_level, BreakLev)
-> true
; BreakLev = -1
),
repeat,
( '$module'(TypeIn, TypeIn),
'$toplevel':((( (stream_property(user_input, tty(true)),write('tprolog '))
-> user:dra_prompt(TypeIn, BreakLev, Prompt),
prompt(Old, '| ')
; Prompt = '', prompt(Old, '') ),
trim_stacks,
'$toplevel':read_query(Prompt, Query, Bindings),
prompt(_, Old),
call_expand_query(Query, ExpandedQuery,
Bindings, ExpandedBindings)
-> expand_goal(ExpandedQuery, Goal))),
(user:dra_execute(Goal, ExpandedBindings),fail) )))), !.
initialize_table:-must(initialise).
print_table_statistics:-print_statistics.
%load(P):-must(prog0(P)),!.
:- user:ensure_loaded(library(dra/tabling3/dra_table_assert)).
%:- user:ensure_loaded(library(dra/tabling3/dra_table_record)).
:- user:ensure_loaded(library(dra/tabling3/compatibility_utilities_swi)).
:- user:ensure_loaded(library(dra/tabling3/dra_common)).
% c + r = 7.949 seconds
/*
% :- pf(library('dra/tabling3/examples/XSB/fib.tlp') ).
:- pf(library('dra/tabling3/examples/co_t.tlp') ).
:- pf(library('dra/tabling3/examples/coind2.tlp') ).
% :- pf(library('dra/tabling3/examples/LTL/v.pl') ).
%:- pf(library('dra/tabling3/examples/mini_graph.tlp') ).
%:- pf(library('dra/tabling3/examples/mini_language.tlp') ).
:- pf(library('dra/tabling3/examples/paper_example.tlp') ).
:- pf(library('dra/tabling3/Bench/tabling3/run')).
:- pf(library('dra/tabling3/Bench/prolog/run')).
:- pf(library('dra/tabling3/Bench/clpfd/run')).
:- pf(library('dra/tabling3/Bench/aspclp/run')).
*/
t0:- time([library('dra/tabling3/examples/XSB/farmer.tlp')]).
tn:- time([library('dra/tabling3/examples/tnot1.tlp')]).
t1:- time(pf(library('dra/tabling3/examples/XSB/farmer.tlp') )),!.
t2:- time([library('dra/tabling3/examples/XSB/ham.tlp')]).
t2a:- time([library('dra/tabling3/examples/XSB/ham_auto.tlp')]).
t2b:- time(pf(library('dra/tabling3/examples/XSB/ham.tlp') )).
t3:- [(library('dra/tabling3/examples/graph.tlp') )].
t4:- pf(library('dra/tabling3/examples/module.tlp') ).
t4:- [(library('dra/tabling3/examples/paper_example.tlp') )].
t4:- pf(library('dra/tabling3/examples/conditional.clp') ).
t4:- pf(library('dra/tabling3/examples/simple1.tlp') ).
t4:- pf(library('dra/tabling3/examples/simple1_old_first.tlp') ).
t4:- pf(library('dra/tabling3/examples/conditional.clp') ).
t4:- pf(library('dra/tabling3/examples/small_comment_example.tlp') ).
t4:- pf(library('dra/tabling3/examples/coind_new.tlp') ).
t5:- consult('/devel/LogicmooDeveloperFramework/PrologMUD/packs/MUD_PDDL/prolog/dra/tabling3/Bench/tabling/tcl.pl').
% :- repeat,logOnErrorIgnore(prolog),fail.
user:term_expansion((?- G),_):- nonvar(G), format(atom(H),'~q .',[G]),user:rl_add_history(H),fail.
% user:goal_expansion(G,_):- G\=(_,_),G\=(_;_),\+predicate_property(G,_),format(atom(H),'~q .',[G]),user:rl_add_history(H),fail.
| 13,139
|
https://github.com/nikolajw/localizationnet/blob/master/Localization.Net/Parsing/IPatternTransformer.cs
|
Github Open Source
|
Open Source
|
MIT
| null |
localizationnet
|
nikolajw
|
C#
|
Code
| 43
| 96
|
namespace Localization.Net.Parsing
{
/// <summary>
/// Implement this interface to change the default grammar's syntax.
/// </summary>
public interface IPatternTransformer
{
//TODO: Make a mechanism to update positions in error messages based on unencoded pattern
string Encode(string pattern);
string Decode(string encodedPattern);
}
}
| 28,381
|
https://github.com/naoto/rinne/blob/master/spec/spec_helper.rb
|
Github Open Source
|
Open Source
|
MIT
| 2,013
|
rinne
|
naoto
|
Ruby
|
Code
| 7
| 28
|
require 'rubygems'
require 'coveralls'
Coveralls.wear!
require 'rinne'
| 48,653
|
https://github.com/ClaudePlos/nap_cash_registers/blob/master/src/main/java/kskowronski/lib/excel/Exporter.java
|
Github Open Source
|
Open Source
|
Unlicense
| null |
nap_cash_registers
|
ClaudePlos
|
Java
|
Code
| 43
| 181
|
package kskowronski.lib.excel;
import com.vaadin.flow.component.grid.Grid;
import com.vaadin.flow.server.InputStreamFactory;
import java.util.Map;
public class Exporter {
private Exporter(){}
public static <T> InputStreamFactory exportAsExcel(Grid<T> grid, Map<Grid.Column<T>, String> columnHeaders){
return new ExcelFileBuilder<>(grid, columnHeaders)::build;
}
public static <T> InputStreamFactory exportAsCSV(Grid<T> grid, Map<Grid.Column<T>, String> columnHeaders){
return new CSVFileBuilder<>(grid, columnHeaders)::build;
}
}
| 33,047
|
https://github.com/abueide/mage/blob/master/Mage.Tests/src/test/java/org/mage/test/cards/abilities/keywords/DecayedTest.java
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
mage
|
abueide
|
Java
|
Code
| 152
| 667
|
package org.mage.test.cards.abilities.keywords;
import mage.constants.PhaseStep;
import mage.constants.Zone;
import org.junit.Test;
import org.mage.test.serverside.base.CardTestPlayerBase;
public class DecayedTest extends CardTestPlayerBase {
@Test
public void decayedToken() {
addCard(Zone.HAND, playerA, "Falcon Abomination", 1);
addCard(Zone.BATTLEFIELD, playerA, "Island", 3);
setStrictChooseMode(true);
castSpell(1, PhaseStep.PRECOMBAT_MAIN, playerA, "Falcon Abomination");
attack(3, playerA, "Zombie Token");
setStopAt(3, PhaseStep.POSTCOMBAT_MAIN);
execute();
assertAllCommandsUsed();
assertPermanentCount(playerA, "Falcon Abomination", 1);
assertPermanentCount(playerA, "Zombie Token", 0);
}
@Test
public void decayedPermanent() {
addCard(Zone.BATTLEFIELD, playerA, "Gisa, Glorious Resurrector", 1);
addCard(Zone.BATTLEFIELD, playerB, "Grizzly Bears", 1);
addCard(Zone.HAND, playerA, "Doom Blade", 1);
addCard(Zone.BATTLEFIELD, playerA, "Swamp", 2);
setStrictChooseMode(true);
castSpell(1, PhaseStep.PRECOMBAT_MAIN, playerA, "Doom Blade");
addTarget(playerA, "Grizzly Bears");
// Gisa - "If a creature an opponent controls would die, exile it instead."
checkExileCount("Gisa Exile Ability", 1, PhaseStep.POSTCOMBAT_MAIN, playerB, "Grizzly Bears", 1);
attack(5, playerA, "Grizzly Bears");
setStopAt(5, PhaseStep.POSTCOMBAT_MAIN);
execute();
assertAllCommandsUsed();
assertPermanentCount(playerA, "Gisa, Glorious Resurrector", 1);
assertPermanentCount(playerA, "Grizzly Bears", 0);
assertPermanentCount(playerB, "Grizzly Bears", 0);
assertExileCount("Grizzly Bears", 0);
// Grizzly Bears should sacrifice after combat and go to playerB's graveyard
assertGraveyardCount(playerB, "Grizzly Bears", 1);
}
}
| 14,761
|
https://github.com/Koknov/ProgrammingLabAutumn2018/blob/master/src/main/java/Field.java
|
Github Open Source
|
Open Source
|
MIT
| null |
ProgrammingLabAutumn2018
|
Koknov
|
Java
|
Code
| 132
| 338
|
import java.util.ArrayList;
class Field {
private static Coord size;
private static ArrayList<Coord> allCoords;
static void setSize(Coord size) {
Field.size = size;
allCoords = new ArrayList<>();
for (int y = 0; y < size.y; y++)
for (int x = 0; x < size.x; x++)
allCoords.add(new Coord(x, y));
}
static Coord getSize() {
return size;
}
static ArrayList<Coord> getAllCoords() {
return allCoords;
}
static boolean inRange(Coord coord){
return coord.x >= 0 && coord.x < size.x && coord.y >= 0 && coord.y < size.y;
}
static ArrayList<Coord> getCoordAround(Coord coord){
Coord around;
ArrayList<Coord> list = new ArrayList<>();
for (int x = coord.x - 1; x <= coord.x + 1; x++)
for (int y = coord.y - 1; y <= coord.y + 1; y++)
if (inRange(around = new Coord(x, y)))
if (!around.equals(coord))
list.add(around);
return list;
}
}
| 22,230
|
https://github.com/jsdelivrbot/dockerfiles-1/blob/master/data/d/dingmingk/java-jdk6-oracle/Dockerfile
|
Github Open Source
|
Open Source
|
MIT
| 2,018
|
dockerfiles-1
|
jsdelivrbot
|
Dockerfile
|
Code
| 85
| 301
|
FROM ubuntu:trusty
MAINTAINER dingmingk <dingmingk@gmail.com>
RUN cp -f /usr/share/zoneinfo/Asia/Shanghai /etc/localtime
RUN apt-get update -y \
&& apt-get remove -y openjdk* \
&& apt-get install -y software-properties-common curl bzip2 unzip xz-utils \
&& add-apt-repository ppa:webupd8team/java -y \
&& apt-get update -y \
&& echo oracle-java6-installer shared/accepted-oracle-license-v1-1 select true
| /usr/bin/debconf-set-selections \
&& apt-get install -y oracle-java6-installer \
&& apt-get install -y oracle-java6-set-default \
&& apt-get remove software-properties-common -y \
&& apt-get autoremove -y \
&& apt-get clean
ENV JAVA_HOME /usr/lib/jvm/java-6-oracle
ENV JAVA_OPTS -Duser.timezone=Asia/Shanghai
ENV LANG C.UTF-8
| 48,981
|
https://github.com/slkrk/calendar/blob/master/tests/Aeon/Collection/Tests/Unit/DayValueSetTest.php
|
Github Open Source
|
Open Source
|
MIT
| null |
calendar
|
slkrk
|
PHP
|
Code
| 860
| 4,279
|
<?php
declare(strict_types=1);
namespace Aeon\Collection\Tests\Unit;
use Aeon\Calendar\Exception\InvalidArgumentException;
use Aeon\Calendar\Gregorian\Day;
use Aeon\Collection\DayValue;
use Aeon\Collection\DayValueSet;
use PHPUnit\Framework\TestCase;
final class DayValueSetTest extends TestCase
{
public function test_set_with_duplicated_days() : void
{
$this->expectExceptionMessage('Set does not allow duplicated days, day 2020-01-01 is duplicated');
$this->expectException(InvalidArgumentException::class);
new DayValueSet(
DayValue::createEmpty(Day::fromString('2020-01-01')),
DayValue::createEmpty(Day::fromString('2020-01-01'))
);
}
public function test_set_with_empty_values() : void
{
$set = DayValueSet::createEmpty(Day::fromString('2020-01-01'), Day::fromString('2020-01-10'));
$this->assertCount(10, $set);
$this->assertSame(
[null, null, null, null, null, null, null, null, null, null],
$set->values()
);
}
public function test_set_map() : void
{
$set = DayValueSet::createEmpty(Day::fromString('2020-01-01'), Day::fromString('2020-01-10'));
$set = $set->map(fn (DayValue $dayValue) : DayValue => new DayValue($dayValue->day(), $dayValue->day()->toString()));
$this->assertCount(10, $set);
$this->assertSame(
[
'2020-01-01',
'2020-01-02',
'2020-01-03',
'2020-01-04',
'2020-01-05',
'2020-01-06',
'2020-01-07',
'2020-01-08',
'2020-01-09',
'2020-01-10',
],
$set->values()
);
$this->assertCount(10, $set->toDays());
}
public function test_set_filter() : void
{
$set = DayValueSet::createEmpty(Day::fromString('2020-01-01'), Day::fromString('2020-02-29'));
$filteredSet = $set->filter(fn (DayValue $dayValue) : bool => $dayValue->day()->month()->number() === 1);
$this->assertCount(31, $filteredSet);
}
public function test_set_reduce() : void
{
$set = DayValueSet::createWith(Day::fromString('2020-01-01'), Day::fromString('2020-02-29'), 1);
$numberOfDays = $set->reduce(fn (int $initial, DayValue $dayValue) : int => $initial + 1, 0);
$this->assertSame(60, $numberOfDays);
}
public function test_set_sort_ascending() : void
{
$set = new DayValueSet(
new DayValue(Day::fromString('2020-01-10'), 100),
new DayValue(Day::fromString('2020-01-01'), 100),
new DayValue(Day::fromString('2020-01-07'), 100),
new DayValue(Day::fromString('2020-01-03'), 100),
new DayValue(Day::fromString('2020-01-05'), 100),
new DayValue(Day::fromString('2020-01-04'), 100),
new DayValue(Day::fromString('2020-01-08'), 100),
new DayValue(Day::fromString('2020-01-02'), 100),
new DayValue(Day::fromString('2020-01-06'), 100),
new DayValue(Day::fromString('2020-01-09'), 100),
);
$sortedSet = $set->sortAscending();
$this->assertSame(
[
'2020-01-01',
'2020-01-02',
'2020-01-03',
'2020-01-04',
'2020-01-05',
'2020-01-06',
'2020-01-07',
'2020-01-08',
'2020-01-09',
'2020-01-10',
],
$sortedSet->toDays()->map(fn (Day $day) => $day->toString())
);
$this->assertSame('2020-01-01', $sortedSet->first()->day()->toString());
$this->assertSame('2020-01-10', $sortedSet->last()->day()->toString());
}
public function test_set_sort_descending() : void
{
$set = new DayValueSet(
new DayValue(Day::fromString('2020-01-10'), 100),
new DayValue(Day::fromString('2020-01-01'), 100),
new DayValue(Day::fromString('2020-01-07'), 100),
new DayValue(Day::fromString('2020-01-03'), 100),
new DayValue(Day::fromString('2020-01-05'), 100),
new DayValue(Day::fromString('2020-01-04'), 100),
new DayValue(Day::fromString('2020-01-08'), 100),
new DayValue(Day::fromString('2020-01-02'), 100),
new DayValue(Day::fromString('2020-01-06'), 100),
new DayValue(Day::fromString('2020-01-09'), 100),
);
$sortedSet = $set->sortDescending();
$this->assertSame(
\array_reverse([
'2020-01-01',
'2020-01-02',
'2020-01-03',
'2020-01-04',
'2020-01-05',
'2020-01-06',
'2020-01-07',
'2020-01-08',
'2020-01-09',
'2020-01-10',
]),
$sortedSet->toDays()->map(fn (Day $day) => $day->toString())
);
}
public function test_set_fill_missing_with() : void
{
$set = new DayValueSet(
new DayValue(Day::fromString('2020-01-10'), 20),
new DayValue(Day::fromString('2020-01-01'), 20),
);
$set = $set->put(
new DayValue(Day::fromString('2020-01-03'), 100),
new DayValue(Day::fromString('2020-01-05'), 100),
new DayValue(Day::fromString('2020-01-04'), 100),
);
$set = $set->fillMissingWith(50)->sortAscending();
$this->assertSame(
[
0 => 20,
1 => 50,
2 => 100,
3 => 100,
4 => 100,
5 => 50,
6 => 50,
7 => 50,
8 => 50,
9 => 20,
],
$set->values()
);
}
public function test_set_remove() : void
{
$set = DayValueSet::createEmpty(Day::fromString('2020-01-01'), Day::fromString('2020-01-31'));
$set = $set->remove(Day::fromString('2020-01-10'));
$this->assertCount(30, $set);
$this->assertFalse($set->has(Day::fromString('2020-01-10')));
}
public function test_get_non_existing_day() : void
{
$set = DayValueSet::createEmpty(Day::fromString('2020-01-01'), Day::fromString('2020-01-31'));
$set = $set->remove(Day::fromString('2020-01-10'));
$this->expectException(InvalidArgumentException::class);
$this->expectExceptionMessage('There is no value for day 2020-01-10');
$this->assertFalse($set->get(Day::fromString('2020-01-10')));
}
public function test_take() : void
{
$set = new DayValueSet(
new DayValue(Day::fromString('2020-01-01'), 10),
new DayValue(Day::fromString('2020-01-02'), 20),
new DayValue(Day::fromString('2020-01-03'), 30),
new DayValue(Day::fromString('2020-01-04'), 40),
);
$this->assertEquals(
new DayValueSet(
new DayValue(Day::fromString('2020-01-01'), 10),
new DayValue(Day::fromString('2020-01-02'), 20),
),
$set->take(2)
);
}
public function test_take_more_than_available() : void
{
$set = new DayValueSet(
new DayValue(Day::fromString('2020-01-01'), 10),
new DayValue(Day::fromString('2020-01-02'), 20),
new DayValue(Day::fromString('2020-01-03'), 30),
new DayValue(Day::fromString('2020-01-04'), 40),
);
$this->assertEquals(
new DayValueSet(
new DayValue(Day::fromString('2020-01-01'), 10),
new DayValue(Day::fromString('2020-01-02'), 20),
new DayValue(Day::fromString('2020-01-03'), 30),
new DayValue(Day::fromString('2020-01-04'), 40),
),
$set->take(6)
);
}
public function test_take_negative_days() : void
{
$set = new DayValueSet(
new DayValue(Day::fromString('2020-01-01'), 10),
new DayValue(Day::fromString('2020-01-02'), 20),
new DayValue(Day::fromString('2020-01-03'), 30),
new DayValue(Day::fromString('2020-01-04'), 40),
);
$this->expectException(InvalidArgumentException::class);
$this->expectExceptionMessage('Take does not accept negative number of days');
$set->take(-1);
}
public function test_take_zero_days() : void
{
$set = new DayValueSet(
new DayValue(Day::fromString('2020-01-01'), 10),
new DayValue(Day::fromString('2020-01-02'), 20),
new DayValue(Day::fromString('2020-01-03'), 30),
new DayValue(Day::fromString('2020-01-04'), 40),
);
$this->assertEquals(
new DayValueSet(),
$set->take(0)
);
}
public function test_slice() : void
{
$set = new DayValueSet(
new DayValue(Day::fromString('2020-01-01'), 10),
new DayValue(Day::fromString('2020-01-02'), 20),
new DayValue(Day::fromString('2020-01-03'), 30),
new DayValue(Day::fromString('2020-01-04'), 40),
);
$this->assertEquals(
new DayValueSet(
new DayValue(Day::fromString('2020-01-02'), 20),
new DayValue(Day::fromString('2020-01-03'), 30),
),
$set->slice(1, 2)
);
}
public function test_slice_more_than_available() : void
{
$set = new DayValueSet(
new DayValue(Day::fromString('2020-01-01'), 10),
new DayValue(Day::fromString('2020-01-02'), 20),
new DayValue(Day::fromString('2020-01-03'), 30),
new DayValue(Day::fromString('2020-01-04'), 40),
);
$this->assertEquals(
new DayValueSet(
new DayValue(Day::fromString('2020-01-01'), 10),
new DayValue(Day::fromString('2020-01-02'), 20),
new DayValue(Day::fromString('2020-01-03'), 30),
new DayValue(Day::fromString('2020-01-04'), 40),
),
$set->slice(0, 10)
);
}
public function test_slice_with_zero_offset_and_zero_days() : void
{
$set = new DayValueSet(
new DayValue(Day::fromString('2020-01-01'), 10),
new DayValue(Day::fromString('2020-01-02'), 20),
new DayValue(Day::fromString('2020-01-03'), 30),
new DayValue(Day::fromString('2020-01-04'), 40),
);
$this->assertEquals(
new DayValueSet(),
$set->slice(0, 0)
);
}
public function test_slice_negative_days() : void
{
$set = new DayValueSet(
new DayValue(Day::fromString('2020-01-01'), 10),
new DayValue(Day::fromString('2020-01-02'), 20),
new DayValue(Day::fromString('2020-01-03'), 30),
new DayValue(Day::fromString('2020-01-04'), 40),
);
$this->expectException(InvalidArgumentException::class);
$this->expectExceptionMessage('Slice does not accept negative days');
$set->slice(0, -1);
}
public function test_slice_negative_offset() : void
{
$set = new DayValueSet(
new DayValue(Day::fromString('2020-01-01'), 10),
new DayValue(Day::fromString('2020-01-02'), 20),
new DayValue(Day::fromString('2020-01-03'), 30),
new DayValue(Day::fromString('2020-01-04'), 40),
);
$this->expectException(InvalidArgumentException::class);
$this->expectExceptionMessage('Slice does not accept negative offset');
$set->slice(-1, 5);
}
public function test_drop() : void
{
$set = new DayValueSet(
new DayValue(Day::fromString('2020-01-01'), 10),
new DayValue(Day::fromString('2020-01-02'), 20),
new DayValue(Day::fromString('2020-01-03'), 30),
new DayValue(Day::fromString('2020-01-04'), 40),
);
$this->assertEquals(
new DayValueSet(
new DayValue(Day::fromString('2020-01-03'), 30),
new DayValue(Day::fromString('2020-01-04'), 40),
),
$set->drop(2)
);
}
public function test_drop_zero_offset() : void
{
$set = new DayValueSet(
new DayValue(Day::fromString('2020-01-01'), 10),
new DayValue(Day::fromString('2020-01-02'), 20),
new DayValue(Day::fromString('2020-01-03'), 30),
new DayValue(Day::fromString('2020-01-04'), 40),
);
$this->assertEquals(
new DayValueSet(
new DayValue(Day::fromString('2020-01-01'), 10),
new DayValue(Day::fromString('2020-01-02'), 20),
new DayValue(Day::fromString('2020-01-03'), 30),
new DayValue(Day::fromString('2020-01-04'), 40),
),
$set->drop(0)
);
}
public function test_drop_negative_offset() : void
{
$set = new DayValueSet(
new DayValue(Day::fromString('2020-01-01'), 10),
new DayValue(Day::fromString('2020-01-02'), 20),
new DayValue(Day::fromString('2020-01-03'), 30),
new DayValue(Day::fromString('2020-01-04'), 40),
);
$this->expectException(InvalidArgumentException::class);
$this->expectExceptionMessage('Drop does not accept negative offset');
$set->drop(-1);
}
public function test_drop_more_than_available() : void
{
$set = new DayValueSet(
new DayValue(Day::fromString('2020-01-01'), 10),
new DayValue(Day::fromString('2020-01-02'), 20),
new DayValue(Day::fromString('2020-01-03'), 30),
new DayValue(Day::fromString('2020-01-04'), 40),
);
$this->assertEquals(
new DayValueSet(),
$set->drop(10)
);
}
}
| 36,336
|
https://github.com/satishchor/angular10/blob/master/angulapp/src/app/adventure/land/land.module.ts
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
angular10
|
satishchor
|
TypeScript
|
Code
| 151
| 484
|
import { NgModule } from "@angular/core";
import { CommonModule } from "@angular/common";
import { FormsModule, ReactiveFormsModule } from "@angular/forms";
import { SnotifyModule, SnotifyService, ToastDefaults } from "ng-snotify";
import { UiModule } from "../../shared/ui/ui.module";
import { LandRouterModule } from "./land-routing.module";
import { CyclingComponent } from "./cycling/cycling.component";
import { HikingComponent } from "./hiking/hiking.component";
import { NgCycleComponent } from "./cycling/ngcycle/ngcycle.component";
import { ViewChildComponent } from "./cycling/viewchild/viewchild.component";
import { ViewChild2Component } from "./cycling/viewchild2/viewchild2.component";
import { TimerComponent } from "./cycling/timer/timer.component";
import { LocationViewComponent } from "./hiking/locationview/locationview.component";
import { ChildViewComponent } from "./hiking/childview/childview.component";
import { RunningComponent } from "./running/running.component";
import { AstronautComponent } from "./running/astronaut/astronaut.component";
import { ShortNamePipe } from "./hiking/shortname.pipe";
import { HillComponent } from "./trekking/hill.component";
import { UserComponent } from "./trekking/user.component";
@NgModule({
declarations: [CyclingComponent, HikingComponent, NgCycleComponent, ViewChildComponent, ViewChild2Component,
TimerComponent, LocationViewComponent, ChildViewComponent, RunningComponent, AstronautComponent, ShortNamePipe,
HillComponent, UserComponent],
imports: [
CommonModule,
LandRouterModule,
FormsModule,
ReactiveFormsModule,
UiModule
],
exports: [CyclingComponent]
})
export class LandModule {
constructor() {
}
}
| 39,796
|
https://github.com/mrjoes/flask-admin/blob/master/flask_admin/translations/cs/LC_MESSAGES/admin.po
|
Github Open Source
|
Open Source
|
Apache-2.0, MIT
| 2,022
|
flask-admin
|
mrjoes
|
Gettext Catalog
|
Code
| 827
| 3,689
|
# Translations template for Flask-Admin.
# Copyright (C) 2013 Dominik Janků
# This file is distributed under the same license as the Flask-Admin
# project.
# Dominik Janků <djanku@email.cz>, 2013.
#
msgid ""
msgstr ""
"Project-Id-Version: Flask-Admin\n"
"Report-Msgid-Bugs-To: djanku@email.cz\n"
"POT-Creation-Date: 2012-11-09 03:54+0200\n"
"PO-Revision-Date: 2013-05-20 06:48+0200\n"
"Last-Translator: Dominik Janků <djanku@email.cz>\n"
"Language-Team: čeština <>\n"
"Language: cs\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel 0.9.6\n"
"Plural-Forms: nplurals=3; plural=(n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2;\n"
"X-Generator: Gtranslator 2.91.6\n"
"Language-Team: čeština <>\n"
#: ../flask_admin/base.py:283
msgid "Home"
msgstr "Domů"
#: ../flask_admin/form.py:83
msgid "Invalid time format"
msgstr "Nesprávný formát času"
#: ../flask_admin/contrib/fileadmin.py:33
msgid "Invalid directory name"
msgstr "Nesprávný název adresáře"
#: ../flask_admin/contrib/fileadmin.py:41
msgid "File to upload"
msgstr "Soubor k nahrání"
#: ../flask_admin/contrib/fileadmin.py:50
msgid "File required."
msgstr "Soubor vyžadován."
#: ../flask_admin/contrib/fileadmin.py:55
msgid "Invalid file type."
msgstr "Nesprávný formát souboru."
#: ../flask_admin/contrib/fileadmin.py:365
msgid "File uploading is disabled."
msgstr "Nahrávání souborů není povoleno."
#: ../flask_admin/contrib/fileadmin.py:374
#, python-format
msgid "File \"%(name)s\" already exists."
msgstr "Soubor s názvem \"%(name)s\" již existuje."
#: ../flask_admin/contrib/fileadmin.py:381
#, python-format
msgid "Failed to save file: %(error)s"
msgstr "Chyba při ukládání souboru: %(error)s"
#: ../flask_admin/contrib/fileadmin.py:400
msgid "Directory creation is disabled."
msgstr "Vytváření adresářů není povoleno."
#: ../flask_admin/contrib/fileadmin.py:410
#, python-format
msgid "Failed to create directory: %(error)s"
msgstr "Chyba při vytváření adresáře: %(error)s"
#: ../flask_admin/contrib/fileadmin.py:432
msgid "Deletion is disabled."
msgstr "Mazání není povoleno."
#: ../flask_admin/contrib/fileadmin.py:437
msgid "Directory deletion is disabled."
msgstr "Mazání adresářů není povoleno."
#: ../flask_admin/contrib/fileadmin.py:442
#, python-format
msgid "Directory \"%s\" was successfully deleted."
msgstr "Adresář \"%s\" byl úspěšně smazán."
#: ../flask_admin/contrib/fileadmin.py:444
#, python-format
msgid "Failed to delete directory: %(error)s"
msgstr "Chyba při mazání adresáře: %(error)s"
#: ../flask_admin/contrib/fileadmin.py:448
#: ../flask_admin/contrib/fileadmin.py:511
#, python-format
msgid "File \"%(name)s\" was successfully deleted."
msgstr "Soubor \"%(name)s\" byl úspěšně smazán."
#: ../flask_admin/contrib/fileadmin.py:450
#: ../flask_admin/contrib/fileadmin.py:513
#, python-format
msgid "Failed to delete file: %(name)s"
msgstr "Chyba při mazání souboru: %(name)s"
#: ../flask_admin/contrib/fileadmin.py:469
msgid "Renaming is disabled."
msgstr "Přejmenovávání není povoleno."
#: ../flask_admin/contrib/fileadmin.py:473
msgid "Path does not exist."
msgstr "Cesta neexistuje."
#: ../flask_admin/contrib/fileadmin.py:484
#, python-format
msgid "Successfully renamed \"%(src)s\" to \"%(dst)s\""
msgstr "Úspěšné přejmenování \"%(src)s\" na \"%(dst)s\""
#: ../flask_admin/contrib/fileadmin.py:487
#, python-format
msgid "Failed to rename: %(error)s"
msgstr "Chyba při přejmenování: %(error)s"
#: ../flask_admin/contrib/fileadmin.py:503
#: ../flask_admin/contrib/peewee/view.py:355
#: ../flask_admin/contrib/sqla/view.py:680
msgid "Delete"
msgstr "Smazat"
#: ../flask_admin/contrib/fileadmin.py:504
msgid "Are you sure you want to delete these files?"
msgstr "Skutečně chcete vymazat následující soubory?"
#: ../flask_admin/contrib/peewee/filters.py:35
#: ../flask_admin/contrib/sqla/filters.py:35
msgid "equals"
msgstr "rovno"
#: ../flask_admin/contrib/peewee/filters.py:43
#: ../flask_admin/contrib/sqla/filters.py:43
msgid "not equal"
msgstr "není rovno"
#: ../flask_admin/contrib/peewee/filters.py:52
#: ../flask_admin/contrib/sqla/filters.py:52
msgid "contains"
msgstr "obsahuje"
#: ../flask_admin/contrib/peewee/filters.py:61
#: ../flask_admin/contrib/sqla/filters.py:61
msgid "not contains"
msgstr "neobsahuje"
#: ../flask_admin/contrib/peewee/filters.py:69
#: ../flask_admin/contrib/sqla/filters.py:69
msgid "greater than"
msgstr "větší než"
#: ../flask_admin/contrib/peewee/filters.py:77
#: ../flask_admin/contrib/sqla/filters.py:77
msgid "smaller than"
msgstr "menší než"
#: ../flask_admin/contrib/peewee/view.py:317
#: ../flask_admin/contrib/sqla/view.py:627
#, python-format
msgid "Failed to create record. %(error)s"
msgstr "Chyba při vytváření modelu. %(error)s"
#: ../flask_admin/contrib/peewee/view.py:332
#: ../flask_admin/contrib/sqla/view.py:647
#, python-format
msgid "Failed to update record. %(error)s"
msgstr "Chyba při aktualizaci modelu. %(error)s"
#: ../flask_admin/contrib/peewee/view.py:342
#: ../flask_admin/contrib/sqla/view.py:666
#, python-format
msgid "Failed to delete record. %(error)s"
msgstr "Chyba při mazání modelu. %(error)s"
#: ../flask_admin/contrib/peewee/view.py:356
#: ../flask_admin/contrib/sqla/view.py:681
msgid "Are you sure you want to delete selected records?"
msgstr "Skutečně chcete vymazat vybrané modely?"
#: ../flask_admin/contrib/peewee/view.py:372
#: ../flask_admin/contrib/sqla/view.py:699
#, python-format
msgid "Record was successfully deleted."
msgid_plural "%(count)s records were successfully deleted."
msgstr[0] "%(count)s model byl úspěšně smazán."
msgstr[1] "%(count)s modely byly úspěšně smazány."
msgstr[2] "%(count)s modelů bylo úspěšně smazáno."
#: ../flask_admin/contrib/peewee/view.py:377
#: ../flask_admin/contrib/sqla/view.py:704
#, python-format
msgid "Failed to delete records. %(error)s"
msgstr "Chyba při mazání modelu. %(error)s"
#: ../flask_admin/contrib/sqla/fields.py:125
#: ../flask_admin/contrib/sqla/fields.py:175
#: ../flask_admin/contrib/sqla/fields.py:180
msgid "Not a valid choice"
msgstr "Tato volba není přípustná"
#: ../flask_admin/contrib/sqla/validators.py:33
msgid "Already exists."
msgstr "Již existuje."
#: ../flask_admin/model/base.py:869
msgid "Record was successfully created."
msgstr "Model byl úspěšně vytvořen."
#: ../flask_admin/model/filters.py:82
msgid "Yes"
msgstr "Ano"
#: ../flask_admin/model/filters.py:83
msgid "No"
msgstr "Ne"
#: ../flask_admin/templates/admin/actions.html:3
msgid "With selected"
msgstr "S vybranými"
#: ../flask_admin/templates/admin/lib.html:117
msgid "Submit"
msgstr "Odeslat"
#: ../flask_admin/templates/admin/lib.html:122
msgid "Cancel"
msgstr "Zrušit"
#: ../flask_admin/templates/admin/file/list.html:8
msgid "Root"
msgstr "Kořen"
#: ../flask_admin/templates/admin/file/list.html:55
#, python-format
msgid "Are you sure you want to delete \\'%(name)s\\' recursively?"
msgstr "Opravdu chcete vymazat \\'%(name)s\\' rekurzivně?"
#: ../flask_admin/templates/admin/file/list.html:63
#, python-format
msgid "Are you sure you want to delete \\'%(name)s\\'?"
msgstr "Opravdu chcete vymazat \\'%(name)s\\'?"
#: ../flask_admin/templates/admin/file/list.html:90
msgid "Upload File"
msgstr "Nahrát soubor"
#: ../flask_admin/templates/admin/file/list.html:95
msgid "Create Directory"
msgstr "Vytvořit adresář"
#: ../flask_admin/templates/admin/file/list.html:109
msgid "Please select at least one file."
msgstr "Vyberte prosím alespoň jeden soubor."
#: ../flask_admin/templates/admin/file/rename.html:5
#, python-format
msgid "Please provide new name for %(name)s"
msgstr "Prosím napište nový název pro %(name)s"
#: ../flask_admin/templates/admin/model/create.html:12
#: ../flask_admin/templates/admin/model/list.html:13
msgid "List"
msgstr "Seznam"
#: ../flask_admin/templates/admin/model/create.html:15
#: ../flask_admin/templates/admin/model/list.html:17
msgid "Create"
msgstr "Vytvořit"
#: ../flask_admin/templates/admin/model/create.html:20
msgid "Save and Add"
msgstr "Uložit a přidat"
#: ../flask_admin/templates/admin/model/inline_form_list.html:24
msgid "Delete?"
msgstr "Smazat?"
#: ../flask_admin/templates/admin/model/inline_form_list.html:32
msgid "Add"
msgstr "Přidat"
#: ../flask_admin/templates/admin/model/list.html:24
msgid "Add Filter"
msgstr "Přidat filtr"
#: ../flask_admin/templates/admin/model/list.html:51
msgid "Search"
msgstr "Hledat"
#: ../flask_admin/templates/admin/model/list.html:64
msgid "Apply"
msgstr "Potvrdit"
#: ../flask_admin/templates/admin/model/list.html:66
msgid "Reset Filters"
msgstr "Zrušit filtry"
#: ../flask_admin/templates/admin/model/list.html:74
msgid "Remove Filter"
msgstr "Odebrat filtr"
#: ../flask_admin/templates/admin/model/list.html:149
msgid "Are you sure you want to delete this record?"
msgstr "Opravdu chcete vymazat tento záznam?"
#: ../flask_admin/templates/admin/model/list.html:173
msgid "Please select at least one record."
msgstr "Vyberte prosím alespoň jeden model."
| 13,475
|
https://github.com/lizardmedia/sample-magento2/blob/master/view/frontend/web/js/model/sample.js
|
Github Open Source
|
Open Source
|
MIT
| 2,019
|
sample-magento2
|
lizardmedia
|
JavaScript
|
Code
| 21
| 78
|
/**
* @author Maciej Sławik <maciej.slawik@lizardmedia.pl>
*/
define(
[
'ko',
],
function(ko) {
'use strict';
return{
formFields: ko.observable(null)
}
}
);
| 20,216
|
https://github.com/zqmillet/Anti.DNS.Fucker/blob/master/Anti-DNSFucker/Controls/CheckBoxAdvanced.vb
|
Github Open Source
|
Open Source
|
MIT
| null |
Anti.DNS.Fucker
|
zqmillet
|
Visual Basic
|
Code
| 319
| 606
|
''' <summary>
''' This class inherits CheckBox, and it is an advanced CheckBox.
''' Comparing with the CheckBox, CheckBoxAdvanced has a public property: Broken, which is a Boolean.
''' If the property Broken is True, the properties Enabled and Checked cannot be operated.
''' </summary>
Public Class CheckBoxAdvanced
Inherits System.Windows.Forms.CheckBox
''' <summary>
''' This is the newly added property: Broken.
''' The value of this proporty is save in the IsBroken which is a Boolean, too.
''' If the property Broken is assigned True, the properties Enabled and Checked is assigned False.
''' </summary>
''' <returns></returns>
Public Property Broken As Boolean
Get
Return IsBroken
End Get
Set(value As Boolean)
IsBroken = value
If IsBroken Then
Enabled = False
Checked = False
End If
End Set
End Property
''' <summary>
''' This property is used to save the value of the property Broken.
''' </summary>
Private IsBroken As Boolean
''' <summary>
''' This is property is used to override the property Enabled.
''' If this property is read, there is nothing different.
''' But if this property is assigned, the property Broken is checked first.
''' If the property Broken is True, the property Enabled does not work.
''' </summary>
''' <returns></returns>
Public Shadows Property Enabled As Boolean
Get
Return MyBase.Enabled
End Get
Set(value As Boolean)
MyBase.Enabled = If(Broken, False, value)
End Set
End Property
''' <summary>
''' This is property is used to override the property Checked.
''' If this property is read, there is nothing different.
''' But if this property is assigned, the property Broken is checked first.
''' If the property Broken is True, the property Checked does not work.
''' </summary>
''' <returns></returns>
Public Shadows Property Checked As Boolean
Get
Return MyBase.Checked
End Get
Set(value As Boolean)
MyBase.Checked = If(Broken, False, value)
End Set
End Property
''' <summary>
''' This is the constructor.
''' </summary>
Public Sub New()
Broken = False
End Sub
End Class
| 31,365
|
https://github.com/amicha24/veins-documentation/blob/master/doxy/search/enums_7.js
|
Github Open Source
|
Open Source
|
BSD-3-Clause
| null |
veins-documentation
|
amicha24
|
JavaScript
|
Code
| 7
| 263
|
var searchData=
[
['packet_5fok_5fresult',['PACKET_OK_RESULT',['../classveins_1_1_decider80211p.html#a19b797f1554f813ef971d42f51539eeb',1,'veins::Decider80211p']]],
['powertype',['PowerType',['../classveins_1_1_draw_amount.html#a72eb786928939c15bd91da699cd2bb8e',1,'veins::DrawAmount']]],
['protocolids',['ProtocolIds',['../classveins_1_1_base_phy_layer.html#a9645e727e49f5c8ed7c62a7f9dadf5d3',1,'veins::BasePhyLayer::ProtocolIds()'],['../classveins_1_1_phy_layer80211p.html#a419745f4760e70d0c7c4ffd7452f6caa',1,'veins::PhyLayer80211p::ProtocolIds()']]]
];
| 13,737
|
https://github.com/weblfe/goloader/blob/master/dymcode.go
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,020
|
goloader
|
weblfe
|
Go
|
Code
| 1,917
| 7,321
|
package goloader
import (
"cmd/objfile/goobj"
"encoding/binary"
"errors"
"fmt"
"os"
"runtime"
"strings"
"sync"
"unsafe"
)
// copy from $GOROOT/src/cmd/internal/objabi/reloctype.go
const (
R_ADDR = 1
// R_ADDRARM64 relocates an adrp, add pair to compute the address of the
// referenced symbol.
R_ADDRARM64 = 3
// R_ADDROFF resolves to a 32-bit offset from the beginning of the section
// holding the data being relocated to the referenced symbol.
R_ADDROFF = 5
// R_WEAKADDROFF resolves just like R_ADDROFF but is a weak relocation.
// A weak relocation does not make the symbol it refers to reachable,
// and is only honored by the linker if the symbol is in some other way
// reachable.
R_WEAKADDROFF = 6
R_CALL = 8
R_CALLARM = 9
R_CALLARM64 = 10
R_CALLIND = 11
)
type SymData struct {
Name string
Kind int
Offset int
Reloc []Reloc
}
type Reloc struct {
Offset int
SymOff int
Size int
Type int
Add int
DataSize int64
}
// CodeReloc dispatch and load CodeReloc struct via network is OK
type CodeReloc struct {
Code []byte
Data []byte
Mod Module
Syms []*SymData
SymMap map[string]int
GCObjs map[string]uintptr
FileMap map[string]int
Arch string
}
type CodeModule struct {
Syms map[string]uintptr
CodeByte []byte
Module *moduledata
pcfuncdata []findfuncbucket
stkmaps [][]byte
itabs map[string]*itabSym
}
type itabSym struct {
Reloc
inter *interfacetype
typ *_type
ptr *itab
}
type objSym struct {
sym *goobj.Sym
file *os.File
}
type segment struct {
codeByte []byte
codeBase int
dataBase int
dataLen int
codeLen int
maxLength int
offset int
symAddrs []uintptr
errors string
}
var (
modules = make(map[interface{}]bool)
modulesLock sync.Mutex
x86moduleHead = []byte{0xFB, 0xFF, 0xFF, 0xFF, 0x0, 0x0, 0x1, PtrSize}
armmoduleHead = []byte{0xFB, 0xFF, 0xFF, 0xFF, 0x0, 0x0, 0x4, PtrSize}
)
func addSymMap(symMap map[string]int, symArray *[]*SymData, rsym *SymData) int {
if offset, ok := symMap[rsym.Name]; !ok {
symMap[rsym.Name] = len(*symArray)
*symArray = append(*symArray, rsym)
} else {
(*symArray)[offset] = rsym
}
return symMap[rsym.Name]
}
func relocSym(codereloc *CodeReloc, symName string, objsymmap map[string]objSym) (int, error) {
if offset, ok := codereloc.SymMap[symName]; ok {
return offset, nil
}
objsym := objsymmap[symName]
rsym := SymData{Name: objsym.sym.Name, Kind: int(objsym.sym.Kind)}
addSymMap(codereloc.SymMap, &codereloc.Syms, &rsym)
code := make([]byte, objsym.sym.Data.Size)
_, err := objsym.file.ReadAt(code, objsym.sym.Data.Offset)
if err != nil {
return INVALID_OFFSET, err
}
switch rsym.Kind {
case STEXT:
rsym.Offset = len(codereloc.Code)
codereloc.Code = append(codereloc.Code, code...)
bytearrayAlign(&codereloc.Code, PtrSize)
err := readFuncData(codereloc, symName, objsymmap, rsym.Offset)
if err != nil {
return INVALID_OFFSET, err
}
default:
rsym.Offset = len(codereloc.Data)
codereloc.Data = append(codereloc.Data, code...)
bytearrayAlign(&codereloc.Data, PtrSize)
}
for _, loc := range objsym.sym.Reloc {
symOff := INVALID_OFFSET
if s, ok := objsymmap[loc.Sym.Name]; ok {
symOff, err = relocSym(codereloc, s.sym.Name, objsymmap)
if err != nil {
return INVALID_OFFSET, err
}
} else {
sym := SymData{Name: loc.Sym.Name, Offset: INVALID_OFFSET}
if loc.Type == R_TLS_LE {
sym.Name = TLSNAME
sym.Offset = int(loc.Offset)
}
if loc.Type == R_CALLIND {
sym.Offset = 0
sym.Name = R_CALLIND_NAME
}
if strings.HasPrefix(sym.Name, TYPE_IMPORTPATH_PREFIX) {
path := strings.Trim(strings.TrimLeft(sym.Name, TYPE_IMPORTPATH_PREFIX), ".")
sym.Offset = len(codereloc.Data)
codereloc.Data = append(codereloc.Data, path...)
codereloc.Data = append(codereloc.Data, ZERO_BYTE)
}
symOff = addSymMap(codereloc.SymMap, &codereloc.Syms, &sym)
}
rsym.Reloc = append(rsym.Reloc, Reloc{Offset: int(loc.Offset) + rsym.Offset, SymOff: symOff, Type: int(loc.Type), Size: int(loc.Size), Add: int(loc.Add), DataSize: -1})
if s, ok := objsymmap[loc.Sym.Name]; ok {
if s.sym.Data.Size == 0 && loc.Size > 0 {
rsym.Reloc[len(rsym.Reloc)-1].DataSize = s.sym.Data.Size
}
}
}
codereloc.Syms[codereloc.SymMap[symName]].Reloc = rsym.Reloc
return codereloc.SymMap[symName], nil
}
func relocateADRP(mCode []byte, loc Reloc, seg *segment, symAddr uintptr, symName string) {
offset := int64(symAddr) + int64(loc.Add) - ((int64(seg.codeBase) + int64(loc.Offset)) &^ 0xfff)
//overflow
if offset > 0xFFFFFFFF || offset <= -0x100000000 {
//low: MOV reg imm
//high: MOVK reg imm LSL#16
value := uint64(0xF2A00000D2800000)
addr := binary.LittleEndian.Uint32(mCode)
low := uint32(value & 0xFFFFFFFF)
high := uint32(value >> 32)
low = ((addr & 0x1f) | low) | ((uint32(symAddr) & 0xffff) << 5)
high = ((addr & 0x1f) | high) | (uint32(symAddr) >> 16 << 5)
binary.LittleEndian.PutUint64(mCode, uint64(low)|(uint64(high)<<32))
} else {
// 2bit + 19bit + low(12bit) = 33bit
low := (uint32((offset>>12)&3) << 29) | (uint32((offset>>12>>2)&0x7ffff) << 5)
high := (uint32(offset&0xfff) << 10)
value := binary.LittleEndian.Uint64(mCode)
value = (uint64(uint32(value>>32)|high) << 32) | uint64(uint32(value&0xFFFFFFFF)|low)
binary.LittleEndian.PutUint64(mCode, value)
}
}
func addSymAddrs(code *CodeReloc, symPtr map[string]uintptr, codeModule *CodeModule, seg *segment) {
for i, sym := range code.Syms {
if sym.Offset == INVALID_OFFSET {
if ptr, ok := symPtr[sym.Name]; ok {
seg.symAddrs[i] = ptr
} else {
seg.symAddrs[i] = INVALID_HANDLE_VALUE
seg.errors += fmt.Sprintf("unresolve external:%s\n", sym.Name)
}
} else if sym.Name == TLSNAME {
regTLS(symPtr, sym.Offset)
} else if sym.Kind == STEXT {
seg.symAddrs[i] = uintptr(code.Syms[i].Offset + seg.codeBase)
codeModule.Syms[sym.Name] = uintptr(seg.symAddrs[i])
} else if strings.HasPrefix(sym.Name, ITAB_PREFIX) {
if ptr, ok := symPtr[sym.Name]; ok {
seg.symAddrs[i] = ptr
}
} else {
seg.symAddrs[i] = uintptr(code.Syms[i].Offset + seg.dataBase)
if strings.HasPrefix(sym.Name, TYPE_PREFIX) {
if ptr, ok := symPtr[sym.Name]; ok {
seg.symAddrs[i] = ptr
}
}
}
}
}
func relocateItab(code *CodeReloc, module *CodeModule, seg *segment) {
for itabName, iter := range module.itabs {
sym := code.Syms[code.SymMap[itabName]]
inter := seg.symAddrs[sym.Reloc[0].SymOff]
typ := seg.symAddrs[sym.Reloc[1].SymOff]
if inter != INVALID_HANDLE_VALUE && typ != INVALID_HANDLE_VALUE {
*(*uintptr)(unsafe.Pointer(&(iter.inter))) = inter
*(*uintptr)(unsafe.Pointer(&(iter.typ))) = typ
methods := iter.typ.uncommon().methods()
for k := 0; k < len(iter.inter.mhdr) && k < len(methods); k++ {
itype := uintptr(unsafe.Pointer(iter.inter.typ.typeOff(iter.inter.mhdr[k].ityp)))
module.Module.typemap[methods[k].mtyp] = itype
}
iter.ptr = getitab(iter.inter, iter.typ, false)
address := uintptr(unsafe.Pointer(iter.ptr))
if iter.ptr != nil {
switch iter.Type {
case R_PCREL:
offset := int(address) - (seg.codeBase + iter.Offset + iter.Size) + iter.Add
if offset > 0x7FFFFFFF || offset < -0x80000000 {
offset = (seg.codeBase + seg.offset) - (seg.codeBase + iter.Offset + iter.Size) + iter.Add
binary.LittleEndian.PutUint32(seg.codeByte[iter.Offset:], uint32(offset))
if seg.codeByte[iter.Offset-2] == x86amd64MOVcode {
//!!!TRICK
//because struct itab doesn't change after it adds into itab list, so
//copy itab data instead of jump code
copy2Slice(seg.codeByte[seg.offset:], address, ItabSize)
seg.offset += ItabSize
} else if seg.codeByte[iter.Offset-2] == x86amd64LEAcode {
seg.codeByte[iter.Offset-2:][0] = x86amd64MOVcode
putAddress(seg.codeByte[seg.offset:], uint64(address))
seg.offset += PtrSize
} else {
seg.errors += fmt.Sprintf("relocateItab: not support code:%v!\n", seg.codeByte[iter.Offset-2:iter.Offset])
}
} else {
binary.LittleEndian.PutUint32(seg.codeByte[iter.Offset:], uint32(offset))
}
case R_ADDRARM64:
relocateADRP(seg.codeByte[iter.Offset:], iter.Reloc, seg, address, itabName)
case R_ADDR:
putAddress(seg.codeByte[iter.Offset:], uint64(int(address)+iter.Add))
default:
seg.errors += fmt.Sprintf("unknown relocateItab type:%d Name:%s\n", iter.Type, itabName)
}
}
}
}
}
func relocate(code *CodeReloc, symPtr map[string]uintptr, codeModule *CodeModule, seg *segment) {
for _, curSym := range code.Syms {
for _, loc := range curSym.Reloc {
addr := seg.symAddrs[loc.SymOff]
sym := code.Syms[loc.SymOff]
//static_tmp is 0, golang compile not allocate memory.
if loc.DataSize == 0 && loc.Size > 0 {
if loc.Size <= IntSize {
addr = uintptr(seg.codeBase + seg.codeLen + seg.dataLen)
} else {
seg.errors += fmt.Sprintf("Symbol:%s size:%d>IntSize:%d\n", sym.Name, loc.Size, IntSize)
}
}
if addr == INVALID_HANDLE_VALUE {
//nothing todo
} else if addr == 0 && strings.HasPrefix(sym.Name, ITAB_PREFIX) {
codeModule.itabs[sym.Name] = &itabSym{Reloc: loc, inter: nil, typ: nil, ptr: nil}
} else {
switch loc.Type {
case R_TLS_LE:
binary.LittleEndian.PutUint32(seg.codeByte[loc.Offset:], uint32(symPtr[TLSNAME]))
case R_CALL, R_PCREL:
var relocByte = seg.codeByte[seg.codeLen:]
var addrBase = seg.dataBase
if curSym.Kind == STEXT {
addrBase = seg.codeBase
relocByte = seg.codeByte
}
offset := int(addr) - (addrBase + loc.Offset + loc.Size) + loc.Add
if offset > 0x7FFFFFFF || offset < -0x80000000 {
if seg.offset+PtrSize > seg.maxLength {
seg.errors += fmt.Sprintf("len overflow! sym:%s\n", sym.Name)
} else {
offset = (seg.codeBase + seg.offset) - (addrBase + loc.Offset + loc.Size)
bytes := relocByte[loc.Offset-2:]
address := addr
opcode := relocByte[loc.Offset-2]
reginfo := ZERO_BYTE
if loc.Type == R_CALL {
address = uintptr(int(addr) + loc.Add)
copy(seg.codeByte[seg.offset:], x86amd64JMPLcode)
seg.offset += len(x86amd64JMPLcode)
} else if opcode == x86amd64LEAcode {
bytes[0] = x86amd64MOVcode
} else if opcode == x86amd64MOVcode && loc.Size >= Uint32Size {
reginfo = ((relocByte[loc.Offset-1] >> 3) & 0x7) | 0xb8
copy(bytes, x86amd64JMPLcode)
} else if opcode == x86amd64CMPLcode && loc.Size >= Uint32Size {
copy(bytes, x86amd64JMPLcode)
} else {
seg.errors += fmt.Sprintf("not support code:%v!\n", relocByte[loc.Offset-2:loc.Offset])
}
binary.LittleEndian.PutUint32(relocByte[loc.Offset:], uint32(offset))
if opcode == x86amd64CMPLcode {
putAddress(seg.codeByte[seg.offset:], uint64(seg.codeBase+seg.offset+PtrSize))
seg.offset += PtrSize
copy(seg.codeByte[seg.offset:], x86amd64replaceCMPLcode)
seg.codeByte[seg.offset+0x0F] = relocByte[loc.Offset+loc.Size]
seg.offset += len(x86amd64replaceCMPLcode)
putAddress(seg.codeByte[seg.offset:], uint64(address))
seg.offset += PtrSize
address = uintptr(addrBase + loc.Offset + loc.Size - loc.Add)
putAddress(seg.codeByte[seg.offset:], uint64(address))
seg.offset += PtrSize
} else if opcode == x86amd64MOVcode {
putAddress(seg.codeByte[seg.offset:], uint64(seg.codeBase+seg.offset+PtrSize))
seg.offset += PtrSize
copy(seg.codeByte[seg.offset:], x86amd64replaceMOVQcode)
seg.codeByte[seg.offset+1] = reginfo
copy2Slice(seg.codeByte[seg.offset+2:], address, PtrSize)
seg.offset += len(x86amd64replaceMOVQcode)
address = uintptr(addrBase + loc.Offset + loc.Size - loc.Add)
putAddress(seg.codeByte[seg.offset:], uint64(address))
seg.offset += PtrSize
} else {
putAddress(seg.codeByte[seg.offset:], uint64(address))
seg.offset += PtrSize
}
}
} else {
binary.LittleEndian.PutUint32(relocByte[loc.Offset:], uint32(offset))
}
case R_CALLARM, R_CALLARM64:
var add = loc.Add
var pcOff = 0
if loc.Type == R_CALLARM {
add = int(signext24(int64(loc.Add&0xFFFFFF)) * 4)
pcOff = 8
}
offset := (int(addr) + add - (seg.codeBase + loc.Offset)) / 4
if offset > 0x7FFFFF || offset < -0x800000 {
if seg.offset+PtrSize > seg.maxLength {
seg.errors += fmt.Sprintf("len overflow! sym:%s\n", sym.Name)
} else {
seg.offset = alignof(seg.offset, PtrSize)
if loc.Type == R_CALLARM {
add = int(signext24(int64(loc.Add&0xFFFFFF)+2) * 4)
}
putUint24(seg.codeByte[loc.Offset:], uint32(seg.offset-pcOff-loc.Offset)/4)
if loc.Type == R_CALLARM64 {
copy(seg.codeByte[seg.offset:], arm64code)
seg.offset += len(arm64code)
} else {
copy(seg.codeByte[seg.offset:], armcode)
seg.offset += len(armcode)
}
putAddress(seg.codeByte[seg.offset:], uint64(int(addr)+add))
seg.offset += PtrSize
}
} else {
val := binary.LittleEndian.Uint32(seg.codeByte[loc.Offset : loc.Offset+4])
if loc.Type == R_CALLARM {
val |= uint32(offset) & 0x00FFFFFF
} else {
val |= uint32(offset) & 0x03FFFFFF
}
binary.LittleEndian.PutUint32(seg.codeByte[loc.Offset:], val)
}
case R_ADDRARM64:
if curSym.Kind != STEXT {
seg.errors += fmt.Sprintf("impossible!Sym:%s locate not in code segment!\n", sym.Name)
}
relocateADRP(seg.codeByte[loc.Offset:], loc, seg, addr, sym.Name)
case R_ADDR:
var relocByte = seg.codeByte[seg.codeLen:]
if curSym.Kind == STEXT {
relocByte = seg.codeByte
}
address := uintptr(int(addr) + loc.Add)
putAddress(relocByte[loc.Offset:], uint64(address))
case R_CALLIND:
case R_ADDROFF, R_WEAKADDROFF, R_METHODOFF:
if curSym.Kind == STEXT {
seg.errors += fmt.Sprintf("impossible!Sym:%s locate on code segment!\n", sym.Name)
}
offset := int(addr) - seg.codeBase + loc.Add
binary.LittleEndian.PutUint32(seg.codeByte[seg.codeLen+loc.Offset:], uint32(offset))
default:
seg.errors += fmt.Sprintf("unknown reloc type:%d sym:%s\n", loc.Type, sym.Name)
}
}
}
}
}
func addFuncTab(module *moduledata, i, offset int, code *CodeReloc, seg *segment, symPtr map[string]uintptr) int {
module.ftab[i].entry = uintptr(seg.symAddrs[int(code.Mod.ftab[i].entry)])
offset = alignof(offset, PtrSize)
module.ftab[i].funcoff = uintptr(offset)
fi := code.Mod.funcinfo[i]
fi.entry = module.ftab[i].entry
funcdata := make([]uintptr, len(fi.funcdata))
copy(funcdata, fi.funcdata)
for i, v := range fi.funcdata {
if code.Mod.stkmaps[v] != nil {
funcdata[i] = (uintptr)(unsafe.Pointer(&(code.Mod.stkmaps[v][0])))
} else {
funcdata[i] = (uintptr)(0)
}
}
addStackObject(code, &fi, seg, symPtr)
addDeferReturn(code, &fi, seg)
copy2Slice(module.pclntable[offset:], uintptr(unsafe.Pointer(&fi._func)), _FuncSize)
offset += _FuncSize
if len(fi.pcdata) > 0 {
size := int(int32(unsafe.Sizeof(fi.pcdata[0])) * fi.npcdata)
copy2Slice(module.pclntable[offset:], uintptr(unsafe.Pointer(&fi.pcdata[0])), size)
offset += size
}
offset = alignof(offset, PtrSize)
funcDataSize := int(PtrSize * fi.nfuncdata)
copy2Slice(module.pclntable[offset:], uintptr(unsafe.Pointer(&funcdata[0])), funcDataSize)
offset += funcDataSize
return offset
}
func buildModule(code *CodeReloc, symPtr map[string]uintptr, codeModule *CodeModule, seg *segment) {
var module moduledata
module.ftab = make([]functab, len(code.Mod.ftab))
copy(module.ftab, code.Mod.ftab)
pclnOff := len(code.Mod.pclntable)
module.pclntable = make([]byte, len(code.Mod.pclntable)+
(_FuncSize+128)*len(code.Mod.ftab))
copy(module.pclntable, code.Mod.pclntable)
module.findfunctab = (uintptr)(unsafe.Pointer(&code.Mod.pcfunc[0]))
module.minpc = uintptr(seg.codeBase)
module.maxpc = uintptr(seg.dataBase)
module.filetab = code.Mod.filetab
module.typemap = make(map[typeOff]uintptr)
module.types = uintptr(seg.codeBase)
module.etypes = uintptr(seg.codeBase + seg.maxLength)
module.text = uintptr(seg.codeBase)
module.etext = uintptr(seg.codeBase + len(code.Code))
codeModule.pcfuncdata = code.Mod.pcfunc // hold reference
codeModule.stkmaps = code.Mod.stkmaps
for i := range module.ftab {
pclnOff = addFuncTab(&module, i, pclnOff, code, seg, symPtr)
}
module.pclntable = module.pclntable[:pclnOff]
module.ftab = append(module.ftab, functab{})
for i := len(module.ftab) - 1; i > 0; i-- {
module.ftab[i] = module.ftab[i-1]
}
module.ftab = append(module.ftab, functab{})
module.ftab[0].entry = module.minpc
module.ftab[len(module.ftab)-1].entry = module.maxpc
modulesLock.Lock()
addModule(codeModule, &module)
modulesLock.Unlock()
moduledataverify1(&module)
codeModule.CodeByte = seg.codeByte
}
func Load(code *CodeReloc, symPtr map[string]uintptr) (*CodeModule, error) {
var seg segment
seg.codeLen = len(code.Code)
seg.dataLen = len(code.Data)
seg.maxLength = seg.codeLen*2 + seg.dataLen
codeByte, err := Mmap(seg.maxLength)
if err != nil {
return nil, err
}
seg.codeByte = codeByte
var codeModule = CodeModule{
Syms: make(map[string]uintptr),
itabs: make(map[string]*itabSym),
}
seg.codeBase = int((*sliceHeader)(unsafe.Pointer(&codeByte)).Data)
seg.dataBase = seg.codeBase + len(code.Code)
seg.symAddrs = make([]uintptr, len(code.Syms))
seg.offset = seg.codeLen + seg.dataLen
//static_tmp is 0, golang compile not allocate memory.
seg.offset += IntSize
copy(seg.codeByte, code.Code)
copy(seg.codeByte[seg.codeLen:], code.Data)
addSymAddrs(code, symPtr, &codeModule, &seg)
relocate(code, symPtr, &codeModule, &seg)
buildModule(code, symPtr, &codeModule, &seg)
relocateItab(code, &codeModule, &seg)
if len(seg.errors) > 0 {
return &codeModule, errors.New(seg.errors)
}
return &codeModule, nil
}
func (cm *CodeModule) Unload() {
for _, itab := range cm.itabs {
if itab.inter != nil && itab.typ != nil {
eraseiface(itab.inter, itab.typ)
}
}
runtime.GC()
modulesLock.Lock()
removeModule(cm.Module)
modulesLock.Unlock()
Munmap(cm.CodeByte)
}
| 43,466
|
https://github.com/saffire/saffire/blob/master/src/components/modules/saffire.c
|
Github Open Source
|
Open Source
|
BSD-3-Clause
| 2,022
|
saffire
|
saffire
|
C
|
Code
| 722
| 2,915
|
/*
Copyright (c) 2012-2015, The Saffire Group
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the Saffire Group the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <stdio.h>
#include <saffire/general/output.h>
#include <saffire/modules/module_api.h>
#include <saffire/general/parse_options.h>
#include <saffire/general/string.h>
#include <saffire/objects/object.h>
#include <saffire/objects/objects.h>
#include <saffire/general/config.h>
#include <saffire/general/dll.h>
#include <saffire/version.h>
#include <saffire/vm/vm.h>
#include <saffire/vm/thread.h>
#include <saffire/memory/smm.h>
#include <string.h>
SAFFIRE_MODULE_METHOD(saffire, get_locale) {
t_thread *thread = thread_get_current();
RETURN_STRING_FROM_CHAR(thread->locale);
}
SAFFIRE_MODULE_METHOD(saffire, set_locale) {
t_string *locale;
if (object_parse_arguments(SAFFIRE_METHOD_ARGS, "s", &locale) != 0) {
return NULL;
}
// Set locale
t_thread *thread = thread_get_current();
thread->locale = string_strdup0(STRING_CHAR0(locale));
RETURN_SELF;
}
/**
*
*/
SAFFIRE_MODULE_METHOD(saffire, version) {
RETURN_STRING_FROM_CHAR(saffire_version);
}
SAFFIRE_MODULE_METHOD(saffire, gitrev) {
RETURN_STRING_FROM_CHAR(__GIT_REVISION__);
}
SAFFIRE_MODULE_METHOD(saffire, sapi) {
if ((vm_runmode & VM_RUNMODE_FASTCGI) == VM_RUNMODE_FASTCGI) {
RETURN_STRING_FROM_CHAR("fastcgi");
}
if ((vm_runmode & VM_RUNMODE_CLI) == VM_RUNMODE_CLI) {
RETURN_STRING_FROM_CHAR("cli");
}
if ((vm_runmode & VM_RUNMODE_REPL) == VM_RUNMODE_REPL) {
RETURN_STRING_FROM_CHAR("repl");
}
RETURN_STRING_FROM_CHAR("unknown");
}
SAFFIRE_MODULE_METHOD(saffire, debug) {
if (VM_IN_DEBUG_MODE) {
RETURN_TRUE;
} else {
RETURN_FALSE;
}
}
SAFFIRE_MODULE_METHOD(saffire, args) {
long idx = -1;
if (object_parse_arguments(SAFFIRE_METHOD_ARGS, "|n", &idx) != 0) {
RETURN_SELF;
}
if (idx == -1) {
t_hash_table *ht = ht_create();
for (int i=0; i!=saffire_getopt_count(); i++) {
ht_add_num(ht, ht->element_count, STR02OBJ(saffire_getopt_string(i)));
}
RETURN_LIST(ht);
}
if (idx >= saffire_getopt_count() || idx < 0) {
RETURN_NULL;
}
RETURN_STRING_FROM_CHAR(saffire_getopt_string(idx));
}
SAFFIRE_MODULE_METHOD(saffire, exception_handler) {
t_exception_object *exception_obj;
if (object_parse_arguments(SAFFIRE_METHOD_ARGS, "o", &exception_obj) != 0) {
RETURN_SELF;
}
if (! OBJECT_IS_EXCEPTION(exception_obj)) {
RETURN_SELF;
}
module_io_print("%s",
" _ _\n" \
" | | (_)\n" \
" _____ _____ ___ _ __ | |_ _ ___ _ __\n" \
" / _ \\ \\/ / __/ _ \\ '_ \\| __| |/ _ \\| '_ \\\n" \
" | __/> < (_| __/ |_) | |_| | (_) | | | |\n" \
" \\___/_/\\_\\___\\___| .__/ \\__|_|\\___/|_| |_|\n" \
" | |\n" \
" |_|\n" \
"\n");
module_io_print("%s", "-------------------------------------------\n");
module_io_print(" Code: %d\n", exception_obj->data.code);
module_io_print(" Mesg: %s\n", STRING_CHAR0(exception_obj->data.message));
module_io_print("%s", "-------------------------------------------\n");
t_hash_iter iter;
ht_iter_init(&iter, exception_obj->data.stacktrace);
while (ht_iter_valid(&iter)) {
t_string_object *v = (t_string_object *)(ht_iter_value(&iter));
module_io_print("%s\n", OBJ2STR0(v));
ht_iter_next(&iter);
}
module_io_print("%s", "-------------------------------------------\n");
RETURN_SELF;
}
SAFFIRE_MODULE_METHOD(saffire, modules) {
t_hash_table *modules_ht = ht_create();
t_dll_element *e = DLL_HEAD(registered_modules);
while (e) {
t_module_info *module_info = (t_module_info *)DLL_DATA_PTR(e);
t_hash_table *module_ht = ht_create();
// Add all objects
t_hash_table *objects_ht = ht_create();
int idx = 0;
t_object *obj = (t_object *)module_info->mod->objects[idx];
while (obj != NULL) {
ht_append_num(objects_ht, STR02OBJ(obj->name));
idx++;
obj = (t_object *)module_info->mod->objects[idx];
}
ht_add_obj(module_ht, STR02OBJ("objects"), LIST2OBJ(objects_ht));
// Add module path and description
ht_add_obj(module_ht, STR02OBJ("path"), STR02OBJ(module_info->path));
ht_add_obj(module_ht, STR02OBJ("description"), STR02OBJ(module_info->mod->description));
// Add element to modules hash
ht_add_obj(modules_ht, STR02OBJ(module_info->mod->name), HASH2OBJ(module_ht));
e = DLL_NEXT(e);
}
RETURN_HASH(modules_ht);
}
t_object saffire_struct = { OBJECT_HEAD_INIT("saffire", objectTypeBase, OBJECT_TYPE_CLASS, NULL, 0), OBJECT_FOOTER };
static void _init(void) {
saffire_struct.attributes = ht_create();
object_add_internal_method((t_object *)&saffire_struct, "version", ATTRIB_METHOD_STATIC, ATTRIB_VISIBILITY_PUBLIC, module_saffire_method_version);
object_add_internal_method((t_object *)&saffire_struct, "git_revision", ATTRIB_METHOD_STATIC, ATTRIB_VISIBILITY_PUBLIC, module_saffire_method_gitrev);
object_add_internal_method((t_object *)&saffire_struct, "sapi", ATTRIB_METHOD_STATIC, ATTRIB_VISIBILITY_PUBLIC, module_saffire_method_sapi);
object_add_internal_method((t_object *)&saffire_struct, "debug", ATTRIB_METHOD_STATIC, ATTRIB_VISIBILITY_PUBLIC, module_saffire_method_debug);
object_add_internal_method((t_object *)&saffire_struct, "set_locale", ATTRIB_METHOD_STATIC, ATTRIB_VISIBILITY_PUBLIC, module_saffire_method_set_locale);
object_add_internal_method((t_object *)&saffire_struct, "get_locale", ATTRIB_METHOD_STATIC, ATTRIB_VISIBILITY_PUBLIC, module_saffire_method_get_locale);
object_add_internal_method((t_object *)&saffire_struct, "uncaughtExceptionHandler", ATTRIB_METHOD_STATIC, ATTRIB_VISIBILITY_PUBLIC, module_saffire_method_exception_handler);
object_add_internal_method((t_object *)&saffire_struct, "args", ATTRIB_METHOD_STATIC, ATTRIB_VISIBILITY_PUBLIC, module_saffire_method_args);
object_add_internal_method((t_object *)&saffire_struct, "modules", ATTRIB_METHOD_STATIC, ATTRIB_VISIBILITY_PUBLIC, module_saffire_method_modules);
object_add_property((t_object *)&saffire_struct, "fastcgi", ATTRIB_VISIBILITY_PUBLIC, Object_Null);
object_add_property((t_object *)&saffire_struct, "cli", ATTRIB_VISIBILITY_PUBLIC, Object_Null);
object_add_property((t_object *)&saffire_struct, "repl", ATTRIB_VISIBILITY_PUBLIC, Object_Null);
}
static void _fini(void) {
// Destroy methods and properties
object_free_internal_object(&saffire_struct);
}
static t_object *_objects[] = {
&saffire_struct,
NULL
};
t_module module_saffire = {
"\\saffire",
"Saffire configuration module",
_objects,
_init,
_fini
};
| 7,684
|
https://github.com/Ahmedkr5/Mini-Miracles/blob/master/src/PlanBundle/Resources/views/enfant/edit2.html.twig
|
Github Open Source
|
Open Source
|
MIT
| null |
Mini-Miracles
|
Ahmedkr5
|
Twig
|
Code
| 53
| 196
|
{% extends 'base.html.twig' %}
{% block body %}
<h1>Selectioner un groupe</h1>
<table class="table table-striped">
<thead>
<tr>
<th>Nom du group</th>
<th>Ajouter</th>
</tr>
</thead>
<tbody>
{% for groupe in groupes %}
<tr>
<td>{{ groupe.nomGroup }}</td>
<td>
<a href="{{ path('enfant_edit', { 'idGroup': groupe.idGroup }) }}" >Ajouter</a>
</td>
</tr>
{% endfor %}
</tbody>
</table>
{% endblock %}
| 32,633
|
https://github.com/openstates/openstates-core/blob/master/openstates/data/admin/event.py
|
Github Open Source
|
Open Source
|
MIT
| 2,023
|
openstates-core
|
openstates
|
Python
|
Code
| 149
| 710
|
from django.contrib import admin
from django.template import defaultfilters
from . import base
from .. import models
@admin.register(models.EventLocation)
class EventLocationAdmin(admin.ModelAdmin):
pass
class EventParticipantInline(base.RelatedEntityInline):
model = models.EventParticipant
readonly_fields = ("organization", "person")
@admin.register(models.Event)
class EventAdmin(admin.ModelAdmin):
readonly_fields = ("jurisdiction", "location")
fields = (
"name",
"jurisdiction",
"location",
"description",
"classification",
"status",
("start_date", "end_date", "all_day"),
)
def source_link(self, obj):
source = obj.sources.filter(url__icontains="meetingdetail").get()
tmpl = u'<a href="{0}" target="_blank">View source</a>'
return tmpl.format(source.url)
source_link.short_description = "View source"
source_link.allow_tags = True
list_display = ("jurisdiction", "name", "start_date", "source_link")
inlines = [EventParticipantInline]
@admin.register(models.EventMedia)
class EventMediaAdmin(admin.ModelAdmin):
pass
@admin.register(models.EventDocument)
class EventDocumentAdmin(admin.ModelAdmin):
readonly_fields = ("event",)
list_display = ("event", "date", "note")
@admin.register(models.EventSource)
class EventSourceAdmin(admin.ModelAdmin):
readonly_fields = ("event",)
@admin.register(models.EventParticipant)
class EventParticipantAdmin(admin.ModelAdmin):
pass
@admin.register(models.EventAgendaItem)
class EventAgendaItemAdmin(admin.ModelAdmin):
readonly_fields = ("event",)
fields = ("event", "description", "classification", "order", "subjects", "notes")
def get_truncated_description(self, obj):
return defaultfilters.truncatewords(obj.description, 25)
get_truncated_description.short_description = "Description"
def get_truncated_event_name(self, obj):
return defaultfilters.truncatewords(obj.event.name, 8)
get_truncated_event_name.short_description = "Event Name"
list_display = ("get_truncated_event_name", "get_truncated_description")
@admin.register(models.EventRelatedEntity)
class EventRelatedEntityAdmin(admin.ModelAdmin):
pass
@admin.register(models.EventAgendaMedia)
class EventAgendaMediaAdmin(admin.ModelAdmin):
pass
| 26,019
|
https://github.com/essensoft/paymen/blob/master/src/Essensoft.Paylink.Alipay/Domain/KoubeiMarketingDataCustomreportSaveModel.cs
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
paymen
|
essensoft
|
C#
|
Code
| 35
| 142
|
using System.Text.Json.Serialization;
namespace Essensoft.Paylink.Alipay.Domain
{
/// <summary>
/// KoubeiMarketingDataCustomreportSaveModel Data Structure.
/// </summary>
public class KoubeiMarketingDataCustomreportSaveModel : AlipayObject
{
/// <summary>
/// 自定义报表规则条件信息
/// </summary>
[JsonPropertyName("report_condition_info")]
public CustomReportCondition ReportConditionInfo { get; set; }
}
}
| 48,385
|
https://github.com/BrandEmbassy/php-file-type-detector/blob/master/src/FileInfoTest.php
|
Github Open Source
|
Open Source
|
MIT
| 2,023
|
php-file-type-detector
|
BrandEmbassy
|
PHP
|
Code
| 61
| 248
|
<?php declare(strict_types = 1);
namespace BrandEmbassy\FileTypeDetector;
use PHPUnit\Framework\TestCase;
class FileInfoTest extends TestCase
{
/**
* @dataProvider extensionDataProvider()
*/
public function testFileInfoAlwaysHasAllAttributes(Extension $extension, bool $isCreatedFromFileName): void
{
$fileInfo = new FileInfo($extension, $isCreatedFromFileName);
$fileInfo->getExtension();
$fileInfo->getFileType();
$fileInfo->getMimeType();
$fileInfo->isCreatedFromFileName();
$this->expectNotToPerformAssertions();
}
/**
* @return iterable<array<Extension|bool>>
*/
public function extensionDataProvider(): iterable
{
foreach (Extension::getValues() as $value) {
yield [Extension::get($value), true];
yield [Extension::get($value), false];
}
}
}
| 2,051
|
https://github.com/igiu1988/iOS13.4.1-Runtime-Headers/blob/master/PrivateFrameworks/UIKitCore.framework/_UIDebugLogReport.h
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
iOS13.4.1-Runtime-Headers
|
igiu1988
|
Objective-C
|
Code
| 127
| 505
|
/* Generated by EzioChiu
Image: /System/Library/PrivateFrameworks/UIKitCore.framework/UIKitCore
*/
@interface _UIDebugLogReport : NSObject {
unsigned long long _currentIndentLevel;
id /* block */ _fallbackMessagePrefixHandler;
NSMutableArray * _prefixStack;
NSMutableArray * _statements;
}
@property (nonatomic) unsigned long long currentIndentLevel;
@property (nonatomic, copy) id /* block */ fallbackMessagePrefixHandler;
@property (nonatomic, readonly) unsigned long long messageCount;
@property (getter=_prefixStack, nonatomic, retain) NSMutableArray *prefixStack;
@property (getter=_statements, nonatomic, retain) NSMutableArray *statements;
- (void).cxx_destruct;
- (id)_messagePrefixAtIndentLevel:(unsigned long long)arg1;
- (id)_prefixStack;
- (id)_statements;
- (void)addLineBreak;
- (void)addMessage:(id)arg1;
- (void)addMessageWithFormat:(id)arg1;
- (void)clearAllMessagePrefixes;
- (unsigned long long)currentIndentLevel;
- (void)decrementIndentLevel;
- (void)decrementIndentLevelAndPopMessagePrefix;
- (id /* block */)fallbackMessagePrefixHandler;
- (void)incrementIndentLevel;
- (void)incrementIndentLevelAndPushMessagePrefix:(id)arg1;
- (id)init;
- (unsigned long long)messageCount;
- (void)popMessagePrefix;
- (void)pushMessagePrefix:(id)arg1;
- (void)pushMessagePrefixHandler:(id /* block */)arg1;
- (void)resetIndentLevel;
- (void)setCurrentIndentLevel:(unsigned long long)arg1;
- (void)setFallbackMessagePrefixHandler:(id /* block */)arg1;
- (void)setPrefixStack:(id)arg1;
- (void)setStatements:(id)arg1;
@end
| 43,446
|
https://github.com/nohros/must/blob/master/src/base/common/configuration/builders/ThrowableConfigurationBuilder.cs
|
Github Open Source
|
Open Source
|
MIT
| 2,016
|
must
|
nohros
|
C#
|
Code
| 86
| 222
|
using System;
namespace Nohros.Configuration.Builders
{
/// <summary>
/// A implementation of the <see cref="IConfigurationBuilder{T}"/> that
/// throws an <see cref="NotImplementedException"/> when <see cref="Build"/>
/// is called.
/// </summary>
/// <remarks>
/// This main purpose of this class is to allow classes to
/// <see cref="AbstractConfigurationLoader{T}"/> class without implementing
/// the <see cref="IConfigurationBuilder{T}"/> interface.
/// </remarks>
internal class ThrowableConfigurationBuilder<T> :
AbstractConfigurationBuilder<T> where T : IConfiguration {
/// <summary>
/// Throws an <see cref="NotImplementedException"/> exception.
/// </summary>
public override T Build() {
throw new NotImplementedException();
}
}
}
| 48,038
|
https://github.com/pvjones/boilerplate-ts-react-redux-auth/blob/master/src/app/store/reducers/security.reducer.ts
|
Github Open Source
|
Open Source
|
MIT
| 2,018
|
boilerplate-ts-react-redux-auth
|
pvjones
|
TypeScript
|
Code
| 84
| 267
|
import { fromJS, Map } from 'immutable'
import { serializeStateToLocalStorage, deserializeStateFromLocalStorage } from '../../../utils/storage.utils'
import actionDefs from '../actions/actionDefs'
import { SecurityState, Reducer, PayloadFunc } from '../store.models'
const setSession: PayloadFunc<SecurityState, any> = (state, payload) =>
state.merge(fromJS({ session: payload }))
const reducer: Reducer<SecurityState> = (state = deserializeSession(), action) => {
switch (action.type) {
case actionDefs.Security.Session.Set:
return serializeSession(setSession(state, action.payload))
case actionDefs.Security.Session.Clear:
return serializeSession(Map())
default:
return state
}
}
export default reducer
const serializeSession = state =>
serializeStateToLocalStorage(state, 'userSession', ['session'])
const deserializeSession = () =>
deserializeStateFromLocalStorage(Map(), 'userSession', ['session'])
| 11,294
|
https://github.com/Spessi/Thor/blob/master/include/Thor/Resources/Detail/ResourceHolder.inl
|
Github Open Source
|
Open Source
|
Zlib
| 2,022
|
Thor
|
Spessi
|
C++
|
Code
| 604
| 1,325
|
/////////////////////////////////////////////////////////////////////////////////
//
// Thor C++ Library
// Copyright (c) 2011-2021 Jan Haller
//
// This software is provided 'as-is', without any express or implied
// warranty. In no event will the authors be held liable for any damages
// arising from the use of this software.
//
// Permission is granted to anyone to use this software for any purpose,
// including commercial applications, and to alter it and redistribute it
// freely, subject to the following restrictions:
//
// 1. The origin of this software must not be misrepresented; you must not
// claim that you wrote the original software. If you use this software
// in a product, an acknowledgment in the product documentation would be
// appreciated but is not required.
//
// 2. Altered source versions must be plainly marked as such, and must not be
// misrepresented as being the original software.
//
// 3. This notice may not be removed or altered from any source distribution.
//
/////////////////////////////////////////////////////////////////////////////////
namespace thor
{
template <typename R, typename I, class O>
ResourceHolder<R, I, O>::ResourceHolder()
: mMap()
{
}
template <typename R, typename I, class O>
ResourceHolder<R, I, O>::ResourceHolder(ResourceHolder&& source)
: mMap(std::move(source.mMap))
{
}
template <typename R, typename I, class O>
ResourceHolder<R, I, O>& ResourceHolder<R, I, O>::operator= (ResourceHolder&& source)
{
mMap = std::move(source.mMap);
return *this;
}
template <typename R, typename I, class O>
typename ResourceHolder<R, I, O>::Resource ResourceHolder<R, I, O>::acquire(const I& id, const ResourceLoader<R>& how, Resources::KnownIdStrategy known)
{
// ID is new: we always load the resource
auto found = mMap.find(id);
if (found == mMap.end())
return load(id, how);
// ID is known: behavior depends on strategy
switch (known)
{
default: // TODO: Assume 'default' unreachable
case Resources::AssumeNew:
throw ResourceAccessException("Failed to load resource, ID already stored in ResourceHolder");
case Resources::Reload:
release(id);
return load(id, how);
case Resources::Reuse:
return Om::makeReturned(found->second);
}
}
template <typename R, typename I, class O>
void ResourceHolder<R, I, O>::release(const I& id)
{
auto found = mMap.find(id);
if (found == mMap.end())
throw ResourceAccessException("Failed to release resource, ID not currently stored in ResourceHolder");
mMap.erase(found);
}
template <typename R, typename I, class O>
typename ResourceHolder<R, I, O>::Resource ResourceHolder<R, I, O>::operator[] (const I& id)
{
auto found = mMap.find(id);
if (found == mMap.end())
throw ResourceAccessException("Failed to access resource, ID not currently stored in ResourceHolder");
return Om::makeReturned(found->second);
}
template <typename R, typename I, class O>
typename ResourceHolder<R, I, O>::ConstResource ResourceHolder<R, I, O>::operator[] (const I& id) const
{
auto found = mMap.find(id);
if (found == mMap.end())
throw ResourceAccessException("Failed to access resource, ID not currently stored in ResourceHolder");
return Om::makeReturned(found->second);
}
template <typename R, typename I, class O>
typename ResourceHolder<R, I, O>::Resource ResourceHolder<R, I, O>::load(const I& id, const ResourceLoader<R>& what)
{
assert(mMap.find(id) == mMap.end());
// Loading process is rather complicated because it has to respect different ownership semantics.
// That's why the resource is moved several times. The data flow is as follows:
// original (temporary) ----> loaded (temporary) .---> returned (handed out to user)
// `--> stored (stored in resource holder's map)
std::unique_ptr<R> original = what.load();
if (!original)
throw ResourceLoadingException("Failed to load resource \"" + what.getInfo() + "\"");
// Insert initially empty element, to learn about its iterator
auto inserted = mMap.insert(std::make_pair(id, typename Om::Stored())).first;
// For ownership policies that try to be smart and remove resources from the holder when unused,
// we need to pass them information about the container and the iterator referring to the element
auto elementRef = detail::makeElementRef(mMap, inserted);
// Create temporary 'loaded' object and from it, 'returned' object given to user
typename Om::Loaded loaded = Om::makeLoaded(std::move(original), std::move(elementRef));
typename Om::Returned returned = Om::makeReturned(loaded);
// Actually store resource (together with tracking element) in map
inserted->second = Om::makeStored(std::move(loaded));
return returned;
}
} // namespace thor
| 17,619
|
https://github.com/david-szabo97/cpvault.com/blob/master/src/style/pages/default/default.sass
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,017
|
cpvault.com
|
david-szabo97
|
Sass
|
Code
| 153
| 526
|
main.default
section
height: 100%
display: flex
flex-direction: column
padding: $gutter
flex: 1 0 auto
.logo-container
display: flex
flex-direction: row
flex: 1 0 auto
align-items: center
justify-content: center
h1
flex: 0 0 auto
padding-left: $spacing*2
img.logo
float: left
max-width: 6em
display: block
flex: 0 0 0
@media screen and (min-height: 760px)
max-width: 8em
float: none
@media screen and (min-height: 1000px)
max-width: 12em
h1
font-size: 3em
color: $color-dark-text-secondary
padding: $spacing*3 $spacing $spacing*3 $spacing
font-weight: 300
text-align: center
h2
font-size: 1.5em
color: $color-dark-text-secondary
padding: $spacing*2 $spacing $spacing*2 $spacing
font-weight: 300
input,
button
width: 100%
height: 48px
outline: 0
border: 0
flex: 0 0 auto
padding: 0 $spacing
font-size: 1em
input + input
border-top: 1px solid $color-dark-text-divider
.box
+shadow-2
display: flex
flex-direction: column
flex: 0 0 auto
background: #fff
button
background: $secondary-color
color: $color-light-text-primary
text-transform: uppercase
font-weight: 300
cursor: pointer
transition: background .25s $transition-ease
font-size: 1.2em
&:hover
background: lighten($secondary-color, 5%)
&[disabled]
cursor: not-allowed
background: lighten($secondary-color, 10%) !important
| 37,399
|
https://github.com/zathras777/core/blob/master/tests/components/ambee/test_sensor.py
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,022
|
core
|
zathras777
|
Python
|
Code
| 984
| 4,688
|
"""Tests for the sensors provided by the Ambee integration."""
from unittest.mock import AsyncMock
import pytest
from homeassistant.components.ambee.const import DEVICE_CLASS_AMBEE_RISK, DOMAIN
from homeassistant.components.sensor import (
ATTR_STATE_CLASS,
DOMAIN as SENSOR_DOMAIN,
STATE_CLASS_MEASUREMENT,
)
from homeassistant.const import (
ATTR_DEVICE_CLASS,
ATTR_FRIENDLY_NAME,
ATTR_ICON,
ATTR_UNIT_OF_MEASUREMENT,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_PARTS_PER_CUBIC_METER,
CONCENTRATION_PARTS_PER_MILLION,
DEVICE_CLASS_CO,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr, entity_registry as er
from tests.common import MockConfigEntry
async def test_air_quality(
hass: HomeAssistant,
init_integration: MockConfigEntry,
) -> None:
"""Test the Ambee Air Quality sensors."""
entry_id = init_integration.entry_id
entity_registry = er.async_get(hass)
device_registry = dr.async_get(hass)
state = hass.states.get("sensor.air_quality_particulate_matter_2_5")
entry = entity_registry.async_get("sensor.air_quality_particulate_matter_2_5")
assert entry
assert state
assert entry.unique_id == f"{entry_id}_air_quality_particulate_matter_2_5"
assert state.state == "3.14"
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "Particulate Matter < 2.5 μm"
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
assert (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
== CONCENTRATION_MICROGRAMS_PER_CUBIC_METER
)
assert ATTR_DEVICE_CLASS not in state.attributes
assert ATTR_ICON not in state.attributes
state = hass.states.get("sensor.air_quality_particulate_matter_10")
entry = entity_registry.async_get("sensor.air_quality_particulate_matter_10")
assert entry
assert state
assert entry.unique_id == f"{entry_id}_air_quality_particulate_matter_10"
assert state.state == "5.24"
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "Particulate Matter < 10 μm"
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
assert (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
== CONCENTRATION_MICROGRAMS_PER_CUBIC_METER
)
assert ATTR_DEVICE_CLASS not in state.attributes
assert ATTR_ICON not in state.attributes
state = hass.states.get("sensor.air_quality_sulphur_dioxide")
entry = entity_registry.async_get("sensor.air_quality_sulphur_dioxide")
assert entry
assert state
assert entry.unique_id == f"{entry_id}_air_quality_sulphur_dioxide"
assert state.state == "0.031"
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "Sulphur Dioxide (SO2)"
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
assert (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
== CONCENTRATION_PARTS_PER_BILLION
)
assert ATTR_DEVICE_CLASS not in state.attributes
assert ATTR_ICON not in state.attributes
state = hass.states.get("sensor.air_quality_nitrogen_dioxide")
entry = entity_registry.async_get("sensor.air_quality_nitrogen_dioxide")
assert entry
assert state
assert entry.unique_id == f"{entry_id}_air_quality_nitrogen_dioxide"
assert state.state == "0.66"
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "Nitrogen Dioxide (NO2)"
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
assert (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
== CONCENTRATION_PARTS_PER_BILLION
)
assert ATTR_DEVICE_CLASS not in state.attributes
assert ATTR_ICON not in state.attributes
state = hass.states.get("sensor.air_quality_ozone")
entry = entity_registry.async_get("sensor.air_quality_ozone")
assert entry
assert state
assert entry.unique_id == f"{entry_id}_air_quality_ozone"
assert state.state == "17.067"
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "Ozone"
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
assert (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
== CONCENTRATION_PARTS_PER_BILLION
)
assert ATTR_DEVICE_CLASS not in state.attributes
assert ATTR_ICON not in state.attributes
state = hass.states.get("sensor.air_quality_carbon_monoxide")
entry = entity_registry.async_get("sensor.air_quality_carbon_monoxide")
assert entry
assert state
assert entry.unique_id == f"{entry_id}_air_quality_carbon_monoxide"
assert state.state == "0.105"
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_CO
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "Carbon Monoxide (CO)"
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
assert (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
== CONCENTRATION_PARTS_PER_MILLION
)
assert ATTR_ICON not in state.attributes
state = hass.states.get("sensor.air_quality_air_quality_index")
entry = entity_registry.async_get("sensor.air_quality_air_quality_index")
assert entry
assert state
assert entry.unique_id == f"{entry_id}_air_quality_air_quality_index"
assert state.state == "13"
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "Air Quality Index (AQI)"
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
assert ATTR_DEVICE_CLASS not in state.attributes
assert ATTR_UNIT_OF_MEASUREMENT not in state.attributes
assert ATTR_ICON not in state.attributes
assert entry.device_id
device_entry = device_registry.async_get(entry.device_id)
assert device_entry
assert device_entry.identifiers == {(DOMAIN, f"{entry_id}_air_quality")}
assert device_entry.manufacturer == "Ambee"
assert device_entry.name == "Air Quality"
assert device_entry.entry_type is dr.DeviceEntryType.SERVICE
assert not device_entry.model
assert not device_entry.sw_version
async def test_pollen(
hass: HomeAssistant,
init_integration: MockConfigEntry,
) -> None:
"""Test the Ambee Pollen sensors."""
entry_id = init_integration.entry_id
entity_registry = er.async_get(hass)
device_registry = dr.async_get(hass)
state = hass.states.get("sensor.pollen_grass")
entry = entity_registry.async_get("sensor.pollen_grass")
assert entry
assert state
assert entry.unique_id == f"{entry_id}_pollen_grass"
assert state.state == "190"
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "Grass Pollen"
assert state.attributes.get(ATTR_ICON) == "mdi:grass"
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
assert (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
== CONCENTRATION_PARTS_PER_CUBIC_METER
)
assert ATTR_DEVICE_CLASS not in state.attributes
state = hass.states.get("sensor.pollen_tree")
entry = entity_registry.async_get("sensor.pollen_tree")
assert entry
assert state
assert entry.unique_id == f"{entry_id}_pollen_tree"
assert state.state == "127"
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "Tree Pollen"
assert state.attributes.get(ATTR_ICON) == "mdi:tree"
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
assert (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
== CONCENTRATION_PARTS_PER_CUBIC_METER
)
assert ATTR_DEVICE_CLASS not in state.attributes
state = hass.states.get("sensor.pollen_weed")
entry = entity_registry.async_get("sensor.pollen_weed")
assert entry
assert state
assert entry.unique_id == f"{entry_id}_pollen_weed"
assert state.state == "95"
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "Weed Pollen"
assert state.attributes.get(ATTR_ICON) == "mdi:sprout"
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
assert (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
== CONCENTRATION_PARTS_PER_CUBIC_METER
)
assert ATTR_DEVICE_CLASS not in state.attributes
state = hass.states.get("sensor.pollen_grass_risk")
entry = entity_registry.async_get("sensor.pollen_grass_risk")
assert entry
assert state
assert entry.unique_id == f"{entry_id}_pollen_grass_risk"
assert state.state == "high"
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_AMBEE_RISK
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "Grass Pollen Risk"
assert state.attributes.get(ATTR_ICON) == "mdi:grass"
assert ATTR_STATE_CLASS not in state.attributes
assert ATTR_UNIT_OF_MEASUREMENT not in state.attributes
state = hass.states.get("sensor.pollen_tree_risk")
entry = entity_registry.async_get("sensor.pollen_tree_risk")
assert entry
assert state
assert entry.unique_id == f"{entry_id}_pollen_tree_risk"
assert state.state == "moderate"
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_AMBEE_RISK
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "Tree Pollen Risk"
assert state.attributes.get(ATTR_ICON) == "mdi:tree"
assert ATTR_STATE_CLASS not in state.attributes
assert ATTR_UNIT_OF_MEASUREMENT not in state.attributes
state = hass.states.get("sensor.pollen_weed_risk")
entry = entity_registry.async_get("sensor.pollen_weed_risk")
assert entry
assert state
assert entry.unique_id == f"{entry_id}_pollen_weed_risk"
assert state.state == "high"
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_AMBEE_RISK
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "Weed Pollen Risk"
assert state.attributes.get(ATTR_ICON) == "mdi:sprout"
assert ATTR_STATE_CLASS not in state.attributes
assert ATTR_UNIT_OF_MEASUREMENT not in state.attributes
assert entry.device_id
device_entry = device_registry.async_get(entry.device_id)
assert device_entry
assert device_entry.identifiers == {(DOMAIN, f"{entry_id}_pollen")}
assert device_entry.manufacturer == "Ambee"
assert device_entry.name == "Pollen"
assert device_entry.entry_type is dr.DeviceEntryType.SERVICE
assert not device_entry.model
assert not device_entry.sw_version
@pytest.mark.parametrize(
"entity_id",
(
"sensor.pollen_grass_poaceae",
"sensor.pollen_tree_alder",
"sensor.pollen_tree_birch",
"sensor.pollen_tree_cypress",
"sensor.pollen_tree_elm",
"sensor.pollen_tree_hazel",
"sensor.pollen_tree_oak",
"sensor.pollen_tree_pine",
"sensor.pollen_tree_plane",
"sensor.pollen_tree_poplar",
"sensor.pollen_weed_chenopod",
"sensor.pollen_weed_mugwort",
"sensor.pollen_weed_nettle",
"sensor.pollen_weed_ragweed",
),
)
async def test_pollen_disabled_by_default(
hass: HomeAssistant, init_integration: MockConfigEntry, entity_id: str
) -> None:
"""Test the Ambee Pollen sensors that are disabled by default."""
entity_registry = er.async_get(hass)
state = hass.states.get(entity_id)
assert state is None
entry = entity_registry.async_get(entity_id)
assert entry
assert entry.disabled
assert entry.disabled_by == er.DISABLED_INTEGRATION
@pytest.mark.parametrize(
"key,icon,name,value",
[
("grass_poaceae", "mdi:grass", "Poaceae Grass Pollen", "190"),
("tree_alder", "mdi:tree", "Alder Tree Pollen", "0"),
("tree_birch", "mdi:tree", "Birch Tree Pollen", "35"),
("tree_cypress", "mdi:tree", "Cypress Tree Pollen", "0"),
("tree_elm", "mdi:tree", "Elm Tree Pollen", "0"),
("tree_hazel", "mdi:tree", "Hazel Tree Pollen", "0"),
("tree_oak", "mdi:tree", "Oak Tree Pollen", "55"),
("tree_pine", "mdi:tree", "Pine Tree Pollen", "30"),
("tree_plane", "mdi:tree", "Plane Tree Pollen", "5"),
("tree_poplar", "mdi:tree", "Poplar Tree Pollen", "0"),
("weed_chenopod", "mdi:sprout", "Chenopod Weed Pollen", "0"),
("weed_mugwort", "mdi:sprout", "Mugwort Weed Pollen", "1"),
("weed_nettle", "mdi:sprout", "Nettle Weed Pollen", "88"),
("weed_ragweed", "mdi:sprout", "Ragweed Weed Pollen", "3"),
],
)
async def test_pollen_enable_disable_by_defaults(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_ambee: AsyncMock,
key: str,
icon: str,
name: str,
value: str,
) -> None:
"""Test the Ambee Pollen sensors that are disabled by default."""
entry_id = mock_config_entry.entry_id
entity_id = f"{SENSOR_DOMAIN}.pollen_{key}"
entity_registry = er.async_get(hass)
# Pre-create registry entry for disabled by default sensor
entity_registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
f"{entry_id}_pollen_{key}",
suggested_object_id=f"pollen_{key}",
disabled_by=None,
)
mock_config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
entry = entity_registry.async_get(entity_id)
assert entry
assert state
assert entry.unique_id == f"{entry_id}_pollen_{key}"
assert state.state == value
assert state.attributes.get(ATTR_FRIENDLY_NAME) == name
assert state.attributes.get(ATTR_ICON) == icon
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
assert (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
== CONCENTRATION_PARTS_PER_CUBIC_METER
)
assert ATTR_DEVICE_CLASS not in state.attributes
| 9,129
|
https://github.com/RuleLau/nacos-spring-cloud-demo/blob/master/springcloud-auth/src/main/java/com/rule/config/DomainUserDetailsService.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
nacos-spring-cloud-demo
|
RuleLau
|
Java
|
Code
| 77
| 375
|
package com.rule.config;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.rule.common.entity.UserInfo;
import com.rule.common.mapper.UserMapper;
import lombok.extern.slf4j.Slf4j;
import org.springframework.security.core.authority.AuthorityUtils;
import org.springframework.security.core.userdetails.User;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
@Slf4j
@Service("userDetailsService")
public class DomainUserDetailsService implements UserDetailsService {
@Resource
private UserMapper userMapper;
@Override
public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException {
// 数据库中查询用户信息
QueryWrapper<UserInfo> wrapper = new QueryWrapper<>();
wrapper.eq("username", username);
UserInfo user = userMapper.selectOne(wrapper);
if (user == null) {
throw new UsernameNotFoundException("用户" + username + "不存在");
}
return new User(user.getUsername(), user.getPassword(),
AuthorityUtils.commaSeparatedStringToAuthorityList(user.getAuthorities()));
}
}
| 592
|
https://github.com/la-mar/prodstats/blob/master/src/prodstats/db/models/providers.py
|
Github Open Source
|
Open Source
|
MIT
| null |
prodstats
|
la-mar
|
Python
|
Code
| 26
| 61
|
# from __future__ import annotations
# from const import HoleDirection, Provider
# from db.models.bases import Base, db
# __all__ = ["IDMaster"]
# class IDMaster(Base):
# pass
| 40,386
|
https://github.com/castronu/quickat/blob/master/src/main/java/org/quickat/telnet/ClientConnection.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
quickat
|
castronu
|
Java
|
Code
| 230
| 832
|
package org.quickat.telnet;
import org.apache.velocity.app.VelocityEngine;
import org.quickat.telnet.commands.Command;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.ConfigurableBeanFactory;
import org.springframework.context.annotation.Lazy;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import org.springframework.ui.velocity.VelocityEngineUtils;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.Socket;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/**
* @author Christophe Pollet
*/
@Component
@Scope(ConfigurableBeanFactory.SCOPE_PROTOTYPE)
@Lazy
public class ClientConnection extends Thread {
private static final Logger logger = LoggerFactory.getLogger(ClientConnection.class);
@Autowired
private VelocityEngine velocityEngine;
@Autowired
private List<Command> commands;
private Socket socket;
public void setSocket(Socket socket) {
this.socket = socket;
}
@Override
public void run() {
logger.info("New client connected from " + socket.getRemoteSocketAddress().toString());
try (BufferedReader in = new BufferedReader(new InputStreamReader(socket.getInputStream()));
PrintWriter out = new PrintWriter(socket.getOutputStream())) {
welcomeMessage(out);
commandsLoop(in, out);
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
closeSocket();
}
}
private void welcomeMessage(PrintWriter out) {
String content = VelocityEngineUtils.mergeTemplateIntoString(velocityEngine, "telnet/welcome.vm", "UTF-8", Collections.<String, Object>emptyMap());
out.append(content).flush();
}
private void commandsLoop(BufferedReader in, PrintWriter out) throws IOException {
while (true) {
out.append("> ").flush();
String line = in.readLine();
logger.info(socket.getRemoteSocketAddress().toString() + " -- " + line);
if ("quit".equals(line)) {
out.append("Bye.\n").flush();
return;
}
String[] parts = line.split(" ");
String[] args = Arrays.copyOfRange(parts, 1, parts.length);
executeCommand(parts[0], args, out);
}
}
private void executeCommand(String cmd, String[] args, PrintWriter out) {
for (Command command : commands) {
if (command.respondsTo(cmd)) {
command.execute(args, out);
return;
}
}
out.append("Command [").append(cmd).append("] not implemented\n").flush();
}
private void closeSocket() {
try {
socket.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
| 4,260
|
https://github.com/SawyerSven/element-ts-admin/blob/master/src/views/Diary/Diary.vue
|
Github Open Source
|
Open Source
|
MIT
| null |
element-ts-admin
|
SawyerSven
|
Vue
|
Code
| 439
| 1,495
|
<template>
<div class="diary">
<TableFilter @search="search" :filter-list="searchList" v-model="searchResult"></TableFilter>
<TableMain :data="tableData" :tableObject="tableObject" style="margin-top:20px">
<template #control>
<TableControl :select-all="true"></TableControl>
</template>
</TableMain>
</div>
</template>
<script lang='ts'>
import { Component, Vue } from 'vue-property-decorator';
import { FormComponents } from '@/core/constraint';
@Component({
name: 'Diary'
})
export default class Diary extends Vue {
public searchList: FormComponents[] = [
{
label: '时间',
prop: 'daterange',
type: 'date',
options: ['a', 'b', 'c'],
dateConfig: {
'type': 'daterange',
'range-separator': '-',
'start-placeholder': '开始时间',
'end-placeholder': '结束时间'
}
},
{
label: '施工日志编号',
prop: 'code',
type: 'select',
placeholder: '请选择施工日志编号',
options: [1, 2, 3, 4, 5, 6, 7, 8, 9]
},
{
label: '施工日志状态',
prop: 'status',
type: 'select',
placeholder: '请选择施工日志状态',
options: [
{
label: '全部',
value: -1
},
{
label: '待发布',
value: 1
},
{
label: '待安排工人',
value: 2
},
{
label: '进行中',
value: 3
},
{
label: '完工',
value: 4
},
{
label: '待提交',
value: 5
},
{
label: '已提交',
value: 6
}
]
}
];
public searchResult = {};
public tableData = [
{
date: '2016-05-02',
name: '王小虎',
address: '上海市普陀区金沙江路 1518 弄'
},
{
date: '2016-05-04',
name: '王小虎',
address: '上海市普陀区金沙江路 1517 弄'
},
{
date: '2016-05-01',
name: '王小虎',
address: '上海市普陀区金沙江路 1519 弄'
},
{
date: '2016-05-03',
name: '王小虎',
address: '上海市普陀区金沙江路 1516 弄'
}
];
public tableObject = {
isShowpagination: true,
paginationOptions: {
currentChange: (e: any) => {
// console.log(e);
},
currentPage: 1,
pageSize: 1,
total: this.tableData.length
},
selection: {
open: true,
width: '100',
handle: (e: any) => {
// console.log(this)
}
},
tableDataInfo: [
{
name: '日期',
prop: 'date'
},
{
name: '姓名',
prop: 'name'
},
{
name: '地址',
prop: 'address'
}
],
tableControl: {
title: '操作',
type: 'buttons',
commands: [
{
name: '查看施工日志',
hasAuth: () => true,
isShow: () => true,
handle: (row: any, index: number) => {
// console.log(this)
}
},
{
name: '编辑',
hasAuth: () => true,
isShow: () => true,
handle: (row: any, index: number) => {
// console.log(this)
}
},
{
name: '删除',
hasAuth: () => true,
isShow: () => true,
handle: (row: any, index: number) => {
// console.log(row, index);
}
},
{
name: '发布',
hasAuth: () => true,
isShow: () => true,
handle: (row: any, index: number) => {
// console.log(row, index);
}
},
{
name: '确认做完',
hasAuth: () => true,
isShow: () => true,
handle: (row: any, index: number) => {
// console.log(row, index);
}
},
{
name: '做工安排',
hasAuth: () => true,
isShow: () => true,
handle: (row: any, index: number) => {
// console.log(row, index);
}
},
{
name: '提交',
hasAuth: () => true,
isShow: () => true,
handle: (row: any, index: number) => {
// console.log(row, index);
}
}
]
}
};
public search() {
// // console.log(this.searchResult);
}
}
</script>
<style lang='less'>
</style>
| 7,514
|
https://github.com/couchbase/gocb/blob/master/search/facets.go
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,023
|
gocb
|
couchbase
|
Go
|
Code
| 346
| 951
|
package search
import (
"encoding/json"
)
// Facet represents a facet for a search query.
type Facet interface {
}
type termFacetData struct {
Field string `json:"field,omitempty"`
Size uint64 `json:"size,omitempty"`
}
// TermFacet is an search term facet.
type TermFacet struct {
data termFacetData
}
// MarshalJSON marshal's this facet to JSON for the search REST API.
func (f TermFacet) MarshalJSON() ([]byte, error) {
return json.Marshal(f.data)
}
// NewTermFacet creates a new TermFacet
func NewTermFacet(field string, size uint64) *TermFacet {
mq := &TermFacet{}
mq.data.Field = field
mq.data.Size = size
return mq
}
type numericFacetRange struct {
Name string `json:"name,omitempty"`
Start float64 `json:"min,omitempty"`
End float64 `json:"max,omitempty"`
}
type numericFacetData struct {
Field string `json:"field,omitempty"`
Size uint64 `json:"size,omitempty"`
NumericRanges []numericFacetRange `json:"numeric_ranges,omitempty"`
}
// NumericFacet is an search numeric range facet.
type NumericFacet struct {
data numericFacetData
}
// MarshalJSON marshal's this facet to JSON for the search REST API.
func (f NumericFacet) MarshalJSON() ([]byte, error) {
return json.Marshal(f.data)
}
// AddRange adds a new range to this numeric range facet.
func (f *NumericFacet) AddRange(name string, start, end float64) *NumericFacet {
f.data.NumericRanges = append(f.data.NumericRanges, numericFacetRange{
Name: name,
Start: start,
End: end,
})
return f
}
// NewNumericFacet creates a new numeric range facet.
func NewNumericFacet(field string, size uint64) *NumericFacet {
mq := &NumericFacet{}
mq.data.Field = field
mq.data.Size = size
return mq
}
type dateFacetRange struct {
Name string `json:"name,omitempty"`
Start string `json:"start,omitempty"`
End string `json:"end,omitempty"`
}
type dateFacetData struct {
Field string `json:"field,omitempty"`
Size uint64 `json:"size,omitempty"`
DateRanges []dateFacetRange `json:"date_ranges,omitempty"`
}
// DateFacet is an search date range facet.
type DateFacet struct {
data dateFacetData
}
// MarshalJSON marshal's this facet to JSON for the search REST API.
func (f DateFacet) MarshalJSON() ([]byte, error) {
return json.Marshal(f.data)
}
// AddRange adds a new range to this date range facet.
func (f *DateFacet) AddRange(name string, start, end string) *DateFacet {
f.data.DateRanges = append(f.data.DateRanges, dateFacetRange{
Name: name,
Start: start,
End: end,
})
return f
}
// NewDateFacet creates a new date range facet.
func NewDateFacet(field string, size uint64) *DateFacet {
mq := &DateFacet{}
mq.data.Field = field
mq.data.Size = size
return mq
}
| 6,301
|
https://github.com/tskisner/sotodlib/blob/master/sotodlib/toast/pipeline_tools/demodulation.py
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
sotodlib
|
tskisner
|
Python
|
Code
| 226
| 861
|
# Copyright (c) 2020 Simons Observatory.
# Full license can be found in the top level "LICENSE" file.
import numpy as np
from toast.timing import function_timer, Timer
from toast.utils import Logger
from ..demodulation import OpDemod
def add_demodulation_args(parser):
parser.add_argument(
"--demodulate",
required=False,
action="store_true",
help="Demodulate and downsample HWP-modulated signal",
dest="demodulate"
)
parser.add_argument(
"--no-demodulate",
required=False,
action="store_false",
help="Do not demodulate HWP-modulated signal",
dest="demodulate"
)
parser.set_defaults(demodulate=False)
parser.add_argument(
"--demod-wkernel",
required=False,
type=np.int,
help="Width of demodulation kernel",
)
parser.add_argument(
"--demod-fmax",
required=False,
type=np.float,
help="Low-pass filter cut-off",
)
parser.add_argument(
"--demod-nskip",
type=np.int,
default=3,
help="Number of samples to skip in downsampling",
)
parser.add_argument(
"--demod-2f",
required=False,
action="store_true",
help="Demodulate, downsample and cache 2f signal",
dest="demod_2f"
)
parser.add_argument(
"--no-demod-2f",
required=False,
action="store_false",
help="Do not demodulate 2f signal",
dest="demod_2f"
)
parser.set_defaults(demod_2f=False)
return
def demodulate(args, comm, data, name, detweights=None, madampars=None, verbose=True):
if not args.demodulate:
return
log = Logger.get()
timer = Timer()
timer.start()
if detweights is not None:
# Copy the detector weights to demodulated TOD
modulated = [detname for detname in detweights if "demod" not in detname]
for detname in modulated:
detweight = detweights[detname]
for demodkey in ["demod0", "demod4r", "demod4i"]:
demod_name = "{}_{}".format(demodkey, detname)
detweights[demod_name] = detweight
del detweights[detname]
if madampars is not None:
# Filtering will affect the high frequency end of the noise PSD
madampars["radiometers"] = False
# Intensity and polarization will be decoupled in the noise matrix
madampars["allow_decoupling"] = True
demod = OpDemod(
name=name,
wkernel=args.demod_wkernel,
fmax=args.demod_fmax,
nskip=args.demod_nskip,
do_2f=args.demod_2f,
)
demod.exec(data)
timer.report_clear("Demodulate")
return
| 46,056
|
https://github.com/netlify/build/blob/master/packages/build/tests/telemetry/tests.js
|
Github Open Source
|
Open Source
|
MIT
| 2,023
|
build
|
netlify
|
JavaScript
|
Code
| 954
| 2,694
|
import { versions } from 'process'
import { Fixture, normalizeOutput, startServer } from '@netlify/testing'
import test from 'ava'
const TELEMETRY_PATH = '/track'
const BUGSNAG_TEST_KEY = '00000000000000000000000000000000'
// Normalize telemetry request so it can be snapshot
const normalizeSnapshot = function ({ body, ...request }) {
return { ...request, body: normalizeBody(body) }
}
const normalizeBody = function ({
timestamp,
properties: { duration, buildVersion, osPlatform, osName, nodeVersion, plugins, ...properties } = {},
...body
}) {
const optDuration = duration ? { duration: typeof duration } : {}
return {
...body,
timestamp: typeof timestamp,
properties: {
...properties,
...optDuration,
nodeVersion: typeof nodeVersion,
buildVersion: typeof buildVersion,
osPlatform: typeof osPlatform,
osName: typeof osName,
...(plugins !== undefined && { plugins: plugins.map(normalizePlugin) }),
},
}
}
const normalizePlugin = function ({ nodeVersion, version, ...plugin }) {
return { ...plugin, nodeVersion: typeof nodeVersion, version: typeof version }
}
const runWithApiMock = async function (
t,
fixture,
{
env = {},
snapshot = false,
telemetry = true,
// Disables the timeout by default because of latency issues in the CI windows boxes
disableTelemetryTimeout = true,
responseStatusCode = 200,
// By default, run build programmatically
useBinary = false,
waitTelemetryServer,
...flags
} = {},
) {
// Start the mock telemetry server
const {
scheme: schemeTelemetry,
host: hostTelemetry,
requests: telemetryRequests,
stopServer,
} = await startServer({
path: TELEMETRY_PATH,
wait: waitTelemetryServer,
status: responseStatusCode,
})
const { testOpts = {}, ...restFlags } = flags
try {
const fix = new Fixture(`./fixtures/${fixture}`).withEnv(env).withFlags({
siteId: 'test',
testOpts: {
// {} disables all request timeouts
telemetryTimeout: disableTelemetryTimeout ? {} : undefined,
telemetryOrigin: `${schemeTelemetry}://${hostTelemetry}`,
// Any telemetry errors will be logged
errorMonitor: true,
...testOpts,
},
telemetry,
bugsnagKey: BUGSNAG_TEST_KEY,
...restFlags,
})
if (useBinary) {
const { exitCode, output } = await fix.runBuildBinary()
if (snapshot) {
t.snapshot(normalizeOutput(output))
}
return { exitCode, telemetryRequests }
}
const output = await fix.runWithBuild()
if (snapshot) {
t.snapshot(normalizeOutput(output))
}
return { exitCode: undefined, telemetryRequests }
} finally {
await stopServer()
}
}
test('Telemetry success generates no logs', async (t) => {
const { telemetryRequests } = await runWithApiMock(t, 'success', { snapshot: true })
t.is(telemetryRequests.length, 1)
})
test('Telemetry error only reports to error monitor and does not affect build success', async (t) => {
const { exitCode } = await runWithApiMock(t, 'success', {
responseStatusCode: 500,
// Execute via cli so that we can validate the exitCode
useBinary: true,
snapshot: true,
})
t.is(exitCode, 0)
})
test('Telemetry reports build success', async (t) => {
const { telemetryRequests } = await runWithApiMock(t, 'success')
const snapshot = telemetryRequests.map(normalizeSnapshot)
t.snapshot(snapshot)
})
test('Telemetry reports local plugins success', async (t) => {
const { telemetryRequests } = await runWithApiMock(t, 'plugin_success')
const snapshot = telemetryRequests.map(normalizeSnapshot)
t.snapshot(snapshot)
})
test('Telemetry reports package.json plugins success', async (t) => {
const { telemetryRequests } = await runWithApiMock(t, 'plugin_package')
const snapshot = telemetryRequests.map(normalizeSnapshot)
t.snapshot(snapshot)
})
test('Telemetry reports netlify.toml-only plugins success', async (t) => {
const { telemetryRequests } = await runWithApiMock(t, 'plugins_cache_config', {
testOpts: { pluginsListUrl: undefined },
})
const snapshot = telemetryRequests.map(normalizeSnapshot)
t.snapshot(snapshot)
})
test('Telemetry reports UI plugins success', async (t) => {
const { telemetryRequests } = await runWithApiMock(t, 'plugins_cache_ui', {
defaultConfig: { plugins: [{ package: 'netlify-plugin-contextual-env' }] },
testOpts: { pluginsListUrl: undefined },
})
const snapshot = telemetryRequests.map(normalizeSnapshot)
t.snapshot(snapshot)
})
test('Telemetry reports build cancellation', async (t) => {
const { telemetryRequests } = await runWithApiMock(t, 'cancel')
const snapshot = telemetryRequests.map(normalizeSnapshot)
t.snapshot(snapshot)
})
test('Telemetry reports user error', async (t) => {
const { telemetryRequests } = await runWithApiMock(t, 'invalid')
const snapshot = telemetryRequests.map(normalizeSnapshot)
t.snapshot(snapshot)
})
test('Telemetry reports plugin error', async (t) => {
const { telemetryRequests } = await runWithApiMock(t, 'plugin_error')
const snapshot = telemetryRequests.map(normalizeSnapshot)
t.snapshot(snapshot)
})
test('Telemetry is disabled by default', async (t) => {
// We're just overriding our default test harness behaviour
const { telemetryRequests } = await runWithApiMock(t, 'success', { telemetry: null })
t.is(telemetryRequests.length, 0)
})
test('Telemetry BUILD_TELEMETRY_DISABLED env var overrides flag', async (t) => {
const { telemetryRequests } = await runWithApiMock(t, 'success', {
env: { BUILD_TELEMETRY_DISABLED: 'true' },
})
t.is(telemetryRequests.length, 0)
})
test('Telemetry node version reported is based on the version provided by the user', async (t) => {
const nodeVersion = '8.8.0'
const { telemetryRequests } = await runWithApiMock(t, 'success', {
nodePath: `/test/.nvm/versions/node/v${nodeVersion}/bin/node`,
})
t.is(telemetryRequests.length, 1)
t.is(telemetryRequests[0].body.properties.nodeVersion, nodeVersion)
})
test('Telemetry node version reported is based on the current process version if none is provided', async (t) => {
const { telemetryRequests } = await runWithApiMock(t, 'success')
t.is(telemetryRequests.length, 1)
t.is(telemetryRequests[0].body.properties.nodeVersion, versions.node)
})
test('Telemetry reports a framework if any is given', async (t) => {
const framework = 'gatsby'
const { telemetryRequests } = await runWithApiMock(t, 'success', { framework })
t.is(telemetryRequests.length, 1)
t.is(telemetryRequests[0].body.properties.framework, framework)
})
test('Telemetry reports no framework if none is provided', async (t) => {
const { telemetryRequests } = await runWithApiMock(t, 'success')
t.is(telemetryRequests.length, 1)
t.is(telemetryRequests[0].body.properties.framework, undefined)
})
test('Telemetry reports the build id if given via BUILD_ID', async (t) => {
const buildId = 'test-build-id'
const { telemetryRequests } = await runWithApiMock(t, 'success', { env: { BUILD_ID: buildId } })
t.is(telemetryRequests.length, 1)
t.is(telemetryRequests[0].body.properties.buildId, buildId)
})
test('Telemetry reports a deploy id if given via DEPLOY_ID', async (t) => {
const deployId = 'test-deploy-id'
const { telemetryRequests } = await runWithApiMock(t, 'success', { env: { DEPLOY_ID: deployId } })
t.is(telemetryRequests.length, 1)
t.is(telemetryRequests[0].body.properties.deployId, deployId)
})
test('Telemetry reports a deploy id if given via --deployId flag', async (t) => {
const deployId = 'test-deploy-id'
const { telemetryRequests } = await runWithApiMock(t, 'success', { deployId })
t.is(telemetryRequests.length, 1)
t.is(telemetryRequests[0].body.properties.deployId, deployId)
})
test('Telemetry calls timeout by default', async (t) => {
const { telemetryRequests } = await runWithApiMock(t, 'success', {
// We want to rely on the default timeout value
disableTelemetryTimeout: false,
// Introduce an arbitrary large timeout on the server side so that we can validate the client timeout works
waitTelemetryServer: WAIT_TELEMETRY_SERVER,
// The error monitor snapshot should contain the timeout error
snapshot: true,
})
t.is(telemetryRequests.length, 0)
})
const WAIT_TELEMETRY_SERVER = 3e5
| 16,275
|
https://github.com/menhuan/notes/blob/master/code/codebase-master/onirigi-front/build.gradle
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,023
|
notes
|
menhuan
|
Gradle
|
Code
| 90
| 419
|
apply from: JAVA_GRADLE
buildscript {
ext {
springBootVersion = '1.5.10.RELEASE'
}
repositories {
maven { url 'http://maven.aliyun.com/nexus/content/groups/public/' }
maven { url 'http://repos.infervision.com' }
mavenCentral()
}
}
group 'com.infervision'
version '1.0'
sourceCompatibility = 1.8
targetCompatibility = 1.8
jar {
baseName = 'onirigi-front'
version = '1.0.0'
}
apply plugin: 'java'
apply plugin: 'eclipse'
apply plugin: 'org.springframework.boot'
apply plugin: 'application'
repositories {
mavenCentral()
}
jar {
enabled = true
}
dependencies {
compile project(':shiro-manage')
compile project(':mq-manage')
compile project(':redis-manage')
compile project(':swagger-manage')
compile("org.springframework.boot:spring-boot-starter-web")
testCompile('org.springframework.boot:spring-boot-starter-test')
compile("mysql:mysql-connector-java:${mysqlVersion}")
compile("com.alibaba:druid:${aliVersion}")
compile("com.alibaba:easyexcel:${excelVersion}")
compile("org.flywaydb:flyway-core:${flywayVersion}")
}
bootRun {
addResources = true
}
| 22,693
|
https://github.com/mohit-40/E-commerce-site/blob/master/Routes/verifyToken.js
|
Github Open Source
|
Open Source
|
MIT
| null |
E-commerce-site
|
mohit-40
|
JavaScript
|
Code
| 84
| 469
|
const jwt=require('jsonwebtoken');
const verifyToken=(req,res,next)=>{
const authHeader= req.headers.authorization;
if(authHeader){
const token=authHeader.split(' ')[1];
jwt.verify(token,process.env.ACCESS_TOKEN_SECRET,(err,user)=>{
if(err) {res.status(404).json("jwt token not valid")}
req.user=user;
next();
})
}
else { res.status(404).json("no jwt token availble"); }
}
const verifyTokenAndAuthorization=(req,res,next)=>{
verifyToken(req,res,()=>{
if(req.user._id===req.params.id || req.user.isAdmin){
next();
}
else{ res.status(404).json("you can't perform this action")}
});
}
const verifyTokenAndAdmin=(req,res,next)=>{
verifyToken(req,res,()=>{
if(req.user.isAdmin){
next();
}
else{ res.status(404).json("only admin can perform this action")}
});
}
const gernateAccessToken = (user)=> jwt.sign({_id:user._id , isAdmin:user.isAdmin }, process.env.ACCESS_TOKEN_SECRET,{expiresIn:"5s"});
const gernateRefreshToken = (user)=> jwt.sign({_id:user._id , isAdmin:user.isAdmin } , process.env.REFRESH_TOKEN_SECRET)
module.exports={verifyToken, verifyTokenAndAuthorization,verifyTokenAndAdmin ,gernateAccessToken ,gernateRefreshToken }
| 18,147
|
https://github.com/Sub2n/JavaScript-Exercise/blob/master/route-exam/src/app/community/community.module.ts
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
JavaScript-Exercise
|
Sub2n
|
TypeScript
|
Code
| 56
| 149
|
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { SharedModule } from '../shared/shared.module';
import { CommunityRoutingModule } from './community-routing.module';
import { CommunityComponent } from './components/community.component';
import { PhotoComponent } from './components/photo.component';
import { KnowhowComponent } from './components/knowhow.component';
@NgModule({
declarations: [CommunityComponent, PhotoComponent, KnowhowComponent],
imports: [CommonModule, SharedModule, CommunityRoutingModule]
})
export class CommunityModule {}
| 21,524
|
https://github.com/LousiesModPorts/Custom-Main-Menu/blob/master/src/main/java/lumien/custommainmenu/lib/textures/TextureResourceLocation.java
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
Custom-Main-Menu
|
LousiesModPorts
|
Java
|
Code
| 28
| 119
|
package lumien.custommainmenu.lib.textures;
import net.minecraft.client.Minecraft;
import net.minecraft.util.ResourceLocation;
public class TextureResourceLocation extends ResourceLocation implements ITexture
{
public TextureResourceLocation(String resourceString)
{
super(resourceString);
}
@Override
public void bind()
{
Minecraft.getMinecraft().renderEngine.bindTexture(this);
}
}
| 5,592
|
https://github.com/MartinDrab/netpipe/blob/master/aes.h
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
netpipe
|
MartinDrab
|
C
|
Code
| 186
| 543
|
#ifndef __AES_H__
#define __AES_H__
int rijndaelSetupEncrypt(unsigned long *rk, const unsigned char *key, int keybits);
int rijndaelSetupDecrypt(unsigned long *rk, const unsigned char *key, int keybits);
void rijndaelEncrypt(const unsigned long *rk, int nrounds, const unsigned char plaintext[16], unsigned char ciphertext[16]);
void rijndaelDecrypt(const unsigned long *rk, int nrounds, const unsigned char ciphertext[16], unsigned char plaintext[16]);
#define KEYLENGTH(keybits) ((keybits)/8)
#define RKLENGTH(keybits) ((keybits)/8+28)
#define NROUNDS(keybits) ((keybits)/32+6)
typedef struct _AES_Ctx {
unsigned long RoundKeys[RKLENGTH(0x100)];
unsigned char IV[0x10];
int Rounds;
} AES_Ctx, *PAES_Ctx;
void AES_SetupEncrypt(PAES_Ctx Ctx, const unsigned char *Key, int KeyBits);
void AES_SetupDecrypt(PAES_Ctx Ctx, const unsigned char *Key, int KeyBits);
void AES_Encrypt(const AES_Ctx *Ctx, const unsigned char *Pt, size_t Length, unsigned char *Ct);
void AES_Decrypt(const AES_Ctx *Ctx, const unsigned char *Ct, size_t Length, unsigned char *Pt);
void AES_EncryptCBC(const AES_Ctx *Ctx, const unsigned char *Pt, size_t Length, unsigned char *Ct);
void AES_DecryptCBC(const AES_Ctx *Ctx, const unsigned char *Ct, size_t Length, unsigned char *Pt);
void AES_EncryptECB(const AES_Ctx *Ctx, const unsigned char *Pt, size_t Length, unsigned char *Ct);
void AES_DecryptECB(const AES_Ctx *Ctx, const unsigned char *Ct, size_t Length, unsigned char *Pt);
void AESDecryptCTS(const AES_Ctx *Ctx, const unsigned char *Ct, size_t Length, unsigned char *Pt);
#endif
| 46,694
|
https://github.com/fabriziobertoglio1987/surf-rails/blob/master/db/seeds/posts.rb
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
surf-rails
|
fabriziobertoglio1987
|
Ruby
|
Code
| 279
| 1,512
|
bondi_all = ['bondi_0466.JPG','bondi_0467.JPG','bondi_0468.JPG','bondi_0471.JPG','bondi_0472.JPG','bondi_0473.JPG','bondi_0484.JPG','bondi_0494.JPG','bondi_0503.JPG','bondi_0504.JPG','bondi_0505.JPG','bondi_0506.JPG','bondi_0507.JPG','bondi_0511.JPG','bondi_0512.JPG','bondi_0513.JPG','bondi_0523.JPG','bondi_0554.JPG','bondi_0567.JPG','bondi_0568.JPG','bondi_0569.JPG','bondi_0570.JPG','bondi_0571.JPG','bondi_0572.JPG','bondi_0573.JPG','bondi_0596.JPG']
bondi = ['bondi_0596.JPG', 'bondi_0571.JPG', 'bondi_0569.JPG', 'bondi_0568.JPG', 'bondi_0554.JPG', 'bondi_0523.JPG', 'bondi_0513.JPG', 'bondi_0471.JPG', 'bondi_0468.JPG']
balangan = ['balangan_0735.JPG']
# 'balangan_0728.JPG','balangan_0732.JPG','balangan_0733.JPG','balangan_0735.JPG','balangan_0736.JPG','balangan_0737.JPG','balangan_0739.JPG']
kuta = ['kuta_0687.JPG']
videos = ["bronte-min.mp4", "costline-min.mp4", "surfer-min.mp4", "seaside-min.mp4"]
videos_high = ["bronte-max.mp4", "costline-max.mp4", "surfer-max.mp4", "seaside-max.mp4"]
posters = ["bronte-poster.png", "costline-poster.png", "surfer-poster.png", "seaside-poster.png"]
posters_high = ["bronte-poster-max.png", "costline-poster-max.png", "surfer-poster-max.png", "seaside-poster-max.png"]
spots = ["Bronte Reef", "Brace Cove", "Balangan", "Brace Cove"]
domain = 'https://surfcheck.s3.eu-central-1.amazonaws.com'
user = User.create(email: "admin@user.com", password: "fabrizio") unless user = User.find_by(email: "admin@user.com")
spots = ["Bronte Reef", "Brace Cove", "Balangan", "Brace Cove"]
folder = "/seeds/images/"
balangan.each do |picture_name|
image_url = "#{domain}#{folder}#{picture_name}"
location = Location.find_by(name: "Balangan")
post = Post.new(user: user, longitude: location.longitude, latitude: location.latitude, location: location)
post.remote_picture_url = image_url
post.save
puts "post saved, picture url: #{post.picture.url}" if post.valid?
puts post.errors.full_messages unless post.valid?
end
bondi.each do |picture_name|
image_url = "#{domain}#{folder}#{picture_name}"
random_location = ["Tama Reef", "Bondi Beach", "Bronte Reef", "Mckenzies", "The Boot", "South Bondi"]
location = Location.find_by(name: random_location[rand(0..5)])
post = Post.new(user: user, longitude: location.longitude, latitude: location.latitude, location: location)
post.remote_picture_url = image_url
post.save
puts "post saved, picture url: #{post.picture.url}" if post.valid?
puts post.errors.full_messages unless post.valid?
end
kuta.each do |picture_name|
image_url = "#{domain}#{folder}#{picture_name}"
location = Location.find_by(name: "Kuta Beach")
post = Post.new(user: user, longitude: location.longitude, latitude: location.latitude, location: location)
post.remote_picture_url = image_url
post.save
puts "post saved, picture url: #{post.picture.url}" if post.valid?
puts post.errors.full_messages unless post.valid?
end
videos.each.with_index do |video_name, index|
folder = "/seeds/videos/"
video_url = "#{domain}#{folder}#{video_name}"
video_high_url = "#{domain}#{folder}#{videos_high[index]}"
poster_high_url = "#{domain}#{folder}#{posters_high[index]}"
poster_url = "#{domain}#{folder}#{posters[index]}"
location = Location.find_by(name: spots[index])
post = Post.new(user: user, longitude: location.longitude, latitude: location.latitude, location: location)
post.video = { url_name: video_name, poster_name: posters[index], url: video_url, poster: poster_url, high: { url_name: videos_high[index], url: video_high_url, poster_name: posters_high[index], poster: poster_high_url }}
post.save
puts "post saved, picture url: #{post.picture.url}" if post.valid?
puts post.errors.full_messages unless post.valid?
puts location.inspect unless post.valid?
end
| 48,403
|
https://github.com/taintp21/littleandlittle_pj/blob/master/Modules/Translation/Resources/lang/core/en/core.php
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
littleandlittle_pj
|
taintp21
|
PHP
|
Code
| 408
| 1,008
|
<?php
return [
'modal' => [
'title' => 'Confirmation',
'confirmation-message' => 'Are you sure you want to delete this record?',
],
'table' => [
'created at' => 'Created at',
'updated at' => 'Updated at',
'deleted at' => 'Deleted at',
'actions' => 'Actions',
'thumbnail' => 'Thumbnail',
],
'tab' => [
'english' => 'English',
'french' => 'French',
'dutch' => 'Dutch',
'italian' => 'Italian',
'greek' => 'Ελληνικά',
'spanish' => 'Spanish',
'polish' => 'Polish',
'portuguese' => 'Portuguese',
'arabic' => 'Arabic',
'macedonian' => 'Македонски',
'turkish' => 'Türkçe',
'czech' => 'Czech',
'swedish' => 'Svenska',
'korean' => 'Korean',
'hungarian' => 'Magyar',
],
'button' => [
'cancel' => 'Cancel',
'create' => 'Create',
'update' => 'Update',
'delete' => 'Delete',
'reset' => 'Reset',
'see' => 'See',
'visualize' => 'Visualize',
'update and back' => 'Update and go back',
],
'save' => 'Save',
'confirm' => 'Confirm',
'move' => 'Move',
'widget' => 'Widget',
'widgets' => 'Widgets',
'breadcrumb' => [
'home' => 'Home',
],
'title' => [
'translatable fields' => 'Translatable fields',
'non translatable fields' => 'Non translatable fields',
'create resource' => 'Create :name',
'edit resource' => 'Edit :name',
],
'general' => [
'available keyboard shortcuts' => 'Available keyboard shortcuts on this page',
'view website' => 'View Website',
'complete your profile' => 'Complete your profile',
'profile' => 'Profile',
'sign out' => 'Sign out',
],
'messages' => [
'resource created' => ':name successfully created.',
'resource not found' => ':name not found.',
'resource updated' => ':name successfully updated.',
'resource deleted' => ':name successfully deleted.',
],
'back' => 'Back',
'back to index' => 'Back to :name index',
'permission denied' => 'Permission denied. (required permission: ":permission")',
'list resource' => 'List :name',
'create resource' => 'Create :name',
'edit resource' => 'Edit :name',
'destroy resource' => 'Delete :name',
'error token mismatch' => 'Your session timed out, please submit the form again.',
'error 404' => '404',
'error 404 title' => 'Oops! This page was not found.',
'error 404 description' => 'The page you are looking for was not found.',
'error 500' => '500',
'error 500 title' => 'Oops! Something went wrong',
'error 500 description' => 'An administrator was notified.',
'delete cancelled' => 'Delete cancelled',
'unauthorized' => 'Unauthorized',
'unauthorized-access' => 'You do not have the appropriate permissions to access that page.',
'unauthenticated' => 'Niet ingelogd',
'unauthenticated-access' => 'You need to be logged in to be able to view this page',
'something went wrong' => 'Whoops! Something went wrong.',
'mark as online' => 'Mark as online',
'mark as offline' => 'Mark as offline',
'back to backend' => 'Back to backend',
];
| 21,714
|
https://github.com/crab-cr/php-strtotime/blob/master/spec/strtotime_spec.cr
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
php-strtotime
|
crab-cr
|
Crystal
|
Code
| 161
| 628
|
require "./spec_helper"
# private NOW = Time.unix(1129633200)
private NOW = Time.parse_rfc3339("2005-10-18T11:00:00Z")
# php artisan tinker: \Carbon\Carbon::createFromTimestampUTC(strtotime('yesterday', 1129633200))->toRfc3339String()
describe "Iom::PHP::Strtotime" do
it "should allow now as Int64" do
Iom::PHP::Strtotime.strtotime("now", 1129633200_i64).should eq 1129633200_i64
end
# it "should pass example 1" do
# Iom::PHP::Strtotime.strtotime("+1 day", NOW).should eq Time.unix(1129719600)
# end
# it "should pass example 2" do
# Iom::PHP::Strtotime.strtotime("+1 week 2 days 4 hours 2 seconds", NOW).should eq Time.unix(1130425202)
# end
# it "should pass example 3" do
# Iom::PHP::Strtotime.strtotime("last month", NOW).should eq Time.unix(1127041200)
# end
# it "should pass example 4" do
# Iom::PHP::Strtotime.strtotime("2009-05-04 08:30:00 GMT", NOW).should eq Time.unix(1241425800)
# end
# it "should pass example 5" do
# Iom::PHP::Strtotime.strtotime("2009-05-04 08:30:00+00", NOW).should eq Time.unix(1241425800)
# end
# it "should pass example 6" do
# Iom::PHP::Strtotime.strtotime("2009-05-04 08:30:00+02:00", NOW).should eq Time.unix(1241418600)
# end
# it "should pass example 7" do
# Iom::PHP::Strtotime.strtotime("2009-05-04T08:30:00Z", NOW).should eq Time.unix(1241425800)
# end
# it "should pass example 8" do
# Iom::PHP::Strtotime.strtotime("dec 12 2004 04pm", NOW).should eq Time.parse_rfc3339("2004-12-12T16:00:00+00:00")
# end
end
| 39,235
|
https://github.com/dalemartyn/daleimg/blob/master/config/deploy.rb
|
Github Open Source
|
Open Source
|
MIT
| null |
daleimg
|
dalemartyn
|
Ruby
|
Code
| 392
| 1,093
|
require_relative 'nginx'
require_relative 'setup'
set :application, 'daleimg'
set :repo_url, 'git@github.com:dalemartyn/daleimg.git'
set :ssh_options, {
#keys: %w(~/.ssh/id_rsa),
forward_agent: true
#auth_methods: %w(password)
}
# Use :debug for more verbose output when troubleshooting
set :log_level, :info
# Apache users with .htaccess files:
# it needs to be added to linked_files so it persists across deploys:
set :linked_files, fetch(:linked_files, []).push('.env')
set :linked_dirs, fetch(:linked_dirs, []).push('web/app/uploads')
set :nvm_type, :user # or :system, depends on your nvm setup
set :nvm_node, 'v12.13.0'
set :nvm_map_bins, %w{node npm yarn}
# for setup task
set :db_user, -> { "#{fetch(:application)}" }
namespace :deploy do
desc 'Restart application'
task :restart do
on roles(:app), in: :sequence, wait: 5 do
# Your restart mechanism here, for example:
# execute :service, :nginx, :reload
end
end
end
# The above restart task is not run by default
# Uncomment the following line to run it on deploys if needed
# after 'deploy:publishing', 'deploy:restart'
namespace :deploy do
desc 'Update WordPress template root paths to point to the new release'
task :update_option_paths do
on roles(:app) do
within fetch(:release_path) do
if test :wp, :core, 'is-installed'
[:stylesheet_root, :template_root].each do |option|
# Only change the value if it's an absolute path
# i.e. The relative path "/themes" must remain unchanged
# Also, the option might not be set, in which case we leave it like that
value = capture :wp, :option, :get, option, raise_on_non_zero_exit: false
if value != '' && value != '/themes'
execute :wp, :option, :set, option, fetch(:release_path).join('web/wp/wp-content/themes')
end
end
end
end
end
end
desc 'Build the assets'
task :build_assets do
on roles(:app) do
within fetch(:release_path) do
execute :npm, :install
execute :npm, :run, :install
execute :npm, :run, :build
end
end
end
desc 'Purge Nginx Cache'
task :purge_cache do
unless fetch(:setup)
on roles(:app), in: :sequence, wait: 5 do
within fetch(:release_path) do
execute! :sudo, "-u www-data wp eval '$ngc = new NginxCache(); $ngc->purge_zone_once();' "
end
end
end
end
end
set :manual_plugins, [ ]
set :local_path, "~/WordPressSites/daleimg/"
# upload woocommerce-paypal-pro plugin
namespace :deploy do
desc "copy up non-composer plugins"
task :upload_manual_plugins do
run_locally do
fetch(:manual_plugins).each do |p|
roles(:app).each do |host|
execute "rsync -rv --progress --chmod=0775 --ignore-existing #{fetch(:local_path)}/web/app/plugins/#{p} #{host.user}@#{host.hostname}:#{fetch(:release_path)}/web/app/plugins/"
end
end
end
end
end
# The above update_option_paths task is not run by default
# Note that you need to have WP-CLI installed on your server
# Uncomment the following line to run it on deploys if needed
after 'deploy:updated', 'deploy:update_option_paths'
#after 'deploy:updated', 'deploy:build_assets'
# after 'deploy:published', 'deploy:upload_manual_plugins'
| 6,756
|
https://github.com/zhangweijin/laravel_shop_cart/blob/master/database/seeds/CatalogsTableSeeder.php
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
laravel_shop_cart
|
zhangweijin
|
PHP
|
Code
| 491
| 2,055
|
<?php
use Illuminate\Database\Seeder;
use Carbon\Carbon;
use App\Catalog;
class CatalogsTableSeeder extends Seeder
{
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
DB::table('catalogs')->truncate();
DB::table('catalogs')->insert([
[
'name' => 'Appliances',
'image' => 'appliances_cat.jpg',
'description' => 'description for Appliances',
'parent_id' => NULL,
'priority' => 0,
'created_at' => Carbon::now()->format('Y-m-d H:i:s'),
'updated_at' => Carbon::now()->format('Y-m-d H:i:s')
],
[
'name' => 'Furniture',
'image' => 'furniture_cat.jpg',
'description' => 'description for Furniture',
'parent_id' => NULL,
'priority' => 1,
'created_at' => Carbon::now()->format('Y-m-d H:i:s'),
'updated_at' => Carbon::now()->format('Y-m-d H:i:s')
],
[
'name' => 'Food',
'image' => 'food_cat.jpg',
'description' => 'description for Food',
'parent_id' => NULL,
'priority' => 2,
'created_at' => Carbon::now()->format('Y-m-d H:i:s'),
'updated_at' => Carbon::now()->format('Y-m-d H:i:s')
],
[
'name' => 'Jewelry',
'image' => 'jewelry_cat.jpg',
'description' => 'description for Jewelry',
'parent_id' => NULL,
'priority' => 3,
'created_at' => Carbon::now()->format('Y-m-d H:i:s'),
'updated_at' => Carbon::now()->format('Y-m-d H:i:s')
],
]);
$parent_id = Catalog::where('name', 'Appliances')->first()->id;
DB::table('catalogs')->insert([
[
'name' => 'TVs',
'image' => 'tv_cat.jpg',
'description' => 'description for TV',
'parent_id' => $parent_id,
'priority' => 0,
'created_at' => Carbon::now()->format('Y-m-d H:i:s'),
'updated_at' => Carbon::now()->format('Y-m-d H:i:s')
],
[
'name' => 'Refrigerators',
'image' => 'refrigerators_cat.jpg',
'description' => 'description for Refrigerators',
'parent_id' => $parent_id,
'priority' => 1,
'created_at' => Carbon::now()->format('Y-m-d H:i:s'),
'updated_at' => Carbon::now()->format('Y-m-d H:i:s')
],
[
'name' => 'Cellphones',
'image' => 'cellphones_cat.jpg',
'description' => 'description for Cellphones',
'parent_id' => $parent_id,
'priority' => 2,
'created_at' => Carbon::now()->format('Y-m-d H:i:s'),
'updated_at' => Carbon::now()->format('Y-m-d H:i:s')
],
]);
$parent_id = Catalog::where('name', 'Furniture')->first()->id;
DB::table('catalogs')->insert([
[
'name' => 'Sofas',
'image' => 'sofas_cat.jpg',
'description' => 'description for Sofas',
'parent_id' => $parent_id,
'priority' => 0,
'created_at' => Carbon::now()->format('Y-m-d H:i:s'),
'updated_at' => Carbon::now()->format('Y-m-d H:i:s')
],
[
'name' => 'Cupboards',
'image' => 'cupboards_cat.jpg',
'description' => 'description for Cupboards',
'parent_id' => $parent_id,
'priority' => 1,
'created_at' => Carbon::now()->format('Y-m-d H:i:s'),
'updated_at' => Carbon::now()->format('Y-m-d H:i:s')
],
[
'name' => 'Beds',
'image' => 'beds_cat.jpg',
'description' => 'description for Beds',
'parent_id' => $parent_id,
'priority' => 2,
'created_at' => Carbon::now()->format('Y-m-d H:i:s'),
'updated_at' => Carbon::now()->format('Y-m-d H:i:s')
],
]);
$parent_id = Catalog::where('name', 'Food')->first()->id;
DB::table('catalogs')->insert([
[
'name' => 'Milk',
'image' => 'milk_cat.jpg',
'description' => 'description for Milk',
'parent_id' => $parent_id,
'priority' => 0,
'created_at' => Carbon::now()->format('Y-m-d H:i:s'),
'updated_at' => Carbon::now()->format('Y-m-d H:i:s')
],
[
'name' => 'Bread',
'image' => 'bread_cat.jpg',
'description' => 'description for Bread',
'parent_id' => $parent_id,
'priority' => 1,
'created_at' => Carbon::now()->format('Y-m-d H:i:s'),
'updated_at' => Carbon::now()->format('Y-m-d H:i:s')
],
[
'name' => 'Sweets',
'image' => 'sweets_cat.jpg',
'description' => 'description for Sweets',
'parent_id' => $parent_id,
'priority' => 2,
'created_at' => Carbon::now()->format('Y-m-d H:i:s'),
'updated_at' => Carbon::now()->format('Y-m-d H:i:s')
],
]);
$parent_id = Catalog::where('name', 'TVs')->first()->id;
DB::table('catalogs')->insert([
[
'name' => 'LCD',
'image' => 'lcd_cat.jpg',
'description' => 'description for LCD TVs',
'parent_id' => $parent_id,
'priority' => 0,
'created_at' => Carbon::now()->format('Y-m-d H:i:s'),
'updated_at' => Carbon::now()->format('Y-m-d H:i:s')
],
[
'name' => 'Plasma',
'image' => 'plasma_cat.jpg',
'description' => 'description for Plasma TVs',
'parent_id' => $parent_id,
'priority' => 1,
'created_at' => Carbon::now()->format('Y-m-d H:i:s'),
'updated_at' => Carbon::now()->format('Y-m-d H:i:s')
],
[
'name' => 'LED',
'image' => 'led_tv_cat.jpg',
'description' => 'description for LED TVs',
'parent_id' => $parent_id,
'priority' => 2,
'created_at' => Carbon::now()->format('Y-m-d H:i:s'),
'updated_at' => Carbon::now()->format('Y-m-d H:i:s')
],
]);
}
}
| 5,487
|
https://github.com/hlzz/dotfiles/blob/master/graphics/VTK-7.0.0/Common/ExecutionModel/vtkExecutive.cxx
|
Github Open Source
|
Open Source
|
BSD-3-Clause
| 2,016
|
dotfiles
|
hlzz
|
C++
|
Code
| 1,967
| 6,703
|
/*=========================================================================
Program: Visualization Toolkit
Module: vtkExecutive.cxx
Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
All rights reserved.
See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notice for more information.
=========================================================================*/
#include "vtkExecutive.h"
#include "vtkAlgorithm.h"
#include "vtkAlgorithmOutput.h"
#include "vtkDataObject.h"
#include "vtkGarbageCollector.h"
#include "vtkInformation.h"
#include "vtkInformationExecutivePortKey.h"
#include "vtkInformationExecutivePortVectorKey.h"
#include "vtkInformationIntegerKey.h"
#include "vtkInformationIterator.h"
#include "vtkInformationKeyVectorKey.h"
#include "vtkInformationVector.h"
#include "vtkObjectFactory.h"
#include "vtkSmartPointer.h"
#include <vector>
#include <sstream>
#include "vtkCompositeDataPipeline.h"
vtkInformationKeyMacro(vtkExecutive, ALGORITHM_AFTER_FORWARD, Integer);
vtkInformationKeyMacro(vtkExecutive, ALGORITHM_BEFORE_FORWARD, Integer);
vtkInformationKeyMacro(vtkExecutive, ALGORITHM_DIRECTION, Integer);
vtkInformationKeyMacro(vtkExecutive, CONSUMERS, ExecutivePortVector);
vtkInformationKeyMacro(vtkExecutive, FORWARD_DIRECTION, Integer);
vtkInformationKeyMacro(vtkExecutive, FROM_OUTPUT_PORT, Integer);
vtkInformationKeyMacro(vtkExecutive, KEYS_TO_COPY, KeyVector);
vtkInformationKeyMacro(vtkExecutive, PRODUCER, ExecutivePort);
//----------------------------------------------------------------------------
class vtkExecutiveInternals
{
public:
std::vector<vtkInformationVector*> InputInformation;
vtkExecutiveInternals();
~vtkExecutiveInternals();
vtkInformationVector** GetInputInformation(int newNumberOfPorts);
};
//----------------------------------------------------------------------------
vtkExecutiveInternals::vtkExecutiveInternals()
{
}
//----------------------------------------------------------------------------
vtkExecutiveInternals::~vtkExecutiveInternals()
{
// Delete all the input information vectors.
for(std::vector<vtkInformationVector*>::iterator
i = this->InputInformation.begin();
i != this->InputInformation.end(); ++i)
{
if(vtkInformationVector* v = *i)
{
v->Delete();
}
}
}
//----------------------------------------------------------------------------
vtkInformationVector**
vtkExecutiveInternals::GetInputInformation(int newNumberOfPorts)
{
// Adjust the number of vectors.
int oldNumberOfPorts = static_cast<int>(this->InputInformation.size());
if(newNumberOfPorts > oldNumberOfPorts)
{
// Create new vectors.
this->InputInformation.resize(newNumberOfPorts, 0);
for(int i=oldNumberOfPorts; i < newNumberOfPorts; ++i)
{
this->InputInformation[i] = vtkInformationVector::New();
}
}
else if(newNumberOfPorts < oldNumberOfPorts)
{
// Delete old vectors.
for(int i=newNumberOfPorts; i < oldNumberOfPorts; ++i)
{
if(vtkInformationVector* v = this->InputInformation[i])
{
// Set the pointer to NULL first to avoid reporting of the
// entry if deleting the vector causes a garbage collection
// reference walk.
this->InputInformation[i] = 0;
v->Delete();
}
}
this->InputInformation.resize(newNumberOfPorts);
}
// Return the array of information vector pointers.
if(newNumberOfPorts > 0)
{
return &this->InputInformation[0];
}
else
{
return 0;
}
}
//----------------------------------------------------------------------------
vtkExecutive::vtkExecutive()
{
this->ExecutiveInternal = new vtkExecutiveInternals;
this->OutputInformation = vtkInformationVector::New();
this->Algorithm = 0;
this->InAlgorithm = 0;
this->SharedInputInformation = 0;
this->SharedOutputInformation = 0;
}
//----------------------------------------------------------------------------
vtkExecutive::~vtkExecutive()
{
this->SetAlgorithm(0);
if(this->OutputInformation)
{
this->OutputInformation->Delete();
}
delete this->ExecutiveInternal;
}
//----------------------------------------------------------------------------
void vtkExecutive::PrintSelf(ostream& os, vtkIndent indent)
{
this->Superclass::PrintSelf(os, indent);
if(this->Algorithm)
{
os << indent << "Algorithm: " << this->Algorithm << "\n";
}
else
{
os << indent << "Algorithm: (none)\n";
}
}
//----------------------------------------------------------------------------
void vtkExecutive::Register(vtkObjectBase* o)
{
this->RegisterInternal(o, 1);
}
//----------------------------------------------------------------------------
void vtkExecutive::UnRegister(vtkObjectBase* o)
{
this->UnRegisterInternal(o, 1);
}
//----------------------------------------------------------------------------
void vtkExecutive::SetAlgorithm(vtkAlgorithm* newAlgorithm)
{
vtkDebugMacro(<< this->GetClassName() << " (" << this
<< "): setting Algorithm to " << newAlgorithm);
vtkAlgorithm* oldAlgorithm = this->Algorithm;
if(oldAlgorithm != newAlgorithm)
{
if(newAlgorithm)
{
newAlgorithm->Register(this);
}
this->Algorithm = newAlgorithm;
if(oldAlgorithm)
{
oldAlgorithm->UnRegister(this);
}
this->Modified();
}
}
//----------------------------------------------------------------------------
vtkAlgorithm* vtkExecutive::GetAlgorithm()
{
return this->Algorithm;
}
//----------------------------------------------------------------------------
vtkInformationVector** vtkExecutive::GetInputInformation()
{
// Use the shared input information vector if any is set.
if(this->SharedInputInformation)
{
return this->SharedInputInformation;
}
// Use this executive's input information vector.
if(this->Algorithm)
{
int numPorts = this->Algorithm->GetNumberOfInputPorts();
return this->ExecutiveInternal->GetInputInformation(numPorts);
}
else
{
return this->ExecutiveInternal->GetInputInformation(0);
}
}
//----------------------------------------------------------------------------
vtkInformation* vtkExecutive::GetInputInformation(int port, int connection)
{
if(!this->InputPortIndexInRange(port, "get connected input information from"))
{
return 0;
}
vtkInformationVector* inVector = this->GetInputInformation()[port];
return inVector->GetInformationObject(connection);
}
//----------------------------------------------------------------------------
vtkInformationVector* vtkExecutive::GetInputInformation(int port)
{
if(!this->InputPortIndexInRange(port, "get input information vector from"))
{
return 0;
}
return this->GetInputInformation()[port];
}
//----------------------------------------------------------------------------
vtkInformationVector* vtkExecutive::GetOutputInformation()
{
// Use the shared output information vector if any is set.
if(this->SharedOutputInformation)
{
return this->SharedOutputInformation;
}
// Use this executive's output information vector.
if (!this->Algorithm)
{
return 0;
}
// Set the length of the vector to match the number of ports.
int oldNumberOfPorts =
this->OutputInformation->GetNumberOfInformationObjects();
this->OutputInformation
->SetNumberOfInformationObjects(this->GetNumberOfOutputPorts());
// For any new information obects, set the executive pointer and
// port number on the information object to tell it what produces
// it.
int nop = this->Algorithm->GetNumberOfOutputPorts();
for(int i = oldNumberOfPorts; i < nop; ++i)
{
vtkInformation* info = this->OutputInformation->GetInformationObject(i);
vtkExecutive::PRODUCER()->Set(info, this, i);
}
return this->OutputInformation;
}
//----------------------------------------------------------------------------
vtkInformation* vtkExecutive::GetOutputInformation(int port)
{
return this->GetOutputInformation()->GetInformationObject(port);
}
//----------------------------------------------------------------------------
vtkExecutive* vtkExecutive::GetInputExecutive(int port, int index)
{
if(index < 0 || index >= this->GetNumberOfInputConnections(port))
{
vtkErrorMacro("Attempt to get executive for connection index " << index
<< " on input port " << port << " of algorithm "
<< this->Algorithm->GetClassName() << "(" << this->Algorithm
<< "), which has "
<< this->GetNumberOfInputConnections(port)
<< " connections.");
return 0;
}
if(vtkAlgorithmOutput* input = this->Algorithm->GetInputConnection(port, index))
{
return input->GetProducer()->GetExecutive();
}
return 0;
}
//----------------------------------------------------------------------------
void vtkExecutive::ReportReferences(vtkGarbageCollector* collector)
{
// Report reference to our algorithm.
vtkGarbageCollectorReport(collector, this->Algorithm, "Algorithm");
for(int i=0; i < int(this->ExecutiveInternal->InputInformation.size()); ++i)
{
vtkGarbageCollectorReport(collector,
this->ExecutiveInternal->InputInformation[i],
"Input Information Vector");
}
vtkGarbageCollectorReport(collector, this->OutputInformation,
"Output Information Vector");
this->Superclass::ReportReferences(collector);
}
//----------------------------------------------------------------------------
int vtkExecutive::Update()
{
if (this->Algorithm->GetNumberOfOutputPorts())
{
return this->Update(0);
}
return this->Update(-1);
}
//----------------------------------------------------------------------------
int vtkExecutive::Update(int)
{
vtkErrorMacro("This class does not implement Update.");
return 0;
}
//----------------------------------------------------------------------------
int vtkExecutive::GetNumberOfInputPorts()
{
if(this->Algorithm)
{
return this->Algorithm->GetNumberOfInputPorts();
}
return 0;
}
//----------------------------------------------------------------------------
int vtkExecutive::GetNumberOfOutputPorts()
{
if(this->Algorithm)
{
return this->Algorithm->GetNumberOfOutputPorts();
}
return 0;
}
//----------------------------------------------------------------------------
int vtkExecutive::GetNumberOfInputConnections(int port)
{
vtkInformationVector* inputs = this->GetInputInformation(port);
if (inputs)
{
return inputs->GetNumberOfInformationObjects();
}
return 0;
}
//----------------------------------------------------------------------------
int vtkExecutive::InputPortIndexInRange(int port, const char* action)
{
// Make sure the algorithm is set.
if(!this->Algorithm)
{
vtkErrorMacro("Attempt to " << (action?action:"access") <<
" input port index " << port << " with no algorithm set.");
return 0;
}
// Make sure the index of the input port is in range.
if(port < 0 || port >= this->Algorithm->GetNumberOfInputPorts())
{
vtkErrorMacro("Attempt to " << (action?action:"access")
<< " input port index " << port << " for algorithm "
<< this->Algorithm->GetClassName()
<< "(" << this->Algorithm << "), which has "
<< this->Algorithm->GetNumberOfInputPorts()
<< " input ports.");
return 0;
}
return 1;
}
//----------------------------------------------------------------------------
int vtkExecutive::OutputPortIndexInRange(int port, const char* action)
{
// Make sure the algorithm is set.
if(!this->Algorithm)
{
vtkErrorMacro("Attempt to " << (action?action:"access") <<
" output port index " << port << " with no algorithm set.");
return 0;
}
// Make sure the index of the output port is in range.
if(port < 0 || port >= this->Algorithm->GetNumberOfOutputPorts())
{
vtkErrorMacro("Attempt to " << (action?action:"access")
<< " output port index " << port << " for algorithm "
<< this->Algorithm->GetClassName()
<< "(" << this->Algorithm << "), which has "
<< this->Algorithm->GetNumberOfOutputPorts()
<< " output ports.");
return 0;
}
return 1;
}
//----------------------------------------------------------------------------
// vtkAlgorithmOutput* vtkExecutive::GetProducerPort(vtkDataObject* d)
// {
// if (!this->Algorithm)
// {
// return 0;
// }
// int numPorts = this->GetNumberOfOutputPorts();
// for (int i=0; i<numPorts; i++)
// {
// vtkInformation* info = this->GetOutputInformation(i);
// if (info->Has(vtkDataObject::DATA_OBJECT()) &&
// info->Get(vtkDataObject::DATA_OBJECT()) == d)
// {
// return this->Algorithm->GetOutputPort(port);
// }
// }
// return 0;
// }
//----------------------------------------------------------------------------
void vtkExecutive::SetSharedInputInformation(vtkInformationVector** inInfoVec)
{
this->SharedInputInformation = inInfoVec;
}
//----------------------------------------------------------------------------
void vtkExecutive::SetSharedOutputInformation(vtkInformationVector* outInfoVec)
{
this->SharedOutputInformation = outInfoVec;
}
//----------------------------------------------------------------------------
vtkDataObject* vtkExecutive::GetOutputData(int port)
{
if(!this->OutputPortIndexInRange(port, "get data for"))
{
return 0;
}
vtkInformation* info = this->GetOutputInformation(port);
if (!info)
{
return 0;
}
// for backward compatibility we bring Outputs up to date if they do not
// already exist
if (!this->InAlgorithm && !info->Has(vtkDataObject::DATA_OBJECT()))
{
// Bring the data object up to date only if it isn't already there
this->UpdateDataObject();
}
// Return the data object.
return info->Get(vtkDataObject::DATA_OBJECT());
}
//----------------------------------------------------------------------------
void vtkExecutive::SetOutputData(int newPort, vtkDataObject* newOutput)
{
vtkInformation *info = this->GetOutputInformation(newPort);
this->SetOutputData(newPort, newOutput, info);
}
//----------------------------------------------------------------------------
void vtkExecutive::SetOutputData(int newPort, vtkDataObject* newOutput,
vtkInformation* info)
{
if(info)
{
vtkDataObject* currentOutput = info->Get(vtkDataObject::DATA_OBJECT());
if(newOutput != currentOutput)
{
info->Set(vtkDataObject::DATA_OBJECT(), newOutput);
// Output has changed. Reset the pipeline information.
this->ResetPipelineInformation(newPort, info);
}
}
else
{
vtkErrorMacro("Could not set output on port " << newPort << ".");
}
}
//----------------------------------------------------------------------------
vtkDataObject* vtkExecutive::GetInputData(int port, int index)
{
if(index < 0 || index >= this->GetNumberOfInputConnections(port))
{
return 0;
}
vtkInformationVector* inVector = this->GetInputInformation()[port];
vtkInformation* info = inVector->GetInformationObject(index);
vtkExecutive* e;
int producerPort;
vtkExecutive::PRODUCER()->Get(info,e,producerPort);
if(e)
{
return e->GetOutputData(producerPort);
}
else
{
return 0;
}
}
//----------------------------------------------------------------------------
vtkDataObject* vtkExecutive::GetInputData
(int port, int index, vtkInformationVector **inInfoVec)
{
if (!inInfoVec[port])
{
return 0;
}
vtkInformation *info = inInfoVec[port]->GetInformationObject(index);
if (!info)
{
return 0;
}
return info->Get(vtkDataObject::DATA_OBJECT());
}
//----------------------------------------------------------------------------
int vtkExecutive::ProcessRequest(vtkInformation* request,
vtkInformationVector** inInfo,
vtkInformationVector* outInfo)
{
if(request->Has(FORWARD_DIRECTION()))
{
// Request will be forwarded.
if(request->Get(FORWARD_DIRECTION()) == vtkExecutive::RequestUpstream)
{
if(this->Algorithm && request->Get(ALGORITHM_BEFORE_FORWARD()))
{
if(!this->CallAlgorithm(request, vtkExecutive::RequestUpstream,
inInfo, outInfo))
{
return 0;
}
}
if(!this->ForwardUpstream(request))
{
return 0;
}
if(this->Algorithm && request->Get(ALGORITHM_AFTER_FORWARD()))
{
if(!this->CallAlgorithm(request, vtkExecutive::RequestDownstream,
inInfo, outInfo))
{
return 0;
}
}
}
if(request->Get(FORWARD_DIRECTION()) == vtkExecutive::RequestDownstream)
{
vtkErrorMacro("Downstream forwarding not yet implemented.");
return 0;
}
}
else
{
// Request will not be forwarded.
vtkErrorMacro("Non-forwarded requests are not yet implemented.");
return 0;
}
return 1;
}
//----------------------------------------------------------------------------
int vtkExecutive::ComputePipelineMTime(vtkInformation*,
vtkInformationVector**,
vtkInformationVector*,
int, unsigned long*)
{
// Demand-driven executives that use this request should implement
// this method.
vtkErrorMacro("ComputePipelineMTime not implemented for this executive.");
return 0;
}
//----------------------------------------------------------------------------
int vtkExecutive::ForwardDownstream(vtkInformation*)
{
// Do not forward downstream if the output is shared with another
// executive.
if(this->SharedOutputInformation)
{
return 1;
}
// Forwarding downstream is not yet implemented.
vtkErrorMacro("ForwardDownstream not yet implemented.");
return 0;
}
//----------------------------------------------------------------------------
int vtkExecutive::ForwardUpstream(vtkInformation* request)
{
// Do not forward upstream if the input is shared with another
// executive.
if(this->SharedInputInformation)
{
return 1;
}
if (!this->Algorithm->ModifyRequest(request, BeforeForward))
{
return 0;
}
// Forward the request upstream through all input connections.
int result = 1;
for(int i=0; i < this->GetNumberOfInputPorts(); ++i)
{
int nic = this->Algorithm->GetNumberOfInputConnections(i);
vtkInformationVector* inVector = this->GetInputInformation()[i];
for(int j=0; j < nic; ++j)
{
vtkInformation* info = inVector->GetInformationObject(j);
// Get the executive producing this input. If there is none, then
// it is a NULL input.
vtkExecutive* e;
int producerPort;
vtkExecutive::PRODUCER()->Get(info,e,producerPort);
if(e)
{
int port = request->Get(FROM_OUTPUT_PORT());
request->Set(FROM_OUTPUT_PORT(), producerPort);
if(!e->ProcessRequest(request,
e->GetInputInformation(),
e->GetOutputInformation()))
{
result = 0;
}
request->Set(FROM_OUTPUT_PORT(), port);
}
}
}
if (!this->Algorithm->ModifyRequest(request, AfterForward))
{
return 0;
}
return result;
}
//----------------------------------------------------------------------------
void vtkExecutive::CopyDefaultInformation(vtkInformation* request,
int direction,
vtkInformationVector** inInfoVec,
vtkInformationVector* outInfoVec)
{
if(direction == vtkExecutive::RequestDownstream)
{
// Copy information from the first input to all outputs.
if(this->GetNumberOfInputPorts() > 0 &&
inInfoVec[0]->GetNumberOfInformationObjects() > 0)
{
vtkInformationKey** keys = request->Get(KEYS_TO_COPY());
int length = request->Length(KEYS_TO_COPY());
vtkInformation* inInfo = inInfoVec[0]->GetInformationObject(0);
vtkSmartPointer<vtkInformationIterator> infoIter =
vtkSmartPointer<vtkInformationIterator>::New();
infoIter->SetInformationWeak(inInfo);
int oiobj = outInfoVec->GetNumberOfInformationObjects();
for(int i=0; i < oiobj; ++i)
{
vtkInformation* outInfo = outInfoVec->GetInformationObject(i);
for(int j=0; j < length; ++j)
{
// Copy the entry.
outInfo->CopyEntry(inInfo, keys[j]);
// If the entry is a key vector, copy all the keys listed.
if(vtkInformationKeyVectorKey* vkey =
vtkInformationKeyVectorKey::SafeDownCast(keys[j]))
{
outInfo->CopyEntries(inInfo, vkey);
}
}
// Give the keys an opportunity to copy themselves.
infoIter->InitTraversal();
while(!infoIter->IsDoneWithTraversal())
{
vtkInformationKey* key = infoIter->GetCurrentKey();
key->CopyDefaultInformation(request, inInfo, outInfo);
infoIter->GoToNextItem();
}
}
}
}
else
{
// Get the output port from which the request was made. Use zero
// if output port was not specified.
int outputPort = 0;
if(request->Has(FROM_OUTPUT_PORT()))
{
outputPort = request->Get(FROM_OUTPUT_PORT());
outputPort = outputPort == -1 ? 0 : outputPort;
}
// Copy information from the requesting output to all inputs.
if(outputPort >= 0 &&
outputPort < outInfoVec->GetNumberOfInformationObjects())
{
vtkInformationKey** keys = request->Get(KEYS_TO_COPY());
int length = request->Length(KEYS_TO_COPY());
vtkInformation* outInfo = outInfoVec->GetInformationObject(outputPort);
vtkSmartPointer<vtkInformationIterator> infoIter =
vtkSmartPointer<vtkInformationIterator>::New();
infoIter->SetInformationWeak(outInfo);
for(int i=0; i < this->GetNumberOfInputPorts(); ++i)
{
for(int j=0; j < inInfoVec[i]->GetNumberOfInformationObjects(); ++j)
{
vtkInformation* inInfo = inInfoVec[i]->GetInformationObject(j);
for(int k=0; k < length; ++k)
{
// Copy the entry.
inInfo->CopyEntry(outInfo, keys[k]);
// If the entry is a key vector, copy all the keys listed.
if(vtkInformationKeyVectorKey* vkey =
vtkInformationKeyVectorKey::SafeDownCast(keys[k]))
{
inInfo->CopyEntries(outInfo, vkey);
}
}
// Give the keys an opportunity to copy themselves.
infoIter->InitTraversal();
while(!infoIter->IsDoneWithTraversal())
{
vtkInformationKey* key = infoIter->GetCurrentKey();
key->CopyDefaultInformation(request, outInfo, inInfo);
infoIter->GoToNextItem();
}
}
}
}
}
}
//----------------------------------------------------------------------------
int vtkExecutive::CallAlgorithm(vtkInformation* request, int direction,
vtkInformationVector** inInfo,
vtkInformationVector* outInfo)
{
// Copy default information in the direction of information flow.
this->CopyDefaultInformation(request, direction, inInfo, outInfo);
// Invoke the request on the algorithm.
this->InAlgorithm = 1;
int result = this->Algorithm->ProcessRequest(request, inInfo, outInfo);
this->InAlgorithm = 0;
// If the algorithm failed report it now.
if(!result)
{
vtkErrorMacro("Algorithm " << this->Algorithm->GetClassName()
<< "(" << this->Algorithm
<< ") returned failure for request: "
<< *request);
}
return result;
}
//----------------------------------------------------------------------------
int vtkExecutive::CheckAlgorithm(const char* method,
vtkInformation* request)
{
if(this->InAlgorithm)
{
if(request)
{
std::ostringstream rqmsg;
request->Print(rqmsg);
vtkErrorMacro(<< method << " invoked during another request. "
"Returning failure to algorithm "
<< this->Algorithm->GetClassName() << "("
<< this->Algorithm << ") for the recursive request:\n"
<< rqmsg.str().c_str());
}
else
{
vtkErrorMacro(<< method << " invoked during another request. "
"Returning failure to algorithm "
<< this->Algorithm->GetClassName() << "("
<< this->Algorithm << ").");
}
// Tests should fail when this happens because there is a bug in
// the code.
if(getenv("DASHBOARD_TEST_FROM_CTEST") || getenv("DART_TEST_FROM_DART"))
{
abort();
}
return 0;
}
return 1;
}
| 7,084
|
https://github.com/zeno-api/zeno-console/blob/master/resources/js/Pages/Service/ServiceStats.vue
|
Github Open Source
|
Open Source
|
MIT
| null |
zeno-console
|
zeno-api
|
Vue
|
Code
| 339
| 1,361
|
<template>
<div>
<div class="grid grid-cols-1 sm:rounded-lg bg-white overflow-hidden shadow md:grid-cols-3">
<div>
<div class="px-4 py-5 sm:p-6">
<dl>
<dt class="text-base leading-6 font-normal text-gray-900">
Total Concurrent
</dt>
<dd class="mt-1 flex justify-between items-baseline md:block lg:flex">
<div class="flex items-baseline text-2xl leading-8 font-semibold text-indigo-600">
71,897
<span class="ml-2 text-sm leading-5 font-medium text-gray-500">
from 70,946
</span>
</div>
<div class="inline-flex items-baseline px-2.5 py-0.5 rounded-full text-sm font-medium leading-5 bg-green-100 text-green-800 md:mt-2 lg:mt-0">
<svg class="-ml-1 mr-0.5 flex-shrink-0 self-center h-5 w-5 text-green-500" fill="currentColor" viewBox="0 0 20 20">
<path fill-rule="evenodd" d="M5.293 9.707a1 1 0 010-1.414l4-4a1 1 0 011.414 0l4 4a1 1 0 01-1.414 1.414L11 7.414V15a1 1 0 11-2 0V7.414L6.707 9.707a1 1 0 01-1.414 0z" clip-rule="evenodd" />
</svg>
<span class="sr-only">
Increased by
</span>
12%
</div>
</dd>
</dl>
</div>
</div>
<div class="border-t border-gray-200 md:border-0 md:border-l">
<div class="px-4 py-5 sm:p-6">
<dl>
<dt class="text-base leading-6 font-normal text-gray-900">
Total Hits 30 Last Days
</dt>
<dd class="mt-1 flex justify-between items-baseline md:block lg:flex">
<div class="flex items-baseline text-2xl leading-8 font-semibold text-indigo-600">
71,897
</div>
<div class="inline-flex items-baseline px-2.5 py-0.5 rounded-full text-sm font-medium leading-5 bg-green-100 text-green-800 md:mt-2 lg:mt-0">
<svg class="-ml-1 mr-0.5 flex-shrink-0 self-center h-5 w-5 text-green-500" fill="currentColor" viewBox="0 0 20 20">
<path fill-rule="evenodd" d="M5.293 9.707a1 1 0 010-1.414l4-4a1 1 0 011.414 0l4 4a1 1 0 01-1.414 1.414L11 7.414V15a1 1 0 11-2 0V7.414L6.707 9.707a1 1 0 01-1.414 0z" clip-rule="evenodd" />
</svg>
<span class="sr-only">
Increased by
</span>
2.02%
</div>
</dd>
</dl>
</div>
</div>
<div class="border-t border-gray-200 md:border-0 md:border-l">
<div class="px-4 py-5 sm:p-6">
<dl>
<dt class="text-base leading-6 font-normal text-gray-900">
Avg. Click Rate
</dt>
<dd class="mt-1 flex justify-between items-baseline md:block lg:flex">
<div class="flex items-baseline text-2xl leading-8 font-semibold text-indigo-600">
24.57%
<span class="ml-2 text-sm leading-5 font-medium text-gray-500">
from 28.62
</span>
</div>
<div class="inline-flex items-baseline px-2.5 py-0.5 rounded-full text-sm font-medium leading-5 bg-red-100 text-red-800 md:mt-2 lg:mt-0">
<svg class="-ml-1 mr-0.5 flex-shrink-0 self-center h-5 w-5 text-red-500" fill="currentColor" viewBox="0 0 20 20">
<path fill-rule="evenodd" d="M14.707 10.293a1 1 0 010 1.414l-4 4a1 1 0 01-1.414 0l-4-4a1 1 0 111.414-1.414L9 12.586V5a1 1 0 012 0v7.586l2.293-2.293a1 1 0 011.414 0z" clip-rule="evenodd" />
</svg>
<span class="sr-only">
Decreased by
</span>
4.05%
</div>
</dd>
</dl>
</div>
</div>
</div>
</div>
</template>
<script>
export default {
}
</script>
<style scoped>
</style>
| 38,107
|
https://github.com/gabrigcl/firestore-ref/blob/master/rollup.config.js
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
firestore-ref
|
gabrigcl
|
JavaScript
|
Code
| 34
| 128
|
import banner from 'rollup-plugin-banner';
export default {
input: './src/index.js',
output: [
{
file: './build/firestore-ref.umd.js',
format: 'umd',
name: 'firestoreRef'
}
],
plugins: [
banner('firestore-ref v<%= pkg.version %> by <%= pkg.author %>'),
],
external: ['firebase']
};
| 25,813
|
https://github.com/RaymondGrumney/FayeAndGrumpy/blob/master/assets/assets/scripts/Meta Objects/Heart.cs
|
Github Open Source
|
Open Source
|
CC-BY-3.0
| null |
FayeAndGrumpy
|
RaymondGrumney
|
C#
|
Code
| 400
| 994
|
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class Heart : MonoBehaviour {
[Tooltip("The full heart sprite.")]
public Sprite fullHeart;
[Tooltip("The empty heart sprite.")]
public Sprite emptyHeart;
[Tooltip("How big the heart gets on a pulse.")]
public float pulseScale;
private Vector3 _initialScale;
[Tooltip("How long the pulse takes.")]
public float pulseTime;
[Tooltip("How many pulses per pulse.")]
public int numPulses;
[Tooltip("How long each indivual pulse takes.")]
public float individualPulseTime;
[Tooltip("Whether or not to pulse the hearts.")]
public bool pulse;
// whether the heart is pulsing in or out
private bool pulseOut = true;
// which pulse out of numPulses the heart is currently on
private int currentPulse = 0;
// when the heart last pulsed
private float lastPulse;
// whether this heart is representing being damaged or not
private bool damaged = false;
// this object's sprite renderer
private SpriteRenderer _spriteRenderer;
// initialization
void Awake() {
_initialScale = transform.localScale;
_spriteRenderer = GetComponent<SpriteRenderer>();
updateHeartSprite();
}
// called once per frame
void Update() {
pulseHeart();
}
/// <summary>
/// Pulses the heart.
/// </summary>
void pulseHeart() {
// if we are pulsing this heart
if( !damaged && pulse ) {
// if it's time to pulse again
if ( Time.time > lastPulse ) {
// increment lastPulse and reset currentPulse
lastPulse += pulseTime;
currentPulse = 0;
}
// if we still need to scale this
if ( pulseOut ) {
// if we haven't pulse numPulses times
if (currentPulse < numPulses) {
scaleTransform( 1f );
}
// set pulseOut
pulseOut = this.transform.localScale.x < _initialScale.x + pulseScale;
} else {
// if the scale isn't the initial scale
scaleTransform( -1f );
}
// if the scale is the initial scale
if (this.transform.localScale.x > _initialScale.x + pulseScale) {
pulseOut = false;
currentPulse++;
}
}
}
/// <summary>
/// Scales the transform up or down. Passing a positive number scales up, a negative scales down
/// </summary>
/// <param name="sign"> The sign of the multiplier.</param>
void scaleTransform(float sign) {
sign = Mathf.Sign( sign );
transform.localScale += Vector3.one * sign * Time.deltaTime / individualPulseTime;
}
/// <summary>
/// Sets the damaged.
/// </summary>
/// <param name="state">If set to <c>true</c> state.</param>
public void setDamaged(bool state) {
this.damaged = state;
updateHeartSprite();
}
/// <summary>
/// Updates the heart sprite.
/// </summary>
void updateHeartSprite() {
// full heart if not damaged
if( !damaged ) {
_spriteRenderer.sprite = fullHeart;
} else {
// empty heart if is damaged
_spriteRenderer.sprite = emptyHeart;
}
}
}
| 48,784
|
https://github.com/woyaowoyao/jhipster-operator/blob/master/example-app/invoice/src/main/java/com/salaboy/invoice/service/mapper/ShipmentMapper.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
jhipster-operator
|
woyaowoyao
|
Java
|
Code
| 82
| 271
|
package com.salaboy.invoice.service.mapper;
import com.salaboy.invoice.domain.*;
import com.salaboy.invoice.service.dto.ShipmentDTO;
import org.mapstruct.*;
/**
* Mapper for the entity {@link Shipment} and its DTO {@link ShipmentDTO}.
*/
@Mapper(componentModel = "spring", uses = {InvoiceMapper.class})
public interface ShipmentMapper extends EntityMapper<ShipmentDTO, Shipment> {
@Mapping(source = "invoice.id", target = "invoiceId")
@Mapping(source = "invoice.code", target = "invoiceCode")
ShipmentDTO toDto(Shipment shipment);
@Mapping(source = "invoiceId", target = "invoice")
Shipment toEntity(ShipmentDTO shipmentDTO);
default Shipment fromId(Long id) {
if (id == null) {
return null;
}
Shipment shipment = new Shipment();
shipment.setId(id);
return shipment;
}
}
| 9,366
|
https://github.com/sengeiou/group_purchase/blob/master/src/main/java/com/mds/group/purchase/order/dao/ReceiptBillDetailMapper.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,022
|
group_purchase
|
sengeiou
|
Java
|
Code
| 228
| 513
|
/*
* Copyright Ningbo Qishan Technology Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mds.group.purchase.order.dao;
import com.mds.group.purchase.core.Mapper;
import com.mds.group.purchase.order.model.ReceiptBillDetail;
import org.apache.ibatis.annotations.Param;
import java.util.List;
import java.util.Set;
/**
* The interface Receipt bill detail mapper.
*
* @author pavawi
*/
public interface ReceiptBillDetailMapper extends Mapper<ReceiptBillDetail> {
/**
* Select by bill ids list.
*
* @param billIds the bill ids
* @return the list
*/
List<ReceiptBillDetail> selectByBillIds(@Param("billIds") Set<Long> billIds);
/**
* Select by order detail id receipt bill detail.
*
* @param orderDetailId the order detail id
* @return the receipt bill detail
*/
ReceiptBillDetail selectByOrderDetailId(@Param("orderDetailId") Long orderDetailId);
/**
* Select by order id receipt bill detail.
*
* @param orderId the order id
* @return the receipt bill detail
*/
ReceiptBillDetail selectByOrderId(@Param("orderId") Long orderId);
/**
* Delete by bill ids.
*
* @param billIds the bill ids
*/
void deleteByBillIds(@Param("billIds") List<Long> billIds);
}
| 1,896
|
https://github.com/garora/PortalCMS/blob/master/Portal.CMS.Web/Areas/PageBuilder/ViewModels/Section/MarkupViewModel.cs
|
Github Open Source
|
Open Source
|
MIT
| 2,017
|
PortalCMS
|
garora
|
C#
|
Code
| 23
| 65
|
namespace Portal.CMS.Web.Areas.PageBuilder.ViewModels.Section
{
public class MarkupViewModel
{
public int PageSectionId { get; set; }
public string PageSectionBody { get; set; }
}
}
| 38,404
|
https://github.com/SkyFoundation/PanguEngine/blob/master/graphics/src/main/java/engine/graphics/graph/ColorOutputInfo.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,022
|
PanguEngine
|
SkyFoundation
|
Java
|
Code
| 110
| 320
|
package engine.graphics.graph;
import engine.graphics.util.BlendMode;
import engine.util.Color;
public class ColorOutputInfo {
private String colorBuffer;
private boolean clear = false;
private Color clearColor = Color.BLACK;
private BlendMode blendMode = BlendMode.DISABLED;
public static ColorOutputInfo colorOutput() {
return new ColorOutputInfo();
}
public String getColorBuffer() {
return colorBuffer;
}
public ColorOutputInfo setColorBuffer(String colorBuffer) {
this.colorBuffer = colorBuffer;
return this;
}
public boolean isClear() {
return clear;
}
public ColorOutputInfo setClear(boolean clear) {
this.clear = clear;
return this;
}
public Color getClearColor() {
return clearColor;
}
public ColorOutputInfo setClearColor(Color clearColor) {
this.clearColor = clearColor;
return this;
}
public BlendMode getBlendMode() {
return blendMode;
}
public ColorOutputInfo setBlendMode(BlendMode blendMode) {
this.blendMode = blendMode;
return this;
}
}
| 29,003
|
https://github.com/Stitchous/VisualRust/blob/master/VisualRust/Forms/DebuggingOptionsPage.cs
|
Github Open Source
|
Open Source
|
MIT
| 2,016
|
VisualRust
|
Stitchous
|
C#
|
Code
| 103
| 326
|
using System;
using System.Runtime.InteropServices;
using System.Windows.Forms;
using Microsoft.VisualStudio.Shell;
namespace VisualRust.Options
{
[ComVisible(true)]
[Guid("93F42A39-0AF6-40EE-AE2C-1C44AB5F8B15")]
public partial class DebuggingOptionsPage : DialogPage
{
public bool UseCustomGdb { get; set; }
public string CustomGdbPath { get; set; }
public string GdbExtraArguments { get; set; }
private IWin32Window page;
protected override IWin32Window Window
{
get { return page ?? (page = new DebuggingOptionsPageControl(this)); }
}
protected override void OnClosed(EventArgs e)
{
var debugControl = page as DebuggingOptionsPageControl;
if(debugControl != null)
debugControl.LoadSettings(this);
base.OnClosed(e);
}
protected override void OnApply(PageApplyEventArgs e)
{
var debugControl = page as DebuggingOptionsPageControl;
if (e.ApplyBehavior == ApplyKind.Apply && debugControl != null)
debugControl.ApplySettings(this);
base.OnApply(e);
}
}
}
| 50,494
|
https://github.com/AthenaEPI/mipy/blob/master/dmipy/distributions/distributions.py
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
mipy
|
AthenaEPI
|
Python
|
Code
| 2,029
| 6,175
|
# -*- coding: utf-8 -*-
'''
Document Module
'''
from __future__ import division
import pkg_resources
from os.path import join
import numpy as np
from scipy import stats
from scipy import special
from dipy.reconst.shm import real_sym_sh_mrtrix
from dmipy.utils import utils
from scipy import interpolate
from dmipy.core.modeling_framework import ModelProperties
from dipy.utils.optpkg import optional_package
from dipy.data import get_sphere, HemiSphere
sphere = get_sphere('symmetric724')
hemisphere = HemiSphere(phi=sphere.phi, theta=sphere.theta)
numba, have_numba, _ = optional_package("numba")
GRADIENT_TABLES_PATH = pkg_resources.resource_filename(
'dmipy', 'data/gradient_tables'
)
SIGNAL_MODELS_PATH = pkg_resources.resource_filename(
'dmipy', 'signal_models'
)
DATA_PATH = pkg_resources.resource_filename(
'dmipy', 'data'
)
SPHERE_CARTESIAN = np.loadtxt(
join(GRADIENT_TABLES_PATH, 'sphere_with_cap.txt')
)
SPHERE_SPHERICAL = utils.cart2sphere(SPHERE_CARTESIAN)
log_bingham_normalization_splinefit = np.load(
join(DATA_PATH, "bingham_normalization_splinefit.npz"),
encoding='bytes', allow_pickle=True)['arr_0']
inverse_sh_matrix_kernel = {
sh_order: np.linalg.pinv(real_sym_sh_mrtrix(
sh_order, hemisphere.theta, hemisphere.phi
)[0]) for sh_order in np.arange(0, 15, 2)
}
BETA_SCALING = 1e-6
__all__ = [
'get_sh_order_from_odi',
'SD1Watson',
'SD2Bingham',
'DD1Gamma',
'odi2kappa',
'kappa2odi'
]
def get_sh_order_from_odi(odi):
"Returns minimum sh_order to estimate spherical harmonics for given odi."
odis = np.array([0.80606061, 0.46666667, 0.25333333,
0.15636364, 0.09818182, 0.06909091, 0.])
sh_orders = np.arange(2, 15, 2)
return sh_orders[np.argmax(odis < odi)]
class SD1Watson(ModelProperties):
r""" The Watson spherical distribution model [1]_ [2]_.
Parameters
----------
mu : array, shape(2),
angles [theta, phi] representing main orientation on the sphere.
theta is inclination of polar angle of main angle mu [0, pi].
phi is polar angle of main angle mu [-pi, pi].
kappa : float,
concentration parameter of the Watson distribution.
References
----------
.. [1] Kaden et al.
"Parametric spherical deconvolution: inferring anatomical
connectivity using diffusion MR imaging". NeuroImage (2007)
.. [2] Zhang et al.
"NODDI: practical in vivo neurite orientation dispersion and density
imaging of the human brain". NeuroImage (2012)
"""
_parameter_ranges = {
'mu': ([0, np.pi], [-np.pi, np.pi]),
'odi': (0.02, 0.99),
}
_parameter_scales = {
'mu': np.r_[1., 1.],
'odi': 1.,
}
_parameter_types = {
'mu': 'orientation',
'odi': 'normal'
}
_model_type = 'SphericalDistribution'
def __init__(self, mu=None, odi=None):
self.mu = mu
self.odi = odi
def __call__(self, n, **kwargs):
r""" The Watson spherical distribution model [1, 2].
Parameters
----------
n : array of shape(3) or array of shape(N x 3),
sampled orientations of the Watson distribution.
Returns
-------
Wn: float or array of shape(N),
Probability density at orientations n, given mu and kappa.
"""
odi = kwargs.get('odi', self.odi)
mu = kwargs.get('mu', self.mu)
kappa = odi2kappa(odi)
mu_cart = utils.unitsphere2cart_1d(mu)
numerator = np.exp(kappa * np.dot(n, mu_cart) ** 2)
denominator = 4 * np.pi * special.hyp1f1(0.5, 1.5, kappa)
Wn = numerator / denominator
return Wn
def spherical_harmonics_representation(self, sh_order=None, **kwargs):
r""" The Watson spherical distribution model in spherical harmonics.
The minimum order is automatically derived from numerical experiments
to ensure fast function executation and accurate results.
Parameters
----------
sh_order : int,
maximum spherical harmonics order to be used in the approximation.
Returns
-------
watson_sh : array,
spherical harmonics of Watson probability density.
"""
odi = kwargs.get('odi', self.odi)
mu = kwargs.get('mu', self.mu)
if sh_order is None:
sh_order = get_sh_order_from_odi(odi)
watson_sf = self(hemisphere.vertices, mu=mu, odi=odi)
sh_mat_inv = inverse_sh_matrix_kernel[sh_order]
watson_sh = np.dot(sh_mat_inv, watson_sf)
return watson_sh
class SD2Bingham(ModelProperties):
r""" The Bingham spherical distribution model [1]_ [2]_ [3]_ using angles.
Parameters
----------
mu : array, shape(2),
angles [theta, phi] representing main orientation on the sphere.
theta is inclination of polar angle of main angle mu [0, pi].
phi is polar angle of main angle mu [-pi, pi].
psi : float,
angle in radians of the bingham distribution around mu [0, pi].
kappa : float,
first concentration parameter of the Bingham distribution.
defined as kappa = kappa1 - kappa3.
beta : float,
second concentration parameter of the Bingham distribution.
defined as beta = kappa2 - kappa3. Bingham becomes Watson when beta=0.
References
----------
.. [1] Kaden et al.
"Parametric spherical deconvolution: inferring anatomical
connectivity using diffusion MR imaging". NeuroImage (2007)
.. [2] Sotiropoulos et al.
"Ball and rackets: inferring fiber fanning from
diffusion-weighted MRI". NeuroImage (2012)
.. [3] Tariq et al.
"Bingham--NODDI: Mapping anisotropic orientation dispersion of
neurites using diffusion MRI". NeuroImage (2016)
"""
_parameter_ranges = {
'mu': ([0, np.pi], [-np.pi, np.pi]),
'psi': (0, np.pi),
'odi': (0.02, 0.99),
'beta_fraction': (0, 1) # beta<=kappa in fact
}
_parameter_scales = {
'mu': np.r_[1., 1.],
'psi': 1.,
'odi': 1.,
'beta_fraction': 1.
}
_parameter_types = {
'mu': 'orientation',
'psi': 'circular',
'odi': 'normal',
'beta_fraction': 'normal'
}
_model_type = 'SphericalDistribution'
def __init__(self, mu=None, psi=None, odi=None, beta_fraction=None):
self.mu = mu
self.psi = psi
self.odi = odi
self.beta_fraction = beta_fraction
def __call__(self, n, **kwargs):
r""" The Watson spherical distribution model.
Parameters
----------
n : array of shape(3) or array of shape(N x 3),
sampled orientations of the Watson distribution.
Returns
-------
Bn: float or array of shape(N),
Probability density at orientations n, given mu and kappa.
"""
odi = kwargs.get('odi', self.odi)
beta_fraction = kwargs.get('beta_fraction', self.beta_fraction)
mu = kwargs.get('mu', self.mu)
psi = kwargs.get('psi', self.psi)
kappa = odi2kappa(odi)
beta = beta_fraction * kappa
mu_cart = utils.unitsphere2cart_1d(mu)
R = utils.rotation_matrix_100_to_theta_phi_psi(mu[0], mu[1], psi)
mu_beta = R.dot(np.r_[0., 1., 0.])
numerator = _probability_bingham(kappa, beta, mu_cart, mu_beta, n)
denominator = 4 * np.pi * self._get_normalization(kappa, beta)
Bn = numerator / denominator
return Bn
def spherical_harmonics_representation(self, sh_order=None, **kwargs):
r""" The Bingham spherical distribution model in spherical harmonics.
The minimum order is automatically derived from numerical experiments
to ensure fast function executation and accurate results.
Parameters
----------
sh_order : int,
maximum spherical harmonics order to be used in the approximation.
Returns
-------
bingham_sh : array,
spherical harmonics of Bingham probability density.
"""
odi = kwargs.get('odi', self.odi)
beta_fraction = kwargs.get('beta_fraction', self.beta_fraction)
mu = kwargs.get('mu', self.mu)
psi = kwargs.get('psi', self.psi)
if sh_order is None:
sh_order = get_sh_order_from_odi(odi)
bingham_sf = self(hemisphere.vertices, mu=mu, psi=psi, odi=odi,
beta_fraction=beta_fraction)
sh_mat_inv = inverse_sh_matrix_kernel[sh_order]
bingham_sh = np.dot(sh_mat_inv, bingham_sf)
return bingham_sh
def _get_normalization(self, kappa, beta):
"""
The hyperconfluent function with matrix input is not available in
python, so to normalize we estimated the bingham sphere function
for kappa, beta in [0, 32] and estimated a 50x50 grid of its spherical
means.
Since the spherical mean of the bingham is similar to an exponential,
we took its natural logarithm and fitted it to a 2D spline function.
Below we use the fitted spline parameters in
log_bingham_normalization_splinefit to interpolate the normalization
for the distribution.
code to generate the interpolation:
from dipy.data import get_sphere, HemiSphere
from dmipy.signal_models.spherical_mean import (
estimate_spherical_mean_shell)
import numpy as np
sphere = get_sphere()
n = HemiSphere(sphere.x, sphere.y, sphere.z).subdivide().vertices
R = np.eye(3)
norm_size = 50
numerator = np.zeros(n.shape[0])
norm_grid = np.ones((norm_size, norm_size))
kappa_beta_range = np.linspace(0, 32, norm_size)
for i in np.arange(norm_size):
for j in np.arange(i + 1):
Bdiag = np.diag(np.r_[kappa_beta_range[i],
kappa_beta_range[j],
0])
B = R.dot(Bdiag).dot(R.T)
for k, n_ in enumerate(n):
numerator[k] = np.exp(n_.dot(B).dot(n_))
norm_grid[i, j] = norm_grid[j, i] = (
estimate_spherical_mean_shell(
numerator, n, sh_order=12))
log_norm_grid = np.log(norm_grid)
kappa_grid, beta_grid = np.meshgrid(kappa_beta_range, kappa_beta_range)
from scipy import interpolate
tck = interpolate.bisplrep(kappa_grid.ravel(),
beta_grid.ravel(),
log_norm_grid.ravel(), s=0)
np.savez("bingham_normalization_splinefit.npz", tck)
Parameters
----------
kappa : float,
first concentration parameter of the Bingham distribution.
defined as kappa = kappa1 - kappa3.
beta : float,
second concentration parameter of the Bingham distribution.
defined as beta = kappa2 - kappa3. Bingham becomes Watson when
beta=0.
Returns
-------
bingham_normalization: float
spherical mean / normalization of the bingham distribution
"""
log_norm = interpolate.bisplev(kappa, beta,
log_bingham_normalization_splinefit)
bingham_normalization = np.exp(log_norm)
return bingham_normalization
class SD3SphericalHarmonics(ModelProperties):
r"""A real-valued spherical harmonics distribution.
Parameters
----------
sh_order: int,
maximum spherical harmonics order.
sh_coeff: np.ndarray that must be of shape corresponding to sh_order.
spherical harmonics coefficients of the distribution.
"""
def __init__(self, sh_order, sh_coeff=None):
self.sh_order = sh_order
self.N_coeff = int((sh_order + 2) * (sh_order + 1) // 2)
if sh_coeff is not None:
if len(sh_coeff) != self.N_coeff:
msg = 'if given, sh_coeff length must correspond to N_coeffs '\
'associated with sh_order ({} vs {}).'
raise ValueError(msg.format(len(sh_coeff, self.N_coeff)))
self.sh_coeff = sh_coeff
self._parameter_ranges = {'sh_coeff': [
[None, None] for i in range(self.N_coeff)]}
self._parameter_scales = {'sh_coeff':
np.ones(self.N_coeff, dtype=float)}
self._parameter_cardinality = {'sh_coeff': self.N_coeff}
self._parameter_types = {'sh_coeff': 'sh_coefficients'}
self._parameter_optimization_flags = {'sh_coeff': True}
def __call__(self, n, **kwargs):
r"""Returns the sphere function at cartesian orientations n given
spherical harmonic coefficients.
Parameters
----------
n : array of shape(N x 3),
sampled orientations of the Watson distribution.
Returns
-------
SHn: array of shape(N),
Probability density at orientations n, given sh coeffs.
"""
# calculate SHT matrix
_, theta, phi = utils.cart2sphere(n).T
SHT = real_sym_sh_mrtrix(self.sh_order, theta, phi)[0]
# transform coefficients to sphere values
sh_coeff = kwargs.get('sh_coeff', self.sh_coeff)
SHn = SHT.dot(sh_coeff)
return SHn
def spherical_harmonics_representation(self, **kwargs):
r"""Returns the spherical harmonic coefficients themselves.
"""
return kwargs.get('sh_coeff', self.sh_coeff)
class DD1Gamma(ModelProperties):
r"""A Gamma distribution of cylinder diameter for given alpha and beta
parameters. NOTE: This is a distribution for axon DIAMETER and not SURFACE.
To simulate the diffusion signal of an ensemble of gamma-distributed
cylinders the probability still needs to be corrected for cylinder surface
by multiplying by np.pi * radius ** 2 and renormalizing [1]_. Reason being
that diffusion signals are generated by the volume of spins inside axons
(cylinders), which is proportional to cylinder surface and not to diameter.
Parameters
----------
alpha : float,
shape of the gamma distribution.
beta : float,
scale of the gamma distrubution. Different from Bingham distribution!
References
----------
.. [1] Assaf, Yaniv, et al. "AxCaliber: a method for measuring axon
diameter distribution from diffusion MRI." Magnetic resonance in
medicine 59.6 (2008): 1347-1354.
"""
_parameter_ranges = {
'alpha': (0.1, 30.),
'beta': (1e-3, 2)
}
_parameter_scales = {
'alpha': 1.,
'beta': BETA_SCALING,
}
_parameter_types = {
'alpha': 'normal',
'beta': 'normal'
}
_model_type = 'SpatialDistribution'
def __init__(self, alpha=None, beta=None, Nsteps=30,
normalization='standard'):
self.alpha = alpha
self.beta = beta
self.Nsteps = Nsteps
if normalization == 'standard':
self.norm_func = self.unity
elif normalization == 'plane':
self.norm_func = self.length_plane
elif normalization == 'cylinder':
self.norm_func = self.surface_cylinder
elif normalization == 'sphere':
self.norm_func = self.volume_sphere
else:
msg = "Unknown normalization {}".format(normalization)
raise ValueError(msg)
self.calculate_sampling_start_and_end_points(self.norm_func)
def length_plane(self, radius):
"The distance normalization function for planes."
return 2 * radius
def surface_cylinder(self, radius):
"The surface normalization function for cylinders."
return np.pi * radius ** 2
def volume_sphere(self, radius):
"The volume normalization function for spheres."
return (4. / 3.) * np.pi * radius ** 3
def unity(self, radius):
"The standard normalization for the Gamma distribution (none)."
return np.ones(len(radius))
def calculate_sampling_start_and_end_points(self, norm_func, gridsize=50):
"""
For a given normalization function calculates the best start and end
points to sample for all possible values of alpha, beta. This is done
to make sure the function does not sample where the probability of
basically zero.
The function is based on first doing a dense sampling and then finding
out which points need to be included to have sampled at least 99% of
the area under the probability density curve.
It sets two interpolator functions that can be called for any
combination of alpha,beta and to return the appropriate start and end
sampling points.
Parameters
----------
norm_func : normalization function,
normalization of the model, depends on if it's a sphere/cylinder.
gridsize : integer,
value that decides how big the grid will be on which we define the
start and end sampling points.
"""
start_grid = np.ones([gridsize, gridsize])
end_grid = np.ones([gridsize, gridsize])
alpha_range = (np.array(self._parameter_ranges['alpha']) *
self._parameter_scales['alpha'])
beta_range = (np.array(self._parameter_ranges['beta']) *
self._parameter_scales['beta'])
alpha_linspace = np.linspace(alpha_range[0], alpha_range[1], gridsize)
beta_linspace = np.linspace(beta_range[0], beta_range[1], gridsize)
for i, alpha in enumerate(alpha_linspace):
for j, beta in enumerate(beta_linspace):
gamma_distribution = stats.gamma(alpha, scale=beta)
outer_limit = (
gamma_distribution.mean() + 9 * gamma_distribution.std())
x_grid = np.linspace(1e-8, outer_limit, 500)
pdf = gamma_distribution.pdf(x_grid)
pdf *= norm_func(x_grid)
cdf = np.cumsum(pdf)
cdf /= cdf.max()
inverse_cdf = np.cumsum(pdf[::-1])[::-1]
inverse_cdf /= inverse_cdf.max()
end_grid[i, j] = x_grid[np.argmax(cdf > 0.995)]
start_grid[i, j] = x_grid[np.argmax(inverse_cdf < 0.995)]
start_grid = np.clip(start_grid, 1e-8, np.inf)
end_grid = np.clip(end_grid, 1e-7, np.inf)
alpha_grid, beta_grid = np.meshgrid(alpha_linspace, beta_linspace)
self.start_interpolator = interpolate.bisplrep(alpha_grid.ravel(),
beta_grid.ravel(),
start_grid.T.ravel(),
kx=2, ky=2)
self.end_interpolator = interpolate.bisplrep(alpha_grid.ravel(),
beta_grid.ravel(),
end_grid.T.ravel(),
kx=2, ky=2)
def __call__(self, **kwargs):
r"""
Parameters
----------
diameter : float or array, shape (N)
cylinder (axon) diameter in meters.
Returns
-------
Pgamma : float or array, shape (N)
probability of cylinder diameter for given alpha and beta.
"""
alpha = kwargs.get('alpha', self.alpha)
beta = kwargs.get('beta', self.beta)
gamma_dist = stats.gamma(alpha, scale=beta)
start_point = interpolate.bisplev(alpha, beta, self.start_interpolator)
end_point = interpolate.bisplev(alpha, beta, self.end_interpolator)
start_point = max(start_point, 1e-8)
radii = np.linspace(start_point, end_point, self.Nsteps)
normalization = self.norm_func(radii)
radii_pdf = gamma_dist.pdf(radii)
radii_pdf_area = radii_pdf * normalization
radii_pdf_normalized = (
radii_pdf_area /
np.trapz(x=radii, y=radii_pdf_area)
)
return radii, radii_pdf_normalized
def _probability_bingham(kappa, beta, mu, mu_beta, n):
"Non-normalized probability of the Bingham distribution."
return np.exp(kappa * np.dot(n, mu) ** 2 +
beta * np.dot(n, mu_beta) ** 2)
def odi2kappa(odi):
"Calculates concentration (kappa) from orientation dispersion index (odi)."
return 1. / np.tan(odi * (np.pi / 2.0))
def kappa2odi(kappa):
"Calculates orientation dispersion index (odi) from concentration (kappa)."
return (2. / np.pi) * np.arctan(1. / kappa)
if have_numba:
get_sh_order_from_odi = numba.njit()(get_sh_order_from_odi)
_probability_bingham = numba.njit()(_probability_bingham)
| 37,287
|
https://github.com/peterhoeg/ros-cli/blob/master/lib/ros/main/project/files/avp
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
ros-cli
|
peterhoeg
|
Shell
|
Code
| 7
| 38
|
#!/bin/bash
# workaround for https://github.com/ansible/ansible/issues/45214
echo ${ROS_MASTER_KEY}
| 45,279
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.