hexsha
stringlengths 40
40
| size
int64 5
1.05M
| ext
stringclasses 98
values | lang
stringclasses 21
values | max_stars_repo_path
stringlengths 3
945
| max_stars_repo_name
stringlengths 4
118
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
368k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
945
| max_issues_repo_name
stringlengths 4
118
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
134k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
945
| max_forks_repo_name
stringlengths 4
135
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
1.05M
| avg_line_length
float64 1
1.03M
| max_line_length
int64 2
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6b77b2296aa4ffbe2b6c6af31366a8cf41ebdd71
| 3,566
|
js
|
JavaScript
|
src/microzine-3.2/api/Scroller.js
|
rcolepeterson/fnp-static
|
c8e932d547b81e131ab4b029de058dbdebdd5c9c
|
[
"MIT"
] | null | null | null |
src/microzine-3.2/api/Scroller.js
|
rcolepeterson/fnp-static
|
c8e932d547b81e131ab4b029de058dbdebdd5c9c
|
[
"MIT"
] | null | null | null |
src/microzine-3.2/api/Scroller.js
|
rcolepeterson/fnp-static
|
c8e932d547b81e131ab4b029de058dbdebdd5c9c
|
[
"MIT"
] | null | null | null |
import Eventifier from "microzine-3.2/base/EventifierStatic";
import MicrozineEvents from "microzine-3.2/helpers/MicrozineEvents";
import Properties from "microzine-3.2/helpers/MicrozineProperties";
import Page from "microzine-3.2/api/Page";
let _scrollTop = 0,
_lastScrollTop = 0,
_body = null,
_isScrolling = false,
_isArticleView = false,
_isMainPageView = true;
class Scroller extends Eventifier {
static _initialize() {
console.log("we have scroller");
if (Properties.isFriendlyIframe) {
let content = Page.getElementById("content");
content.addEventListener("scroll", Scroller._onScroll, false);
_body = content;
} else {
if (typeof document !== `undefined`) {
_body = document.body;
document.addEventListener("scroll", Scroller._onScroll, false);
}
}
}
static get scrollTop() {
return _scrollTop;
}
static set scrollTop(scrollTop = 0) {
if (!Properties.isFriendlyIframe) {
window.scrollTo(0, scrollTop);
}
_body.scrollTop = scrollTop;
_scrollTop = scrollTop;
}
static set articleVisibility(visibility) {
_isArticleView = visibility;
_isMainPageView = !visibility;
}
static _onScroll() {
if (Properties.isFriendlyIframe) {
_scrollTop = _body.scrollTop;
} else {
_scrollTop = window.pageYOffset;
}
if (!_isScrolling) {
window.requestAnimationFrame(Scroller._scrolling);
}
_isScrolling = true;
}
static _scrolling() {
if (Properties.isFriendlyIframe) {
_scrollTop = _body.scrollTop;
} else {
_scrollTop = window.pageYOffset;
}
if (_lastScrollTop !== _scrollTop && _scrollTop >= 0) {
if (_isMainPageView) {
Scroller.dispatchEvent("mainPageScroll", { scrollTop: _scrollTop });
} else if (_isArticleView) {
Scroller.dispatchEvent("articleScroll", { scrollTop: _scrollTop });
}
Scroller.dispatchEvent("scroll", { scrollTop: _scrollTop });
window.requestAnimationFrame(Scroller._scrolling);
} else {
_isScrolling = false;
MicrozineEvents.dispatchEvent("scrollended", {});
}
_lastScrollTop = _scrollTop;
}
static smoothScrollTo(endY = 0, dur = 800) {
let animationParams = {
startY: _scrollTop,
deltaY: endY - _scrollTop,
startTime: Date.now(),
totalTime: dur
};
window.requestAnimationFrame(() => {
Scroller._animationTick(animationParams);
});
}
static _animationTick({ startY, deltaY, startTime, totalTime }) {
let elapsed = Date.now() - startTime;
Scroller.scrollTop = Scroller.easeInCubic(
elapsed,
startY,
deltaY,
totalTime
);
if (elapsed < totalTime) {
window.requestAnimationFrame(() => {
Scroller._animationTick({ startY, deltaY, startTime, totalTime });
});
} else {
return;
}
}
static easeInCubic(t, b, c, d) {
t /= d;
t--;
return c * (t * t * t + 1) + b;
}
/**
* Returns the percenatge the page has scrolled.
*
* @returns {number} - amount scrolled.
*/
static percentageScrolled() {
let h = document.documentElement,
b = document.body,
st = "scrollTop",
sh = "scrollHeight";
if (Properties.isFriendlyIframe) {
return (_body[st] / (_body[sh] - window.innerHeight)) * 100;
}
return ((h[st] || b[st]) / ((h[sh] || b[sh]) - h.clientHeight)) * 100;
}
}
MicrozineEvents.addEventListener(
"microzineready",
Scroller._initialize.bind(Scroller)
);
export default Scroller;
| 25.654676
| 76
| 0.636007
|
d7032299323623fce36256d9763665034ed95931
| 11,848
|
ps1
|
PowerShell
|
API/v4/Applications.ps1
|
MarkDPierce/Powershell-Loginvsi-API
|
4d20039dde35b00f6ed512b97c7bc2fafba21da5
|
[
"MIT"
] | null | null | null |
API/v4/Applications.ps1
|
MarkDPierce/Powershell-Loginvsi-API
|
4d20039dde35b00f6ed512b97c7bc2fafba21da5
|
[
"MIT"
] | null | null | null |
API/v4/Applications.ps1
|
MarkDPierce/Powershell-Loginvsi-API
|
4d20039dde35b00f6ed512b97c7bc2fafba21da5
|
[
"MIT"
] | null | null | null |
#Requires -Version 5
$PSDefaultParameterValues['*:Verbose'] = $false
Import-Module -Name ".\API\v4\auth.ps1" -Force -PassThru -WarningAction SilentlyContinue
function Get-Applications{
[CmdletBinding()]
param (
[Parameter(mandatory = $false)]
[ValidateSet('name', 'description')]
[string]
$OrderBy = 'name',
[Parameter(mandatory = $false)]
[ValidateSet('ascending', 'descending')]
[string]
$Direction = 'descending',
[Parameter(mandatory = $false)]
[string]
$Count = "1",
[ValidateSet('true', 'false')]
[string]
$includeTotalCount,
[ValidateSet('all', 'none', 'script', 'timers')]
[string]
$includeOptions,
[string]
$Token = $global:TokenUser
)
begin{
$AuthHeaders = New-AuthHeaders -Token $Token
$AuthHeaders.Add("accept", "application/json")
$params += "orderBy=$OrderBy&direction=$Direction&count=$Count"
if ($includeTotalCount) {
$params += "&includeTotalCount=$includeTotalCount"
}
if ($includeOptions) {
$params += "&includeOptions=$includeOptions"
}
}
process{
try {
$requestData = Invoke-WebRequest -Uri "$Global:BASEURI/$apiVersion/applications?$params" `
-Headers $AuthHeaders `
-UseBasicParsing `
-ContentType 'application/json' `
-TimeoutSec 10 `
-DisableKeepAlive `
-PassThru `
-OutFile 'Results\APILogs\Get-Applications.log' `
-Method GET
return $requestData
}
catch [System.Net.WebException] {
$errObj = [PSCustomObject]@{
statuscode = [int]$_.Exception.Response.StatusCode
response = $_.Exception.Response
baseURI = $_.Exception.Response.ResponseUri
}
return $errObj
}
catch {
Write-Error $_
}
}
end{
Write-Verbose "[URI] $Global:BASEURI/$apiVersion/applications?$params"
}
}
function Get-ApplicationsID{
[CmdletBinding()]
param (
[Parameter(mandatory = $true)]
[string]
$ApplicationID,
[ValidateSet('none', 'script', 'timers', 'all')]
[string]
$includeOptions,
[string]
$Token = $global:TokenUser
)
begin{
$AuthHeaders = New-AuthHeaders -Token $Token
$AuthHeaders.Add("accept", "application/json")
$params = "/$ApplicationID"
If ($includeOptions) {
$params += "?includeOptions=$includeOptions"
}
}
process{
try {
$requestData = Invoke-WebRequest -Uri "$Global:BASEURI/$apiVersion/applications$params" `
-Headers $AuthHeaders `
-UseBasicParsing `
-ContentType 'application/json' `
-TimeoutSec 10 `
-DisableKeepAlive `
-PassThru `
-OutFile 'Results\APILogs\Get-ApplicationsID.log' `
-Method GET
return $requestData
}
catch [System.Net.WebException] {
$errObj = [PSCustomObject]@{
statuscode = [int]$_.Exception.Response.StatusCode
response = $_.Exception.Response
baseURI = $_.Exception.Response.ResponseUri
ApplicationID = $ApplicationID
}
return $errObj
}
catch {
Write-Error $_
}
}
end{
Write-Verbose "[URI] $Global:BASEURI/$apiVersion/applications$params"
}
}
function Add-Application{
[CmdletBinding()]
param (
[ValidateSet('WindowsApp', 'WebApp')]
[string]
$AppType,
[string]
$ApplicationName='blank name',
$AppDescription='blank desc',
$AppUserName='foobar',
$AppPass='barfoo',
$WorkingDirectory = 'c:\windows\system32\',
$CommandLine= 'notepad.exe',
$StartURl = 'https:\\foobar.com',
$BrowserName = 'Chrome',
[string]
$Token = $global:TokenUser
)
begin{
$AuthHeaders = New-AuthHeaders -Token $Token
$AuthHeaders.Add("accept", "application/json")
if ($AppType -eq 'WindowsApp'){
$body = @{
'$type' = "WindowsApp";
'CommandLine' = $CommandLine;
'WorkingDirectory' = $WorkingDirectory;
'name' = $ApplicationName;
"description" = $AppDescription;
"userName" = $AppUserName;
"password" = $AppPass
}
}else {
$body = @{
'$type' ="WebApp";
"BrowserName" = $BrowserName;
"name" = $ApplicationName;
"description" = $AppDescription;
"userName" = $AppUserName;
"password" = $AppPass;
"StartUrl" = $StartURl
}
}
}
process{
try {
$requestData = Invoke-WebRequest -Uri "$Global:BASEURI/$apiVersion/applications" `
-Headers $AuthHeaders `
-UseBasicParsing `
-ContentType 'application/json' `
-TimeoutSec 10 `
-DisableKeepAlive `
-PassThru `
-OutFile 'Results\APILogs\Add-Application.log' `
-Body ($body | ConvertTo-Json) `
-Method POST
return $requestData
}
catch [System.Net.WebException] {
$errObj = [PSCustomObject]@{
statuscode = [int]$_.Exception.Response.StatusCode
response = $_.Exception.Response
baseURI = $_.Exception.Response.ResponseUri
}
return $errObj
}
catch {
Write-Error $_
}
}
end{
Write-Verbose "[URI] $Global:BASEURI/$apiVersion/applications"
}
}
function Edit-Application{
[CmdletBinding()]
param (
[Parameter(mandatory = $true)]
[string]
$ApplicationID,
[ValidateSet('WindowsApp', 'WebApp')]
[string]
$AppType,
[string]
$NewAppName = '',
$NewAppDescription = '',
$NewAppUserName = '',
$NewAppPass = '',
$NewCommandLine = '',
$NewWorkingDirectory='',
$NewStartURl = '',
$NewBrowserName = '',
$BrowserName = '',
[string]
$Token = $global:TokenUser
)
begin{
if($NewAppPass.count -gt 0){
$pvalue = "true"
}else{
$pvalue = "false"
}
$AuthHeaders = New-AuthHeaders -Token $Token
$AuthHeaders.Add("accept", "application/json")
if ($AppType -eq 'WindowsApp') {
$body = @{
'$type' = "WindowsApp";
"name" = "$NewAppName";
"description" = "$NewAppDescription";
"userName" = "$NewAppUserName";
"password" = "$NewAppPass";
"CommandLIne" = "$NewCommandLine";
"WorkingDirectory" = "$NewWorkingDirectory";
"mustUpdatePassword" = "$pvalue"
}
}else{
$body = @{
'$type' = "WebApp";
"BrowserName" = "$BrowserName";
"name" = "$NewBrowserName";
"description" = "$NewAppDescription";
"userName" = "$NewAppUserName";
"password" = "$NewAppPass";
"mustUpdatePassword" = "$pvalue";
"StartUrl" = "$NewStartURl"
}
}
}
process{
try {
$requestData = Invoke-WebRequest -Uri "$Global:BASEURI/$apiVersion/applications/$ApplicationID" `
-Headers $AuthHeaders `
-UseBasicParsing `
-ContentType 'application/json' `
-TimeoutSec 10 `
-DisableKeepAlive `
-PassThru `
-OutFile 'Results\APILogs\Edit-Application.log' `
-Body ($body | ConvertTo-Json) `
-Method PUT
return $requestData
}
catch [System.Net.WebException] {
$errObj = [PSCustomObject]@{
statuscode = [int]$_.Exception.Response.StatusCode
response = $_.Exception.Response
baseURI = $_.Exception.Response.ResponseUri
}
return $errObj
}
catch {
Write-Error $_
}
}
end{
Write-Verbose "[URI] $Global:BASEURI/$apiVersion/applications/$ApplicationID"
}
}
function Remove-Applications {
[CmdletBinding()]
param (
[System.Array]
$AppIDs,
[string]
$Token = $global:TokenUser
)
begin {
$AuthHeaders = New-AuthHeaders -Token $Token
$AuthHeaders.Add("accept", "application/json")
if ($AppIDs.Count -gt 1) {
$r = ($AppIDs | ConvertTo-Json)
$body = "`n
`n $r
`n "
}else {
$r = ($AppIDs | ConvertTo-Json)
$body = "[`n
`n $r
`n]"
}
}
process {
try {
$requestData = Invoke-RestMethod -Uri "$Global:BASEURI/$apiVersion/applications" `
-Headers $AuthHeaders `
-UseBasicParsing `
-ContentType 'application/json' `
-TimeoutSec 10 `
-DisableKeepAlive `
-PassThru `
-OutFile 'Results\APILogs\Remove-Applications.log' `
-Body $body `
-Method DELETE
return $requestData
}
catch [System.Net.WebException] {
$errObj = [PSCustomObject]@{
statuscode = [int]$_.Exception.Response.StatusCode
response = $_.Exception.Response
baseURI = $_.Exception.Response.ResponseUri
}
return $errObj
}
catch {
Write-Error $_
}
}
end {
Write-Verbose "[URI] $Global:BASEURI/$apiVersion/applications"
}
}
function Remove-ApplicationID {
[CmdletBinding()]
param (
[System.Array]
$AppID,
[string]
$Token = $global:TokenUser
)
begin {
$AuthHeaders = New-AuthHeaders -Token $Token
$AuthHeaders.Add("accept", "application/json")
}
process {
try {
$requestData = Invoke-RestMethod -Uri "$Global:BASEURI/$apiVersion/applications/$AppID" `
-Headers $AuthHeaders `
-UseBasicParsing `
-Body $body `
-ContentType 'application/json' `
-TimeoutSec 10 `
-DisableKeepAlive `
-PassThru `
-OutFile 'Results\APILogs\Remove-ApplicationID.log' `
-Method DELETE
return $requestData
}
catch [System.Net.WebException] {
$errObj = [PSCustomObject]@{
statuscode = [int]$_.Exception.Response.StatusCode
response = $_.Exception.Response
baseURI = $_.Exception.Response.ResponseUri
}
return $errObj
}
catch {
Write-Error $_
}
}
end {
Write-Verbose "[URI] $Global:BASEURI/$apiVersion/applications/$AppID"
}
}
| 29.110565
| 109
| 0.487424
|
a3b20563dbcb1c9a91d1e84db0ef6b86e138c744
| 3,844
|
java
|
Java
|
addOns/openapi/src/main/java/org/zaproxy/zap/extension/openapi/OpenApiSpider.java
|
theamanrawat/zap-extensions
|
38271747467f3c18e6c2125b6a8095ce7cf3cd60
|
[
"Apache-2.0"
] | 1
|
2021-04-20T20:54:35.000Z
|
2021-04-20T20:54:35.000Z
|
addOns/openapi/src/main/java/org/zaproxy/zap/extension/openapi/OpenApiSpider.java
|
theamanrawat/zap-extensions
|
38271747467f3c18e6c2125b6a8095ce7cf3cd60
|
[
"Apache-2.0"
] | 1
|
2021-08-17T16:47:08.000Z
|
2021-08-17T17:56:10.000Z
|
addOns/openapi/src/main/java/org/zaproxy/zap/extension/openapi/OpenApiSpider.java
|
theamanrawat/zap-extensions
|
38271747467f3c18e6c2125b6a8095ce7cf3cd60
|
[
"Apache-2.0"
] | 1
|
2020-04-23T22:50:29.000Z
|
2020-04-23T22:50:29.000Z
|
/*
* Zed Attack Proxy (ZAP) and its related class files.
*
* ZAP is an HTTP/HTTPS proxy for assessing web application security.
*
* Copyright 2017 The ZAP Development Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zaproxy.zap.extension.openapi;
import java.util.Locale;
import net.htmlparser.jericho.Source;
import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.parosproxy.paros.control.Control;
import org.parosproxy.paros.network.HttpHeader;
import org.parosproxy.paros.network.HttpMessage;
import org.parosproxy.paros.network.HttpSender;
import org.zaproxy.zap.extension.openapi.converter.Converter;
import org.zaproxy.zap.extension.openapi.converter.swagger.SwaggerConverter;
import org.zaproxy.zap.extension.openapi.network.Requestor;
import org.zaproxy.zap.extension.spider.ExtensionSpider;
import org.zaproxy.zap.model.ValueGenerator;
import org.zaproxy.zap.spider.parser.SpiderParser;
public class OpenApiSpider extends SpiderParser {
private static final Logger log = LogManager.getLogger(OpenApiSpider.class);
private Requestor requestor;
private ValueGenerator valGen = null;
public OpenApiSpider() {
requestor = new Requestor(HttpSender.SPIDER_INITIATOR);
requestor.addListener(new HistoryPersister());
}
@Override
public boolean parseResource(HttpMessage message, Source source, int depth) {
try {
Converter converter =
new SwaggerConverter(
null,
message.getRequestHeader().getURI().toString(),
message.getResponseBody().toString(),
this.getValueGenerator());
requestor.run(converter.getRequestModels());
} catch (Exception e) {
log.debug(e.getMessage(), e);
return false;
}
return true;
}
@Override
public boolean canParseResource(HttpMessage message, String path, boolean wasAlreadyConsumed) {
try {
String contentType =
message.getResponseHeader()
.getHeader(HttpHeader.CONTENT_TYPE)
.toLowerCase(Locale.ROOT);
String responseBodyStart =
StringUtils.left(message.getResponseBody().toString(), 250)
.toLowerCase(Locale.ROOT);
if (contentType.startsWith("application/vnd.oai.openapi")) {
return true;
} else if ((contentType.contains("json") || contentType.contains("yaml"))
&& (responseBodyStart.contains("swagger")
|| responseBodyStart.contains("openapi"))) {
return true;
}
} catch (Exception e) {
return false;
}
log.debug("Can't parse {}", message.getRequestHeader().getURI());
return false;
}
private ValueGenerator getValueGenerator() {
if (this.valGen == null) {
ExtensionSpider spider =
Control.getSingleton().getExtensionLoader().getExtension(ExtensionSpider.class);
valGen = spider.getValueGenerator();
}
return valGen;
}
}
| 38.44
| 100
| 0.649584
|
c6c87cc50670a70725c495955081ce8a1e720d9d
| 6,677
|
py
|
Python
|
ontask/templatetags/ontask_tags.py
|
pinheiroo27/ontask_b
|
23fee8caf4e1c5694a710a77f3004ca5d9effeac
|
[
"MIT"
] | 33
|
2017-12-02T04:09:24.000Z
|
2021-11-07T08:41:57.000Z
|
ontask/templatetags/ontask_tags.py
|
pinheiroo27/ontask_b
|
23fee8caf4e1c5694a710a77f3004ca5d9effeac
|
[
"MIT"
] | 189
|
2017-11-16T04:06:29.000Z
|
2022-03-11T23:35:59.000Z
|
ontask/templatetags/ontask_tags.py
|
pinheiroo27/ontask_b
|
23fee8caf4e1c5694a710a77f3004ca5d9effeac
|
[
"MIT"
] | 30
|
2017-11-30T03:35:44.000Z
|
2022-01-31T03:08:08.000Z
|
# -*- coding: utf-8 -*-
"""Tags to include URLS and other auxiliary HTML resources."""
import json
from django import template
from django.conf import settings
from django.template.loader import render_to_string
from django.utils.html import format_html
from django.utils.safestring import mark_safe
import ontask
from ontask import models
from ontask.action import evaluate
from ontask.dataops import sql
ACTION_CONTEXT_VAR = 'ONTASK_ACTION_CONTEXT_VARIABLE___'
register = template.Library()
# Tag to get ontask_version
@register.simple_tag
def ontask_version() -> str:
"""Return ontask version."""
return ontask.__version__
@register.filter
def country(country_code) -> str:
"""Extract the country from the given variable."""
return ontask.get_country_code(country_code)
@register.simple_tag
def ontask_jquery() -> str:
"""Provide the JQuery URL."""
return format_html(
'<script src="//code.jquery.com/jquery-3.4.1.min.js"></script>'
+ '<script src="//cdnjs.cloudflare.com/ajax/libs/jquery-validate/1.19'
'.0/jquery.validate.min.js"></script>'
+ '<script src="//cdnjs.cloudflare.com/ajax/libs/jquery-validate/1.19'
'.0/additional-methods.min.js"></script>')
@register.simple_tag
def ontask_jqcron_js() -> str:
"""Provide the jqCron jquery files"""
return format_html(
'<script src="{0}js/jqCron/jqCron.js"></script>'.format(
settings.STATIC_URL)
+ '<script src="{0}js/jqCron/jqCron.{1}.js"></script>'.format(
settings.STATIC_URL,
ontask.get_country_code(settings.LANGUAGE_CODE)))
@register.simple_tag
def ontask_jqcron_css() -> str:
"""Provide the jqCron CSS files"""
return format_html(
'<link rel="stylesheet" href="{0}css/jqCron/jqCron.css">'.format(
settings.STATIC_URL))
@register.simple_tag
def ontask_bootstrap_css() -> str:
"""Provide bootstrap CSS."""
return format_html(
'<link rel="stylesheet" '
'href="//stackpath.bootstrapcdn.com/bootstrap/4.1.3/css/bootstrap.min'
'.css" integrity="sha384-MCw98'
'/SFnGE8fJT3GXwEOngsV7Zt27NXFoaoApmYm81iuXoPkFOJwJ8ERdknLPMO" '
'crossorigin="anonymous">'
+ '<link href="//maxcdn.bootstrapcdn.com/font-awesome/4.7.0/css/font'
'-awesome.min.css" rel="stylesheet">')
@register.simple_tag
def ontask_bootstrap_js() -> str:
"""Provide the bootstrap JS."""
return format_html(
'<script src="//cdnjs.cloudflare.com/ajax/libs/popper.js/1.14.3/umd'
'/popper.min.js" '
'integrity="sha384-ZMP7rVo3mIykV+2'
'+9J3UJ46jBk0WLaUAdn689aCwoqbBJiSnjAK/l8WvCWPIPm49" '
'crossorigin="anonymous"></script>'
+ '<script src="//stackpath.bootstrapcdn.com/bootstrap/4.1.3/js'
'/bootstrap.min.js" '
'integrity="sha384-ChfqqxuZUCnJSK3'
'+MXmPNIyE6ZbWh2IMqE241rYiqJxyMiZ6OW/JmZQ5stwEULTy" '
'crossorigin="anonymous"></script>')
@register.simple_tag
def ontask_datatables_jquery_js() -> str:
"""Provide the datatables JQuery JS URL."""
return format_html(
'<script src="//cdn.datatables.net/1.10.21/js/jquery.dataTables.min'
'.js"></script>')
@register.simple_tag
def ontask_datatables_bootstrap_css() -> str:
"""Provide the datatables bootstrap CSS URL."""
return format_html(
'<link rel="stylesheet" type="text/css" '
'href="//cdn.datatables.net/v/bs4/dt-1.10.21/cr-1.5.0/r-2.2.2/fc-3.2'
'.5/rr-1.2.4/sc-1.5.0/datatables.min.css"/>')
@register.simple_tag
def ontask_datatables_bootstrap_js() -> str:
"""Provide the datatables bootstrap JS URL."""
return format_html(
'<script type="text/javascript" '
'src="//cdn.datatables.net/v/bs4/dt-1.10.21/cr-1.5.0/r-2.2.2/fc-3.2.5'
'/rr-1.2.4/sc-1.5.0/datatables.min.js"></script>')
@register.simple_tag
def ontask_datetimepicker_css() -> str:
"""Provide the datetime picker CSS URL."""
return format_html(
('<link href="//cdnjs.cloudflare.com/ajax/libs/bootstrap'
+ '-datetimepicker/4.17.47/css/bootstrap-datetimepicker.css" '
+ 'type="text/css" media="all" rel="stylesheet"><link href="{'
+ '0}bootstrap_datepicker_plus/css/datepicker-widget.css" '
+ 'type="text/css" media="all" rel="stylesheet">').format(
settings.STATIC_URL))
@register.simple_tag
def ontask_datetimepicker_js() -> str:
"""Provide the datetime picker JS URL."""
return format_html(
('<script type="text/javascript" src="{'
+ '0}js/moment-with-locales.js"></script>').format(
settings.STATIC_URL)
+ ('<script type="text/javascript" '
+ 'src="//cdnjs.cloudflare.com/ajax/libs/bootstrap-datetimepicker/4'
+ '.17.47/js/bootstrap-datetimepicker.min.js"></script><script '
+ 'type="text/javascript" src="{'
+ '0}bootstrap_datepicker_plus/js/datepicker-widget.js"></script'
+ '>').format(settings.STATIC_URL))
@register.simple_tag(takes_context=True)
def ot_insert_report(context, *args) -> str:
"""Insert in the text a column list."""
action = context[ACTION_CONTEXT_VAR]
real_args = [evaluate.RTR_ITEM(argitem) for argitem in args]
all_column_values = []
for column_name in real_args:
all_column_values.append([
str(citem[0]) for citem in sql.get_rows(
action.workflow.get_data_frame_table_name(),
column_names=[evaluate.RTR_ITEM(column_name)],
filter_formula=action.get_filter_formula())])
if action.action_type == models.Action.JSON_REPORT:
return mark_safe(json.dumps({
cname: cval for cname, cval in zip(real_args, all_column_values)}))
# return the content rendered as a table
return render_to_string(
'table.html',
{
'column_names': real_args,
'rows': zip(*all_column_values)})
@register.simple_tag(takes_context=True)
def ot_insert_rubric_feedback(context) -> str:
"""Insert in the text the rubric feedback."""
return render_to_string(
'action/includes/partial_rubric_message.html',
context={
'text_sources': evaluate.render_rubric_criteria(
context[ACTION_CONTEXT_VAR], context)})
@register.simple_tag
def ontask_shim_respond() -> str:
"""Provide additional JS URLs."""
return format_html(
'<!--[if lt IE 9]><script '
'src="//oss.maxcdn.com/libs/html5shiv/3.7.0/html5shiv.js"></script'
'><script src="//oss.maxcdn.com/libs/respond.js/1.3.0/respond.min.js'
'"></script><![endif]-->')
| 35.142105
| 79
| 0.655085
|
c97918099b38fe8939de7a453731711f95b1d198
| 4,218
|
swift
|
Swift
|
iOSEngineerCodeCheckTests/iOSEngineerCodeCheckTests.swift
|
genki-takami/Search-GitHub-Repositories
|
746bc242cdae0e721e88c9ee865dea93d4838937
|
[
"Apache-2.0"
] | null | null | null |
iOSEngineerCodeCheckTests/iOSEngineerCodeCheckTests.swift
|
genki-takami/Search-GitHub-Repositories
|
746bc242cdae0e721e88c9ee865dea93d4838937
|
[
"Apache-2.0"
] | null | null | null |
iOSEngineerCodeCheckTests/iOSEngineerCodeCheckTests.swift
|
genki-takami/Search-GitHub-Repositories
|
746bc242cdae0e721e88c9ee865dea93d4838937
|
[
"Apache-2.0"
] | null | null | null |
//
// iOSEngineerCodeCheckTests.swift
// iOSEngineerCodeCheckTests
//
// Created by 史 翔新 on 2020/04/20.
// Copyright © 2020 YUMEMI Inc. All rights reserved.
//
import XCTest
import Alamofire
@testable import iOSEngineerCodeCheck
class iOSEngineerCodeCheckTests: XCTestCase {
override func setUpWithError() throws {
// Put setup code here. This method is called before the invocation of each test method in the class.
}
override func tearDownWithError() throws {
// Put teardown code here. This method is called after the invocation of each test method in the class.
}
/// responseJSON()メソッドの返り値を正常にデコードしているかの表示テスト
func testAPI_responseJSON() throws {
let expect = expectation(description: "あらもファイアーーーーーー、、、これは成功します!")
let baseURL = "https://api.github.com/search/repositories?q="
let urlString = baseURL + "Alamofire"
AF.request(urlString).responseJSON() { response in
do {
let decoder = JSONDecoder()
decoder.keyDecodingStrategy = .convertFromSnakeCase
let repositries: Repositories = try decoder.decode(Repositories.self, from: response.data!)
print(repositries.items)
expect.fulfill()
} catch {
XCTFail(error.localizedDescription)
expect.fulfill()
}
}
waitForExpectations(timeout: 5) { error in
if let _ = error {
XCTFail("ターーーーーーーーーーイム!!!")
} else {
print("完了")
}
}
}
/// responseDecodable()メソッドの返り値を正常にデコードしているかの表示テスト
func testAPI_responseDecodable() throws {
let expect = expectation(description: "これは失敗します")
let baseURL = "https://api.github.com/search/repositories?q="
let urlString = baseURL + "swift"
AF.request(urlString).validate().responseDecodable(of: Repositories.self) { response in
//一応出力
print(response.value as Any)
expect.fulfill()
switch response.result {
case.success(let repo):
print(repo.items)
expect.fulfill()
case . failure(let error):
XCTFail(error.localizedDescription)
}
}
waitForExpectations(timeout: 5) { error in
if let _ = error {
XCTFail("ターーーーーーーーーーイム!!!")
} else {
print("完了")
}
}
}
/// URLSessionメソッドの返り値を正常に取得しているかの表示テスト
func testAPI_URLSession() throws {
let task: URLSessionTask?
let expect = expectation(description: "URLSession")
let url = "https://api.github.com/search/repositories?q=swift"
task = URLSession.shared.dataTask(with: URL(string: url)!) { (data, res, err) in
if let obj = try! JSONSerialization.jsonObject(with: data!) as? [String: Any] {
if let items = obj["items"] as? [[String: Any]] {
print(items)
expect.fulfill()
}
}
}
task?.resume()
waitForExpectations(timeout: 5) { error in
if let _ = error {
XCTFail("ターーーーーーーーーーイム!!!")
task?.cancel()
} else {
print("完了")
}
}
}
/// Modalで表示するエラーメッセージが正しく表示されているか
func testErrorMessage() throws {
let expect = expectation(description: "絵文字と日本語をテスト")
var errorMessage = ""
let word = "こんにちは✋"
APIClient.fetchRepositories(word) { result in
switch result {
case .success(_):
errorMessage = "成功"
case .failure(let error):
errorMessage = String(describing: error)
}
expect.fulfill()
XCTAssertEqual(errorMessage, "リポジトリが存在しません!")
}
waitForExpectations(timeout: 5) { error in
if let _ = error {
XCTFail("ターーーーーーーーーーイム!!!")
} else {
print("完了")
}
}
}
}
| 29.496503
| 111
| 0.541963
|
d71dd09d628b2485c4bf09576744da89e776a56f
| 899
|
psd1
|
PowerShell
|
ADF/ext-DSCResources/ComputerManagementDsc/8.5.0/DSCResources/DSC_IEEnhancedSecurityConfiguration/en-US/DSC_IEEnhancedSecurityConfiguration.strings.psd1
|
brwilkinson/ADFL
|
22efe8310cf78220264339a5662cabb53acf2b22
|
[
"MIT"
] | null | null | null |
ADF/ext-DSCResources/ComputerManagementDsc/8.5.0/DSCResources/DSC_IEEnhancedSecurityConfiguration/en-US/DSC_IEEnhancedSecurityConfiguration.strings.psd1
|
brwilkinson/ADFL
|
22efe8310cf78220264339a5662cabb53acf2b22
|
[
"MIT"
] | null | null | null |
ADF/ext-DSCResources/ComputerManagementDsc/8.5.0/DSCResources/DSC_IEEnhancedSecurityConfiguration/en-US/DSC_IEEnhancedSecurityConfiguration.strings.psd1
|
brwilkinson/ADFL
|
22efe8310cf78220264339a5662cabb53acf2b22
|
[
"MIT"
] | null | null | null |
ConvertFrom-StringData @'
GettingStateMessage = Getting IE Enhanced Security Configuration state for '{0}'. (IEESC0001)
SettingStateMessage = Setting IE Enhanced Security Configuration state for '{0}'. (IEESC0002)
TestingStateMessage = Testing IE Enhanced Security Configuration state for '{0}'. (IEESC0003)
SuppressRestart = Suppressing the restart. For the change to come in affect the node must be restarted manually. (IEESC0004)
InDesiredState = The IE Enhanced Security Configuration for '{0}' is in desired state. (IEESC0005)
NotInDesiredState = The IE Enhanced Security Configuration for '{0}' was {1}, but expected it to be {2}. (IEESC0006)
UnableToDetermineState = The current state cannot be determined because the registry path '{0}' cannot be read. (IEESC0007)
FailedToSetDesiredState = Failed to set the desired state for '{0}'. (IEESC0008)
'@
| 81.727273
| 129
| 0.746385
|
f82e7a724a8170a2a714d68696ac79e907e3dfa2
| 2,830
|
swift
|
Swift
|
Sources/Core/MLRetryButton.swift
|
maclacerda/MLAudioPlayer
|
23a37241c29556321088fde3a958c6654895fb92
|
[
"MIT"
] | 14
|
2018-08-14T14:54:24.000Z
|
2020-10-06T22:57:30.000Z
|
Sources/Core/MLRetryButton.swift
|
maclacerda/MLAudioPlayer
|
23a37241c29556321088fde3a958c6654895fb92
|
[
"MIT"
] | 1
|
2021-02-09T12:55:43.000Z
|
2021-02-09T12:55:43.000Z
|
Sources/Core/MLRetryButton.swift
|
maclacerda/MLAudioPlayer
|
23a37241c29556321088fde3a958c6654895fb92
|
[
"MIT"
] | 6
|
2018-08-19T13:07:24.000Z
|
2019-04-23T04:18:43.000Z
|
////MIT License
////
////Copyright (c) 2018 Michel Anderson Lüz Teixeira
////
////Permission is hereby granted, free of charge, to any person obtaining a copy
////of this software and associated documentation files (the "Software"), to deal
////in the Software without restriction, including without limitation the rights
////to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
////copies of the Software, and to permit persons to whom the Software is
////furnished to do so, subject to the following conditions:
////
////The above copyright notice and this permission notice shall be included in all
////copies or substantial portions of the Software.
////
////THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
////IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
////FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
////AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
////LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
////OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
////SOFTWARE.
import UIKit
class MLRetryButton: UIView {
var button: UIButton = {
let button = UIButton(type: .custom)
button.setImage(UIImage(named: "refresh"), for: .normal)
button.setTitle("TRY AGAIN", for: .normal)
button.setTitleColor(.black, for: .normal)
button.translatesAutoresizingMaskIntoConstraints = false
return button
}()
var didTap: (() -> Void)?
var heightLayoutConstraint: NSLayoutConstraint?
init(text: String? = "TRY AGAIN") {
super.init(frame: .zero)
if let text = text {
button.setTitle(text, for: .normal)
}
button.addTarget(self, action: #selector(tapAction), for: .touchUpInside)
setupViewConfiguration()
}
@objc func tapAction() {
didTap?()
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
extension MLRetryButton: ViewConfiguration {
func setupConstraints() {
heightLayoutConstraint = heightAnchor.constraint(equalToConstant: 1)
heightLayoutConstraint?.isActive = true
button.leadingAnchor.constraint(equalTo: leadingAnchor, constant: 0).isActive = true
button.trailingAnchor.constraint(equalTo: trailingAnchor, constant: 0).isActive = true
button.topAnchor.constraint(equalTo: topAnchor).isActive = true
button.bottomAnchor.constraint(equalTo: bottomAnchor).isActive = true
}
func buildViewHierarchy() {
addSubview(button)
}
func configureViews() {
translatesAutoresizingMaskIntoConstraints = false
button.clipsToBounds = true
}
}
| 39.859155
| 94
| 0.695053
|
0accb34dc116c16faac7266463f6c9dd8d1b9e4d
| 1,628
|
cs
|
C#
|
src/Magicallity.Client/UI/CharacterEditor/Items/ExitButton.cs
|
NinjaMuffin08/Magicallity-public-source
|
1d07eab6386d623306fb2fa535568c2e15988b19
|
[
"MIT"
] | 3
|
2019-12-25T10:28:01.000Z
|
2022-01-15T16:30:16.000Z
|
src/Magicallity.Client/UI/CharacterEditor/Items/ExitButton.cs
|
NinjaMuffin08/Magicallity-public-source
|
1d07eab6386d623306fb2fa535568c2e15988b19
|
[
"MIT"
] | null | null | null |
src/Magicallity.Client/UI/CharacterEditor/Items/ExitButton.cs
|
NinjaMuffin08/Magicallity-public-source
|
1d07eab6386d623306fb2fa535568c2e15988b19
|
[
"MIT"
] | 1
|
2020-01-11T13:46:36.000Z
|
2020-01-11T13:46:36.000Z
|
using CitizenFX.Core;
using CitizenFX.Core.Native;
using Magicallity.Client.UI;
using Magicallity.Shared;
using MenuFramework;
using Newtonsoft.Json;
namespace Magicallity.Client.Menus.CharacterEditor.MainMenu
{
internal class ExitButton : MenuItemStandard
{
private CharacterEditorMenu Root;
private MenuModel Prompt;
public ExitButton( CharacterEditorMenu root ) {
Root = root;
Title = "Leave without saving";
OnActivate = OpenExitPrompt;
Prompt = new MenuModel { headerTitle = "Leave without saving?", statusTitle = "All changes will be lost." };
Prompt.menuItems.Add( new MenuItemStandard { Title = "Yes", OnActivate = ExitWithoutSaving } );
Prompt.menuItems.Add( new MenuItemStandard { Title = "No", OnActivate = AbortExit } );
}
public override void OnTick( long frameCount, int frameTime, long gameTimer ) {
base.OnTick( frameCount, frameTime, gameTimer );
if( Root.Observer.CurrentMenu == Prompt ) {
if( Game.IsDisabledControlJustReleased( 0, Control.FrontendCancel ) ) {
AbortExit( this );
}
}
}
private void OpenExitPrompt( MenuItemStandard m ) {
Root.Observer.OpenMenu( Prompt );
}
private void ExitWithoutSaving( MenuItemStandard m ) {
//Log.ToChat( "CharacterEditorMenu ExitWithoutSaving" );
Root.cleanCloseMenu();
var pedData = JsonConvert.DeserializeObject<PedData>(Client.Instance.Instances.Session.GetPlayer(Game.Player).GetGlobalData("Character.SkinData", ""));
CharacterEditorMenu.handleSkinCreate(pedData);
}
private void AbortExit( MenuItemStandard m ) {
Root.Observer.CloseMenu();
}
}
}
| 31.307692
| 157
| 0.724201
|
a43dc0727584a85fae53c115ab5527c4fc5a828a
| 504
|
h
|
C
|
CGMatrix/CGMatrix.h
|
CGDevHusky92/CGMatrix
|
14b5bbd75be51ede9cce773800862cd9d4282930
|
[
"MIT"
] | null | null | null |
CGMatrix/CGMatrix.h
|
CGDevHusky92/CGMatrix
|
14b5bbd75be51ede9cce773800862cd9d4282930
|
[
"MIT"
] | null | null | null |
CGMatrix/CGMatrix.h
|
CGDevHusky92/CGMatrix
|
14b5bbd75be51ede9cce773800862cd9d4282930
|
[
"MIT"
] | null | null | null |
//
// CGMatrix.h
// CGMatrix
//
// Created by Charles Gorectke on 11/12/14.
// Copyright (c) 2014 Revision Works, LLC. All rights reserved.
//
#import <UIKit/UIKit.h>
//! Project version number for CGMatrix.
FOUNDATION_EXPORT double CGMatrixVersionNumber;
//! Project version string for CGMatrix.
FOUNDATION_EXPORT const unsigned char CGMatrixVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <CGMatrix/PublicHeader.h>
| 25.2
| 133
| 0.755952
|
8e8a41fc83c43be4894ac33dced5d7669a873c00
| 936
|
h
|
C
|
modules/user/poker.h
|
ksb3214/Flump
|
7d2a418e7888d75ddd0c2a8ce3474b82988e1a4b
|
[
"BSD-2-Clause"
] | null | null | null |
modules/user/poker.h
|
ksb3214/Flump
|
7d2a418e7888d75ddd0c2a8ce3474b82988e1a4b
|
[
"BSD-2-Clause"
] | null | null | null |
modules/user/poker.h
|
ksb3214/Flump
|
7d2a418e7888d75ddd0c2a8ce3474b82988e1a4b
|
[
"BSD-2-Clause"
] | null | null | null |
#ifndef _POKER_H_
#define _POKER_H_
struct cards_s {
char value;
char name[3];
char suit;
};
struct cards_s cards[] = {
{ 1,"A",'c' },
{ 1,"A",'s' },
{ 1,"A",'d' },
{ 1,"A",'h' },
{ 2,"2",'c' },
{ 2,"2",'s' },
{ 2,"2",'h' },
{ 2,"2",'d' },
{ 3,"3",'c' },
{ 3,"3",'s' },
{ 3,"3",'h' },
{ 3,"3",'d' },
{ 4,"4",'c' },
{ 4,"4",'s' },
{ 4,"4",'h' },
{ 4,"4",'d' },
{ 5,"5",'c' },
{ 5,"5",'s' },
{ 5,"5",'h' },
{ 5,"5",'d' },
{ 6,"6",'c' },
{ 6,"6",'s' },
{ 6,"6",'h' },
{ 6,"6",'d' },
{ 7,"7",'c' },
{ 7,"7",'s' },
{ 7,"7",'h' },
{ 7,"7",'d' },
{ 8,"8",'c' },
{ 8,"8",'s' },
{ 8,"8",'h' },
{ 8,"8",'d' },
{ 9,"9",'c' },
{ 9,"9",'s' },
{ 9,"9",'h' },
{ 9,"9",'d' },
{ 10,"10",'c' },
{ 10,"10",'s' },
{ 10,"10",'h' },
{ 10,"10",'d' },
{ 11,"J",'c' },
{ 11,"J",'s' },
{ 11,"J",'h' },
{ 11,"J",'d' },
{ 12,"Q",'c' },
{ 12,"Q",'s' },
{ 12,"Q",'h' },
{ 12,"Q",'d' },
{ 13,"K",'c' },
{ 13,"K",'s' },
{ 13,"K",'h' },
{ 13,"K",'d' }
};
#endif
| 13.970149
| 26
| 0.286325
|
5e546699488d902e8ac56184e9b2b5d9bc6287ec
| 2,945
|
rb
|
Ruby
|
app/models/transfer_transaction.rb
|
scottohara/loot
|
5e225fba1b3e297149e5c42641370db629aedecd
|
[
"MIT"
] | 2
|
2016-04-06T07:21:57.000Z
|
2018-03-03T01:26:41.000Z
|
app/models/transfer_transaction.rb
|
scottohara/loot
|
5e225fba1b3e297149e5c42641370db629aedecd
|
[
"MIT"
] | 130
|
2015-01-01T10:38:11.000Z
|
2022-02-11T04:47:18.000Z
|
app/models/transfer_transaction.rb
|
scottohara/loot
|
5e225fba1b3e297149e5c42641370db629aedecd
|
[
"MIT"
] | 5
|
2017-09-19T04:18:04.000Z
|
2020-10-22T12:21:29.000Z
|
# Copyright (c) 2016 Scott O'Hara, oharagroup.net
# frozen_string_literal: true
# Transfer transaction
class TransferTransaction < PayeeCashTransaction
validate :validate_account_uniqueness
has_one :source_transaction_account, -> { where direction: 'outflow' }, class_name: 'TransactionAccount', foreign_key: 'transaction_id', dependent: :destroy
has_one :source_account, class_name: 'Account', through: :source_transaction_account, source: :account
has_one :destination_transaction_account, -> { where direction: 'inflow' }, class_name: 'TransactionAccount', foreign_key: 'transaction_id', dependent: :destroy
has_one :destination_account, class_name: 'Account', through: :destination_transaction_account, source: :account
after_initialize do |t|
t.transaction_type = 'Transfer'
end
class << self
def create_from_json(json)
source = ::Account.find json['primary_account']['id']
destination = ::Account.find json['account']['id']
source_status = json['status']
destination_status = json['related_status']
source, destination, source_status, destination_status = destination, source, destination_status, source_status if json['direction'].eql? 'inflow'
s = super
s.build_source_transaction_account(direction: 'outflow', status: source_status).account = source
s.build_destination_transaction_account(direction: 'inflow', status: destination_status).account = destination
s.save!
s.as_json direction: json['direction']
end
def update_from_json(json)
s = includes(:header, :source_account, :destination_account).find json[:id]
s.update_from_json json
s.as_json direction: json['direction']
end
end
def validate_account_uniqueness
errors.add :base, "Source and destination account can't be the same" if (source_transaction_account || destination_transaction_account) && source_transaction_account.account.eql?(destination_transaction_account.account)
end
def update_from_json(json)
source = ::Account.find json['primary_account']['id']
destination = ::Account.find json['account']['id']
source, destination = destination, source if json['direction'].eql? 'inflow'
super
self.source_account = source
self.destination_account = destination
save!
end
def as_json(options = {})
primary_account = source_account
other_account = destination_account
category_direction = 'To'
status = source_transaction_account.status
related_status = destination_transaction_account.status
primary_account, other_account, category_direction, status, related_status = other_account, primary_account, 'From', related_status, status if options[:direction].eql? 'inflow'
super.merge(
primary_account: primary_account.as_json,
category: {
id: "Transfer#{category_direction}",
name: "Transfer #{category_direction}"
},
account: other_account.as_json,
direction: options[:direction],
status: status,
related_status: related_status
)
end
end
| 38.75
| 221
| 0.768421
|
7dafcf40a7b152c6ac5c5874e42b5489a0420c44
| 1,545
|
rs
|
Rust
|
pgx/src/nodes/mod.rs
|
ethanpailes/pgx
|
886d3b62da0fb0ccee9a7b208cb08532fb30672c
|
[
"MIT"
] | null | null | null |
pgx/src/nodes/mod.rs
|
ethanpailes/pgx
|
886d3b62da0fb0ccee9a7b208cb08532fb30672c
|
[
"MIT"
] | null | null | null |
pgx/src/nodes/mod.rs
|
ethanpailes/pgx
|
886d3b62da0fb0ccee9a7b208cb08532fb30672c
|
[
"MIT"
] | null | null | null |
// Copyright 2020 ZomboDB, LLC <zombodb@gmail.com>. All rights reserved. Use of this source code is
// governed by the MIT license that can be found in the LICENSE file.
//! Helper functions and such for Postgres' various query tree `Node`s
#[cfg(feature = "pg10")]
mod pg10;
#[cfg(feature = "pg11")]
mod pg11;
#[cfg(feature = "pg12")]
mod pg12;
#[cfg(feature = "pg10")]
pub use pg10::*;
#[cfg(feature = "pg11")]
pub use pg11::*;
#[cfg(feature = "pg12")]
pub use pg12::*;
use crate::{pg_sys, PgBox};
/// #define IsA(nodeptr,_type_) (nodeTag(nodeptr) == T_##_type_)
#[allow(clippy::not_unsafe_ptr_arg_deref)] // ok b/c we check that nodeptr isn't null
#[inline]
pub fn is_a(nodeptr: *mut pg_sys::Node, tag: pg_sys::NodeTag) -> bool {
!nodeptr.is_null() && unsafe { nodeptr.as_ref().unwrap().type_ == tag }
}
pub fn node_to_string<'a>(nodeptr: *mut pg_sys::Node) -> Option<&'a str> {
if nodeptr.is_null() {
None
} else {
let string = unsafe { pg_sys::nodeToString(nodeptr as crate::void_ptr) };
if string.is_null() {
None
} else {
Some(
unsafe { std::ffi::CStr::from_ptr(string) }
.to_str()
.expect("unable to convert Node into a &str"),
)
}
}
}
impl PgNode {
pub fn is<T>(self, boxed: PgBox<T>) -> bool {
let node = boxed.as_ptr() as *mut pg_sys::Node;
let me = self as u32;
!node.is_null() && unsafe { node.as_ref() }.unwrap().type_ == me
}
}
| 27.105263
| 99
| 0.580583
|
ea9c567a1b514efdf72bb2d3364d1da8d2f3ec57
| 1,209
|
go
|
Go
|
election/example/main.go
|
fourstring/sheetfs
|
d1e70c0a2c843464ab795125ae5d2437c1c549fd
|
[
"MIT"
] | 1
|
2021-06-29T16:22:59.000Z
|
2021-06-29T16:22:59.000Z
|
election/example/main.go
|
fourstring/sheetfs
|
d1e70c0a2c843464ab795125ae5d2437c1c549fd
|
[
"MIT"
] | null | null | null |
election/example/main.go
|
fourstring/sheetfs
|
d1e70c0a2c843464ab795125ae5d2437c1c549fd
|
[
"MIT"
] | null | null | null |
package main
import (
"flag"
"fmt"
"github.com/fourstring/sheetfs/election"
"log"
"time"
)
var electionZnode = "/test_election"
var electionPrefix = "4da1fce7-d3f8-42dd-965d-4c3311661202-n_"
var electionAck = "/test_election_ack"
var electionServers = []string{
"127.0.0.1:2181",
"127.0.0.1:2182",
"127.0.0.1:2183",
}
var id = flag.String("i", "", "ID of proposer")
func main() {
flag.Parse()
e, err := election.NewElector(electionServers, 1*time.Second, electionZnode, electionPrefix, electionAck)
proposal, err := e.CreateProposal()
if err != nil {
log.Fatal(err)
}
fmt.Printf("%s: My proposal is %s\n", *id, proposal)
for {
success, watch, notify, err := e.TryBeLeader()
if err != nil {
log.Fatal(err)
}
if success {
break
}
fmt.Printf("%s: I'm secondary and watching %s\n", *id, watch)
done := false
for !done {
select {
case <-notify:
done = true
default:
/*
Do works of a secondary node here.
*/
}
}
}
/*
MUST complete all preparation required to serve requests before AckLeader!
*/
fmt.Printf("%s: I'm primary!\n", *id)
err = e.AckLeader(*id)
if err != nil {
log.Fatal(err)
}
for {
time.Sleep(1 * time.Minute)
}
}
| 19.5
| 106
| 0.634409
|
ee553fee53987ed41310503036ac4da7fcd1a553
| 3,497
|
ps1
|
PowerShell
|
VaporShell/Public/Resource Property Types/Add-VSEMRInstanceFleetConfigEbsBlockDeviceConfig.ps1
|
sheldonhull/VaporShell
|
e6a29672ce84b461e4f8d6058a52b83cbfaf3c5c
|
[
"Apache-2.0"
] | 35
|
2017-08-22T23:16:27.000Z
|
2020-02-13T18:26:47.000Z
|
VaporShell/Public/Resource Property Types/Add-VSEMRInstanceFleetConfigEbsBlockDeviceConfig.ps1
|
sheldonhull/VaporShell
|
e6a29672ce84b461e4f8d6058a52b83cbfaf3c5c
|
[
"Apache-2.0"
] | 31
|
2017-08-29T03:27:32.000Z
|
2020-03-04T22:02:20.000Z
|
VaporShell/Public/Resource Property Types/Add-VSEMRInstanceFleetConfigEbsBlockDeviceConfig.ps1
|
sheldonhull/VaporShell
|
e6a29672ce84b461e4f8d6058a52b83cbfaf3c5c
|
[
"Apache-2.0"
] | 6
|
2020-04-21T18:29:31.000Z
|
2021-12-24T11:01:08.000Z
|
function Add-VSEMRInstanceFleetConfigEbsBlockDeviceConfig {
<#
.SYNOPSIS
Adds an AWS::EMR::InstanceFleetConfig.EbsBlockDeviceConfig resource property to the template. EbsBlockDeviceConfig is a subproperty of the EbsConfiguration property type. EbsBlockDeviceConfig defines the number and type of EBS volumes to associate with all EC2 instances in an EMR cluster.
.DESCRIPTION
Adds an AWS::EMR::InstanceFleetConfig.EbsBlockDeviceConfig resource property to the template.
EbsBlockDeviceConfig is a subproperty of the EbsConfiguration property type. EbsBlockDeviceConfig defines the number and type of EBS volumes to associate with all EC2 instances in an EMR cluster.
.LINK
http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticmapreduce-instancefleetconfig-ebsblockdeviceconfig.html
.PARAMETER VolumeSpecification
EBS volume specifications such as volume type, IOPS, and size GiB that will be requested for the EBS volume attached to an EC2 instance in the cluster.
Documentation: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticmapreduce-instancefleetconfig-ebsblockdeviceconfig.html#cfn-elasticmapreduce-instancefleetconfig-ebsblockdeviceconfig-volumespecification
Type: VolumeSpecification
UpdateType: Immutable
.PARAMETER VolumesPerInstance
Number of EBS volumes with a specific volume configuration that will be associated with every instance in the instance group
Documentation: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticmapreduce-instancefleetconfig-ebsblockdeviceconfig.html#cfn-elasticmapreduce-instancefleetconfig-ebsblockdeviceconfig-volumesperinstance
PrimitiveType: Integer
UpdateType: Immutable
.FUNCTIONALITY
Vaporshell
#>
[OutputType('Vaporshell.Resource.EMR.InstanceFleetConfig.EbsBlockDeviceConfig')]
[cmdletbinding()]
Param
(
[parameter(Mandatory = $true)]
$VolumeSpecification,
[parameter(Mandatory = $false)]
[ValidateScript( {
$allowedTypes = "System.Int32","Vaporshell.Function"
if ([string]$($_.PSTypeNames) -match "($(($allowedTypes|ForEach-Object{[RegEx]::Escape($_)}) -join '|'))") {
$true
}
else {
$PSCmdlet.ThrowTerminatingError((New-VSError -String "This parameter only accepts the following types: $($allowedTypes -join ", "). The current types of the value are: $($_.PSTypeNames -join ", ")."))
}
})]
$VolumesPerInstance
)
Begin {
$obj = [PSCustomObject]@{}
$commonParams = @('Verbose','Debug','ErrorAction','WarningAction','InformationAction','ErrorVariable','WarningVariable','InformationVariable','OutVariable','OutBuffer','PipelineVariable')
}
Process {
foreach ($key in $PSBoundParameters.Keys | Where-Object {$commonParams -notcontains $_}) {
switch ($key) {
Default {
$obj | Add-Member -MemberType NoteProperty -Name $key -Value $PSBoundParameters.$key
}
}
}
}
End {
$obj | Add-ObjectDetail -TypeName 'Vaporshell.Resource.EMR.InstanceFleetConfig.EbsBlockDeviceConfig'
Write-Verbose "Resulting JSON from $($MyInvocation.MyCommand): `n`n$($obj | ConvertTo-Json -Depth 5)`n"
}
}
| 52.984848
| 297
| 0.698599
|
40ef24c9c9bc37beaedf9adaf2cf8b6d9b867ef6
| 226
|
rb
|
Ruby
|
app/models/project.rb
|
abratashov/TManager
|
5b77b71eb6f9ef90e1baba4f68cdb946760d6b27
|
[
"MIT"
] | null | null | null |
app/models/project.rb
|
abratashov/TManager
|
5b77b71eb6f9ef90e1baba4f68cdb946760d6b27
|
[
"MIT"
] | 7
|
2021-02-28T14:56:33.000Z
|
2022-03-30T21:56:16.000Z
|
app/models/project.rb
|
abratashov/TManager
|
5b77b71eb6f9ef90e1baba4f68cdb946760d6b27
|
[
"MIT"
] | null | null | null |
class Project < ApplicationRecord
FIELDS = [:deadline, :done, :name, :position].freeze
belongs_to :user
has_many :tasks, dependent: :destroy, inverse_of: :project
validates :name, presence: true, uniqueness: true
end
| 28.25
| 60
| 0.738938
|
cf315031717dde127cbde07d98e63d54dc1ebe13
| 325
|
php
|
PHP
|
src/PhpValueObjects/Identity/Exception/InvalidMd5Exception.php
|
pawellewandowski/php-value-objects
|
dd73c0c5983c9d05f230d0890178160d2d0fe358
|
[
"MIT"
] | null | null | null |
src/PhpValueObjects/Identity/Exception/InvalidMd5Exception.php
|
pawellewandowski/php-value-objects
|
dd73c0c5983c9d05f230d0890178160d2d0fe358
|
[
"MIT"
] | null | null | null |
src/PhpValueObjects/Identity/Exception/InvalidMd5Exception.php
|
pawellewandowski/php-value-objects
|
dd73c0c5983c9d05f230d0890178160d2d0fe358
|
[
"MIT"
] | null | null | null |
<?php
namespace PhpValueObjects\Identity\Exception;
class InvalidMd5Exception extends \Exception
{
/**
* InvalidMd5Exception constructor.
*
* @param string $value
*/
public function __construct($value)
{
parent::__construct(sprintf('"%s" is not a valid md5 hash.', $value));
}
}
| 19.117647
| 78
| 0.64
|
aa2efb45dba8c43d6a9e6a6d09e0547ff7253562
| 884
|
ps1
|
PowerShell
|
src/dashboard/stage.ps1
|
ikhramts/maesure-oss
|
af744f03891de82ae02a06037a723045ebc38218
|
[
"BSD-3-Clause"
] | 2
|
2021-02-07T11:20:09.000Z
|
2021-02-07T12:07:18.000Z
|
src/dashboard/stage.ps1
|
ikhramts/maesure-oss
|
af744f03891de82ae02a06037a723045ebc38218
|
[
"BSD-3-Clause"
] | null | null | null |
src/dashboard/stage.ps1
|
ikhramts/maesure-oss
|
af744f03891de82ae02a06037a723045ebc38218
|
[
"BSD-3-Clause"
] | null | null | null |
$ErrorActionPreference = "Stop"
function StopOnError() {
if ($LastExitCode -ne 0) {
[Environment]::Exit(1)
}
}
$scriptPath = $PSScriptRoot
$distPath = "$scriptPath\build"
# Rebuild.
if (Test-Path $distPath) {
Remove-Item -Recurse -Force $distPath
}
#iex "npm install"
iex "npm run build:staging"
# Deploy.
# We will do a gradual deployment and will test every gsutil
# funcitonality before committing to replacing the production files.
$bucket = "gs://taposcope-web-prod"
$publishDir = "$bucket/dashboard-staging"
$initialUploadDir = "$bucket/dashboard-staging-initial-upload"
iex "gsutil -m rsync -d -r $distPath $initialUploadDir"
# Fix mime type for woff2 files
iex "gsutil -m setmeta -h ""Content-Type:font/woff2"" $initialUploadDir/**.woff2"
iex "gsutil -m cp -r $initialUploadDir $publishDir"
iex "gsutil -m rsync -d -r $initialUploadDir $publishDir"
| 26.787879
| 81
| 0.719457
|
5de06650f8ac14307b254df239aeb0ef406a1352
| 378
|
cpp
|
C++
|
codeforces/Div2/CS204/Lab1/q2.cpp
|
jeevanpuchakay/a2oj
|
f867e9b2ced6619be3ca6b1a1a1838107322782d
|
[
"MIT"
] | null | null | null |
codeforces/Div2/CS204/Lab1/q2.cpp
|
jeevanpuchakay/a2oj
|
f867e9b2ced6619be3ca6b1a1a1838107322782d
|
[
"MIT"
] | null | null | null |
codeforces/Div2/CS204/Lab1/q2.cpp
|
jeevanpuchakay/a2oj
|
f867e9b2ced6619be3ca6b1a1a1838107322782d
|
[
"MIT"
] | null | null | null |
#include <bits/stdc++.h>
using namespace std;
struct city
{
/* data */
string city;
int x,y;
};
int noOfCities=0;
city DbUsingarray[5000];
void insertRecordInDb1(string s,int x,int y){
DbUsingarray[++noOfCities].city=s;
DbUsingarray[noOfCities].x=x;
DbUsingarray[noOfCities].y=y;
}
void deleteCityString(string s){
}
int main(){
}
| 13.034483
| 45
| 0.637566
|
b8cfc4832413f774e231a358a83c7075e6b4ed78
| 3,386
|
c
|
C
|
benchmarks/tkengo-highway/src/util.c
|
pointhi/benchmarks
|
68899480c0fc8d361079a81edc6d816d5f17d58e
|
[
"UPL-1.0"
] | 276
|
2015-09-15T15:09:49.000Z
|
2022-01-30T08:47:44.000Z
|
benchmarks/tkengo-highway/src/util.c
|
pointhi/benchmarks
|
68899480c0fc8d361079a81edc6d816d5f17d58e
|
[
"UPL-1.0"
] | 23
|
2015-10-20T02:24:06.000Z
|
2017-01-14T13:38:11.000Z
|
benchmarks/tkengo-highway/src/util.c
|
pointhi/benchmarks
|
68899480c0fc8d361079a81edc6d816d5f17d58e
|
[
"UPL-1.0"
] | 14
|
2015-10-19T17:10:24.000Z
|
2020-11-09T01:52:13.000Z
|
#include <string.h>
#include <ctype.h>
#include <iconv.h>
#include <unistd.h>
#include "common.h"
#include "util.h"
static iconv_t euc_ic;
static iconv_t sjis_ic;
static iconv_t utf8_euc_ic;
static iconv_t utf8_sjis_ic;
// The word separation character list.
static char word_sp[256] = {
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
// ! " # $ % & ' ( ) * + , - . /
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
// 0 1 2 3 4 5 6 7 8 9 : ; < = > ?
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1,
// @ A B C D E F G H I J K L M N O
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// P Q R S T U V W X Y Z [ ] ^ _
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0,
// ` a b c d e f g h i j k l m n o
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// p q r s t u v w x y z { | } ~
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1,
// Non-ASCII character
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
};
/**
* Set the fd resource limit. This method is used because hw will open too many files while
* searching in very large directory, so it may occur an error when open(2), opendir(3), etc... was
* called if too many files was opened. To avoid that, we release the limit.
*/
bool set_fd_rlimit(rlim_t limit)
{
#ifndef _WIN32
struct rlimit r;
getrlimit(RLIMIT_NOFILE, &r);
if (limit < r.rlim_max) {
r.rlim_cur = limit;
return setrlimit(RLIMIT_NOFILE, &r) == 0;
}
#endif
return false;
}
/**
* Check whether the passed character is a word separation character.
*/
bool is_word_sp(char c)
{
return word_sp[(unsigned char)c];
}
char *trim(char *str)
{
while (isspace(*str)) str++;
if (*str == '\0') {
return str;
}
char *end = str + strlen(str) - 1;
while (end > str && isspace(*end)) end--;
*(end + 1) = '\0';
return str;
}
void init_iconv()
{
euc_ic = iconv_open("EUC-JP", "UTF-8");
sjis_ic = iconv_open("SHIFT_JIS", "UTF-8");
utf8_euc_ic = iconv_open("UTF-8", "EUC-JP");
utf8_sjis_ic = iconv_open("UTF-8", "SHIFT_JIS");
}
void close_iconv()
{
iconv_close(euc_ic);
iconv_close(sjis_ic);
iconv_close(utf8_euc_ic);
iconv_close(utf8_sjis_ic);
}
void to_euc(char *in, size_t nin, char *out, size_t nout)
{
char *ptr_in = in, *ptr_out = out;
iconv(euc_ic, &ptr_in, &nin, &ptr_out, &nout);
}
void to_sjis(char *in, size_t nin, char *out, size_t nout)
{
char *ptr_in = in, *ptr_out = out;
iconv(sjis_ic, &ptr_in, &nin, &ptr_out, &nout);
}
void to_utf8_from_euc(char *in, size_t nin, char *out, size_t nout)
{
char *ptr_in = in, *ptr_out = out;
iconv(utf8_euc_ic, &ptr_in, &nin, &ptr_out, &nout);
}
void to_utf8_from_sjis(char *in, size_t nin, char *out, size_t nout)
{
char *ptr_in = in, *ptr_out = out;
iconv(utf8_sjis_ic, &ptr_in, &nin, &ptr_out, &nout);
}
| 26.661417
| 99
| 0.518311
|
e2986d55da1db1a526426dcaac345e2f678401d5
| 357
|
js
|
JavaScript
|
docs/html/structVkSparseImageFormatProperties2.js
|
Vyraax/VulkanLab
|
45aeaf576979684bc2a384a50aade276e64c601e
|
[
"MIT"
] | 1
|
2021-08-06T11:28:18.000Z
|
2021-08-06T11:28:18.000Z
|
docs/html/structVkSparseImageFormatProperties2.js
|
Vyraax/VulkanLab
|
45aeaf576979684bc2a384a50aade276e64c601e
|
[
"MIT"
] | null | null | null |
docs/html/structVkSparseImageFormatProperties2.js
|
Vyraax/VulkanLab
|
45aeaf576979684bc2a384a50aade276e64c601e
|
[
"MIT"
] | null | null | null |
var structVkSparseImageFormatProperties2 =
[
[ "pNext", "structVkSparseImageFormatProperties2.html#a18df86c2d261eb597fa6e5f1a42d05c4", null ],
[ "properties", "structVkSparseImageFormatProperties2.html#a772872e1b9b625a2a560dc0e8abe928e", null ],
[ "sType", "structVkSparseImageFormatProperties2.html#ab2c1fd1cd8bf6a342a3ad84ec307995a", null ]
];
| 59.5
| 106
| 0.812325
|
729fde4266aab62c11a2bb63035466b0c2358920
| 9,062
|
swift
|
Swift
|
Sources/RealityUI/RUISwitch.swift
|
maxxfrazer/RealityUI
|
512fe6acedccbcffa37b686d810843bebdc4cb1b
|
[
"MIT"
] | 374
|
2020-06-01T01:04:44.000Z
|
2022-03-19T11:52:37.000Z
|
Sources/RealityUI/RUISwitch.swift
|
Reality-Dev/RealityUI
|
512fe6acedccbcffa37b686d810843bebdc4cb1b
|
[
"MIT"
] | null | null | null |
Sources/RealityUI/RUISwitch.swift
|
Reality-Dev/RealityUI
|
512fe6acedccbcffa37b686d810843bebdc4cb1b
|
[
"MIT"
] | 19
|
2020-06-01T15:50:49.000Z
|
2021-11-25T00:59:46.000Z
|
//
// RUISwitch.swift
//
//
// Created by Max Cobb on 5/16/20.
// Copyright © 2020 Max Cobb. All rights reserved.
//
import RealityKit
/// A RealityUI Switch to be added to a RealityKit scene.
public class RUISwitch: Entity, HasSwitch, HasClick {
public var tapAction: (
(HasClick, SIMD3<Float>?) -> Void
)? = { tapthing, _ in
guard let toggleObj = (tapthing as? RUISwitch) else {
return
}
toggleObj.setOn(!toggleObj.isOn)
}
/// Switch's isOn property has changed
public var switchChanged: ((HasSwitch) -> Void)?
/// Creates a RealityUI Switch entity with optional `SwitchComponent`, `RUIComponent` and `changedCallback`.
/// - Parameters:
/// - switchness: Details about the switch to be set when initialized.
/// - RUI: Details about the RealityUI Entity
/// - changedCallback: callback function to receive updates when the switch isOn property changes.
public init(
switchness: SwitchComponent? = nil,
RUI: RUIComponent? = nil,
changedCallback: ((HasSwitch) -> Void)? = nil
) {
super.init()
self.RUI = RUI ?? RUIComponent()
self.switchness = switchness ?? SwitchComponent()
self.ruiOrientation()
self.makeModels()
self.switchChanged = changedCallback
}
/// Create a RUISwitch entity with the default styling.
required public convenience init() {
self.init(switchness: SwitchComponent())
}
}
/// An interface used for all entities that have a toggling option
public protocol HasSwitch: HasRUIMaterials {
/// Switch's isOn property has changed
var switchChanged: ((HasSwitch) -> Void)? { get set }
}
/// A collection of resources that create the visual appearance a RealityUI Switch, `RUISwitch`.
public struct SwitchComponent: Component {
/// A Boolean value that determines the off/on state of the switch. Default to `false`, meaning off.
var isOn: Bool
/// Padding (in meters) between the thumb and the inner capsule of the switch. Default 0.05.
let padding: Float
/// Border (in meters) between the two outer capsules of the switch. No border if set to 0. Default 0.05.
let border: Float
/// Color of the inner capsule when the switch is set to `off`. Default `Material.Color.systemGreen`
let onColor: Material.Color
/// Color of the inner capsule when the switch is set to `on`. Default `Material.Color.lightGray`
let offColor: Material.Color
/// Color of the outer border. Default `Material.Color.black`
let borderColor: Material.Color
/// Color of the thumb. Default white.
let thumbColor: Material.Color
/// Length of the toggle, not customisable for now.
internal let length: Float = 55 / 34
enum UIPart: String {
case thumb
case background
case border
}
/// Creates a SwitchComponent using a list of completely optional parameters.
/// - Parameters:
/// - isOn: A Boolean value that determines the off/on state of the switch. Default to `false`, meaning off.
/// - onColor: Color of the inner capsule when the switch is set to `on`. Default `Material.Color.systemGreen`
/// - offColor: Color of the inner capsule when the switch is set to `off`. Default `Material.Color.lightGray`
/// - padding: Padding (in meters) between the thumb and the inner capsule of the switch. Default 0.05.
/// - border: Border (in meters) between the two outer capsules of the switch. No border if set to 0. Default 0.05.
/// - borderColor: Color of the outer border. Default `Material.Color.black`
/// - thumbColor: Color of the thumb. Default white.
public init(
isOn: Bool = false,
onColor: Material.Color = .systemGreen,
offColor: Material.Color = .lightGray,
padding: Float = 0.05,
border: Float = 0.05,
borderColor: Material.Color = .black,
thumbColor: Material.Color = .white
) {
assert(padding > 0, "Padding must be positive")
assert(border >= 0, "Border must be positive or zero")
self.isOn = isOn
self.padding = padding
self.border = border
self.onColor = onColor
self.offColor = offColor
self.borderColor = borderColor
self.thumbColor = thumbColor
}
/// Creates the SwitchComponent with all default styles, only custom colours.
/// - Parameters:
/// - onColor: Color of the inner capsule when the switch is set to `on`.
/// - offColor: Color of the inner capsule when the switch is set to `off`.
public init(onColor: Material.Color, offColor: Material.Color) {
self.init(isOn: false, onColor: onColor, offColor: offColor)
}
}
public extension HasSwitch {
/// The switch properties that defines the visual appearance and state.
internal(set) var switchness: SwitchComponent {
get {
self.components[SwitchComponent.self] ?? SwitchComponent()}
set {
self.components[SwitchComponent.self] = newValue
}
}
/// Set the switch's current value
/// - Parameters:
/// - isOn: The switch's new state
/// - animated: Should the switch animate to the new state, if an animation is available.
func setOn(_ isOn: Bool, animated: Bool = true) {
if self.isOn == isOn {
return
}
self.isOn = isOn
self.getModel(part: .background)?.model?.materials = self.getMaterials(for: .background)
let thumbTransform = Transform(
scale: .one, rotation: .init(), translation: togglePos
)
let thumbEntity = self.getModel(part: .thumb)
thumbEntity?.stopAllAnimations()
if animated {
thumbEntity?.move(to: thumbTransform, relativeTo: self, duration: 0.3)
} else {
thumbEntity?.transform = thumbTransform
}
self.switchChanged?(self)
}
/// Padding (in meters) between the thumb and the inner capsule of the switch.
/// This cannot yet be altered once the switch has been created
var padding: Float { self.switchness.padding }
/// Border (in meters) between the two outer capsules of the switch.
/// This cannot yet be altered once the switch has been created
var border: Float { self.switchness.border }
/// A Boolean value that determines the off/on state of the switch.
/// To update the value, use `.setOn(:Bool,animated:Bool)`
private(set) var isOn: Bool {
get { self.switchness.isOn }
set { self.switchness.isOn = newValue }
}
/// Color of the outer border. Default `Material.Color.black`
var borderColor: Material.Color { self.switchness.borderColor }
/// Color of the inner capsule when the switch is set to off. Default `Material.Color.systemGreen`
var onColor: Material.Color { self.switchness.onColor }
/// Color of the inner capsule when the switch is set to on. Default `Material.Color.lightGray`
var offColor: Material.Color { self.switchness.offColor }
private var togglePos: SIMD3<Float> {
[(isOn ? -1 : 1) * (self.switchness.length - 1)/2, 0, 0]
}
private var thumbColor: Material.Color {
self.switchness.thumbColor
}
private func getModel(part: SwitchComponent.UIPart) -> ModelEntity? {
return (self as HasRUI).getModel(part: part.rawValue)
}
private func addModel(part: SwitchComponent.UIPart) -> ModelEntity {
return (self as HasRUI).addModel(part: part.rawValue)
}
fileprivate func makeModels() {
let togLen = self.switchness.length
if self.border > 0 {
let borderBg = self.addModel(part: .border)
borderBg.model = ModelComponent(mesh: .generateBox(
size: [togLen + border, 1 + border, 1 + border], cornerRadius: (1 + border) / 2), materials: []
)
borderBg.scale = .init(repeating: -1)
} else if let border = self.getModel(part: .border) {
border.removeFromParent()
}
let bigBg = self.addModel(part: .background)
bigBg.model = ModelComponent(
mesh: .generateBox(size: [togLen, 1, 1], cornerRadius: 0.5), materials: []
)
bigBg.scale = .init(repeating: -1)
let thumb = self.addModel(part: .thumb)
thumb.model = ModelComponent(mesh: .generateSphere(radius: (1 - padding) / 2), materials: [])
thumb.position = togglePos
(self as? HasCollision)?.collision = CollisionComponent(
shapes: [ShapeResource.generateCapsule(height: 2, radius: 0.5)
.offsetBy(rotation: simd_quatf(angle: .pi/2, axis: [0, 0, 1]))
]
)
self.updateMaterials()
}
/// Updates all materials in an entity, this is called internally whenever things change such as
/// the entity responding to light or whether it is enabled.
func updateMaterials() {
self.getModel(part: .border)?.model?.materials = getMaterials(for: .border)
self.getModel(part: .background)?.model?.materials = getMaterials(for: .background)
self.getModel(part: .thumb)?.model?.materials = getMaterials(for: .thumb)
}
internal func getMaterials(
for part: SwitchComponent.UIPart
) -> [Material] {
switch part {
case .background:
return [self.getMaterial(with: self.isOn ? switchness.onColor : switchness.offColor)]
case .border:
return [self.getMaterial(with: switchness.borderColor)]
case .thumb:
return [self.getMaterial(with: switchness.thumbColor)]
}
}
}
| 38.561702
| 119
| 0.686272
|
2a1ca3976f5a956c3006bcb36e3240b1d2375d8c
| 769
|
css
|
CSS
|
sellotape/sellotape_dj/main_app/static/sellotape/user.css
|
itzhaki1234/sellotape
|
326e52d3743ccc35309a96c89a30e1c6a7e77a3b
|
[
"MIT"
] | 6
|
2020-08-11T07:28:35.000Z
|
2020-10-08T03:29:10.000Z
|
sellotape/sellotape_dj/main_app/static/sellotape/user.css
|
itzhaki1234/sellotape
|
326e52d3743ccc35309a96c89a30e1c6a7e77a3b
|
[
"MIT"
] | 60
|
2020-08-14T15:20:10.000Z
|
2020-11-08T09:54:01.000Z
|
sellotape/sellotape_dj/main_app/static/sellotape/user.css
|
itzhaki1234/sellotape
|
326e52d3743ccc35309a96c89a30e1c6a7e77a3b
|
[
"MIT"
] | 10
|
2020-08-11T07:40:39.000Z
|
2021-03-09T11:54:32.000Z
|
.header {
display: flex;
align-items: center;
}
.header a {
color: black
}
h2 {
margin-right: 20px;
text-decoration: underline;
}
.red-dot {
height: 12px;
width: 12px;
background-color: red;
border-radius: 50%;
display: inline-block;
}
.live-link {
color: #2d2d2d;
text-decoration: none;
}
.section {
margin-bottom: 50px;
}
.streams {
margin: 0;
padding: 0;
}
.streams li {
display: flex;
flex-direction: column;
padding: 10px 20px;
transition: box-shadow .2s ease-in-out;
position: relative;
width: 100%;
flex-shrink: 0;
border-radius: 8px;
border: 1px solid #e8f2ff;
background-color: #e8f2ff;
box-shadow: 0 2px 6px 1px #e1e5e8;
margin-bottom: 14px;
}
| 15.078431
| 43
| 0.602081
|
da5ceda6db55226198bcb9d037f41cfdf55f04c3
| 2,020
|
ts
|
TypeScript
|
src/ui/helpers/cookie/use.ts
|
andreigec/ag-common
|
cb83937913a90268425d1e89486dc38a12ae52a7
|
[
"ISC"
] | 1
|
2022-01-03T13:42:09.000Z
|
2022-01-03T13:42:09.000Z
|
src/ui/helpers/cookie/use.ts
|
andreigec/ag-common
|
cb83937913a90268425d1e89486dc38a12ae52a7
|
[
"ISC"
] | 1
|
2022-01-07T11:14:50.000Z
|
2022-01-07T11:14:50.000Z
|
src/ui/helpers/cookie/use.ts
|
andreigec/ag-common
|
cb83937913a90268425d1e89486dc38a12ae52a7
|
[
"ISC"
] | null | null | null |
import { TParse, ReturnType } from './const';
import { getCookieRawWrapper } from './get';
import { setCookieRawWrapper } from './set';
import { SetStateAction, useState } from 'react';
export function useCookie<T>(p: {
defaultValue: T;
name: string;
cookieDocument?: string;
/**
* required for objects. defaults to JSON.parse
*/
parse?: TParse<T>;
/**
* required for objects. defaults to JSON.stringify
*/
stringify?: (v: T) => string;
}): ReturnType<T> {
const parse: TParse<T> = (s) => {
if (!s) {
return p.defaultValue;
}
if (p.parse) {
return p.parse(s);
}
return JSON.parse(s);
};
const stringify = (s: T): string => {
if (p.stringify) {
return p.stringify(s);
}
return JSON.stringify(s);
};
const [cookie, setCookie] = useState<T>(
getCookieRawWrapper({ ...p, parse }) || p.defaultValue,
);
const setState = (valueRaw: SetStateAction<T>) => {
const value = (
valueRaw instanceof Function ? valueRaw(cookie) : valueRaw
) as T;
setCookieRawWrapper({ ...p, stringify, value });
setCookie(value);
};
return [cookie, setState];
}
export const useCookieString = (p: {
/**
* default value. default ""
*/
defaultValue?: string;
name: string;
cookieDocument?: string;
}) =>
useCookie<string>({
...p,
parse: (s) => s || '',
stringify: (s) => s,
defaultValue: p.defaultValue || '',
});
export const useCookieNumber = (p: {
/**
* default value. default undefined
*/
defaultValue?: number | undefined;
name: string;
cookieDocument?: string;
}) =>
useCookie<number | undefined>({
...p,
parse: (s) => (!s ? undefined : Number.parseFloat(s)),
stringify: (s) => (!s ? '' : s.toString()),
defaultValue: p.defaultValue,
});
export const useCookieBoolean = (p: {
defaultValue: boolean;
name: string;
cookieDocument?: string;
}) =>
useCookie({
...p,
parse: (s) => s === 'true',
stringify: (s) => s.toString(),
});
| 21.72043
| 64
| 0.585644
|
4d1b9e4c5c5a5bc824cc6068e57a9406c7a20af6
| 15,455
|
cs
|
C#
|
Microsoft Office Developer Documentation Team/Lync 2013 Start an IM chat and send text in plain text, RTF or HTML text format/[C#]-Lync 2013 Start an IM chat and send text in plain text, RTF or HTML text format/C#/StartFormattedConversation/MainWindow.xaml.cs
|
zzgchina888/msdn-code-gallery-microsoft
|
21cb9b6bc0da3b234c5854ecac449cb3bd261f29
|
[
"MIT"
] | 2
|
2022-01-21T01:40:58.000Z
|
2022-01-21T01:41:10.000Z
|
Microsoft Office Developer Documentation Team/Lync 2013 Start an IM chat and send text in plain text, RTF or HTML text format/[C#]-Lync 2013 Start an IM chat and send text in plain text, RTF or HTML text format/C#/StartFormattedConversation/MainWindow.xaml.cs
|
zzgchina888/msdn-code-gallery-microsoft
|
21cb9b6bc0da3b234c5854ecac449cb3bd261f29
|
[
"MIT"
] | 1
|
2022-03-15T04:21:41.000Z
|
2022-03-15T04:21:41.000Z
|
Microsoft Office Developer Documentation Team/Lync 2013 Start an IM chat and send text in plain text, RTF or HTML text format/[C#]-Lync 2013 Start an IM chat and send text in plain text, RTF or HTML text format/C#/StartFormattedConversation/MainWindow.xaml.cs
|
zzgchina888/msdn-code-gallery-microsoft
|
21cb9b6bc0da3b234c5854ecac449cb3bd261f29
|
[
"MIT"
] | null | null | null |
/*=====================================================================
This file is part of the Microsoft Unified Communications Code Samples.
Copyright (C) 2012 Microsoft Corporation. All rights reserved.
This source code is intended only as a supplement to Microsoft
Development Tools and/or on-line documentation. See these other
materials for detailed information regarding Microsoft code samples.
THIS CODE AND INFORMATION ARE PROVIDED "AS IS" WITHOUT WARRANTY OF ANY
KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A
PARTICULAR PURPOSE.
=====================================================================*/
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using System.Windows;
using System.Windows.Documents;
using Microsoft.Lync.Model;
using Microsoft.Lync.Model.Conversation;
using MessageBox = System.Windows.MessageBox;
namespace StartConversation
{
/// <summary>
/// Interaction logic for MainWindow.xaml
/// </summary>
public partial class MainWindow : Window
{
private enum CheckedRadio
{
GIF = 1,
INK = 2,
HTML = 3,
RTF = 4,
PlainText = 5
}
Microsoft.Lync.Model.LyncClient client = null;
Microsoft.Lync.Model.Extensibility.Automation automation = null;
string RemoteUserUri = "";
private CheckedRadio _CheckedRadio;
public MainWindow()
{
InitializeComponent();
try
{
//Start the conversation
automation = LyncClient.GetAutomation();
client = LyncClient.GetClient();
ConversationManager conversationManager = client.ConversationManager;
conversationManager.ConversationAdded += new EventHandler<ConversationManagerEventArgs>(conversationManager_ConversationAdded);
}
catch (LyncClientException lyncClientException)
{
MessageBox.Show("Failed to connect to Lync.");
Console.Out.WriteLine(lyncClientException);
}
catch (SystemException systemException)
{
if (IsLyncException(systemException))
{
// Log the exception thrown by the Lync Model API.
MessageBox.Show("Failed to connect to Lync.");
Console.WriteLine("Error: " + systemException);
}
else
{
// Rethrow the SystemException which did not come from the Lync Model API.
throw;
}
}
}
/// <summary>
/// Start the conversation with the specified participant using sip address
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void btnStartConv_Click(object sender, RoutedEventArgs e)
{
TextRange tr = new TextRange(rtbParticipants.Document.ContentStart, rtbParticipants.Document.ContentEnd);
if (String.IsNullOrEmpty(tr.Text.Trim()))
{
txtErrors.Text = "No participants specified!";
return;
}
String[] participants = tr.Text.Split(new string[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries);
RemoteUserUri = participants[0];
if (radioGif.IsChecked == true)
{
_CheckedRadio = CheckedRadio.GIF;
}
if (radioHtml.IsChecked == true)
{
_CheckedRadio = CheckedRadio.HTML;
}
if (radioRtf.IsChecked == true)
{
_CheckedRadio = CheckedRadio.RTF;
}
if (radioText.IsChecked == true)
{
_CheckedRadio = CheckedRadio.PlainText;
}
if (radioInk.IsChecked == true)
{
_CheckedRadio = CheckedRadio.INK;
}
Conversation conversation = client.ConversationManager.AddConversation();
}
/*
/// <summary>
/// Start the conversation with the specified participant using sip address
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void btnStartConv_Click(object sender, RoutedEventArgs e)
{
TextRange tr = new TextRange(rtbParticipants.Document.ContentStart, rtbParticipants.Document.ContentEnd);
if (String.IsNullOrEmpty(tr.Text.Trim()))
{
txtErrors.Text = "No participants specified!";
return;
}
String[] participants = tr.Text.Split(new string[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries);
KeyValuePair<AutomationModalitySettings, object>[] contextData = new KeyValuePair<AutomationModalitySettings, object>[]
{
new KeyValuePair<AutomationModalitySettings, object>(AutomationModalitySettings.SendFirstInstantMessageImmediately, true),
new KeyValuePair<AutomationModalitySettings, object>(AutomationModalitySettings.FirstInstantMessage, "Hello World"),
new KeyValuePair<AutomationModalitySettings, object>(AutomationModalitySettings.Subject, "Welcome To Lync Conversation Window"),
};
IAsyncResult ar = automation.BeginStartConversation(AutomationModalities.InstantMessage, participants, contextData, null, null);
automation.EndStartConversation(ar);
}
*/
/// <summary>
/// Async callback method invoked by InstantMessageModality instance when SendMessage completes
/// </summary>
/// <param name="_asyncOperation">IAsyncResult The operation result</param>
///
private void SendMessageCallback(IAsyncResult ar)
{
InstantMessageModality imModality = (InstantMessageModality)ar.AsyncState;
try
{
imModality.EndSendMessage(ar);
}
catch (LyncClientException lce)
{
MessageBox.Show("Lync Client Exception on EndSendMessage " + lce.Message);
}
}
void conversationManager_ConversationAdded(object sender, ConversationManagerEventArgs e)
{
e.Conversation.ParticipantAdded += new EventHandler<ParticipantCollectionChangedEventArgs>(Conversation_ParticipantAdded);
e.Conversation.AddParticipant(client.ContactManager.GetContactByUri(RemoteUserUri));
}
void Conversation_ParticipantAdded(object sender, ParticipantCollectionChangedEventArgs e)
{
// add event handlers for modalities of participants other than self participant:
if (e.Participant.IsSelf == false)
{
if (((Conversation)sender).Modalities.ContainsKey(ModalityTypes.InstantMessage))
{
((InstantMessageModality)e.Participant.Modalities[ModalityTypes.InstantMessage]).InstantMessageReceived += new EventHandler<MessageSentEventArgs>(ConversationTest_InstantMessageReceived);
((InstantMessageModality)e.Participant.Modalities[ModalityTypes.InstantMessage]).IsTypingChanged += new EventHandler<IsTypingChangedEventArgs>(ConversationTest_IsTypingChanged);
}
Conversation conversation = (Conversation)sender;
InstantMessageModality imModality = (InstantMessageModality)conversation.Modalities[ModalityTypes.InstantMessage];
string messageContent = "Hello World";
IDictionary<InstantMessageContentType, string> formattedMessage = this.GenerateFormattedMessage(messageContent);
try
{
if (imModality.CanInvoke(ModalityAction.SendInstantMessage))
{
IAsyncResult asyncResult = imModality.BeginSendMessage(
formattedMessage,
SendMessageCallback,
imModality);
}
}
catch(LyncClientException ex)
{
txtErrors.Text = ex.Message;
}
}
}
private IDictionary<InstantMessageContentType, string> GenerateFormattedMessage(string message)
{
string formattedMessage = string.Empty;
IDictionary<InstantMessageContentType, string> textMessage = new Dictionary<InstantMessageContentType, string>();
switch (_CheckedRadio)
{
case CheckedRadio.GIF:
formattedMessage = "base64:R0lGODlhjQA1AHAAACH5BAEAAP8ALAAAAACNADUAhwAAAAAAMwAAZgAAmQAAzAAA/wArAAArMwArZgArmQArzAAr/wBVAABVMwBVZgBVmQBVzABV/wCAAACAMwCAZgCAmQCAzACA/wCqAACqMwCqZgCqmQCqzACq/wDVAADVMwDVZgDVmQDVzADV/wD/AAD/MwD/ZgD/mQD/zAD//zMAADMAMzMAZjMAmTMAzDMA/zMrADMrMzMrZjMrmTMrzDMr/zNVADNVMzNVZjNVmTNVzDNV/zOAADOAMzOAZjOAmTOAzDOA/zOqADOqMzOqZjOqmTOqzDOq/zPVADPVMzPVZjPVmTPVzDPV/zP/ADP/MzP/ZjP/mTP/zDP//2YAAGYAM2YAZmYAmWYAzGYA/2YrAGYrM2YrZmYrmWYrzGYr/2ZVAGZVM2ZVZmZVmWZVzGZV/2aAAGaAM2aAZmaAmWaAzGaA/2aqAGaqM2aqZmaqmWaqzGaq/2bVAGbVM2bVZmbVmWbVzGbV/2b/AGb/M2b/Zmb/mWb/zGb//5kAAJkAM5kAZpkAmZkAzJkA/5krAJkrM5krZpkrmZkrzJkr/5lVAJlVM5lVZplVmZlVzJlV/5mAAJmAM5mAZpmAmZmAzJmA/5mqAJmqM5mqZpmqmZmqzJmq/5nVAJnVM5nVZpnVmZnVzJnV/5n/AJn/M5n/Zpn/mZn/zJn//8wAAMwAM8wAZswAmcwAzMwA/8wrAMwrM8wrZswrmcwrzMwr/8xVAMxVM8xVZsxVmcxVzMxV/8yAAMyAM8yAZsyAmcyAzMyA/8yqAMyqM8yqZsyqmcyqzMyq/8zVAMzVM8zVZszVmczVzMzV/8z/AMz/M8z/Zsz/mcz/zMz///8AAP8AM/8AZv8Amf8AzP8A//8rAP8rM/8rZv8rmf8rzP8r//9VAP9VM/9VZv9Vmf9VzP9V//+AAP+AM/+AZv+Amf+AzP+A//+qAP+qM/+qZv+qmf+qzP+q///VAP/VM//VZv/Vmf/VzP/V////AP//M///Zv//mf//zP///wAAAAAAAAAAAAAAAAj/APcJHEiwoMGDCBMqXMiQoTJiDyM2nEixosWLGBVCRDNJzI0YMQCIHEkSQIyKEDOqPPhw5cqHmTqCLEmzJkllE0vGEJPJ5UU0IQGg8ekwU1CbSJOSnJgpKU+iCZWJobkPJ9SCxKYq3Qry48dJHNH0pLhV6FWCk2quECnmbNWjZUGK4SjxrLJJN26sRWoVqlaSMTKhGTnp6l+dMb6KJQbN7URoxNDcqEkMKlyh9QQ2HdlXZVqaaDo79pnpMICxKpXpNPj3ZOrLAAqPHk2sZNuMpm8chEZSN8bPJIfOHq66pGyKwAkn3CwyxiTRC03HgD7cLbTLtx3CrqzQNIDsC5mL/0RdvXq9ySSPL6fpm6H3iUddl58vUBl6kY3XA6aunzND8fQFONBR4BlkXEZHNXQfeQKWV5xICcXHn4IjNbRUgwL+xaBmN62UYHgjFYhhdcxtuE96Lt03oUAajhhgiQYN1pxPf2WmkIwAcOdieTASVA9JOqrUYnf+7cjjSAzi2N5KPSb0V5BGOqbMUeqdWKRLD4pYEI4rRrnSjySR96B8Po1E5kHAmejlQjBxp0wmxIAlBmw2CpTmVR8mhKOaayJUVkmhFXRUlxXdB+J4fTL1p3IGVmgYkgoxV2WiCIEZF39mnlWbSJMStKlZlDIETUcfwaYTg7yxZVeICmUZ6kVvev/XG0GunpWpQre+ihFQSI31qZYrXRiho7pm9CZelxX2oHBXGUqkSIQW2xCOEH7KrGVXHoQjlNIau9dpkrqloka5dsskYcxdS1SeCKUq0pLmYpRuuGcdVSdCyU0Xb0aSpuuWsM+KtEK0+xY0L6NQMQevk7YVjBJhtUL1Lbf9zehwQ9ZaCcDCKlFr0YMIX4zQg4UBbKxpFDN0n5lzTZKSyAJlPOhKn47E8UTJIQUSGjy9nBAxpVksIMn7AHdzQyATy68Bi/YG0tOmQtugvxrHhltvwF5UWtRN24QhvYcRTBC1YltETGTIdk1T1hOdzWdBeAU3oMn/kcT2aDCRGnVXk7xgPRHTNEHdVdTHgXy0QckdDnOkatOUH4esMpTz4io1/q4Y0B12ZkFTAkZ5amAllpjOzvm98ndiwam6dPd+DhVEdVEka1KKu+4itUp1avuaUiklxuO7FwsZWMSjkXLwDQYEACH+/gCnAhwDgYAEHQOYAmgDBEgRRWQZFDIIAIAUAnaM4kEzCADADAJK8+JBFauq00GrqtNBwKrqPgAAVj4eBAOC8dAKSn2C/L35fpNzZWbE3KQACEoXKBKCyxZZUoAAsAsAFQsWWLFggvwZ+DUqALBLmkqKllSzYoBVyhLFlJSUlSypZZuUllSxUFiyyywoCkl3gv3d+8ZsAsDcpZNx3xuWLAAACVLLLLmyyxYSpuWxYWXLYqWLLmyyyoL8EfgkAAlJSF52AXNlzZZYsqFiyyxZUUlEoBKAAAAoClGHAYL+AEv4ARsTZLLKWLKSgAbiyyiWWVLKllhAlliligAAAWWVLCyzUWLllQCC/BX4LBLFy2VLFlRZZZZZUsoEoAAAlikVLLFlliwsKlASgAAAADs=";
textMessage.Add(InstantMessageContentType.Gif, formattedMessage);
break;
case CheckedRadio.HTML:
formattedMessage = "<FONT COLOR=\"#353333\" SIZE=20 FACE=\"Segoe UI\">" + message + "</FONT>";
textMessage.Add(InstantMessageContentType.Html, formattedMessage);
break;
case CheckedRadio.INK:
formattedMessage = "base64:AK0CHAOAgAQdA+wBTgMESBFFZBkUMggAgBQCdoziQTMIAMAMAkrz4kEVq6rTQauq00HAquo+AABWPgpSf4L9Dfoeyzcpc2JUABEsoi2C5uWLmlzZUoALLLFlJUsqLFSxZZYsqLFAgvwZ+DQCWE2WWLLLBzzlLBZQFjcsWWVLKllioLFSyyywLAsWCwLFgAo/aoL92fu1UpKVm5bllSywAAEoiksqFhYssWXNlSxZZZZZUoCC/BX4LAAACVLLFlhYssqLKiypUoEoJRKlAAFgCl+rAYL+ABP4ADE3KFgzuUTZNllLKlBZYWCyVLGVKsFhYssWWVFS5pKlSyhKFlJUpLLKTYSpZUWWVLKAgvwF+AwACVFipQSkoAAAABKEoAEolIqWLLCyoqUllAlAAACUAA==";
textMessage.Add(InstantMessageContentType.Ink, formattedMessage);
break;
case CheckedRadio.PlainText:
formattedMessage = message;
textMessage.Add(InstantMessageContentType.PlainText, formattedMessage);
break;
case CheckedRadio.RTF:
System.Windows.Forms.RichTextBox richTextBox = new System.Windows.Forms.RichTextBox();
richTextBox.ForeColor = System.Drawing.Color.Red;
System.Drawing.Font font = new System.Drawing.Font(System.Drawing.FontFamily.GenericSerif, 25);
richTextBox.Font = font;
richTextBox.Text = message;
formattedMessage = richTextBox.Rtf.Replace(Environment.NewLine, "").Trim();
textMessage.Add(InstantMessageContentType.RichText, formattedMessage);
break;
}
return textMessage;
}
void ConversationTest_IsTypingChanged(object sender, IsTypingChangedEventArgs e)
{
}
void ConversationTest_InstantMessageReceived(object sender, MessageSentEventArgs e)
{
}
/// <summary>
/// Identify if a particular SystemException is one of the exceptions which may be thrown
/// by the Lync Model API.
/// </summary>
/// <param name="ex"></param>
/// <returns></returns>
private bool IsLyncException(SystemException ex)
{
return
ex is NotImplementedException ||
ex is ArgumentException ||
ex is NullReferenceException ||
ex is NotSupportedException ||
ex is ArgumentOutOfRangeException ||
ex is IndexOutOfRangeException ||
ex is InvalidOperationException ||
ex is TypeLoadException ||
ex is TypeInitializationException ||
ex is InvalidComObjectException ||
ex is InvalidCastException;
}
private void radioGif_Click(object sender, RoutedEventArgs e)
{
btnStartConv.Content = "Start Conversation in GIF";
}
private void radioText_Click(object sender, RoutedEventArgs e)
{
btnStartConv.Content = "Start Conversation in plain text";
}
private void radioRtf_Click(object sender, RoutedEventArgs e)
{
btnStartConv.Content = "Start Conversation in RTF";
}
private void radioHtml_Click(object sender, RoutedEventArgs e)
{
btnStartConv.Content = "Start Conversation in HTML";
}
private void radioInK_Click(object sender, RoutedEventArgs e)
{
btnStartConv.Content = "Start Conversation in INK";
}
}
}
| 50.178571
| 2,677
| 0.65791
|
a3bd1445b3633f0504a50163d693333483cf1a6e
| 1,062
|
java
|
Java
|
app/src/main/java/com/app/adapter/ApplicationAdapter.java
|
wankuiya/LRPH
|
892f7d95b2a3d756c8af6e66fd053e32ebbc980f
|
[
"Apache-2.0"
] | null | null | null |
app/src/main/java/com/app/adapter/ApplicationAdapter.java
|
wankuiya/LRPH
|
892f7d95b2a3d756c8af6e66fd053e32ebbc980f
|
[
"Apache-2.0"
] | null | null | null |
app/src/main/java/com/app/adapter/ApplicationAdapter.java
|
wankuiya/LRPH
|
892f7d95b2a3d756c8af6e66fd053e32ebbc980f
|
[
"Apache-2.0"
] | null | null | null |
package com.app.adapter;
import android.support.v4.view.PagerAdapter;
import android.support.v4.view.ViewPager;
import android.view.View;
import android.view.ViewGroup;
import android.widget.GridView;
import java.util.List;
/**
* Author chzjy
* Date 2016/12/19.
* 实现ViewPager页卡
*/
public class ApplicationAdapter extends PagerAdapter {
private List<GridView> gridViewList;
public ApplicationAdapter(List<GridView> gridViewList) {
this.gridViewList = gridViewList;
}
@Override
public int getCount() {
return gridViewList.size();
}
@Override
public boolean isViewFromObject(View view, Object object) {
return view == object;
}
@Override
public Object instantiateItem(ViewGroup container, int position) {
((ViewPager) container).addView(gridViewList.get(position));
return gridViewList.get(position);
}
@Override
public void destroyItem(ViewGroup container, int position, Object object) {
((ViewPager) container).removeView((View) object);
}
}
| 23.6
| 79
| 0.701507
|
852cf16a71bf676bfa2d8acc990c8a55726d8de6
| 109
|
cs
|
C#
|
EnvironmentAssessment.Wizard/Common/VimApi/T/TooManyConsecutiveOverrides.cs
|
octansIt/environmentassessment
|
267867b2b75e3c0955dac7f9250f3455e0935704
|
[
"MIT"
] | 1
|
2020-08-24T15:44:55.000Z
|
2020-08-24T15:44:55.000Z
|
EnvironmentAssessment.Wizard/Common/VimApi/T/TooManyConsecutiveOverrides.cs
|
octansIt/environmentassessment
|
267867b2b75e3c0955dac7f9250f3455e0935704
|
[
"MIT"
] | null | null | null |
EnvironmentAssessment.Wizard/Common/VimApi/T/TooManyConsecutiveOverrides.cs
|
octansIt/environmentassessment
|
267867b2b75e3c0955dac7f9250f3455e0935704
|
[
"MIT"
] | 1
|
2020-08-24T15:44:57.000Z
|
2020-08-24T15:44:57.000Z
|
namespace EnvironmentAssessment.Common.VimApi
{
public class TooManyConsecutiveOverrides : VimFault
{
}
}
| 15.571429
| 52
| 0.807339
|
66550df324ece687cc05fe1ec83da20d9ede7f5a
| 401
|
py
|
Python
|
Desafios/Desafio018.py
|
vaniaferreira/Python
|
5b3158836d47c0bb7bc446e6636e7b3dcea8a0ab
|
[
"MIT"
] | null | null | null |
Desafios/Desafio018.py
|
vaniaferreira/Python
|
5b3158836d47c0bb7bc446e6636e7b3dcea8a0ab
|
[
"MIT"
] | null | null | null |
Desafios/Desafio018.py
|
vaniaferreira/Python
|
5b3158836d47c0bb7bc446e6636e7b3dcea8a0ab
|
[
"MIT"
] | null | null | null |
#Faça um programa que leia um ângulo qualquer e mostre na tela o valor do seno, cosseno e tangente desse ângulo.
import math
num = float(input('Digite um ângulo: '))
seno = math.sin(math.radians(num))
cosseno = math.cos(math.radians(num))
tangente = math.tan(math.radians(num))
print ('Para o ângulo {}, o seno é {:.2f}, cosseno é {:.2f} e a tangente é {:.2f}'.format(num,seno, cosseno, tangente))
| 36.454545
| 119
| 0.705736
|
18c332784807eb726013313584dd510c4e54f4bd
| 1,183
|
rs
|
Rust
|
Rust/source/src/idgen/yit_id_helper.rs
|
bubao/IdGenerator
|
c92d8721071cdb2b4b203e51c1043b21c8c41266
|
[
"MIT"
] | null | null | null |
Rust/source/src/idgen/yit_id_helper.rs
|
bubao/IdGenerator
|
c92d8721071cdb2b4b203e51c1043b21c8c41266
|
[
"MIT"
] | null | null | null |
Rust/source/src/idgen/yit_id_helper.rs
|
bubao/IdGenerator
|
c92d8721071cdb2b4b203e51c1043b21c8c41266
|
[
"MIT"
] | null | null | null |
/*
* 版权属于:yitter(yitter@126.com)
* 开源地址:https://gitee.com/yitter/idgenerator
*/
use std::sync::Mutex;
use std::sync::Arc;
use crate::idgen::*;
pub struct YitIdHelper;
static mut idGenInstance: Option<Arc<Mutex<DefaultIdGenerator>>> = None;
impl YitIdHelper {
fn IdGenInstance() -> Arc<Mutex<DefaultIdGenerator>> {
unsafe {
idGenInstance.get_or_insert_with(|| {
Arc::new(Mutex::new(DefaultIdGenerator::Default()))
}).clone()
}
}
pub fn SetIdGenerator(options: IdGeneratorOptions) {
let mut idgenArc = YitIdHelper::IdGenInstance();
let mut idgen = idgenArc.lock().unwrap();
idgen.Worker.SetOptions(options);
}
pub fn SetWorkerId(workerId: u32) {
let mut idgenArc = YitIdHelper::IdGenInstance();
let mut idgen = idgenArc.lock().unwrap();
let mut options = IdGeneratorOptions::New(workerId);
idgen.Worker.SetOptions(options);
}
pub fn NextId() -> i64 {
let mut idgenArc = YitIdHelper::IdGenInstance();
let mut idgen = idgenArc.lock().unwrap();
idgen.Worker.NextId()
}
}
| 28.853659
| 73
| 0.602705
|
4472f9c137887091f95a6b20970f8105e58b09c0
| 14,836
|
py
|
Python
|
examples/roberta_medical_ner_train.py
|
6666ev/bert_seq2seq
|
caa9b6c5629ae5783c733aebbbcf669d8ab5dde2
|
[
"Apache-2.0"
] | 795
|
2020-03-13T10:16:26.000Z
|
2022-03-30T02:07:57.000Z
|
examples/roberta_medical_ner_train.py
|
6666ev/bert_seq2seq
|
caa9b6c5629ae5783c733aebbbcf669d8ab5dde2
|
[
"Apache-2.0"
] | 51
|
2020-03-23T09:09:45.000Z
|
2022-03-24T02:44:53.000Z
|
examples/roberta_medical_ner_train.py
|
6666ev/bert_seq2seq
|
caa9b6c5629ae5783c733aebbbcf669d8ab5dde2
|
[
"Apache-2.0"
] | 152
|
2020-04-01T04:37:18.000Z
|
2022-03-31T03:22:14.000Z
|
import torch
import codecs
from tqdm import tqdm
import time
import unicodedata
from torch.utils.data import Dataset, DataLoader
from bert_seq2seq import Tokenizer, load_chinese_base_vocab
from bert_seq2seq import load_bert
# target = ["O", "B-DRUG", "B-DRUG_INGREDIENT", "B-DISEASE", "B-SYMPTOM", "B-SYNDROME", "B-DISEASE_GROUP",
# "B-FOOD", "B-FOOD_GROUP", "B-PERSON_GROUP", "B-DRUG_GROUP", "B-DRUG_DOSAGE", "B-DRUG_TASTE",
# "B-DRUG_EFFICACY", "I-DRUG", "I-DRUG_INGREDIENT", "I-DISEASE", "I-SYMPTOM", "I-SYNDROME", "I-DISEASE_GROUP",
# "I-FOOD", "I-FOOD_GROUP", "I-PERSON_GROUP", "I-DRUG_GROUP", "I-DRUG_DOSAGE", "I-DRUG_TASTE",
# "I-DRUG_EFFICACY"]
target = ["O", "DRUG", "DRUG_INGREDIENT", "DISEASE", "SYMPTOM", "SYNDROME", "DISEASE_GROUP",
"FOOD", "FOOD_GROUP", "PERSON_GROUP", "DRUG_GROUP", "DRUG_DOSAGE", "DRUG_TASTE",
"DRUG_EFFICACY"]
labels2id = {k: v for v, k in enumerate(target)}
vocab_path = "./state_dict/roberta_wwm_vocab.txt" # roberta模型字典的位置
model_name = "roberta" # 选择模型名字
model_path = "./state_dict/roberta_wwm_pytorch_model.bin" # roberta模型位置
recent_model_path = "" # 用于把已经训练好的模型继续训练
model_save_path = "./state_dict/bert_medical_ner_model_crf.bin"
batch_size = 8
lr = 1e-5
crf_lr = 1e-2 ## crf层学习率为0.01
# 加载字典
word2idx = load_chinese_base_vocab(vocab_path)
def _is_punctuation(ch):
"""标点符号类字符判断(全/半角均在此内)
"""
code = ord(ch)
return 33 <= code <= 47 or \
58 <= code <= 64 or \
91 <= code <= 96 or \
123 <= code <= 126 or \
unicodedata.category(ch).startswith('P')
def _cjk_punctuation():
return u'\uff02\uff03\uff04\uff05\uff06\uff07\uff08\uff09\uff0a\uff0b\uff0c\uff0d\uff0f\uff1a\uff1b\uff1c\uff1d\uff1e\uff20\uff3b\uff3c\uff3d\uff3e\uff3f\uff40\uff5b\uff5c\uff5d\uff5e\uff5f\uff60\uff62\uff63\uff64\u3000\u3001\u3003\u3008\u3009\u300a\u300b\u300c\u300d\u300e\u300f\u3010\u3011\u3014\u3015\u3016\u3017\u3018\u3019\u301a\u301b\u301c\u301d\u301e\u301f\u3030\u303e\u303f\u2013\u2014\u2018\u2019\u201b\u201c\u201d\u201e\u201f\u2026\u2027\ufe4f\ufe51\ufe54\xb7\uff01\uff1f\uff61\u3002'
def _is_cjk_character(ch):
"""CJK类字符判断(包括中文字符也在此列)
参考:https://en.wikipedia.org/wiki/CJK_Unified_Ideographs_(Unicode_block)
"""
code = ord(ch)
return 0x4E00 <= code <= 0x9FFF or \
0x3400 <= code <= 0x4DBF or \
0x20000 <= code <= 0x2A6DF or \
0x2A700 <= code <= 0x2B73F or \
0x2B740 <= code <= 0x2B81F or \
0x2B820 <= code <= 0x2CEAF or \
0xF900 <= code <= 0xFAFF or \
0x2F800 <= code <= 0x2FA1F
def _is_control(ch):
"""控制类字符判断
"""
return unicodedata.category(ch) in ('Cc', 'Cf')
def word_piece_tokenize(word):
"""word内分成subword
"""
if word in word2idx:
return [word]
tokens = []
start, stop = 0, 0
while start < len(word):
stop = len(word)
while stop > start:
sub = word[start:stop]
if start > 0:
sub = '##' + sub
if sub in word2idx:
break
stop -= 1
if start == stop:
stop += 1
tokens.append(sub)
start = stop
return tokens
def from_ann2dic(w_path):
for i in range(1000):
print(i)
r_ann_path = "./corpus/医学NER/train/" + str(i) + ".ann"
r_txt_path = "./corpus/医学NER/train/" + str(i) + ".txt"
q_dic = {}
print("开始读取文件:%s" % r_ann_path)
with codecs.open(r_ann_path, "r", encoding="utf-8") as f:
line = f.readline()
line = line.strip("\n\r")
while line != "":
line_arr = line.split()
# print(line_arr)
cls = line_arr[1]
start_index = int(line_arr[2])
end_index = int(line_arr[3])
length = end_index - start_index
for r in range(length):
if r == 0:
q_dic[start_index] = ("%s" % cls)
else:
q_dic[start_index + r] = ("%s" % cls)
line = f.readline()
line = line.strip("\n\r")
print("开始读取文件:%s" % r_txt_path)
with codecs.open(r_txt_path, "r", encoding="utf-8") as f:
content_str = f.read()
content_str = content_str.replace("、", ",")
print("开始写入文本%s" % w_path)
with codecs.open(w_path, encoding="utf-8", mode="a+") as w:
for i, char in enumerate(content_str):
if char == " " or char == "" or char == "\n" or char == "\r" or char == "<" or char == ">" or char == "b" or char == "r" or char == "/" or unicodedata.category(char) == 'Zs' or char == "-":
continue
else:
if i in q_dic:
tag = q_dic[i]
else:
tag = "O" # 大写字母O
w.write('%s %s\n' % (char, tag))
# w.write('%s\n' % "END O")
def load_data(path: str):
"""
加载数据
"""
src_data = []
labels_data = []
with open(path) as f :
line = f.readline()
line = line.strip("\n")
temp_list = ""
temp_label_list = [0]
# index = 0
while line != "":
##开始一行一行读数据
line_split = line.split(" ")
# print(line_split)
if line_split[0] == "。":
temp_label_list.append(0)
src_data.append(temp_list)
labels_data.append(temp_label_list)
temp_list = ""
temp_label_list = [0]
else :
temp_list += (line_split[0])
temp_label_list.append(labels2id[line_split[1]])
line = f.readline()
line = line.strip("\n")
print("原始数据大小为:" + str(len(src_data)))
save_src_data = []
save_label_data = []
for src, label in zip(src_data, labels_data):
if len(src) < 5:
# print(src)
continue
save_src_data.append(src)
save_label_data.append(label)
# retain = 0
print("清洗后数据大小为:" + str(len(save_src_data)))
return save_src_data, save_label_data
## 自定义dataset
class NERDataset(Dataset):
"""
针对特定数据集,定义一个相关的取数据的方式
"""
def __init__(self, sents_src, sents_tgt) :
## 一般init函数是加载所有数据
super(NERDataset, self).__init__()
# 读原始数据
# self.sents_src, self.sents_tgt = read_corpus(poem_corpus_dir)
self.sents_src = sents_src
self.sents_tgt = sents_tgt
self.idx2word = {k: v for v, k in word2idx.items()}
self.tokenizer = Tokenizer(word2idx)
def __getitem__(self, i):
## 得到单个数据
# print(i)
src = self.sents_src[i]
tgt = self.sents_tgt[i]
token_ids, token_type_ids = self.tokenizer.encode(src)
if len(token_ids) != len(tgt):
print(len(token_ids))
print(len(tgt))
print(src)
print(self.tokenizer.decode(token_ids))
print(tgt)
self.__getitem__(i + 1)
output = {
"token_ids": token_ids,
"token_type_ids": token_type_ids,
"target_id": tgt
}
return output
def __len__(self):
return len(self.sents_src)
def collate_fn(batch):
"""
动态padding, batch为一部分sample
"""
def padding(indice, max_length, pad_idx=0):
"""
pad 函数
"""
pad_indice = [item + [pad_idx] * max(0, max_length - len(item)) for item in indice]
return torch.tensor(pad_indice)
token_ids = [data["token_ids"] for data in batch]
max_length = max([len(t) for t in token_ids])
token_type_ids = [data["token_type_ids"] for data in batch]
target_ids = [data["target_id"] for data in batch]
token_ids_padded = padding(token_ids, max_length)
token_type_ids_padded = padding(token_type_ids, max_length)
target_ids_padded = padding(target_ids, max_length)
return token_ids_padded, token_type_ids_padded, target_ids_padded
def viterbi_decode(nodes, trans):
"""
维特比算法 解码
nodes: (seq_len, target_size)
trans: (target_size, target_size)
"""
scores = nodes[0]
scores[1:] -= 100000 # 刚开始标签肯定是"O"
target_size = nodes.shape[1]
seq_len = nodes.shape[0]
labels = torch.arange(0, target_size).view(1, -1)
path = labels
for l in range(1, seq_len):
scores = scores.view(-1, 1)
M = scores + trans + nodes[l].view(1, -1)
scores, ids = M.max(0)
path = torch.cat((path[:, ids], labels), dim=0)
# print(scores)
# print(scores)
return path[:, scores.argmax()]
def ner_print(model, test_data):
model.eval()
idxtword = {v: k for k, v in word2idx.items()}
tokenier = Tokenizer(word2idx)
trans = model.state_dict()["crf_layer.trans"]
for text in test_data:
decode = []
text_encode, text_ids = tokenier.encode(text)
text_tensor = torch.tensor(text_encode, device=model.device).view(1, -1)
out = model(text_tensor).squeeze(0) # 其实是nodes
labels = viterbi_decode(out, trans)
starting = False
for l in labels:
if l > 0:
label = target[l.item()]
decode.append(label)
else :
decode.append("O")
flag = 0
res = {}
# print(decode)
# print(text)
decode_text = [idxtword[i] for i in text_encode]
for index, each_entity in enumerate(decode):
if each_entity != "O":
if flag != each_entity:
cur_text = decode_text[index]
if each_entity in res.keys():
res[each_entity].append(cur_text)
else :
res[each_entity] = [cur_text]
flag = each_entity
elif flag == each_entity:
res[each_entity][-1] += decode_text[index]
else :
flag = 0
print(res)
class Trainer:
def __init__(self):
# 加载数据
self.sents_src, self.sents_tgt = load_data("./state_dict/medical_ner_update.txt")
self.tokenier = Tokenizer(word2idx)
# 判断是否有可用GPU
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
print("device: " + str(self.device))
# 定义模型
self.bert_model = load_bert(word2idx, model_name=model_name, model_class="sequence_labeling_crf", target_size=len(target))
## 加载预训练的模型参数~
self.bert_model.load_pretrain_params(model_path)
# 将模型发送到计算设备(GPU或CPU)
self.bert_model.set_device(self.device)
# 声明需要优化的参数
crf_params = list(map(id, self.bert_model.crf_layer.parameters())) ## 单独把crf层参数拿出来
base_params = filter(lambda p: id(p) not in crf_params, self.bert_model.parameters())
self.optimizer = torch.optim.Adam([
{"params": base_params},
{"params": self.bert_model.crf_layer.parameters(), "lr": crf_lr}], lr=lr, weight_decay=1e-5)
# 声明自定义的数据加载器
dataset = NERDataset(self.sents_src, self.sents_tgt)
self.dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=True, collate_fn=collate_fn)
def train(self, epoch):
# 一个epoch的训练
self.bert_model.train()
self.iteration(epoch, dataloader=self.dataloader, train=True)
def save(self, save_path):
"""
保存模型
"""
self.bert_model.save_all_params(save_path)
print("{} saved!".format(save_path))
def iteration(self, epoch, dataloader, train=True):
total_loss = 0
start_time = time.time() ## 得到当前时间
step = 0
for token_ids, token_type_ids, target_ids in tqdm(dataloader,position=0, leave=True):
# print(target_ids.shape)
step += 1
if step % 300 == 0:
test_data = ["补气养血,调经止带,用于月经不调经期腹痛,非处方药物甲类,国家基本药物目录2012如果服用任何其他药品请告知医师或药师包括任何从药房超市或保健品商店购买的非处方药品。",
"月经过多孕妇忌服。黑褐色至黑色的小蜜丸味甜微苦。",
"红虎灌肠液50毫升装,安徽天洋药业清热解毒,化湿除带,祛瘀止痛,散结消癥,用于慢性盆腔炎所致小腹疼痛腰,骶酸痛带下量多或有发热。"]
ner_print(self.bert_model, test_data)
self.bert_model.train()
# 因为传入了target标签,因此会计算loss并且返回
predictions, loss = self.bert_model(token_ids,
labels=target_ids,
# use_layer_num=3
)
# 反向传播
if train:
# 清空之前的梯度
self.optimizer.zero_grad()
# 反向传播, 获取新的梯度
loss.backward()
# 用获取的梯度更新模型参数
self.optimizer.step()
# 为计算当前epoch的平均loss
total_loss += loss.item()
end_time = time.time()
spend_time = end_time - start_time
# 打印训练信息
print("epoch is " + str(epoch)+". loss is " + str(total_loss) + ". spend time is "+ str(spend_time))
# 保存模型
self.save(model_save_path)
if __name__ == '__main__':
trainer = Trainer()
train_epoches = 50
for epoch in range(train_epoches):
# 训练一个epoch
trainer.train(epoch)
# from_ann2dic("./state_dict/medical_ner.txt")
# with open("./state_dict/medical_ner_update.txt", "a+") as f:
# with open("./state_dict/medical_ner.txt", "r", encoding="utf-8") as f1 :
# lines = f1.readlines()
# start = 1
# string = ""
# label = ""
# for line in lines:
# if line == "\n":
# f.write("\n")
# continue
# line = line.strip("\n")
# line = line.split(" ")
# if _is_punctuation(line[0]) or _is_cjk_character(line[0]):
# if string != "":
# string = string.lower()
# tokens = word_piece_tokenize(string) # 子词
# for t in tokens:
# if "##" in t:
# f.write(t[2:] + " " + label + "\n")
# else :
# f.write(t + " " + label + "\n")
# # f.write(string + " " + label + "\n")
# string = ""
# label = ""
# f.write(line[0] + " " + line[1] + "\n")
# else :
# string += line[0]
# label = line[1]
| 35.663462
| 498
| 0.534713
|
3cd6c0b2c4c10ef239c9a2c67ae221a5431e17e0
| 18,912
|
lua
|
Lua
|
AnS/Core/Sources.lua
|
Metric/AnSAddons
|
054879d209dcf2ba76ddaccb2c639b1a36d5317e
|
[
"MIT"
] | 21
|
2018-09-03T16:48:54.000Z
|
2022-01-05T07:17:54.000Z
|
AnS/Core/Sources.lua
|
Metric/AnSAddons
|
054879d209dcf2ba76ddaccb2c639b1a36d5317e
|
[
"MIT"
] | 21
|
2018-09-16T23:12:17.000Z
|
2021-02-10T18:16:18.000Z
|
AnS/Core/Sources.lua
|
Metric/AnSAddons
|
054879d209dcf2ba76ddaccb2c639b1a36d5317e
|
[
"MIT"
] | 9
|
2018-09-17T09:30:10.000Z
|
2021-06-19T17:48:43.000Z
|
local Ans = select(2, ...);
local Data = Ans.Data;
local VendorData = Data.Vendor;
local Config = Ans.Config;
local Sources = Ans.Object.Register("Sources");
Sources.items = {};
local Utils = Ans.Utils;
local PriceSource = Ans.PriceSource;
local NAME_CACHE = "";
local SOURCE_CACHE = "";
local CVAR_CACHE = "";
local BONUS_CACHE = {};
local OpCodes = Ans.Object.Register("OpCodes", Sources);
function OpCodes:Acquire()
local op = {};
op.percent = 0;
op.ppu = 0;
op.stacksize = 0;
op.buyout = 0;
op.ilevel = 0;
op.vendorsell = 0;
op.vendorBuy = 0;
op.quality = 1;
op.dbmarket = 0;
op.dbminbuyout = 0;
op.dbhistorical = 0;
op.dbregionmarketavg = 0;
op.dbregionminbuyoutavg = 0;
op.dbregionhistorical = 0;
op.dbregionsaleavg = 0;
op.dbregionsalerate = 0;
op.dbregionsoldperday = 0;
op.dbglobalminbuyoutavg = 0;
op.dbglobalmarketavg = 0;
op.dbglobalhistorical = 0;
op.dbglobalsaleavg = 0;
op.dbglobalsalerate = 0;
op.dbglobalsoldperday = 0;
op.tujmarket = 0;
op.tujrecent = 0;
op.tujglobalmedian = 0;
op.tujglobalmean = 0;
op.tujage = 0;
op.tujdays = 0;
op.tujstddev = 0;
op.tujglobalstddev = 0;
op.atrvalue = 0;
op.ansrecent = 0;
op.ansmarket = 0;
op.ansmin = 0;
op.ans3day = 0;
op.avgsell = 0;
op.avgbuy = 0;
op.maxsell = 0;
op.maxbuy = 0;
op.destroy = 0;
op.numinventory = 0;
op.isgroup = false;
op.bonuses = BONUS_CACHE;
return op;
end
local VAR_CACHE = {};
local OP_CACHE = {};
local VALUE_CACHE = {};
local SOURCE_TEMPLATE = "local %s = ops.%s or 0; ";
local VAR_TEMPLATE = "local %s = %s or 0; ";
local TEMPLATE = [[
return function(sources, ops)
local ifgte, iflte, iflt, ifgt, ifeq, ifneq, check, avg, first, round,
min, max, mod,
abs, ceil, floor, random, log,
log10, exp, sqrt = sources.ifgte, sources.iflte, sources.iflt, sources.ifgt, sources.ifeq, sources.ifneq, sources.check, sources.avg, sources.first, sources.round, sources.min, sources.max, math.fmod, math.abs, math.ceil, math.floor, math.random, math.log, math.log10, math.exp, math.sqrt;
local isgroup = ops.isgroup;
local eq, neq, startswith, contains = sources.eq, sources.neq, sources.startswith, sources.contains;
local bonuses = ops.bonuses;
local bonus = function(v1,v2,v3)
local noArgs = v2 == nil and v3 == nil;
if (isgroup) then
if (noArgs) then
return true;
end
return v2 or 0;
end
return sources.bonus(bonuses, v1, v2, v3);
end
local percent = ops.percent;
local ppu = ops.ppu;
local stacksize = ops.stacksize;
local buyout = ops.buyout;
local avgBuy, AvgBuy, Avgbuy = ops.avgbuy, ops.avgbuy, ops.avgbuy;
local avgSell, AvgSell, Avgsell = ops.avgsell, ops.avgsell, ops.avgsell;
local Destroy = ops.destroy;
local maxSell, MaxSell, Maxsell = ops.maxsell, ops.maxsell, ops.maxsell;
local maxBuy, MaxBuy, Maxbuy = ops.maxbuy, ops.maxbuy, ops.maxbuy;
local ilevel, itemLevel, ItemLevel, itemlevel, Itemlevel = ops.ilevel, ops.ilevel, ops.ilevel, ops.ilevel, ops.ilevel;
local quality, itemquality, itemQuality, ItemQuality, Itemquality = ops.quality, ops.quality, ops.quality, ops.quality, ops.quality;
local vendorsell, vendorSell, Vendorsell, VendorSell = ops.vendorsell, ops.vendorsell, ops.vendorsell, ops.vendorsell;
local vendorbuy, vendorBuy, VendorBuy, Vendorbuy = ops.vendorbuy, ops.vendorbuy, ops.vendorbuy, ops.vendorbuy;
local tsmId = ops.tsmId;
local id = ops.id;
local DBMarket, Dbmarket = ops.dbmarket, ops.dbmarket;
local DBMinBuyout, Dbminbuyout = ops.dbminbuyout, ops.dbminbuyout;
local DBHistorical, Dbhistorical = ops.dbhistorical, ops.dbhistorical;
local DBRegionMinBuyoutAvg, Dbregionminbuyoutavg = ops.dbregionminbuyoutavg, ops.dbregionminbuyoutavg;
local DBRegionMarketAvg, Dbregionmarketavg = ops.dbregionmarketavg, ops.dbregionmarketavg;
local DBRegionHistorical, Dbregionhistorical = ops.dbregionhistorical, ops.dbregionhistorical;
local DBRegionSaleAvg, Dbregionsaleavg = ops.dbregionsaleavg, ops.dbregionsaleavg;
local NumInventory, Numinventory, numInventory = ops.numinventory, ops.numinventory, ops.numinventory;
%s
%s
return %s;
end
]];
function Sources:Clear()
Utils.ClearCache();
NAME_CACHE = "";
SOURCE_CACHE = "";
CVAR_CACHE = "";
wipe(VALUE_CACHE);
wipe(OP_CACHE);
wipe(BONUS_CACHE);
end
function Sources:ClearValues()
wipe(VALUE_CACHE);
end
function Sources:LoadCVars()
wipe(VAR_CACHE);
for i,v in ipairs(Config.CustomSources()) do
local value = v.value;
if (value and value:len() > 0) then
value = Utils.ReplaceOpShortHand(value);
value = Utils.ReplaceShortHandPercent(value);
value = Utils.ReplaceMoneyShorthand(value);
tinsert(VAR_CACHE, {name = v.name, value = value});
end
end
end
function Sources:Register(name, fn, key)
local source = PriceSource:Acquire(name:lower(),fn,key);
tinsert(self.items, source);
end
function Sources:GetNameString()
local str = "";
local sep = "";
local i;
if (NAME_CACHE and NAME_CACHE:len() > 0) then
return NAME_CACHE;
end
for i = 1, #self.items do
local s = self.items[i];
if (s.fn ~= nil) then
str = str..sep..s.name;
sep = ",";
end
end
-- cache for future lookups
NameStringCache = str;
return str;
end
function Sources:GetCVarString()
local str = "";
local i;
if (CVAR_CACHE and CVAR_CACHE:len() > 0) then
return CVAR_CACHE;
else
self:LoadCVars();
end
for i = 1, #VAR_CACHE do
local cvar = VAR_CACHE[i];
local nstr = string.format(VAR_TEMPLATE, cvar.name, cvar.value);
str = str..nstr;
end
CVAR_CACHE = str;
return str;
end
function Sources:GetVarString()
local str = "";
local i;
local total = #self.items;
if (SOURCE_CACHE and SOURCE_CACHE:len() > 0) then
return SOURCE_CACHE;
end
for i = 1, total do
local s = self.items[i];
if (s.fn ~= nil) then
local name = s.name;
local nstr = string.format(SOURCE_TEMPLATE, name, name);
str = str..nstr;
end
end
SOURCE_CACHE = str;
return str;
end
function Sources:IsValidQuery(q)
-- check for matching ()
local sindex = 1;
sindex = string.find(q, "%s?%(%s?", sindex);
-- since no start ( was found
-- check for straggling )
if (not sindex) then
sindex = string.find(q, "%s?%)%s?", 1);
if (sindex) then
return false;
end
end
while (sindex) do
sindex = sindex + 1;
sindex = string.find(q, "%s?%)%s?", sindex);
if (not sindex) then
return false;
end
sindex = sindex + 1;
sindex = string.find(q, "%s?%(%s?", sindex);
end
return true;
end
function Sources:GetValues(itemId, ops)
local i;
local total = #self.items;
for i = 1, total do
local s = self.items[i];
if (s.fn ~= nil) then
local r = s.fn(itemId, s.key, s.name);
local v = r or 0;
ops[s.name] = v;
end
end
end
Sources.round = function(n)
if (n < 0) then
return math.floor(n - 0.5);
end
return math.floor(n + 0.5);
end
Sources.avg = function(...)
local totalItems = select("#", ...);
local i;
local t = 0;
local amt = 0;
for i = 1, totalItems do
local v = select(i, ...);
if (v and type(v) == "number"
and math.floor(v) > 0) then
amt = amt + v;
t = t + 1;
end
end
if t == 0 then
return amt;
end
return Sources.round(amt / t);
end
Sources.first = function(v1,v2)
if (v1 and type(v1) == "number"
and math.floor(v1) > 0) then
return v1 or 0;
end
return v2 or 0;
end
Sources.check = function(v1,v2,v3)
if (v1 and type(v1) == "number"
and math.floor(v1) > 0) then
return v2 or 0;
end
return v3 or 0;
end
Sources.iflte = function(v1, v2, v3, v4)
if (not v1 or type(v1) ~= "number") then
return v4 or 0;
end
if (not v2 or type(v2) ~= "number") then
return v3 or 0;
end
if (v1 <= v2) then
return v3 or 0;
end
return v4 or 0;
end
Sources.ifgte = function(v1, v2, v3, v4)
if (not v1 or type(v1) ~= "number") then
return v4 or 0;
end
if (not v2 or type(v2) ~= "number") then
return v3 or 0;
end
if (v1 >= v2) then
return v3 or 0;
end
return v4 or 0;
end
Sources.iflt = function(v1, v2, v3, v4)
if (not v1 or type(v1) ~= "number") then
return v4 or 0;
end
if (not v2 or type(v2) ~= "number") then
return v3 or 0;
end
if (v1 < v2) then
return v3 or 0;
end
return v4 or 0;
end
Sources.ifgt = function(v1, v2, v3, v4)
if (not v1 or type(v1) ~= "number") then
return v4 or 0;
end
if (not v2 or type(v2) ~= "number") then
return v3 or 0;
end
if (v1 > v2) then
return v3 or 0;
end
return v4 or 0;
end
Sources.ifeq = function(v1, v2, v3, v4)
if (v1 == v2) then
return v3 or 0;
end
return v4 or 0;
end
Sources.ifneq = function(v1, v2, v3, v4)
if (v1 ~= v2) then
return v3 or 0;
end
return v4 or 0;
end
Sources.neq = function(v1,v2,v3,v4)
if (v1 ~= v2) then
return v3 or 0;
end
return v4 or 0;
end
Sources.eq = function(v1,v2,v3,v4)
if (v1 == v2) then
return v3 or 0;
end
return v4 or 0;
end
Sources.startswith = function(v1, v2, v3, v4)
local noArgs = v3 == nil and v4 == nil;
if (not v1 or type(v1) ~= "string") then
if (noArgs) then
return false;
end
return v4 or 0;
end
if (not v2 or type(v2) ~= "string") then
if (noArgs) then
return false;
end
return v4 or 0;
end
if (strsub(v1, 1, #v2) == v2) then
if (noArgs) then
return true;
end
return v3 or 0;
end
if (noArgs) then
return false;
end
return v4 or 0;
end
Sources.contains = function(v1, v2, v3, v4)
local noArgs = v3 == nil and v4 == nil;
if (not v1 or type(v1) ~= "string") then
if (noArgs) then
return false;
end
return v4 or 0;
end
if (not v2 or type(v2) ~= "string") then
if (noArgs) then
return true;
end
return v3 or 0;
end
if (strfind(v1, v2)) then
if (noArgs) then
return true;
end
return v3 or 0;
end
if (noArgs) then
return false;
end
return v4 or 0;
end
Sources.bonus = function(v1,v2,v3,v4)
local noArgs = v3 == nil and v4 == nil;
if (not v1 or not v2) then
if (noArgs) then
return false;
end
return v4 or 0;
end
local b = v1[v2];
if (b) then
if (noArgs) then
return true;
end
return v3 or 0;
end
if (noArgs) then
return false;
end
return v4 or 0;
end
Sources.max = function(...)
local c = select("#", ...);
local lastMax = -math.huge;
for i = 1, c do
local v = select(i, ...);
if (v and type(v) == "number") then
if (v ~= 0) then
lastMax = math.max(lastMax, v);
end
end
end
return lastMax == -math.huge and 0 or lastMax;
end
Sources.min = function(...)
local c = select("#", ...);
local lastMin = math.huge;
for i = 1, c do
local v = select(i, ...);
if (v and type(v) == "number") then
if (v ~= 0) then
lastMin = math.min(lastMin, v);
end
end
end
return lastMin == math.huge and 0 or lastMin;
end
-- This accepts an item id in tsm format, numeric, or an item link
function Sources:QueryID(q, itemId)
if (not itemId) then
return nil;
end
local codes = nil;
local names = self:GetNameString();
if (not names or names:len() == 0 ) then
return nil;
end
if (not q or q:len() == 0) then
return nil;
end
if (VALUE_CACHE[itemId]) then
codes = VALUE_CACHE[itemId];
else
codes = OpCodes:Acquire();
VALUE_CACHE[itemId] = codes;
self:GetValues(itemId, codes);
end
codes.buyout = 0;
codes.stacksize = 0;
codes.quality = 99;
codes.percent = 0;
codes.ppu = 0;
codes.ilevel = 0;
codes.vendorsell = 0;
codes.tsmId = Utils.GetID(itemId);
codes.isgroup = false;
local idBonusOnly = Utils.BonusID(codes.tsmId, false, codes.bonuses);
local t, id = strsplit(":", codes.tsmId);
if (t == "p") then
idBonusOnly = t..":"..id;
end
codes.vendorbuy = Config.Vendor()[idBonusOnly] or VendorData[idBonusOnly] or 0;
codes.id = tonumber(id);
local _, fn, err = false, nil, nil;
local oq = q;
if (not OP_CACHE[q]) then
q = Utils.ReplaceOpShortHand(q);
q = Utils.ReplaceShortHandPercent(q);
q = Utils.ReplaceMoneyShorthand(q);
q = Utils.ReplaceTabReturns(q);
--print(q);
if (not self:IsValidQuery(q)) then
print("AnS Invalid Filter / Pricing String: "..q);
return 0;
end
local pstr = string.format(TEMPLATE, self:GetVarString(), self:GetCVarString(), q);
fn, err = loadstring(pstr);
if(not fn or err) then
print("AnS Filter / Pricing String Error: "..err);
return 0;
end
_, fn = pcall(fn);
if (not _ or not fn) then
print("AnS Invalid Filter / Pricing String: "..q);
return 0;
end
OP_CACHE[oq] = fn;
else
fn = OP_CACHE[oq];
end
if (not fn) then
return 0;
end
local _, r = pcall(fn, self, codes);
if (not _) then
print("AnS Invalid Filter / Pricing String: "..q);
return 0;
end
return r;
end
function Sources:Validate(q)
local itemId = "i:2589";
local codes = nil;
local names = self:GetNameString();
if (not names or names:len() == 0 ) then
return nil;
end
if (not q or q:len() == 0) then
return nil;
end
if (VALUE_CACHE[itemId]) then
codes = VALUE_CACHE[itemId];
else
codes = OpCodes:Acquire();
VALUE_CACHE[itemId] = codes;
self:GetValues(itemId, codes);
end
codes.buyout = 0;
codes.stacksize = 0;
codes.quality = 99;
codes.percent = 0;
codes.ppu = 0;
codes.ilevel = 0;
codes.vendorsell = 0;
codes.tsmId = itemId;
codes.vendorbuy = 0;
codes.id = 2589;
codes.isgroup = false;
local _, fn, err = false, nil, nil;
local oq = q;
if (not OP_CACHE[q]) then
q = Utils.ReplaceOpShortHand(q);
q = Utils.ReplaceShortHandPercent(q);
q = Utils.ReplaceMoneyShorthand(q);
q = Utils.ReplaceTabReturns(q);
--print(q);
if (not self:IsValidQuery(q)) then
return false;
end
local pstr = string.format(TEMPLATE, self:GetVarString(), self:GetCVarString(), q);
fn, err = loadstring(pstr);
if(not fn or err) then
return false;
end
_, fn = pcall(fn);
if (not _ or not fn) then
return false;
end
OP_CACHE[oq] = fn;
else
fn = OP_CACHE[oq];
end
if (not fn) then
return false;
end
local _, r = pcall(fn, self, codes);
if (not _) then
return false;
end
return true;
end
function Sources:Query(q, item, isGroup)
local itemId = item.link or item.id;
local buyout = item.buyoutPrice;
local stackSize = item.count;
local quality = item.quality;
local ilvl = item.iLevel;
local percent = item.percent;
local ppu = item.ppu;
if (not itemId) then
return nil;
end
local codes = nil;
local names = self:GetNameString();
if (not names or names:len() == 0 ) then
return nil;
end
if (not q or q:len() == 0) then
return nil;
end
if (VALUE_CACHE[itemId]) then
codes = VALUE_CACHE[itemId];
else
codes = OpCodes:Acquire();
VALUE_CACHE[itemId] = codes;
self:GetValues(itemId, codes);
end
if (not item.tsmId and item.link) then
item.tsmId = Utils.GetID(item.link);
end
local idBonusOnly = Utils.BonusID(item.tsmId or item.link or item.id, false, codes.bonuses);
if (item.tsmId) then
local t, id = strsplit(":", item.tsmId);
if (t == "p") then
idBonusOnly = t..":"..id;
end
end
codes.buyout = buyout;
codes.stacksize = stackSize;
codes.quality = quality;
codes.percent = percent;
codes.ppu = ppu;
codes.ilevel = ilvl;
codes.vendorsell = item.vendorsell;
codes.tsmId = item.tsmId;
codes.id = item.id;
codes.vendorbuy = Config.Vendor()[idBonusOnly] or VendorData[idBonusOnly] or 0;
codes.isgroup = isGroup;
local _, fn, err = false, nil, nil;
local oq = q;
if (not OP_CACHE[q]) then
q = Utils.ReplaceOpShortHand(q);
q = Utils.ReplaceShortHandPercent(q);
q = Utils.ReplaceMoneyShorthand(q);
q = Utils.ReplaceTabReturns(q);
--print(q);
if (not self:IsValidQuery(q)) then
print("AnS Invalid Filter / Pricing String: "..q);
return 0;
end
local pstr = string.format(TEMPLATE, self:GetVarString(), self:GetCVarString(), q);
fn, err = loadstring(pstr);
if(not fn or err) then
print("AnS Filter / Pricing String Error: "..err);
return 0;
end
_, fn = pcall(fn);
if (not _ or not fn) then
print("AnS Invalid Filter / Pricing String: "..q);
return 0;
end
OP_CACHE[oq] = fn;
else
fn = OP_CACHE[oq];
end
if (not fn) then
return 0;
end
local _, r = pcall(fn, self, codes);
if (not _) then
print("AnS Invalid Filter / Pricing String: "..q);
return 0;
end
return r;
end
| 23.035323
| 301
| 0.561125
|
c6c859e228957d5a6b9263fe71b5e848dfcb362f
| 409
|
rb
|
Ruby
|
lib/permutations.rb
|
M-Burr/Hashmap-Questions
|
e7e65a787955261c24c0f0126e913fab79910619
|
[
"MIT"
] | null | null | null |
lib/permutations.rb
|
M-Burr/Hashmap-Questions
|
e7e65a787955261c24c0f0126e913fab79910619
|
[
"MIT"
] | null | null | null |
lib/permutations.rb
|
M-Burr/Hashmap-Questions
|
e7e65a787955261c24c0f0126e913fab79910619
|
[
"MIT"
] | null | null | null |
# mom, om
def permutations?(string1, string2)
hash = {}
string1 = string1.chars
string2 = string2.chars
string1.each do |char|
if hash[char].nil?
hash[char] = 1
else
hash[char] += 1
end
end
string2.each do |char|
if hash[char].nil?
return false
else
hash[char] -= 1
end
end
(hash.values.all? {|value| value == 0}) ? true: false
end
| 15.730769
| 55
| 0.562347
|
17ecd5be92b02b2e8d05c8b587c822651b19b12a
| 1,757
|
html
|
HTML
|
index.html
|
YaminaBoghossian/todolist
|
e9cab80498543a4a0afd7114d5934db0bcdcdf41
|
[
"MIT"
] | null | null | null |
index.html
|
YaminaBoghossian/todolist
|
e9cab80498543a4a0afd7114d5934db0bcdcdf41
|
[
"MIT"
] | null | null | null |
index.html
|
YaminaBoghossian/todolist
|
e9cab80498543a4a0afd7114d5934db0bcdcdf41
|
[
"MIT"
] | null | null | null |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<link rel="stylesheet" type="text/css" href="style.css" />
<title>todolist</title>
</head>
<body>
<h1> To do list </h1>
<form>
<fieldset>
<label for="titre">TITLE :</label>
<input type="text" name="nom" id="titre">
</fieldset>
<fieldset>
<label for="liste">LISTE :</label>
<input type="text" name="prenom" id="liste">
</fieldset>
<input type="submit" value="Envoyer" id="button"/>
</form>
<script>
let buttonadd = document.querySelector('#button');
let form = document.querySelector('form');
let i = 0;
form.addEventListener('submit',function(event){
event.preventDefault();
i = i+1
let titre = document.querySelector('#titre').value;
let liste = document.querySelector('#liste').value;
let div = document.createElement('div');
div.id = 'div'+ i;
document.body.appendChild(div);
let para1 = document.createElement('p');
para1.innerHTML = titre;
document.body.querySelector('#div'+ i).appendChild(para1);
let para2 = document.createElement ('p');
para2.innerHTML = liste;
document.body.querySelector('#div' + i).appendChild(para2);
let buttondel = document.createElement('button');
buttondel.innerHTML = 'delete';
buttondel.id = 'delete'+ i;
document.body.querySelector('#div'+ i).appendChild(buttondel);
console.log(i);
let buttondelete = document.querySelector('#delete'+ i);
buttondelete.addEventListener('click', function (event){
this.parentNode.remove(this);
console.log('removed');
});
});
</script>
</body>
</html>
| 20.430233
| 71
| 0.652817
|
ce5ae5ae1c5ec17bb03e9fcea85ef21ed93c374a
| 472
|
lua
|
Lua
|
output/lua/AlienAtmos/Render.lua
|
Nintendows/ns2-rebirth
|
a55a9d91334e1af357600805a744ac13317dc29b
|
[
"MIT"
] | 1
|
2021-01-01T13:10:05.000Z
|
2021-01-01T13:10:05.000Z
|
output/lua/AlienAtmos/Render.lua
|
TheRealNin/ns2-rebirth
|
a55a9d91334e1af357600805a744ac13317dc29b
|
[
"MIT"
] | 24
|
2017-06-30T03:54:46.000Z
|
2018-06-15T22:07:45.000Z
|
output/lua/AlienAtmos/Render.lua
|
Nintendows/ns2-rebirth
|
a55a9d91334e1af357600805a744ac13317dc29b
|
[
"MIT"
] | 3
|
2017-07-02T18:37:12.000Z
|
2021-01-01T12:57:30.000Z
|
-- force shadows, atmospherics, and bloom to be on
local oldRender_SyncRenderOptions = Render_SyncRenderOptions
function Render_SyncRenderOptions()
oldRender_SyncRenderOptions()
local ambient_occlusion = false
Client.SetRenderSetting("ambient_occlusion", ToString(ambient_occlusion))
-- force bloom to be off
local bloom = false
Client.SetRenderSetting("bloom" , ToString(bloom))
Client.SetRenderSetting("particles", "high")
end
| 31.466667
| 77
| 0.75
|
2b37f566c55772a860a2cac65e2e9439bc7e9013
| 573
|
rb
|
Ruby
|
lib/primes/cache.rb
|
miguelverissimo/primes
|
e22dfe0c1ce92742f91aa2eb8d1dc9607983ec01
|
[
"MIT"
] | null | null | null |
lib/primes/cache.rb
|
miguelverissimo/primes
|
e22dfe0c1ce92742f91aa2eb8d1dc9607983ec01
|
[
"MIT"
] | null | null | null |
lib/primes/cache.rb
|
miguelverissimo/primes
|
e22dfe0c1ce92742f91aa2eb8d1dc9607983ec01
|
[
"MIT"
] | null | null | null |
module Primes
class Cache
DEFAULT_PATH = '/cache/primes.txt'.freeze
attr_accessor :file_path
def initialize(path = nil)
@file_path = path || default_path
end
def default_path
Primes.gem_root + DEFAULT_PATH
end
def load
return [] unless File.exist?(file_path)
cached_primes = []
File.open(file_path).each_line do |s|
cached_primes << s.to_i
end
cached_primes
end
def save!(collection)
File.open(file_path, 'w+') do |f|
f.puts(collection)
end
end
end
end
| 17.90625
| 45
| 0.609075
|
909c90c1489637dbeb0636759132dd0877bbbd75
| 55,695
|
rs
|
Rust
|
services/mgmt/resources/src/package_locks_2016_09/operations.rs
|
urbanlogiq/azure-sdk-for-rust
|
13c9947d7ed61a674e829959c058cba2ea7f53f9
|
[
"MIT"
] | null | null | null |
services/mgmt/resources/src/package_locks_2016_09/operations.rs
|
urbanlogiq/azure-sdk-for-rust
|
13c9947d7ed61a674e829959c058cba2ea7f53f9
|
[
"MIT"
] | null | null | null |
services/mgmt/resources/src/package_locks_2016_09/operations.rs
|
urbanlogiq/azure-sdk-for-rust
|
13c9947d7ed61a674e829959c058cba2ea7f53f9
|
[
"MIT"
] | null | null | null |
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use crate::models::*;
use snafu::{ResultExt, Snafu};
pub mod authorization_operations {
use crate::models::*;
use snafu::{ResultExt, Snafu};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<OperationListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.Authorization/operations", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).context(list::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationListResult =
serde_json::from_slice(rsp_body).context(list::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
list::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod management_locks {
use crate::models::*;
use snafu::{ResultExt, Snafu};
pub async fn get_at_resource_group_level(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
lock_name: &str,
subscription_id: &str,
) -> std::result::Result<ManagementLockObject, get_at_resource_group_level::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Authorization/locks/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
lock_name
);
let mut url = url::Url::parse(url_str).context(get_at_resource_group_level::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(get_at_resource_group_level::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(get_at_resource_group_level::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(get_at_resource_group_level::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ManagementLockObject =
serde_json::from_slice(rsp_body).context(get_at_resource_group_level::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
get_at_resource_group_level::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod get_at_resource_group_level {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn create_or_update_at_resource_group_level(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
lock_name: &str,
parameters: &ManagementLockObject,
subscription_id: &str,
) -> std::result::Result<create_or_update_at_resource_group_level::Response, create_or_update_at_resource_group_level::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Authorization/locks/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
lock_name
);
let mut url = url::Url::parse(url_str).context(create_or_update_at_resource_group_level::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(create_or_update_at_resource_group_level::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(create_or_update_at_resource_group_level::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(create_or_update_at_resource_group_level::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ManagementLockObject = serde_json::from_slice(rsp_body)
.context(create_or_update_at_resource_group_level::DeserializeError { body: rsp_body.clone() })?;
Ok(create_or_update_at_resource_group_level::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: ManagementLockObject = serde_json::from_slice(rsp_body)
.context(create_or_update_at_resource_group_level::DeserializeError { body: rsp_body.clone() })?;
Ok(create_or_update_at_resource_group_level::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
create_or_update_at_resource_group_level::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod create_or_update_at_resource_group_level {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(ManagementLockObject),
Created201(ManagementLockObject),
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn delete_at_resource_group_level(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
lock_name: &str,
subscription_id: &str,
) -> std::result::Result<delete_at_resource_group_level::Response, delete_at_resource_group_level::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Authorization/locks/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
lock_name
);
let mut url = url::Url::parse(url_str).context(delete_at_resource_group_level::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(delete_at_resource_group_level::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(delete_at_resource_group_level::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(delete_at_resource_group_level::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(delete_at_resource_group_level::Response::NoContent204),
http::StatusCode::OK => Ok(delete_at_resource_group_level::Response::Ok200),
status_code => {
let rsp_body = rsp.body();
delete_at_resource_group_level::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod delete_at_resource_group_level {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
NoContent204,
Ok200,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn get_by_scope(
operation_config: &crate::OperationConfig,
scope: &str,
lock_name: &str,
) -> std::result::Result<ManagementLockObject, get_by_scope::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.Authorization/locks/{}",
operation_config.base_path(),
scope,
lock_name
);
let mut url = url::Url::parse(url_str).context(get_by_scope::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(get_by_scope::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(get_by_scope::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(get_by_scope::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ManagementLockObject =
serde_json::from_slice(rsp_body).context(get_by_scope::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
get_by_scope::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod get_by_scope {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn create_or_update_by_scope(
operation_config: &crate::OperationConfig,
scope: &str,
lock_name: &str,
parameters: &ManagementLockObject,
) -> std::result::Result<create_or_update_by_scope::Response, create_or_update_by_scope::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.Authorization/locks/{}",
operation_config.base_path(),
scope,
lock_name
);
let mut url = url::Url::parse(url_str).context(create_or_update_by_scope::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(create_or_update_by_scope::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(create_or_update_by_scope::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(create_or_update_by_scope::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ManagementLockObject =
serde_json::from_slice(rsp_body).context(create_or_update_by_scope::DeserializeError { body: rsp_body.clone() })?;
Ok(create_or_update_by_scope::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: ManagementLockObject =
serde_json::from_slice(rsp_body).context(create_or_update_by_scope::DeserializeError { body: rsp_body.clone() })?;
Ok(create_or_update_by_scope::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
create_or_update_by_scope::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod create_or_update_by_scope {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(ManagementLockObject),
Created201(ManagementLockObject),
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn delete_by_scope(
operation_config: &crate::OperationConfig,
scope: &str,
lock_name: &str,
) -> std::result::Result<delete_by_scope::Response, delete_by_scope::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.Authorization/locks/{}",
operation_config.base_path(),
scope,
lock_name
);
let mut url = url::Url::parse(url_str).context(delete_by_scope::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(delete_by_scope::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(delete_by_scope::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(delete_by_scope::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(delete_by_scope::Response::NoContent204),
http::StatusCode::OK => Ok(delete_by_scope::Response::Ok200),
status_code => {
let rsp_body = rsp.body();
delete_by_scope::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod delete_by_scope {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
NoContent204,
Ok200,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn get_at_resource_level(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
resource_provider_namespace: &str,
parent_resource_path: &str,
resource_type: &str,
resource_name: &str,
lock_name: &str,
subscription_id: &str,
) -> std::result::Result<ManagementLockObject, get_at_resource_level::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/{}/{}/{}/{}/providers/Microsoft.Authorization/locks/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_provider_namespace,
parent_resource_path,
resource_type,
resource_name,
lock_name
);
let mut url = url::Url::parse(url_str).context(get_at_resource_level::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(get_at_resource_level::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(get_at_resource_level::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(get_at_resource_level::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ManagementLockObject =
serde_json::from_slice(rsp_body).context(get_at_resource_level::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
get_at_resource_level::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod get_at_resource_level {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn create_or_update_at_resource_level(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
resource_provider_namespace: &str,
parent_resource_path: &str,
resource_type: &str,
resource_name: &str,
lock_name: &str,
parameters: &ManagementLockObject,
subscription_id: &str,
) -> std::result::Result<create_or_update_at_resource_level::Response, create_or_update_at_resource_level::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/{}/{}/{}/{}/providers/Microsoft.Authorization/locks/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_provider_namespace,
parent_resource_path,
resource_type,
resource_name,
lock_name
);
let mut url = url::Url::parse(url_str).context(create_or_update_at_resource_level::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(create_or_update_at_resource_level::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(create_or_update_at_resource_level::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(create_or_update_at_resource_level::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ManagementLockObject = serde_json::from_slice(rsp_body)
.context(create_or_update_at_resource_level::DeserializeError { body: rsp_body.clone() })?;
Ok(create_or_update_at_resource_level::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: ManagementLockObject = serde_json::from_slice(rsp_body)
.context(create_or_update_at_resource_level::DeserializeError { body: rsp_body.clone() })?;
Ok(create_or_update_at_resource_level::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
create_or_update_at_resource_level::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod create_or_update_at_resource_level {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(ManagementLockObject),
Created201(ManagementLockObject),
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn delete_at_resource_level(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
resource_provider_namespace: &str,
parent_resource_path: &str,
resource_type: &str,
resource_name: &str,
lock_name: &str,
subscription_id: &str,
) -> std::result::Result<delete_at_resource_level::Response, delete_at_resource_level::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/{}/{}/{}/{}/providers/Microsoft.Authorization/locks/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_provider_namespace,
parent_resource_path,
resource_type,
resource_name,
lock_name
);
let mut url = url::Url::parse(url_str).context(delete_at_resource_level::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(delete_at_resource_level::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(delete_at_resource_level::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(delete_at_resource_level::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(delete_at_resource_level::Response::NoContent204),
http::StatusCode::OK => Ok(delete_at_resource_level::Response::Ok200),
status_code => {
let rsp_body = rsp.body();
delete_at_resource_level::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod delete_at_resource_level {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
NoContent204,
Ok200,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn get_at_subscription_level(
operation_config: &crate::OperationConfig,
lock_name: &str,
subscription_id: &str,
) -> std::result::Result<ManagementLockObject, get_at_subscription_level::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Authorization/locks/{}",
operation_config.base_path(),
subscription_id,
lock_name
);
let mut url = url::Url::parse(url_str).context(get_at_subscription_level::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(get_at_subscription_level::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(get_at_subscription_level::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(get_at_subscription_level::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ManagementLockObject =
serde_json::from_slice(rsp_body).context(get_at_subscription_level::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
get_at_subscription_level::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod get_at_subscription_level {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn create_or_update_at_subscription_level(
operation_config: &crate::OperationConfig,
lock_name: &str,
parameters: &ManagementLockObject,
subscription_id: &str,
) -> std::result::Result<create_or_update_at_subscription_level::Response, create_or_update_at_subscription_level::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Authorization/locks/{}",
operation_config.base_path(),
subscription_id,
lock_name
);
let mut url = url::Url::parse(url_str).context(create_or_update_at_subscription_level::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(create_or_update_at_subscription_level::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(create_or_update_at_subscription_level::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(create_or_update_at_subscription_level::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: ManagementLockObject = serde_json::from_slice(rsp_body)
.context(create_or_update_at_subscription_level::DeserializeError { body: rsp_body.clone() })?;
Ok(create_or_update_at_subscription_level::Response::Created201(rsp_value))
}
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ManagementLockObject = serde_json::from_slice(rsp_body)
.context(create_or_update_at_subscription_level::DeserializeError { body: rsp_body.clone() })?;
Ok(create_or_update_at_subscription_level::Response::Ok200(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
create_or_update_at_subscription_level::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod create_or_update_at_subscription_level {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Created201(ManagementLockObject),
Ok200(ManagementLockObject),
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn delete_at_subscription_level(
operation_config: &crate::OperationConfig,
lock_name: &str,
subscription_id: &str,
) -> std::result::Result<delete_at_subscription_level::Response, delete_at_subscription_level::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Authorization/locks/{}",
operation_config.base_path(),
subscription_id,
lock_name
);
let mut url = url::Url::parse(url_str).context(delete_at_subscription_level::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(delete_at_subscription_level::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(delete_at_subscription_level::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(delete_at_subscription_level::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(delete_at_subscription_level::Response::NoContent204),
http::StatusCode::OK => Ok(delete_at_subscription_level::Response::Ok200),
status_code => {
let rsp_body = rsp.body();
delete_at_subscription_level::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod delete_at_subscription_level {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
NoContent204,
Ok200,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_at_resource_group_level(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
filter: Option<&str>,
subscription_id: &str,
) -> std::result::Result<ManagementLockListResult, list_at_resource_group_level::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Authorization/locks",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).context(list_at_resource_group_level::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_at_resource_group_level::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(list_at_resource_group_level::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(list_at_resource_group_level::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ManagementLockListResult =
serde_json::from_slice(rsp_body).context(list_at_resource_group_level::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
list_at_resource_group_level::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod list_at_resource_group_level {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_at_resource_level(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
resource_provider_namespace: &str,
parent_resource_path: &str,
resource_type: &str,
resource_name: &str,
filter: Option<&str>,
subscription_id: &str,
) -> std::result::Result<ManagementLockListResult, list_at_resource_level::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/{}/{}/{}/{}/providers/Microsoft.Authorization/locks",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_provider_namespace,
parent_resource_path,
resource_type,
resource_name
);
let mut url = url::Url::parse(url_str).context(list_at_resource_level::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_at_resource_level::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list_at_resource_level::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(list_at_resource_level::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ManagementLockListResult =
serde_json::from_slice(rsp_body).context(list_at_resource_level::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
list_at_resource_level::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod list_at_resource_level {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_at_subscription_level(
operation_config: &crate::OperationConfig,
filter: Option<&str>,
subscription_id: &str,
) -> std::result::Result<ManagementLockListResult, list_at_subscription_level::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Authorization/locks",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).context(list_at_subscription_level::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_at_subscription_level::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list_at_subscription_level::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.context(list_at_subscription_level::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ManagementLockListResult =
serde_json::from_slice(rsp_body).context(list_at_subscription_level::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
list_at_subscription_level::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod list_at_subscription_level {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_by_scope(
operation_config: &crate::OperationConfig,
scope: &str,
filter: Option<&str>,
) -> std::result::Result<ManagementLockListResult, list_by_scope::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/{}/providers/Microsoft.Authorization/locks", operation_config.base_path(), scope);
let mut url = url::Url::parse(url_str).context(list_by_scope::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.context(list_by_scope::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).context(list_by_scope::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.context(list_by_scope::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ManagementLockListResult =
serde_json::from_slice(rsp_body).context(list_by_scope::DeserializeError { body: rsp_body.clone() })?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
list_by_scope::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
}
.fail()
}
}
}
pub mod list_by_scope {
use crate::{models, models::*};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
ParseUrlError { source: url::ParseError },
BuildRequestError { source: http::Error },
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
| 47.724936
| 137
| 0.600539
|
0db4b2ebee4e88619bd38e16d22ce7058eaf295e
| 450
|
rb
|
Ruby
|
lib/cellular/backends.rb
|
hyperoslo/cellular
|
1494769ad01e973971a5dcff80e76cdc9bc3c02c
|
[
"MIT"
] | 15
|
2015-03-02T15:45:36.000Z
|
2021-05-09T14:00:29.000Z
|
lib/cellular/backends.rb
|
hyperoslo/cellular
|
1494769ad01e973971a5dcff80e76cdc9bc3c02c
|
[
"MIT"
] | 21
|
2015-02-04T21:33:42.000Z
|
2021-04-30T09:55:06.000Z
|
lib/cellular/backends.rb
|
hyperoslo/cellular
|
1494769ad01e973971a5dcff80e76cdc9bc3c02c
|
[
"MIT"
] | 5
|
2015-01-24T07:54:06.000Z
|
2019-02-26T03:27:24.000Z
|
module Cellular
# Encapsulates all available backends for Cellular
module Backends
autoload :Backend, 'cellular/backends/backend'
autoload :CoolSMS, 'cellular/backends/cool_sms'
autoload :Sendega, 'cellular/backends/sendega'
autoload :Twilio, 'cellular/backends/twilio'
autoload :LinkMobility, 'cellular/backends/link_mobility'
autoload :Log, 'cellular/backends/log'
autoload :Test, 'cellular/backends/test'
end
end
| 34.615385
| 61
| 0.753333
|
b01b00dd0bae7b43941be663ac9978dbe5dcb57c
| 552
|
py
|
Python
|
ex79 whiletrue.py
|
joaoschweikart/python_projects
|
a30361551ec71ac3bef6d38e4b6ffc7bad21f1cc
|
[
"MIT"
] | null | null | null |
ex79 whiletrue.py
|
joaoschweikart/python_projects
|
a30361551ec71ac3bef6d38e4b6ffc7bad21f1cc
|
[
"MIT"
] | null | null | null |
ex79 whiletrue.py
|
joaoschweikart/python_projects
|
a30361551ec71ac3bef6d38e4b6ffc7bad21f1cc
|
[
"MIT"
] | null | null | null |
valores = []
while True:
n = (int(input('Digite um valor: ')))
if n not in valores:
valores.append(n)
print('Valor computado com sucesso.')
else:
print('Valor já digitado anteriormente.')
continuar = str(input('Quer continuar? [S/N]: ')).upper().strip()[0]
while continuar not in 'SN':
continuar = str(input('Código inválido. Quer continuar? [S/N]: ')).upper().strip()[0]
if continuar == 'N':
break
print(f'Esses são os valores que você digitou em ordem crescente: {sorted(valores)}')
| 30.666667
| 93
| 0.610507
|
58c2e9ab7afd41cc167f4d99c0d69294dbc826d7
| 2,288
|
php
|
PHP
|
src/Everon/View/Interfaces/View.php
|
oliwierptak/everon
|
ac93793d1fa517a8394db5f00062f1925dc218a3
|
[
"MIT"
] | 1
|
2016-04-03T01:51:38.000Z
|
2016-04-03T01:51:38.000Z
|
src/Everon/View/Interfaces/View.php
|
oliwierptak/everon
|
ac93793d1fa517a8394db5f00062f1925dc218a3
|
[
"MIT"
] | null | null | null |
src/Everon/View/Interfaces/View.php
|
oliwierptak/everon
|
ac93793d1fa517a8394db5f00062f1925dc218a3
|
[
"MIT"
] | null | null | null |
<?php
/**
* This file is part of the Everon framework.
*
* (c) Oliwier Ptak <oliwierptak@gmail.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Everon\View\Interfaces;
interface View extends \Everon\Interfaces\Dependency\GetUrl,
\Everon\View\Interfaces\Dependency\Manager
{
/**
* @inheritdoc
*/
function getName();
/**
* @param $value
*/
function setContainerFromString($value);
/**
* @inheritdoc
*/
function setTemplateDirectory($directory);
/**
* @return array
*/
function getData();
/**
* @return string
*/
function getDefaultExtension();
/**
* @param $extension
*/
function setDefaultExtension($extension);
/**
* @param $name
* @param null $default
* @return null
*/
function get($name, $default = null);
/**
* @param $action
* @return null
*/
function execute($action);
/**
* @param $name
*/
function delete($name);
/**
* @param $name
*/
function setName($name);
/**
* @param array $data
*/
function setData(array $data);
/**
* @param $name
* @param $data
* @return Template|null
*/
function getTemplate($name, $data);
/**
* @return \SplFileInfo
*/
function getFilename();
/**
* @inheritdoc
*/
function getTemplateDirectory();
/**
* @param TemplateContainer $Container
*/
function setContainer(TemplateContainer $Container);
/**
* @param $name
* @param $value
*/
function set($name, $value);
/**
* @return Template|TemplateContainer
* @throws \Everon\Exception\View
*/
function getContainer();
/**
* @param array $data
* @return \Everon\Helper\PopoProps
*/
function templetize(array $data);
/**
* @param array $data
* @return array
*/
function templetizeArray(array $data);
/**
* @param array $data Array of items implementing Arrayable Interface
* @return array Array of Helper\PopoProps objects
*/
function templetizeArrayable(array $data);
}
| 18.304
| 74
| 0.568182
|
0d72a4de97acdada0f4442ca2a9cc69a00c7ce8d
| 1,033
|
c
|
C
|
piscine_reloaded/ex25/ft_foreach.c
|
vision-4/42
|
4bb97ed23061374b79bd52996a2270a936f31d27
|
[
"MIT"
] | 1
|
2018-11-17T20:32:40.000Z
|
2018-11-17T20:32:40.000Z
|
piscine_reloaded/ex25/ft_foreach.c
|
vision-4/42
|
4bb97ed23061374b79bd52996a2270a936f31d27
|
[
"MIT"
] | null | null | null |
piscine_reloaded/ex25/ft_foreach.c
|
vision-4/42
|
4bb97ed23061374b79bd52996a2270a936f31d27
|
[
"MIT"
] | null | null | null |
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* ft_foreach.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: rodaniel <marvin@42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2018/11/05 18:28:56 by rodaniel #+# #+# */
/* Updated: 2018/11/06 21:49:12 by rodaniel ### ########.fr */
/* */
/* ************************************************************************** */
void ft_foreach(int *tab, int length, void (*f)(int))
{
int fesse;
fesse = 0;
while (fesse < length)
{
f(tab[fesse]);
fesse++;
}
}
| 43.041667
| 80
| 0.170378
|
c029fea2dcac73fa8f3438f6f2e9cc38ee6c5f91
| 2,161
|
cs
|
C#
|
FarsiLibrary.Utils/PersianWeekDayNames.cs
|
SKamalou/FarsiLibrary
|
83974a5909b7a172517ae583109889ea51f40cf5
|
[
"MIT"
] | 43
|
2015-03-12T06:19:40.000Z
|
2021-11-22T12:30:02.000Z
|
FarsiLibrary.Utils/PersianWeekDayNames.cs
|
majnouni/FarsiLibrary
|
553b2c5efefaf069f231603459d46f79cb557c66
|
[
"MIT"
] | 19
|
2016-11-29T08:04:54.000Z
|
2021-11-18T04:49:54.000Z
|
FarsiLibrary.Utils/PersianWeekDayNames.cs
|
majnouni/FarsiLibrary
|
553b2c5efefaf069f231603459d46f79cb557c66
|
[
"MIT"
] | 29
|
2015-06-08T06:32:12.000Z
|
2022-02-08T09:19:34.000Z
|
using System.Collections.Generic;
namespace FarsiLibrary.Utils
{
internal class PersianWeekDayNames
{
#region fields
public string Shanbeh = "شنبه";
public string Yekshanbeh = "یکشنبه";
public string Doshanbeh = "دوشنبه";
public string Seshanbeh = "ﺳﻪشنبه";
public string Chaharshanbeh = "چهارشنبه";
public string Panjshanbeh = "پنجشنبه";
public string Jomeh = "جمعه";
public string Sh = "ش";
public string Ye = "ی";
public string Do = "د";
public string Se = "س";
public string Ch = "چ";
public string Pa = "پ";
public string Jo = "ج";
private readonly List<string> days;
private readonly List<string> daysAbbr;
private static PersianWeekDayNames instance;
#endregion
#region Ctor
private PersianWeekDayNames()
{
days = new List<string>
{
Yekshanbeh,
Doshanbeh,
Seshanbeh,
Chaharshanbeh,
Panjshanbeh,
Jomeh,
Shanbeh,
};
daysAbbr = new List<string>
{
Ye,
Do,
Se,
Ch,
Pa,
Jo,
Sh,
};
}
#endregion
#region Indexer
public static PersianWeekDayNames Default
{
get
{
if(instance == null)
instance = new PersianWeekDayNames();
return instance;
}
}
#endregion
#region Props
internal List<string> Days
{
get { return days; }
}
internal List<string> DaysAbbr
{
get { return daysAbbr; }
}
#endregion
}
}
| 24.011111
| 57
| 0.40907
|
45532c87de5ae137df518027b280d3e8bdac66f8
| 250
|
py
|
Python
|
scripts/data_processing_scripts/check_intact.py
|
thomasly/slgnn
|
caa1e7814498da41ad025b4e62c569fe511848ff
|
[
"MIT"
] | 2
|
2020-08-31T00:55:31.000Z
|
2020-09-01T19:59:30.000Z
|
scripts/data_processing_scripts/check_intact.py
|
thomasly/slgnn
|
caa1e7814498da41ad025b4e62c569fe511848ff
|
[
"MIT"
] | null | null | null |
scripts/data_processing_scripts/check_intact.py
|
thomasly/slgnn
|
caa1e7814498da41ad025b4e62c569fe511848ff
|
[
"MIT"
] | null | null | null |
import os
def check(path):
for item in os.scandir(path):
if item.is_file() and not item.name.endswith(".gz"):
print(item.name)
if item.is_dir():
check(item.path)
if __name__ == "__main__":
check(".")
| 20.833333
| 60
| 0.56
|
0aabbca4677bc2dcd8e8604e26b730cd4fa7da2a
| 2,392
|
cs
|
C#
|
Runtime/SharedResources/Scripts/BooleanTo1DAxisActionConfigurator.cs
|
ExtendRealityLtd/Tilia.Input.CombinedActions.Unity
|
5d42e2a742ba43b4f2a8ca94c40e6d067fcca1f5
|
[
"MIT"
] | 4
|
2020-06-08T15:31:02.000Z
|
2021-12-21T02:29:08.000Z
|
Runtime/SharedResources/Scripts/BooleanTo1DAxisActionConfigurator.cs
|
ExtendRealityLtd/Tilia.Input.CombinedActions.Unity
|
5d42e2a742ba43b4f2a8ca94c40e6d067fcca1f5
|
[
"MIT"
] | 64
|
2020-03-03T08:57:13.000Z
|
2022-03-15T21:54:44.000Z
|
Runtime/SharedResources/Scripts/BooleanTo1DAxisActionConfigurator.cs
|
ExtendRealityLtd/Tilia.Input.CombinedActions.Unity
|
5d42e2a742ba43b4f2a8ca94c40e6d067fcca1f5
|
[
"MIT"
] | null | null | null |
namespace Tilia.Input.CombinedActions
{
using Malimbe.PropertySerializationAttribute;
using Malimbe.XmlDocumentationAttribute;
using UnityEngine;
using Zinnia.Action;
using Zinnia.Data.Attribute;
/// <summary>
/// Sets up the BooleanTo1DAxisAction prefab based on the provided user settings.
/// </summary>
public class BooleanTo1DAxisActionConfigurator : MonoBehaviour
{
#region Axis Settings
/// <summary>
/// The <see cref="BooleanAction"/> that represents the negative direction of the Axis.
/// </summary>
[Serialized]
[field: Header("Axis Settings"), DocumentedByXml, Restricted]
public BooleanAction NegativeInput { get; set; }
/// <summary>
/// The <see cref="BooleanAction"/> that represents the positive direction of the Axis.
/// </summary>
[Serialized]
[field: DocumentedByXml, Restricted]
public BooleanAction PositiveInput { get; set; }
#endregion
/// <summary>
/// Adds the given <see cref="source"/> to the <see cref="PositiveInput"/> Sources collection.
/// </summary>
/// <param name="source">The <see cref="BooleanAction"/> to add to the Sources collection.</param>
public virtual void SetPositiveInputSource(BooleanAction source)
{
SetInputSource(source, PositiveInput);
}
/// <summary>
/// Adds the given <see cref="source"/> to the <see cref="NegativeInput"/> Sources collection.
/// </summary>
/// <param name="source">The <see cref="BooleanAction"/> to add to the Sources collection.</param>
public virtual void SetNegativeInputSource(BooleanAction source)
{
SetInputSource(source, NegativeInput);
}
/// <summary>
/// Sets the adds the <see cref="source"/> to the Sources collection of the <see cref="target"/> <see cref="BooleanAction"/>.
/// </summary>
/// <param name="source">The <see cref="BooleanAction"/> to add as a source.</param>
/// <param name="target">The <see cref="BooleanAction"/> to have the Sources collection updated.</param>
protected virtual void SetInputSource(BooleanAction source, BooleanAction target)
{
target.ClearSources();
target.AddSource(source);
}
}
}
| 41.241379
| 133
| 0.625836
|
cd431ebf1916247a534027d94fcafd102d94ce76
| 5,647
|
cs
|
C#
|
Dolhouse/Dolhouse/Type/Vec3.cs
|
opeyx/Dolhouse
|
f9b4b605e2f5bf8e078fdfd95a4bf23df009917b
|
[
"Unlicense"
] | 4
|
2019-09-06T22:44:16.000Z
|
2021-05-10T01:03:34.000Z
|
Dolhouse/Dolhouse/Type/Vec3.cs
|
opeyx/Dolhouse
|
f9b4b605e2f5bf8e078fdfd95a4bf23df009917b
|
[
"Unlicense"
] | null | null | null |
Dolhouse/Dolhouse/Type/Vec3.cs
|
opeyx/Dolhouse
|
f9b4b605e2f5bf8e078fdfd95a4bf23df009917b
|
[
"Unlicense"
] | 1
|
2019-09-05T13:33:14.000Z
|
2019-09-05T13:33:14.000Z
|
namespace Dolhouse.Type
{
/// <summary>
/// Custom Vector3
/// </summary>
public class Vec3
{
#region Properties
/// <summary>
/// The X value of the Vec3.
/// </summary>
public float X { get; set; }
/// <summary>
/// The Y value of the Vec3.
/// </summary>
public float Y { get; set; }
/// <summary>
/// The Z value of the Vec3.
/// </summary>
public float Z { get; set; }
/// <summary>
/// Gets the length (magnitude) of the Vec3.
/// </summary>
public float Length
{
get
{
return (float)System.Math.Sqrt(X * X + Y * Y + Z * Z);
}
}
#endregion
/// <summary>
/// Initialize a new empty Vec3.
/// </summary>
public Vec3()
{
X = 0.0f;
Y = 0.0f;
Z = 0.0f;
}
/// <summary>
/// Initialize a new Vec3 by a single value.
/// </summary>
/// <param name="value">Value to set X, Y and Z to.</param>
public Vec3(float value)
{
X = value;
Y = value;
Z = value;
}
/// <summary>
/// Initialize a new Vec3 by three values.
/// </summary>
/// <param name="x">Value to set X to.</param>
/// <param name="y">Value to set Y to.</param>
/// <param name="z">Value to set Z to.</param>
public Vec3(float x, float y, float z)
{
X = x;
Y = y;
Z = z;
}
/// <summary>
/// Defines a unit-length Vec3 that points towards the X-axis.
/// </summary>
public static Vec3 UnitX = new Vec3(1, 0, 0);
/// <summary>
/// Defines a unit-length Vec3 that points towards the Y-axis.
/// </summary>
public static Vec3 UnitY = new Vec3(0, 1, 0);
/// <summary>
/// Defines a unit-length Vec3 that points towards the Z-axis.
/// </summary>
public static Vec3 UnitZ = new Vec3(0, 0, 1);
/// <summary>
/// Return the Vec3 as a string.
/// </summary>
/// <returns>The Vec3 formatted as a string.</returns>
public override string ToString()
{
return "(" + X.ToString("n6") + ", " + Y.ToString("n6") + ", " + Z.ToString("n6") + ")";
}
#region Static Methods
/// <summary>
/// Scale a vector to unit length.
/// </summary>
/// <param name="vec">The input vector.</param>
/// <returns>The normalized vector.</returns>
public static Vec3 Normalize(Vec3 vec)
{
vec.X *= (1.0f / vec.Length);
vec.Y *= (1.0f / vec.Length);
vec.Z *= (1.0f / vec.Length);
return vec;
}
/// <summary>
/// Adds two vectors.
/// </summary>
/// <param name="left">First operand.</param>
/// <param name="right">Second operand.</param>
/// <returns>Result of the addition.</returns>
public static Vec3 Add(Vec3 left, Vec3 right)
{
return new Vec3(left.X + right.X, left.Y + right.Y, left.Z + right.Z);
}
/// <summary>
/// Subtracts two vectors.
/// </summary>
/// <param name="left">First operand.</param>
/// <param name="right">Second operand.</param>
/// <returns>Result of the subtraction.</returns>
public static Vec3 Subtract(Vec3 left, Vec3 right)
{
return new Vec3(left.X - right.X, left.Y - right.Y, left.Z - right.Z);
}
/// <summary>
/// Multiplies a vector by the components a vector (scale).
/// </summary>
/// <param name="left">First operand.</param>
/// <param name="right">Second operand.</param>
/// <param>Result of the multiplication.</param>
public static Vec3 Multiply(Vec3 left, Vec3 right)
{
return new Vec3(left.X * right.X, left.Y * right.Y, left.Z * right.Z);
}
/// <summary>
/// Divide a vector by the components of a vector (scale).
/// </summary>
/// <param name="left">First operand.</param>
/// <param name="right">Second operand.</param>
/// <param>Result of the division.</param>
public static Vec3 Divide(Vec3 left, Vec3 right)
{
return new Vec3(left.X / right.X, left.Y / right.Y, left.Z / right.Z);
}
/// <summary>
/// Calculate the dot (scalar) product of two vectors.
/// </summary>
/// <param name="left">First operand.</param>
/// <param name="right">Second operand.</param>
/// <param>The dot product of the two inputs</param>
public static float Dot(Vec3 left, Vec3 right)
{
return left.X * right.X + left.Y * right.Y + left.Z * right.Z;
}
/// <summary>
/// Caclulate the cross (vector) product of two vectors.
/// </summary>
/// <param name="left">First operand.</param>
/// <param name="right">First operand.</param>
/// <returns>The cross product of the two inputs</returns>
public static Vec3 Cross(Vec3 left, Vec3 right)
{
return new Vec3(
(left.Y * right.Z - left.Z * right.Y),
(left.Z * right.X - left.X * right.Z),
(left.X * right.Y - left.Y * right.X)
);
}
#endregion
}
}
| 30.037234
| 100
| 0.48114
|
b0e77df40b6d2ed88b1a00c491469a3d3df0439e
| 442
|
py
|
Python
|
cottonwood/experimental/initializers.py
|
ShahriyarR/cottonwood
|
345f363a0c17caaac8aba0eb0dac86ee47d3ac2b
|
[
"MIT"
] | null | null | null |
cottonwood/experimental/initializers.py
|
ShahriyarR/cottonwood
|
345f363a0c17caaac8aba0eb0dac86ee47d3ac2b
|
[
"MIT"
] | null | null | null |
cottonwood/experimental/initializers.py
|
ShahriyarR/cottonwood
|
345f363a0c17caaac8aba0eb0dac86ee47d3ac2b
|
[
"MIT"
] | null | null | null |
import numpy as np
class Uniform(object):
"""
Make only a fraction of weights nonzero.
"""
def __init__(self, scale=.2):
self.scale = scale
def __str__(self):
return f"Uniform distribution on [{-self.scale}, {self.scale}]"
def initialize(self, n_rows, n_cols):
return np.random.uniform(
low=-self.scale,
high=self.scale,
size=(n_rows, n_cols),
)
| 22.1
| 71
| 0.570136
|
388754f1e452b5b21f40bc8c8429962571585bb6
| 7,249
|
swift
|
Swift
|
Analog-to-Digital Conversion.playgroundbook/Contents/Modules/Book.playgroundmodule/Sources/Internals/R2RCircuitView.swift
|
kronawetter/analog-to-digital-conversion
|
31a8197a5abf57a6acbae11a591fc823159e7e0b
|
[
"MIT"
] | 29
|
2020-06-17T15:55:26.000Z
|
2021-03-31T05:37:20.000Z
|
Analog-to-Digital Conversion.playgroundbook/Contents/Modules/Book.playgroundmodule/Sources/Internals/R2RCircuitView.swift
|
kronawetter/analog-to-digital-conversion
|
31a8197a5abf57a6acbae11a591fc823159e7e0b
|
[
"MIT"
] | null | null | null |
Analog-to-Digital Conversion.playgroundbook/Contents/Modules/Book.playgroundmodule/Sources/Internals/R2RCircuitView.swift
|
kronawetter/analog-to-digital-conversion
|
31a8197a5abf57a6acbae11a591fc823159e7e0b
|
[
"MIT"
] | 1
|
2020-06-17T21:12:46.000Z
|
2020-06-17T21:12:46.000Z
|
//
// R2RCircuitView.swift
// analog
//
// Created by Philip Kronawetter on 2020-05-10.
// Copyright © 2020 Philip Kronawetter. All rights reserved.
//
import UIKit
class R2RCircuitView: UIView {
private static let firstBitImage = UIImage(named: "R2RFirstBit")!.withTintColor(.label)
private static let lastBitImage = UIImage(named: "R2RLastBit")!.withTintColor(.label)
private static let bitImage = UIImage(named: "R2RBit")!.withTintColor(.label)
private static let switchReferenceImage = UIImage(named: "R2RSwitchReference")!.withTintColor(.systemRed)
private static let switchGroundImage = UIImage(named: "R2RSwitchGround")!.withTintColor(.label)
private let firstBitImageView: UIImageView
private let lastBitImageView: UIImageView
private var bitImageViews: [UIImageView]
private var switchImageViews: [UIImageView]
private let referenceVoltageLabel: UILabel
private let outputVoltageLabel: UILabel
private var horizontalResistorLabels: [UILabel]
private var verticalResistorLabels: [UILabel]
private var switchLabels: [UILabel]
let xSpacingBetweenImageViews: CGFloat = -18.0
var resolution: Int {
didSet {
bitImageViews.forEach { $0.removeFromSuperview() }
switchImageViews.forEach { $0.removeFromSuperview() }
horizontalResistorLabels.forEach { $0.removeFromSuperview() }
verticalResistorLabels.forEach { $0.removeFromSuperview() }
switchLabels.forEach { $0.removeFromSuperview() }
// I don’t know why, but this fixes a weird issue where the images of the `bitImageViews` and `switchImageViews` members have an inncorrect tint color when opening Playground page, enabling/disabling dark mode and then pressing run.
bitImageViews = (0..<(resolution - 2)).map { _ in
let imageView = UIImageView(image: UIImage(named: "R2RBit")!.withRenderingMode(.alwaysTemplate))
imageView.tintColor = .label
return imageView
}
//bitImageViews = (0..<(resolution - 2)).map { _ in UIImageView(image: Self.bitImage) }
switchImageViews = (0..<resolution).map { _ in UIImageView(image: Self.switchGroundImage) }
horizontalResistorLabels = (0..<(resolution - 1)).map { _ in
let label = UILabel(frame: .zero)
label.text = "R"
label.textAlignment = .center
label.textColor = .label
label.font = .systemFont(ofSize: 13.0)
label.sizeToFit()
return label
}
verticalResistorLabels = (0...resolution).map { _ in
let label = UILabel(frame: .zero)
label.text = "2R"
label.textColor = .label
label.font = .systemFont(ofSize: 13.0)
label.sizeToFit()
return label
}
switchLabels = (0..<resolution).map { index in
let label = UILabel(frame: .zero)
label.text = "Bit \(index)"
label.textColor = .label
label.font = .systemFont(ofSize: 13.0)
label.sizeToFit()
return label
}
bitImageViews.forEach { addSubview($0) }
switchImageViews.forEach { addSubview($0) }
horizontalResistorLabels.forEach { addSubview($0) }
verticalResistorLabels.forEach { addSubview($0) }
switchLabels.forEach { addSubview($0) }
setNeedsLayout()
invalidateIntrinsicContentSize()
}
}
enum SwitchPosition {
case reference
case ground
}
func setSwitch(at index: Int, to position: SwitchPosition) {
switch position {
case .reference:
switchImageViews[index].image = Self.switchReferenceImage
switchLabels[index].textColor = .systemRed
case .ground:
switchImageViews[index].image = Self.switchGroundImage
switchLabels[index].textColor = .label
}
}
init(resolution: Int) {
self.resolution = resolution
firstBitImageView = UIImageView(image: Self.firstBitImage)
bitImageViews = (0..<(resolution - 2)).map { _ in UIImageView(image: Self.bitImage) }
lastBitImageView = UIImageView(image: Self.lastBitImage)
switchImageViews = (0..<resolution).map { _ in UIImageView(image: Self.switchGroundImage) }
referenceVoltageLabel = UILabel(frame: .zero)
referenceVoltageLabel.text = "Maximum Voltage"
referenceVoltageLabel.textColor = .label
referenceVoltageLabel.font = .systemFont(ofSize: 13.0)
referenceVoltageLabel.sizeToFit()
outputVoltageLabel = UILabel(frame: .zero)
outputVoltageLabel.text = "Output Voltage"
outputVoltageLabel.textColor = .label
outputVoltageLabel.font = .systemFont(ofSize: 13.0)
outputVoltageLabel.sizeToFit()
horizontalResistorLabels = (0..<(resolution - 1)).map { _ in
let label = UILabel(frame: .zero)
label.text = "R"
label.textAlignment = .center
label.textColor = .label
label.font = .systemFont(ofSize: 13.0)
label.sizeToFit()
return label
}
verticalResistorLabels = (0...resolution).map { _ in
let label = UILabel(frame: .zero)
label.text = "2R"
label.textColor = .label
label.font = .systemFont(ofSize: 13.0)
label.sizeToFit()
return label
}
switchLabels = (0..<resolution).map { index in
let label = UILabel(frame: .zero)
label.text = "Bit \(index)"
label.textColor = .label
label.font = .systemFont(ofSize: 13.0)
label.sizeToFit()
return label
}
super.init(frame: .zero)
addSubview(firstBitImageView)
addSubview(lastBitImageView)
bitImageViews.forEach { addSubview($0) }
switchImageViews.forEach { addSubview($0) }
addSubview(referenceVoltageLabel)
addSubview(outputVoltageLabel)
horizontalResistorLabels.forEach { addSubview($0) }
verticalResistorLabels.forEach { addSubview($0) }
switchLabels.forEach { addSubview($0) }
setNeedsLayout()
invalidateIntrinsicContentSize()
}
override func layoutSubviews() {
firstBitImageView.frame.origin = .zero
var maxXOfPreviousImageView = firstBitImageView.frame.maxX
bitImageViews.enumerated().forEach { index, view in
view.frame.origin = CGPoint(x: maxXOfPreviousImageView + xSpacingBetweenImageViews, y: 0.0)
maxXOfPreviousImageView = view.frame.maxX
}
lastBitImageView.frame.origin = CGPoint(x: maxXOfPreviousImageView + xSpacingBetweenImageViews, y: 0.0)
switchImageViews.enumerated().forEach { index, view in
view.frame.origin = CGPoint(x: 98.0 + 70.0 * CGFloat(index) - view.frame.width, y: 0.0)
}
referenceVoltageLabel.frame.origin = CGPoint(x: 4.0, y: bounds.height - referenceVoltageLabel.frame.height)
outputVoltageLabel.frame.origin = CGPoint(x: bounds.width - outputVoltageLabel.frame.width, y: 2.0)
horizontalResistorLabels.enumerated().forEach { index, view in
view.frame.size.width = 30.0
view.frame.origin = CGPoint(x: 95.0 + 70.0 * CGFloat(index), y: 0.0)
}
verticalResistorLabels.enumerated().forEach { index, view in
view.frame.size.height = 30.0
view.frame.origin = CGPoint(x: 18.0 + 70.0 * CGFloat(index), y: 45.0)
}
switchLabels.enumerated().forEach { index, view in
view.frame.origin = CGPoint(x: 63.0 + 70.0 * CGFloat(index) - view.frame.width, y: 88.0)
}
super.layoutSubviews()
}
override var intrinsicContentSize: CGSize {
let width = Self.firstBitImage.size.width + Self.lastBitImage.size.width + Self.bitImage.size.width * CGFloat(resolution - 2) + xSpacingBetweenImageViews * CGFloat(resolution - 1)
let height = Self.firstBitImage.size.height
return CGSize(width: width, height: height)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
| 34.032864
| 235
| 0.726721
|
e25a9afae987cc94f4ce638e0b05b367a5b8037f
| 4,724
|
py
|
Python
|
improved_gan/imagenet/train.py
|
AliengirlLiv/gan-image-metrics
|
d16fbf4bb4e7b70e87646dc618586519761b13eb
|
[
"MIT"
] | 1
|
2021-01-11T12:32:45.000Z
|
2021-01-11T12:32:45.000Z
|
improved_gan/imagenet/train.py
|
AliengirlLiv/gan-image-metrics
|
d16fbf4bb4e7b70e87646dc618586519761b13eb
|
[
"MIT"
] | 1
|
2019-01-02T21:12:00.000Z
|
2019-01-02T21:12:00.000Z
|
improved_gan/imagenet/train.py
|
AliengirlLiv/gan-image-metrics
|
d16fbf4bb4e7b70e87646dc618586519761b13eb
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
from model import save_images
import time
import numpy as np
def train(self, config):
"""Train DCGAN"""
d_optim = self.d_optim
g_optim = self.g_optim
tf.initialize_all_variables().run()
self.saver = tf.train.Saver()
#self.g_sum = tf.merge_summary([#self.z_sum,
# self.d__sum,
# self.G_sum, self.d_loss_fake_sum, self.g_loss_sum])
# self.d_sum = tf.merge_summary([#self.z_sum,
# self.d_sum, self.d_loss_real_sum, self.d_loss_sum])
self.writer = tf.train.SummaryWriter("./logs", self.sess.graph_def)
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord)
# Hang onto a copy of z so we can feed the same one every time we store
# samples to disk for visualization
assert self.sample_size > self.batch_size
assert self.sample_size % self.batch_size == 0
sample_z = []
steps = self.sample_size // self.batch_size
assert steps > 0
sample_zs = []
for i in xrange(steps):
cur_zs = self.sess.run(self.zses[0])
assert all(z.shape[0] == self.batch_size for z in cur_zs)
sample_zs.append(cur_zs)
sample_zs = [np.concatenate([batch[i] for batch in sample_zs], axis=0) for i in xrange(len(sample_zs[0]))]
assert all(sample_z.shape[0] == self.sample_size for sample_z in sample_zs)
counter = 1
if self.load(self.checkpoint_dir):
print(" [*] Load SUCCESS")
else:
print(" [!] Load failed...")
start_time = time.time()
print_time = time.time()
sample_time = time.time()
save_time = time.time()
idx = 0
try:
while not coord.should_stop():
idx += 1
batch_start_time = time.time()
"""
batch_images = self.images.eval()
from pylearn2.utils.image import save
for i in xrange(self.batch_size):
save("train_image_%d.png" % i, batch_images[i, :, :, :] / 2. + 0.5)
"""
#for i in xrange(3):
# self.sess.run([d_optim], feed_dict=feed_dict)
_d_optim, _d_sum, \
_g_optim, \
errD_fake, errD_real, errD_class, \
errG = self.sess.run([d_optim, self.d_sum,
g_optim, # self.g_sum,
self.d_loss_fakes[0],
self.d_loss_reals[0],
self.d_loss_classes[0],
self.g_losses[0]])
counter += 1
if time.time() - print_time > 15.:
print_time = time.time()
total_time = print_time - start_time
d_loss = errD_fake + errD_real + errD_class
sec_per_batch = (print_time - start_time) / (idx + 1.)
sec_this_batch = print_time - batch_start_time
print "[Batch %(idx)d] time: %(total_time)4.4f, d_loss: %(d_loss).8f, g_loss: %(errG).8f, d_loss_real: %(errD_real).8f, d_loss_fake: %(errD_fake).8f, d_loss_class: %(errD_class).8f, sec/batch: %(sec_per_batch)4.4f, sec/this batch: %(sec_this_batch)4.4f" \
% locals()
if (idx < 300 and idx % 10 == 0) or time.time() - sample_time > 300:
sample_time = time.time()
samples = []
# generator hard codes the batch size
for i in xrange(self.sample_size // self.batch_size):
feed_dict = {}
for z, zv in zip(self.zses[0], sample_zs):
if zv.ndim == 2:
feed_dict[z] = zv[i*self.batch_size:(i+1)*self.batch_size, :]
elif zv.ndim == 4:
feed_dict[z] = zv[i*self.batch_size:(i+1)*self.batch_size, :, :, :]
else:
assert False
cur_samples, = self.sess.run(
[self.Gs[0]],
feed_dict=feed_dict
)
samples.append(cur_samples)
samples = np.concatenate(samples, axis=0)
assert samples.shape[0] == self.sample_size
save_images(samples, [8, 8],
self.sample_dir + '/train_%s.png' % ( idx))
if time.time() - save_time > 3600:
save_time = time.time()
self.save(config.checkpoint_dir, counter)
except tf.errors.OutOfRangeError:
print "Done training; epoch limit reached."
finally:
coord.request_stop()
coord.join(threads)
# sess.close()
| 38.406504
| 271
| 0.528577
|
b30f7b5c8b2980c3d48b594e3ac4d956234ac1ef
| 8,186
|
py
|
Python
|
nets/resnet.py
|
medivhna/TF_Face
|
f63ecdf7bebafe17722de6518fcb023f5839bdd5
|
[
"Apache-2.0"
] | 22
|
2017-11-14T07:36:20.000Z
|
2019-09-15T12:34:09.000Z
|
nets/resnet.py
|
medivhna/TF_Face
|
f63ecdf7bebafe17722de6518fcb023f5839bdd5
|
[
"Apache-2.0"
] | 3
|
2017-11-22T06:32:57.000Z
|
2019-02-16T16:09:55.000Z
|
nets/resnet.py
|
medivhna/TF_Face
|
f63ecdf7bebafe17722de6518fcb023f5839bdd5
|
[
"Apache-2.0"
] | 7
|
2017-12-07T05:40:49.000Z
|
2019-09-27T01:16:46.000Z
|
# Copyright 2018 Guanshuo Wang. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from collections import OrderedDict
import tensorflow as tf
from tensorflow.contrib import layers
from tensorflow.contrib.framework import arg_scope
import net_base
class ResNet(net_base.Network):
def __init__(self,
num_layers,
pre_act=False,
weight_decay=0.0005,
data_format='NCHW',
name='ResNet'):
assert (num_layers-2)%3==0, "num_layers-2 must be divided by 3."
self.num_layers = num_layers
self.pre_act = pre_act
if self.num_layers in [50, 101]:
self.num_block=[3, 4, (self.num_layers-32)/3, 3]
elif self.num_layers == 152:
self.num_block=[3, 8, 36, 3]
elif self.num_layers == 26:
self.num_block=[2, 2, 2, 2]
else:
raise ValueError('Unsupported num_layers.')
self.num_outputs=[256, 512, 1024, 2048]
super(ResNet, self).__init__(weight_decay, data_format, name+'-'+str(num_layers))
def conv_bn_relu(self, x,
num_outputs,
kernel_size,
stride=1,
activation_fn=tf.nn.relu,
normalizer_fn=layers.batch_norm,
scope=None):
net = layers.conv2d(x, num_outputs, kernel_size=kernel_size, stride=stride, scope=scope)
tf.add_to_collection('conv_output', net)
if normalizer_fn is not None:
net = normalizer_fn(net, scope=scope+'/BatchNorm')
if activation_fn is not None:
net = activation_fn(net)
return net
def resBlock(self, x,
num_outputs,
stride=1,
activation_fn=tf.nn.relu,
normalizer_fn=layers.batch_norm,
scope=None):
with tf.variable_scope(scope, 'resBlock'):
shortcut = x
if stride != 1 or x.get_shape()[self.channel_axis] != num_outputs:
if self.pre_act:
shortcut = layers.batch_norm(shortcut)
shortcut = self.conv_bn_relu(shortcut, num_outputs, kernel_size=1, stride=stride,
normalizer_fn=layers.batch_norm if not self.pre_act else None,
activation_fn=None,
scope='conv_shortcut_1x1')
if self.pre_act:
x = tf.nn.relu(batch_norm(x))
x = self.conv_bn_relu(x, num_outputs/4, kernel_size=1, stride=1, scope='conv1_1x1',)
x = self.conv_bn_relu(x, num_outputs/4, kernel_size=3, stride=stride, scope='conv2_3x3')
x = self.conv_bn_relu(x, num_outputs, kernel_size=1, stride=1,
normalizer_fn=normalizer_fn if not self.pre_act else None,
activation_fn=None,
scope='conv3_1x1')
x += shortcut
if not self.pre_act:
x = activation_fn(x)
return x
def backbone(self, inputs, is_training=False, reuse=None):
end_points = OrderedDict()
with tf.variable_scope(self.name, reuse=reuse):
with arg_scope([layers.batch_norm], scale=True, fused=True,
data_format=self.data_format,
is_training=is_training):
with arg_scope([layers.conv2d],
activation_fn=None,
normalizer_fn=None,
biases_initializer=None,
weights_regularizer=layers.l2_regularizer(self.weight_decay),
data_format=self.data_format):
if self.data_format == 'NCHW':
inputs = tf.transpose(inputs, [0, 3, 1, 2])
with tf.variable_scope('conv1'):
net = self.conv_bn_relu(inputs, num_outputs=64, kernel_size=7, stride=2,
activation_fn=tf.nn.relu if not self.pre_act else None,
normalizer_fn=layers.batch_norm if not self.pre_act else None,
scope='conv_7x7')
end_points['conv1/conv_7x7'] = net
net = layers.max_pool2d(net, kernel_size=3, stride=2,
padding='SAME', data_format=self.data_format, scope='maxpool_3x3_2')
end_points['conv1/maxpool_3x3_2'] = net
with tf.variable_scope('conv2'):
for idx in xrange(self.num_block[0]):
net = self.resBlock(net, num_outputs=self.num_outputs[0], scope='resBlock_%d'%idx)
end_points['conv2/resBlock_%d'%idx] = net
with tf.variable_scope('conv3'):
for idx in xrange(self.num_block[1]):
net = self.resBlock(net, num_outputs=self.num_outputs[1],
stride=2 if not idx else 1, scope='resBlock_%d'%idx)
end_points['conv3/resBlock_%d'%idx] = net
with tf.variable_scope('conv4'):
for idx in xrange(self.num_block[2]):
net = self.resBlock(net, num_outputs=self.num_outputs[2],
stride=2 if not idx else 1, scope='resBlock_%d'%idx)
end_points['conv4/resBlock_%d'%idx] = net
with tf.variable_scope('conv5'):
for idx in xrange(self.num_block[3]):
net = self.resBlock(net, num_outputs=self.num_outputs[3],
stride=2 if not idx else 1, scope='resBlock_%d'%idx)
end_points['conv5/resBlock_%d'%idx] = net
net = tf.reduce_mean(net, self.spatial_axis)
return net, end_points
def forward(self, images, num_classes=None, is_training=True):
assert num_classes is not None, 'num_classes must be given when is_training=True'
# Forward
features, _ = self.backbone(images, is_training=is_training)
# Logits
with tf.variable_scope('classifier'):
features_drop = layers.dropout(features, keep_prob=0.5, is_training=is_training)
logit = layers.fully_connected(features_drop, num_classes, activation_fn=None,
weights_initializer=tf.random_normal_initializer(stddev=0.001),
weights_regularizer=layers.l2_regularizer(self.weight_decay),
biases_initializer=None,
scope='fc_classifier')
logits = {}
logits['logits'] = logit
return logits
def loss_function(self, scope, labels, **logits):
losses = []
losses_name = []
others = {}
cross_entropy_loss = tf.losses.sparse_softmax_cross_entropy(logits=logits['logits'],
labels=labels,
scope='cross_entropy')
losses.append(cross_entropy_loss)
losses_name.append('cross_entropy')
# Regularization
losses, losses_name = self._regularize(scope, losses, losses_name)
return losses, losses_name, others
def param_list(self, is_training, trainable, scope=None):
var_fn = tf.trainable_variables if trainable else tf.global_variables
scope_name = scope.name+'/' if scope is not None else ''
if is_training:
return [var_fn(scope_name+self.name), var_fn(scope_name+'classifier')]
else:
return [var_fn(scope_name+self.name)]
def pretrained_param(self, scope=None):
pretrained_param = []
for param in self.param_list(is_training=False, trainable=False, scope=scope):
for v in param:
if self.name in v.name:
pretrained_param.append(v)
return pretrained_param
| 42.195876
| 104
| 0.592597
|
e2a7f460b0a01d048b554f62ad36341450f17709
| 425
|
py
|
Python
|
web/manage.py
|
liudonghua123/plate_vision
|
e7da25e504599282dc2e19d24810eefd978db126
|
[
"MIT"
] | 6
|
2018-12-10T04:05:49.000Z
|
2020-01-02T07:13:49.000Z
|
web/manage.py
|
cmFighting/plate_vision
|
1a09b3e07f76a85c9ebfa7b1fbe7e19e17fb573b
|
[
"MIT"
] | 1
|
2019-07-12T02:54:00.000Z
|
2019-07-12T02:54:00.000Z
|
web/manage.py
|
liudonghua123/plate_vision
|
e7da25e504599282dc2e19d24810eefd978db126
|
[
"MIT"
] | 2
|
2018-12-12T01:45:57.000Z
|
2019-06-13T03:34:43.000Z
|
# encoding: utf-8
from flask_script import Manager
from flask_migrate import MigrateCommand, Migrate
from plate_app import app
from exts import db
import config
from models import User, Station, Car, Records # 引入模型
app.config.from_object(config)
db.init_app(app)
manager = Manager(app)
# migrate绑定app和db
migrate = Migrate(app, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
| 19.318182
| 54
| 0.771765
|
05c5fab0784c5f6b52ce5b0c5ce81dcee7f3c6a2
| 134
|
rb
|
Ruby
|
lib/pivot_table/column.rb
|
rafaltrojanowski/pivot_table
|
20cc6329f068150bcdf122de93422026bfc8b02f
|
[
"MIT"
] | 64
|
2015-01-20T19:23:01.000Z
|
2021-05-12T20:25:37.000Z
|
lib/pivot_table/column.rb
|
rafaltrojanowski/pivot_table
|
20cc6329f068150bcdf122de93422026bfc8b02f
|
[
"MIT"
] | 5
|
2015-01-20T12:02:26.000Z
|
2019-03-25T08:12:58.000Z
|
lib/pivot_table/column.rb
|
rafaltrojanowski/pivot_table
|
20cc6329f068150bcdf122de93422026bfc8b02f
|
[
"MIT"
] | 11
|
2015-01-09T18:40:27.000Z
|
2021-11-30T06:45:48.000Z
|
module PivotTable
class Column
include CellCollection
def row_data row_header
find_data row_header
end
end
end
| 13.4
| 27
| 0.723881
|
e34ea2c975f9711e52c31ea83aad636256b2d5ed
| 544
|
rb
|
Ruby
|
lib/fine_print/configuration.rb
|
ProtoJoe/fine_print
|
85504ff618f8b55fd2cbeb5accfba7085dc27850
|
[
"MIT"
] | 33
|
2015-03-26T05:25:18.000Z
|
2022-01-23T21:04:06.000Z
|
lib/fine_print/configuration.rb
|
ProtoJoe/fine_print
|
85504ff618f8b55fd2cbeb5accfba7085dc27850
|
[
"MIT"
] | 17
|
2015-06-19T21:47:56.000Z
|
2021-09-26T11:57:44.000Z
|
lib/fine_print/configuration.rb
|
ProtoJoe/fine_print
|
85504ff618f8b55fd2cbeb5accfba7085dc27850
|
[
"MIT"
] | 17
|
2015-03-19T00:23:17.000Z
|
2021-03-16T23:15:51.000Z
|
module FinePrint
class Configuration
# Attributes
# Can be set in initializer only
ENGINE_OPTIONS = [
:helpers,
:layout,
:authenticate_user_proc,
:authenticate_manager_proc,
:current_user_proc,
:contract_published_proc
]
# Can be set in initializer or passed as an argument
# to FinePrint controller methods
CONTROLLER_OPTIONS = [
:redirect_to_contracts_proc
]
(ENGINE_OPTIONS + CONTROLLER_OPTIONS).each do |option|
attr_accessor option
end
end
end
| 20.923077
| 58
| 0.674632
|
b16ab7d571cab76e9c524767f042bc73f2b4d7bb
| 83
|
py
|
Python
|
editor/platforms/windows.py
|
Commander07/Magnitude
|
2b793d0d9946f6b35c5935ae5921592e287bbbe7
|
[
"MIT"
] | 6
|
2020-12-06T20:21:39.000Z
|
2021-06-29T06:37:40.000Z
|
editor/platforms/windows.py
|
Commander07/Magnitude
|
2b793d0d9946f6b35c5935ae5921592e287bbbe7
|
[
"MIT"
] | null | null | null |
editor/platforms/windows.py
|
Commander07/Magnitude
|
2b793d0d9946f6b35c5935ae5921592e287bbbe7
|
[
"MIT"
] | null | null | null |
## Windows specific functions. If a function exists here it must exist in linux.py
| 41.5
| 82
| 0.783133
|
bd0f06614c677d28ecc9098f2e1ced1b0c13a658
| 1,077
|
html
|
HTML
|
extension/options.html
|
JLucasRS/nerdcast-extensao-insercoes
|
ff0a6a77b171a616c871ced38bcb7e4bcdead2eb
|
[
"MIT"
] | 2
|
2021-11-14T10:36:39.000Z
|
2022-01-13T22:25:48.000Z
|
extension/options.html
|
JLucasRS/nerdcast-extensao-insercoes
|
ff0a6a77b171a616c871ced38bcb7e4bcdead2eb
|
[
"MIT"
] | null | null | null |
extension/options.html
|
JLucasRS/nerdcast-extensao-insercoes
|
ff0a6a77b171a616c871ced38bcb7e4bcdead2eb
|
[
"MIT"
] | null | null | null |
<!DOCTYPE html>
<html lang="pt-br" ">
<head>
<meta charset="utf-8" />
<link rel="stylesheet" href="options.css"/>
</head>
<body>
<div>
<label>
<input type="checkbox" id="gallery" checked>
Mostrar Galeria
</label><br>
<label>
<input type="checkbox" id="insertions" checked>
Ativar Inserções na Vitrine
</label><br>
<label>
<input type="checkbox" id="sound" checked>
Ativar Sons
</label><br>
<label>
<input type="checkbox" id="skip" checked>
Mostrar botão de Pular Emails
</label><br>
<label>
<input type="checkbox" id="extra" checked>
Adicionar tempo extras as inserções (se possível)
</label><br>
<div id="status"></div>
<button id="save">Salvar</button>
</div>
<script src="options.js"></script>
</body>
</html>
| 28.342105
| 65
| 0.456825
|
41dd4583151576ec8f63f58ee16f8ba480c32875
| 34,081
|
asm
|
Assembly
|
base/mvdm/dos/v86/doskrnl/dos/macro2.asm
|
npocmaka/Windows-Server-2003
|
5c6fe3db626b63a384230a1aa6b92ac416b0765f
|
[
"Unlicense"
] | 17
|
2020-11-13T13:42:52.000Z
|
2021-09-16T09:13:13.000Z
|
base/mvdm/dos/v86/doskrnl/dos/macro2.asm
|
sancho1952007/Windows-Server-2003
|
5c6fe3db626b63a384230a1aa6b92ac416b0765f
|
[
"Unlicense"
] | 2
|
2020-10-19T08:02:06.000Z
|
2020-10-19T08:23:18.000Z
|
base/mvdm/dos/v86/doskrnl/dos/macro2.asm
|
sancho1952007/Windows-Server-2003
|
5c6fe3db626b63a384230a1aa6b92ac416b0765f
|
[
"Unlicense"
] | 14
|
2020-11-14T09:43:20.000Z
|
2021-08-28T08:59:57.000Z
|
TITLE MACRO2 - Pathname and macro related internal routines
NAME MACRO2
;** MACRO2.ASM
;
; TransFCB
; TransPath
; TransPathSet
; TransPathNoSet
; Canonicalize
; PathSep
; SkipBack
; CopyComponent
; Splice
; $NameTrans
; DriveFromText
; TextFromDrive
; PathPref
; ScanPathChar
;
; Revision history:
;
; Sudeepb 11-Mar-1991 Ported for NT DOSEm
;
;
; MSDOS performs several types of name translation. First, we maintain for
; each valid drive letter the text of the current directory on that drive.
; For invalid drive letters, there is no current directory so we pretend to
; be at the root. A current directory is either the raw local directory
; (consisting of drive:\path) or a local network directory (consisting of
; \\machine\path. There is a limit on the point to which a .. is allowed.
;
; Given a path, MSDOS will transform this into a real from-the-root path
; without . or .. entries. Any component that is > 8.3 is truncated to
; this and all * are expanded into ?'s.
;
; The second part of name translation involves subtree aliasing. A list of
; subtree pairs is maintained by the external utility SUBST. The results of
; the previous 'canonicalization' are then examined to see if any of the
; subtree pairs is a prefix of the user path. If so, then this prefix is
; replaced with the other subtree in the pair.
;
; A third part involves mapping this "real" path into a "physical" path. A
; list of drive/subtree pairs are maintained by the external utility JOIN.
; The output of the previous translation is examined to see if any of the
; subtrees in this list are a prefix of the string. If so, then the prefix
; is replaced by the appropriate drive letter. In this manner, we can
; 'mount' one device under another.
;
; The final form of name translation involves the mapping of a user's
; logical drive number into the internal physical drive. This is
; accomplished by converting the drive number into letter:CON, performing
; the above translation and then converting the character back into a drive
; number.
;
.xlist
.xcref
include version.inc
include dosseg.inc
INCLUDE DOSSYM.INC
INCLUDE DEVSYM.INC
include mult.inc
include curdir.inc
.cref
.list
I_need Splices,BYTE ; TRUE => splices are being done.
I_need WFP_Start,WORD ; pointer to beginning of expansion
I_need Curr_Dir_End,WORD ; offset to end of current dir
I_need ThisCDS,DWORD ; pointer to CDS used
I_need NAME1,11 ; Parse output of NameTrans
I_need OpenBuf,128 ; ususal destination of strings
I_need ExtFCB,BYTE ; flag for extended FCBs
I_need Sattrib,BYTE ; attribute of search
I_need fSplice,BYTE ; TRUE => do splice after canonicalize
I_need fSharing,BYTE ; TRUE => no redirection allowed
I_Need NoSetDir,BYTE ; TRUE => syscall is interested in
; entry, not contents. We splice only
; inexact matches
I_Need cMeta,BYTE ; count of meta chars in path
I_Need Temp_Var,WORD ;AN000; variable for temporary use 3/31/KK
I_Need DOS34_FLAG,WORD ;AN000; variable for dos34
DOSCODE SEGMENT
ASSUME SS:DOSDATA,CS:DOSCODE
ifdef DBCS
EXTRN TestKanj:near
endif
EXTRN PathChrCmp:near
BREAK <TransFCB - convert an FCB into a path, doing substitution>
;
; TransFCB - Copy an FCB from DS:DX into a reserved area doing all of the
; gritty substitution.
;
; Inputs: DS:DX - pointer to FCB
; ES:DI - point to destination
; Outputs: Carry Set - invalid path in final map
; Carry Clear - FCB has been mapped into ES:DI
; Sattrib is set from possibly extended FCB
; ExtFCB set if extended FCB found
; ax= 0 means local device found
; ES:DI - points to WFP_START
; ax = -1 means file or UNC
; ES:DI points to WFP_START
; Registers modified: most
Procedure TransFCB,NEAR
ASSUME CS:DOSCODE,SS:DOSDATA
PUBLIC MACRO001S,MACRO001E
MACRO001S:
LocalVar FCBTmp,16 ; M015 - allocate even number of bytes on stack
MACRO001E:
Enter
Context ES ; get DOSDATA addressability
SAVE <ES,DI> ; save away final destination
LEA DI,FCBTmp ; point to FCB temp area
MOV [ExtFCB],0 ; no extended FCB found ;smr;SS Override
MOV [Sattrib],0 ; default search attributes;smr;SS Override
invoke GetExtended ; get FCB, extended or not
JZ GetDrive ; not an extended FCB, get drive
MOV AL,[SI-1] ; get attributes
MOV [SAttrib],AL ; store search attributes;smr;SS Override
MOV [ExtFCB],-1 ; signal extended FCB ;smr;SS Override
GetDrive:
LODSB ; get drive byte
invoke GetThisDrv
jc BadPack
CALL TextFromDrive ; convert 0-based drive to text
;
; Scan the source to see if there are any illegal chars
;
IFDEF DBCS ;
;----------------------------- Start of DBCS 2/13/KK
SAVE <SI> ; back over name, ext
MOV CX,8 ; 8 chars in main part of name
FCBScan:LODSB ; get a byte
call TestKanj ;
jz notkanj2 ;
DEC CX ;
JCXZ VolidChck ; Kanji half char mess up
LODSB ; second kanji byte
jmp short Nextch
VolidChck:
TEST [SAttrib],attr_volume_id ; volume id ? ;smr;SS Override
JZ Badpack ; no, error
OR [DOS34_FLAG],DBCS_VOLID ; no, error ;smr;SS Override
DEC CX ; cx=-1
INC SI ; next char
JMP SHORT FCBScango
notkanj2:
invoke GetCharType ;get bits;smr;
TEST AL,fFCB
JZ BadPack
NextCh:
LOOP FCBScan
FCBScango:
ADD CX,3 ; Three chars in extension
FCBScanE:
LODSB
call TestKanj
jz notkanj3
DEC CX
JCXZ BadPack ; Kanji half char problem
LODSB ; second kanji byte
jmp short NextChE
notkanj3:
invoke GetCharType ;get bits;smr;
TEST AL,fFCB
JZ BadPack
NextChE:
LOOP FCBScanE
;----------------------------- End of DBCS 2/13/KK
ELSE
MOV CX,11
SAVE <SI> ; back over name, ext
FCBScan:LODSB ; get a byte
invoke GetCharType ;get bits;smr;
TEST AL,fFCB
JZ BadPack
NextCh: LOOP FCBScan
ENDIF
RESTORE <SI>
MOV BX,DI
invoke PackName ; crunch the path
RESTORE <DI,ES> ; get original destination
Context DS ; get DS addressability
LEA SI,FCBTmp ; point at new pathname
CMP BYTE PTR [BX],0
JZ BadPack
SAVE <BP>
CALL TransPathSet ; convert the path
RESTORE <BP>
JNC FCBRet ; bye with transPath error code
BadPack:
STC
MOV AL,error_path_not_found
FCBRet: Leave
return
EndProc TransFCB,NoCheck
BREAK <TransPath - copy a path, do string sub and put in current dir>
;
; TransPath - copy a path from DS:SI to ES:DI, performing component string
; substitution, insertion of current directory and fixing . and ..
; entries. Perform splicing. Allow input string to match splice
; exactly.
;
; TransPathSet - Same as above except No splicing is performed if input path
; matches splice.
;
; TransPathNoSet - No splicing/local using is performed at all.
;
; The following anomalous behaviour is required:
;
; Drive letters on devices are ignored. (set up DummyCDS)
; Paths on devices are ignored. (truncate to 0-length)
; Raw net I/O sets ThisCDS => NULL.
; fSharing => dummyCDS and no subst/splice. Only canonicalize.
;
; Other behaviour:
;
; ThisCDS set up.
; ValidateCDS done on local CDS.
;
; Brief flowchart:
;
; if fSharing then
; set up DummyCDS (ThisCDS)
; canonicalize (sets cMeta)
; splice
; return
; if \\ or d:\\ lead then
; set up null CDS (ThisCDS)
; canonicalize (sets cMeta)
; return
; if device then
; set up dummyCDS (ThisCDS)
; canonicalize (sets cMeta)
; return
; if file then
; getCDS (sets (ThisCDS) from name)
; validateCDS (may reset current dir)
; Copy current dir
; canonicalize (set cMeta)
; splice
; generate correct CDS (ThisCDS)
; if local then
; return
;
; Inputs: DS:SI - point to ASCIZ string path
; DI - point to buffer in DOSDATA
; Outputs: Carry Set
; invalid path specification: too many .., bad
; syntax, etc. or user FAILed to I 24. DS:SI may be modified
; Carry Clear
; ax= 0 means local device found
; ES:DI - points to WFP_START
; ax = -1 means file or UNC
; ES:DI points to WFP_START
; DS - DOSDATA
; Registers modified: most
;
; **** WARNING **** 14-Jan-1994 Jonle **** NTVDM port
; Transpath does not verify that the path\drive actually exists, which
; means that dos file apis which rely on Transpath for this validation
; will not get a error_path_not_found error from transpath, and if special
; handling is not done an incorrect error code will be generated
; (usually error_access_denied). See $Mkdir,$Rmdir for an example of proper
; error handling.
;
Procedure TransPath,NEAR
ASSUME CS:DOSCODE,SS:DOSDATA
XOR AL,AL
JMP SHORT SetSplice
Entry TransPathSet
MOV AL,-1
SetSplice:
MOV NoSetDir,AL ; NoSetDir = !fExact; ;smr;SS Override
MOV AL,-1
Entry TransPathNoSet
MOV fSplice,AL ; fSplice = TRUE; ;smr;SS Override
MOV cMeta,-1 ;smr;SS Override
MOV WFP_Start,DI ;smr;SS Override
MOV Curr_Dir_End,-1 ; crack from start ;smr;SS Override
Context ES
LEA BP,[DI+TEMPLEN] ; end of buffer
;
; At this point the name is either a UNC-style name (prefixed with two leading
; \\s) or is a local file/device.
;
CALL DriveFromText ; eat drive letter
PUSH AX ; save it
MOV AX,WORD PTR [SI] ; get first two bytes of path
call PathChrCmp ; convert to normal form
XCHG AH,AL ; swap for second byte
call PathChrCmp ; convert to normal form
JNZ CheckDevice ; not a path char
CMP AH,AL ; are they same?
JNZ CheckDevice ; nope
;
; We have a UNC request.
POP AX
MOVSW ; get the lead \\
UNCCpy: LODSB ; get a byte
IFDEF DBCS ;AN000;
;----------------------------- Start of DBCS 2/23/KK
call TestKanj ;AN000;
jz notkanj1 ;AN000;
STOSB ;AN000;
LODSB ;AN000;
OR AL,AL ;AN000;
JZ UNCTerm ;AN000;; Ignore half kanji error for now
STOSB ;AN000;
jmp UNCCpy ;AN000;
notkanj1: ;AN000;
;----------------------------- End of DBCS 2/23/KK
ENDIF ;AN000;
invoke UCase ;AN000;; convert the char
OR AL,AL
JZ UNCTerm ; end of string. All done.
STOSB
JMP UNCCpy ; no, go copy
UNCTerm:
STOSB ;AN000;
mov ax,-1 ;No Carry and ax = -1
ret
CheckDevice:
;
; Check DS:SI for device. First eat any path stuff
;
POP AX ; retrieve drive info
CMP BYTE PTR DS:[SI],0 ; check for null file
JNZ CheckPath
MOV AL,error_file_not_found ; bad file error
STC ; signal error on null input
RETURN ; bye!
CheckPath:
SAVE <AX,BP> ; save drive number
Invoke CheckThisDevice ; snoop for device
RESTORE <BP,AX> ; get drive letter back
JNC DoFile ; yes we have a file.
;
; We have a device. AX has drive letter. At this point we may fake a CDS ala
; sharing DOS call.
;
; If DX != 0 then path not found
or dx, dx
jz DEV_CONT2
DEV_Err:
MOV AL,error_path_not_found
STC ; signal error on null input
RETURN ; bye!
DEV_CONT2:
MOV fSharing,-1 ; simulate sharing dos call;smr;SS Override
invoke GetThisDrv ; set ThisCDS and init DUMMYCDS
MOV fSharing,0 ;
jc DEV_Err
;
; Now that we have noted that we have a device, we put it into a form that
; getpath can understand. Normally getpath requires d:\ to begin the input
; string. We relax this to state that if the d:\ is present then the path
; may be a file. If D:/ (note the forward slash) is present then we have
; a device.
;
CALL TextFromDrive
MOV AL,'/' ; path sep.
STOSB
invoke StrCpy ; Copy Device Name to Buffer
xor ax,ax ; Clear Carry and ax = 0
Context DS ; remainder of OK stuff
return
;
; We have a file. Get the raw CDS.
;
DoFile:
ASSUME DS:NOTHING
invoke GetVisDrv ; get proper CDS
MOV AL,error_path_not_found ; Set up for possible bad file error
retc ; CARRY set -> bogus drive/spliced
;
; ThisCDS has correct CDS. DS:SI advanced to point to beginning of path/file.
; Make sure that CDS has valid directory; ValidateCDS requires a temp buffer
; Use the one that we are going to use (ES:DI).
;
; SAVE <DS,SI,ES,DI> ; save all string pointers.
; invoke ValidateCDS ; poke CDS amd make everything OK
; RESTORE <DI,ES,SI,DS> ; get back pointers
; MOV AL,error_path_not_found ; Set up for possible bad path error
; retc ; someone failed an operation
;
; ThisCDS points to correct CDS. It contains the correct text of the
; current directory. Copy it in.
;
SAVE <DS,SI>
LDS SI,ThisCDS ; point to CDS ;smr;SS Override
MOV BX,DI ; point to destination
ADD BX,[SI].curdir_end ; point to backup limit
; LEA SI,[SI].curdir_text ; point to text
LEA BP,[DI+TEMPLEN] ; regenerate end of buffer
IFDEF DBCS ;AN000;
;------------------------ Start of DBCS 2/13/KK
Kcpylp: ;AN000;
LODSB ;AN000;
call TestKanj ;AN000;
jz Notkanjf ;AN000;
STOSB ;AN000;
MOVSB ;AN000;
CMP BYTE PTR [SI],0 ;AN000;
JNZ Kcpylp ;AN000;
MOV AL, '\' ;AN000;
STOSB ;AN000;
JMP SHORT GetOrig ;AN000;
Notkanjf: ;AN000;
STOSB ;AN000;
OR AL,AL ;AN000;
JNZ Kcpylp ;AN000;
DEC DI ;AN000;; point to NUL byte
;------------------------ End of DBCS 2/13/KK
ELSE ;AN000;
invoke FStrCpy ; copy string. ES:DI point to end
DEC DI ; point to NUL byte
ENDIF ;AN000;
;
; Make sure that there is a path char at end.
;
MOV AL,'\'
CMP ES:[DI-1],AL
JZ GetOrig
STOSB
;
; Now get original string.
;
GetOrig:
DEC DI ; point to path char
RESTORE <SI,DS>
;
; BX points to the end of the root part of the CDS (at where a path char
; should be) . Now, we decide whether we use this root or extend it with the
; current directory. See if the input string begins with a leading \
;
CALL PathSep ; is DS:SI a path sep?
JNZ PathAssure ; no, DI is correct. Assure a path char
OR AL,AL ; end of string?
JZ DoCanon ; yes, skip.
;
; The string does begin with a \. Reset the beginning of the canonicalization
; to this root. Make sure that there is a path char there and advance the
; source string over all leading \'s.
;
MOV DI,BX ; back up to root point.
SkipPath:
LODSB
call PathChrCmp
JZ SkipPath
DEC SI
OR AL,AL
JZ DoCanon
;
; DS:SI start at some file name. ES:DI points at some path char. Drop one in
; for yucks.
;
PathAssure:
MOV AL,'\'
STOSB
;
; ES:DI point to the correct spot for canonicalization to begin.
; BP is the max extent to advance DI
; BX is the backup limit for ..
;
DoCanon:
CALL Canonicalize ; wham.
retc ; badly formatted path.
;
; The string has been moved to ES:DI. Reset world to DOS context, pointers
; to wfp_start and do string substitution. BP is still the max position in
; buffer.
;
Context DS
MOV DI,wfp_start ; DS:SI point to string
LDS SI,ThisCDS ; point to CDS
ASSUME DS:NOTHING
; LEA SI,[SI].curdir_text ; point to text
CALL PathPref ; is there a prefix?
JNZ DoSplice ; no, do splice
;
; We have a match. Check to see if we ended in a path char.
;
IFDEF DBCS ;AN000;
;---------------------------- Start of DBCS 2/13/KK
PUSH BX ;AN000;
MOV BX,SI ;AN000;
MOV SI,WORD PTR ThisCDS ;AN000;; point to CDS ;smr;SS Override
LOOKDUAL: ;AN000;
MOV AL,BYTE PTR [SI] ;AN000;
call TestKanj ;AN000;
JZ ONEINC ;AN000;
INC SI ;AN000;
INC SI ;AN000;
CMP SI,BX ;AN000;
JB LOOKDUAL ;AN000;
POP BX ;AN000;; Last char was KANJI, don't look back
JMP SHORT Pathline ;AN000;; for path sep, there isn't one.
;AN000;
ONEINC: ;AN000;
INC SI ;AN000;
CMP SI,BX ;AN000;
JB LOOKDUAL ;AN000;
POP BX ;AN000;
;------------------------ End of DBCS 2/13/KK
ENDIF ;AN000;
MOV AL,DS:[SI-1] ; last char to match
call PathChrCmp ; did we end on a path char? (root)
JZ DoSplice ; yes, no current dir here.
Pathline: ; 2/13/KK
CMP BYTE PTR ES:[DI],0 ; end at NUL?
JZ DoSplice
INC DI ; point to after current path char
MOV Curr_Dir_End,DI ; point to correct spot ;smr;SS Override
;
; Splice the result.
;
DoSplice:
Context DS ; back to DOSDATA
MOV SI,wfp_Start ; point to beginning of string
XOR CX,CX
TEST fSplice,-1
JZ SkipSplice
CALL Splice ; replaces in place.
SkipSplice:
ASSUME DS:NOTHING
Context DS
; LES DI,ThisCDS ; point to correct drive
; TEST ES:[DI].curdir_flags,curdir_isnet
; JNZ Done ; net (retnz)
; JCXZ Done
; MOV AL,error_path_not_found ; Set up for possible bad path error
mov ax,-1
clc
Done: return ; any errors in carry flag.
EndProc TransPath
BREAK <Canonicalize - copy a path and remove . and .. entries>
;
; Canonicalize - copy path removing . and .. entries.
;
; Inputs: DS:SI - point to ASCIZ string path
; ES:DI - point to buffer
; BX - backup limit (offset from ES) points to slash
; BP - end of buffer
; Outputs: Carry Set - invalid path specification: too many .., bad
; syntax, etc.
; Carry Clear -
; DS:DI - advanced to end of string
; ES:DI - advanced to end of canonicalized form after nul
; Registers modified: AX CX DX (in addition to those above)
Procedure Canonicalize,NEAR
ASSUME CS:DOSCODE,SS:DOSDATA
;
; We copy all leading path separators.
;
LODSB ; while (PathChr (*s))
call PathChrCmp
IFDEF DBCS
JNZ CanonDec0 ; 2/19/KK
ELSE
JNZ CanonDec
ENDIF
CMP DI,BP ; if (d > dlim)
JAE CanonBad ; goto error;
STOSB
JMP Canonicalize ; *d++ = *s++;
IFDEF DBCS ;AN000;
CanonDec0: ;AN000; 2/19/KK
; mov cs:Temp_Var,di ;AN000; 3/31/KK
ENDIF ;AN000;
CanonDec:
DEC SI
;
; Main canonicalization loop. We come here with DS:SI pointing to a textual
; component (no leading path separators) and ES:DI being the destination
; buffer.
;
CanonLoop:
;
; If we are at the end of the source string, then we need to check to see that
; a potential drive specifier is correctly terminated with a path sep char.
; Otherwise, do nothing
;
XOR AX,AX
CMP [SI],AL ; if (*s == 0) {
JNZ DoComponent
IFDEF DBCS ;AN000;
call chk_last_colon ;AN000; 2/18/KK
ELSE ;AN000;
CMP BYTE PTR ES:[DI-1],':' ; if (d[-1] == ':')
ENDIF ;AN000;
; JNZ DoTerminate
jz Do_Colon
;BUGBUG DBCS???
CMP BYTE PTR ES:[DI-1],'\'
JNZ DoTerminate
IFDEF DBCS
; ntraid:mskkbug#3300,3302: Cannot CreateDir/BrowseDir
; Check that really '\' character for DBCS
; 10/30/93 yasuho
push ax
mov al, es:[di-2]
call TestKanj ; Really '\' ?
pop ax
jnz DoTerminate ; No. this is DBCS character
ENDIF
jmp short CanonBad
Do_Colon:
MOV AL,'\' ; *d++ = '\';
STOSB
MOV AL,AH
DoTerminate:
STOSB ; *d++ = 0;
CLC ; return (0);
return
IFDEF DBCS ;AN000;
;---------------- Start of DBCS 2/18/KK
chk_last_colon proc ;AN000;
push si ;AN000;
push ax ;AN000;
push bx ;AN000;
mov si,[WFP_START] ;AN000;;PTM. for cd .. use beginning of buf;smr;SS Override
cmp si,di ;AN000;; no data stored ?
jb CLC02 ;AN000;;PTM. for cd ..
inc si ;AN000;; make NZ flag
JMP SHORT CLC09 ;AN000;
CLC02: ;AN000;
mov bx,di ;AN000;
dec bx ;AN000;
CLC_lop: ;AN000;
cmp si,bx ;AN000;
jb CLC00 ;AN000;
jne CLC09 ;AN000;
CLC01: ;AN000;
CMP BYTE PTR ES:[DI-1],':' ;AN000;; if (d[-1] == ':')
jmp CLC09 ;AN000;
CLC00: ;AN000;
mov al,es:[si] ;AN000;
inc si ;AN000;
call TestKanj ;AN000;
je CLC_lop ;AN000;
inc si ;AN000;
jmp CLC_lop ;AN000;
CLC09: ;AN000;
pop bx ;AN000;
pop ax ;AN000;
pop si ;AN000;
ret ;AN000;
chk_last_colon endp ;AN000;
;---------------- Endt of DBCS 2/18/KK
ENDIF ;AN000;
CanonBad:
CALL ScanPathChar ; check for path chars in rest of string
MOV AL,error_path_not_found ; Set up for bad path error
JZ PathEnc ; path character encountered in string
MOV AL,error_file_not_found ; Set bad file error
PathEnc:
STC
return
;
; We have a textual component that we must copy. We uppercase it and truncate
; it to 8.3
;
DoComponent: ; }
CALL CopyComponent ; if (!CopyComponent (s, d))
retc ; return (-1);
;
; We special case the . and .. cases. These will be backed up.
;
CMP WORD PTR ES:[DI],'.' + (0 SHL 8)
JZ Skip1
CMP WORD PTR ES:[DI],'..'
JNZ CanonNormal
DEC DI ; d--;
Skip1: CALL SkipBack ; SkipBack ();
MOV AL,error_path_not_found ; Set up for possible bad path error
retc
JMP short CanonPath ; }
;
; We have a normal path. Advance destination pointer over it.
;
CanonNormal: ; else
ADD DI,CX ; d += ct;
;
; We have successfully copied a component. We are now pointing at a path
; sep char or are pointing at a nul or are pointing at something else.
; If we point at something else, then we have an error.
;
CanonPath:
CALL PathSep
JNZ CanonBad ; something else...
;
; Copy the first path char we see.
;
LODSB ; get the char
call PathChrCmp ; is it path char?
ifdef DBCS
JZ Not_CanonDec
JMP CanonDec ; no, go test for nul
Not_CanonDec:
else
JNZ CanonDec ; no, go test for nul
endif
CMP DI,BP ; beyond buffer end?
JAE CanonBad ; yep, error.
STOSB ; copy the one byte
;
; Skip all remaining path chars
;
CanonPathLoop:
LODSB ; get next byte
call PathChrCmp ; path char again?
JZ CanonPathLoop ; yep, grab another
DEC SI ; back up
JMP CanonLoop ; go copy component
EndProc Canonicalize
BREAK <PathSep - determine if char is a path separator>
;
; PathSep - look at DS:SI and see if char is / \ or NUL
; Inputs: DS:SI - point to a char
; Outputs: AL has char from DS:SI (/ => \)
; Zero set if AL is / \ or NUL
; Zero reset otherwise
; Registers modified: AL
Procedure PathSep,NEAR
ASSUME CS:DOSCODE,SS:DOSDATA
MOV AL,[SI] ; get the character
entry PathSepGotCh ; already have character
OR AL,AL ; test for zero
retz ; return if equal to zero (NUL)
call PathChrCmp ; check for path character
return ; and return HIS determination
EndProc PathSep
BREAK <SkipBack - move backwards to a path separator>
;
; SkipBack - look at ES:DI and backup until it points to a / \
; Inputs: ES:DI - point to a char
; BX has current directory back up limit (point to a / \)
; Outputs: ES:DI backed up to point to a path char
; AL has char from output ES:DI (path sep if carry clear)
; Carry set if illegal backup
; Carry Clear if ok
; Registers modified: DI,AL
Procedure SkipBack,NEAR
ASSUME CS:DOSCODE,SS:DOSDATA
IFDEF DBCS ;AN000;
;-------------------------- Start of DBCS 2/13/KK
PUSH DS ;AN000;
PUSH SI ;AN000;
PUSH CX ;AN000;
PUSH ES ;AN000;
POP DS ;AN000;
MOV SI,BX ;AN000;; DS:SI -> start of ES:DI string
MOV CX,DI ;AN000;; Limit of forward scan is input DI
MOV AL,[SI] ;AN000;
call PathChrCmp ;AN000;
JNZ SkipBadP ;AN000;; Backup limit MUST be path char
CMP DI,BX ;AN000;
JBE SkipBadP ;AN000;
MOV DI,BX ;AN000;; Init backup point to backup limit
Skiplp: ;AN000;
CMP SI,CX ;AN000;
JAE SkipOK ;AN000;; Done, DI is correct backup point
LODSB ;AN000;
call TestKanj ;AN000;
jz Notkanjv ;AN000;
lodsb ;AN000;; Skip over second kanji byte
JMP Skiplp ;AN000;
NotKanjv: ;AN000;
call PathChrCmp ;AN000;
JNZ Skiplp ;AN000;; New backup point
MOV DI,SI ;AN000;; DI point to path sep
DEC DI ;AN000;
jmp Skiplp ;AN000;
SkipOK: ;AN000;
MOV AL,ES:[DI] ;AN000;; Set output AL
CLC ;AN000;; return (0);
POP CX ;AN000;
POP SI ;AN000;
POP DS ;AN000;
return ;AN000;
;AN000;
SkipBadP: ;AN000;
POP CX ;AN000;
POP SI ;AN000;
POP DS ;AN000;
;-------------------------- End of DBCS 2/13/KK
ELSE ;AN000;
CMP DI,BX ; while (TRUE) {
JB SkipBad ; if (d < dlim)
DEC DI ; goto err;
MOV AL,ES:[DI] ; if (pathchr (*--d))
call PathChrCmp ; break;
JNZ SkipBack ; }
CLC ; return (0);
return ;
ENDIF ;AN000;
SkipBad: ;err:
MOV AL,error_path_not_found ; bad path error
STC ; return (-1);
return ;
EndProc SkipBack
Break <CopyComponent - copy out a file path component>
;
; CopyComponent - copy a file component from a path string (DS:SI) into ES:DI
;
; Inputs: DS:SI - source path
; ES:DI - destination
; ES:BP - end of buffer
; Outputs: Carry Set - too long
; Carry Clear - DS:SI moved past component
; CX has length of destination
; Registers modified: AX,CX,DX
Procedure CopyComponent,NEAR
ASSUME CS:DOSCODE,SS:DOSDATA
CopyBP EQU WORD PTR [BP]
CopyD EQU DWORD PTR [BP+2]
CopyDoff EQU WORD PTR [BP+2]
CopyS EQU DWORD PTR [BP+6]
CopySoff EQU WORD PTR [BP+6]
CopyTemp EQU BYTE PTR [BP+10]
SUB SP,14 ; room for temp buffer
SAVE <DS,SI,ES,DI,BP>
MOV BP,SP
MOV AH,'.'
LODSB
STOSB
CMP AL,AH ; if ((*d++=*s++) == '.') {
JNZ NormalComp
CALL PathSep ; if (!pathsep(*s))
JZ NulTerm
TryTwoDot:
LODSB ; if ((*d++=*s++) != '.'
STOSB
CMP AL,AH
JNZ CopyBad
CALL PathSep
JNZ CopyBad ; || !pathsep (*s))
NulTerm: ; return -1;
XOR AL,AL ; *d++ = 0;
STOSB
MOV CopySoff,SI
JMP SHORT GoodRet ; }
NormalComp: ; else {
MOV SI,CopySoff
Invoke NameTrans ; s = NameTrans (s, Name1);
CMP SI,CopySOff ; if (s == CopySOff)
JZ CopyBad ; return (-1);
TEST fSharing,-1 ; if (!fSharing) {;smr;SS Override
JNZ DoPack
AND DL,1 ; cMeta += fMeta;
ADD cMeta,DL ; if (cMeta > 0);smr;SS Override
JG CopyBad ; return (-1);
JNZ DoPack ; else
OR DL,DL ; if (cMeta == 0 && fMeta == 0)
JZ CopyBadPath ; return (-1);
DoPack: ; }
MOV CopySoff,SI
Context DS
MOV SI,OFFSET DOSDATA:NAME1
LEA DI,CopyTemp
SAVE <DI>
Invoke PackName ; PackName (Name1, temp);
RESTORE <DI>
Invoke StrLen ; if (strlen(temp)+d > bp)
DEC CX
ADD CX,CopyDoff
CMP CX,CopyBP
JAE CopyBad ; return (-1);
MOV SI,DI ; strcpy (d, temp);
LES DI,CopyD
Invoke FStrCpy
GoodRet: ; }
CLC
JMP SHORT CopyEnd ; return 0;
CopyBad:
STC
CALL ScanPathChar ; check for path chars in rest of string
MOV AL,error_file_not_found ; Set up for bad file error
JNZ CopyEnd
CopyBadPath:
STC
MOV AL,error_path_not_found ; Set bad path error
CopyEnd:
RESTORE <BP,DI,ES,SI,DS>
LAHF
ADD SP,14 ; reclaim temp buffer
Invoke Strlen
DEC CX
SAHF
return
EndProc CopyComponent,NoCheck
Break <Splice - pseudo mount by string substitution>
;
; Splice - take a string and substitute a prefix if one exists. Change
; ThisCDS to point to physical drive CDS.
; Inputs: DS:SI point to string
; NoSetDir = TRUE => exact matches with splice fail
; Outputs: DS:SI points to thisCDS
; String at DS:SI may be reduced in length by removing prefix
; and substituting drive letter.
; CX = 0 If no splice done
; CX <> 0 otherwise
; ThisCDS points to proper CDS if spliced, otherwise it is
; left alone
; Registers modified: DS:SI, ES:DI, BX,AX,CX
Procedure Splice,NEAR
ASSUME CS:DOSCODE,SS:DOSDATA
TEST Splices,-1 ;smr;SS Override
JZ AllDone
SAVE <<WORD PTR ThisCDS>,<WORD PTR ThisCDS+2>> ; TmpCDS = ThisCDS;smr;SS Override
SAVE <DS,SI>
RESTORE <DI,ES>
XOR AX,AX ; for (i=1; s = GetCDSFromDrv (i); i++)
SpliceScan:
invoke GetCDSFromDrv
JC SpliceDone
INC AL
TEST [SI.curdir_flags],curdir_splice
JZ SpliceScan ; if ( Spliced (i) ) {
SAVE <DI>
CALL PathPref ; if (!PathPref (s, d))
JZ SpliceFound ;
SpliceSkip:
RESTORE <DI>
JMP SpliceScan ; continue;
SpliceFound:
CMP BYTE PTR ES:[DI],0 ; if (*s || NoSetDir) {
JNZ SpliceDo
TEST NoSetDir,-1 ;smr;SS Override
JNZ SpliceSkip
SpliceDo:
MOV SI,DI ; p = src + strlen (p);
SAVE <ES>
RESTORE <DS,DI>
CALL TextFromDrive1 ; src = TextFromDrive1(src,i);
MOV AX,Curr_Dir_End ;smr;SS Override
OR AX,AX
JS NoPoke
ADD AX,DI ; curdirend += src-p;
SUB AX,SI
MOV Curr_Dir_End,AX ;smr;SS Override
NoPoke:
CMP BYTE PTR [SI],0 ; if (*p)
JNZ SpliceCopy ; *src++ = '\\';
MOV AL,"\"
STOSB
SpliceCopy: ; strcpy (src, p);
invoke FStrCpy
ADD SP,4 ; throw away saved stuff
OR CL,1 ; signal splice done.
JMP SHORT DoSet ; return;
SpliceDone: ; }
ASSUME DS:NOTHING ; ThisCDS = TmpCDS;
RESTORE <<WORD PTR ThisCDS+2>,<WORD PTR ThisCDS>> ;smr;SS Override
AllDone:
XOR CX,CX
DoSet:
LDS SI,ThisCDS ;
return
EndProc Splice, NoCheck
Break <$NameTrans - partially process a name>
;
; $NameTrans - allow users to see what names get mapped to. This call
; performs only string substitution and canonicalization, not splicing. Due
; to Transpath playing games with devices, we need to insure that the output
; has drive letter and : in it.
;
; Inputs: DS:SI - source string for translation
; ES:DI - pointer to buffer
; Outputs:
; Carry Clear
; Buffer at ES:DI is filled in with data
; ES:DI point byte after nul byte at end of dest string in buffer
; Carry Set
; AX = error_path_not_found
; Registers modified: all
Procedure $NameTrans,Near
ASSUME CS:DOSCODE,SS:DOSDATA
SAVE <DS,SI,ES,DI,CX>
; M027 - Start
;
; Sattrib must be set up with default values here. Otherwise, the value from
; a previous DOS call is used for attrib and DevName thinks it is not a
; device if the old call set the volume attribute bit. Note that devname in
; dir2.asm gets ultimately called by Transpath. See also M026. Also save
; and restore CX.
;
mov ch,attr_hidden+attr_system+attr_directory
invoke SetAttrib
; M027 - End
MOV DI,OFFSET DOSDATA:OpenBuf
CALL TransPath ; to translation (everything)
RESTORE <CX,DI,ES,SI,DS>
JNC TransOK
transfer SYS_Ret_Err
TransOK:
MOV SI,OFFSET DOSDATA:OpenBuf
Context DS
GotText:
Invoke FStrCpy
Transfer SYS_Ret_OK
EndProc $NameTrans
Break <DriveFromText - return drive number from a text string>
;
; DriveFromText - examine DS:SI and remove a drive letter, advancing the
; pointer.
;
; Inputs: DS:SI point to a text string
; Outputs: AL has drive number
; DS:SI advanced
; Registers modified: AX,SI.
Procedure DriveFromText,NEAR
ASSUME CS:DOSCODE,SS:NOTHING
XOR AL,AL ; drive = 0;
CMP BYTE PTR [SI],0 ; if (*s &&
retz
CMP BYTE PTR [SI+1],':' ; s[1] == ':') {
retnz
IFDEF DBCS ;AN000;
;--------------------- Start of DBCS 2/18/KK
push ax ;AN000;
mov al,[si] ;AN000;
call TestKanj ;AN000;
pop ax ;AN000;
retnz ;AN000;
;--------------------- End of DBCS 2/18/KK
ENDIF ;AN000;
LODSW ; drive = (*s | 020) - 'a'+1;
OR AL,020h
SUB AL,'a'-1 ; s += 2;
retnz
MOV AL,-1 ; nuke AL...
return ; }
EndProc DriveFromText
Break <TextFromDrive - convert a drive number to a text string>
;
; TextFromDrive - turn AL into a drive letter: and put it at es:di with
; trailing :. TextFromDrive1 takes a 1-based number.
;
; Inputs: AL has 0-based drive number
; Outputs: ES:DI advanced
; Registers modified: AX
Procedure TextFromDrive,NEAR
ASSUME CS:DOSCODE,SS:NOTHING
INC AL
Entry TextFromDrive1
ADD AL,'A'-1 ; *d++ = drive-1+'A';
MOV AH,":" ; strcat (d, ":");
STOSW
return
EndProc TextFromDrive
Break <PathPref - see if one path is a prefix of another>
;
; PathPref - compare DS:SI with ES:DI to see if one is the prefix of the
; other. Remember that only at a pathchar break are we allowed to have a
; prefix: A:\ and A:\FOO
;
; Inputs: DS:SI potential prefix
; ES:DI string
; Outputs: Zero set => prefix found
; DI/SI advanced past matching part
; Zero reset => no prefix, DS/SI garbage
; Registers modified: CX
Procedure PathPref,NEAR
Invoke DStrLen ; get length
DEC CX ; do not include nul byte
IFDEF DBCS ;AN000;
;----------------------- Start of DBCS 2/13/KK
SAVE <AX> ;AN000;; save char register
CmpLp: ;AN000;
MOV AL,[SI] ;AN000;
call TestKanj ;AN000;
jz NotKanj9 ;AN000;
CMPSW ;AN000;
JNZ Prefix ;AN000;
DEC CX ;AN000;
LOOP CmpLp ;AN000;
JMP SHORT NotSep ;AN000;
NotKanj9: ;AN000;
CMPSB ;AN000;
JNZ Prefix ;AN000;
LOOP CmpLp ;AN000;
;----------------------- End of DBCS 2/13/KK
ELSE ;AN000;
REPZ CMPSB ; compare
retnz ; if NZ then return NZ
SAVE <AX> ; save char register
ENDIF ;AN000;
MOV AL,[SI-1] ; get last byte to match
call PathChrCmp ; is it a path char (Root!)
JZ Prefix ; yes, match root (I hope)
NotSep: ; 2/13/KK
MOV AL,ES:[DI] ; get next char to match
CALL PathSepGotCh ; was it a pathchar?
Prefix:
RESTORE <AX> ; get back original
return
EndProc PathPref
Break <ScanPathChar - see if there is a path character in a string>
;
; ScanPathChar - search through the string (pointed to by DS:SI) for
; a path separator.
;
; Input: DS:SI target string (null terminated)
; Output: Zero set => path separator encountered in string
; Zero clear => null encountered
; Registers modified: SI
Procedure ScanPathChar,NEAR
LODSB ; fetch a character
IFDEF DBCS ;AN000;
call TestKanj ;AN000;; 2/13/KK
jz NotKanjr ;AN000;; 2/13/KK
LODSB ;AN000;; 2/13/KK
OR AL,AL ;AN000;; 2/13/KK 3/31/removed
JNZ ScanPathChar ;AN000;; 2/13/KK 3/31/removed
INC AL ;AN000;; 2/13/KK
return ;AN000;; 2/13/KK
;AN000;
NotKanjr: ;AN000;; 2/13/KK
ENDIF ;AN000;
call PathSepGotCh
JNZ ScanPathChar ; not \, / or NUL => go back for more
call PathChrCmp ; path separator?
return
EndProc ScanPathChar
DOSCODE ends
END
| 28.663583
| 85
| 0.645404
|
b003f0adb7332186a94fe1cb096faeb8062e78b6
| 2,773
|
py
|
Python
|
dayu_ffmpeg/network/node/codec.py
|
phenom-films/dayu_ffmpeg
|
981c00ff3354c881d2cef8ae5e5b9f1880bc9a36
|
[
"MIT"
] | 31
|
2018-10-11T07:44:15.000Z
|
2022-03-30T19:36:08.000Z
|
dayu_ffmpeg/network/node/codec.py
|
phenom-films/dayu_ffmpeg
|
981c00ff3354c881d2cef8ae5e5b9f1880bc9a36
|
[
"MIT"
] | 6
|
2019-03-22T02:43:12.000Z
|
2019-11-20T14:43:28.000Z
|
dayu_ffmpeg/network/node/codec.py
|
phenom-films/dayu_ffmpeg
|
981c00ff3354c881d2cef8ae5e5b9f1880bc9a36
|
[
"MIT"
] | 12
|
2019-01-07T09:13:04.000Z
|
2022-02-16T13:42:37.000Z
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
__author__ = 'andyguo'
from base import BaseNode
from dayu_ffmpeg.config import CODEC_ORDER_SCORE
class BaseCodecNode(BaseNode):
type = 'base_code_node'
order_score = CODEC_ORDER_SCORE
def simple_cmd_string(self):
return self.type
def complex_cmd_string(self):
return '{stream_in}{cmd}{stream_out}'.format(
stream_in=''.join(['[{}]'.format(x) for x in self.stream_in_num]),
cmd=self.simple_cmd_string(),
stream_out='[{}]'.format(self.stream_out_num))
class Codec(BaseCodecNode):
type = 'codec'
def __init__(self, video='prores_ks', audio=None, **kwargs):
self.video = video
self.audio = audio
super(Codec, self).__init__(**kwargs)
def simple_cmd_string(self):
self._cmd = u'-codec:v {video}'.format(video=self.video)
if self.audio:
self._cmd += u' -codec:a {audio}'.format(audio=self.audio)
return self._cmd
def complex_cmd_string(self):
return '{cmd}'.format(
stream_in=''.join(['[{}]'.format(x) for x in self.stream_in_num]),
cmd=self.simple_cmd_string(),
stream_out='[{}]'.format(self.stream_out_num))
class WriteTimecode(BaseCodecNode):
type = 'timecode'
def __init__(self, timecode=None, **kwargs):
self.timecode = timecode
super(WriteTimecode, self).__init__(**kwargs)
def simple_cmd_string(self):
self._cmd = u'-timecode {tc}'.format(tc=self.timecode)
return self._cmd
class WriteReel(BaseCodecNode):
type = 'metadata'
def __init__(self, reel=None, **kwargs):
self.reel = reel
super(WriteReel, self).__init__(**kwargs)
def simple_cmd_string(self):
self._cmd = u'-metadata:s:v:0 reel_name={reel}'.format(reel=self.reel)
return self._cmd
class Quality(BaseCodecNode):
_name = 'qscale'
def __init__(self, qscale=2, **kwargs):
self.qscale = qscale
super(Quality, self).__init__(**kwargs)
def simple_cmd_string(self):
self._cmd = u'-qscale:v {qscale}'.format(qscale=self.qscale) if self.qscale else u''
return self._cmd
class PixelFormat(BaseCodecNode):
type = 'pix_fmt'
def __init__(self, pixel_format='yuv422p10le', profile=2, **kwargs):
self.pixel_format = pixel_format
self.profile = profile
super(PixelFormat, self).__init__(**kwargs)
def simple_cmd_string(self):
self._cmd = u'{pix}{profile}'.format(
pix='-pix_fmt {}'.format(self.pixel_format) if self.pixel_format else '',
profile=' -profile:v {}'.format(self.profile) if self.profile else '')
return self._cmd
| 29.817204
| 92
| 0.627119
|
db9d928cd393ce74c18dc5d9614b15651dafe97f
| 84
|
php
|
PHP
|
application/admin/model/Gallery.php
|
FanchunTsang/KindergartenProject
|
9cffbe985b37dc320704dff2ae9a07deb2d16f9f
|
[
"Apache-2.0"
] | null | null | null |
application/admin/model/Gallery.php
|
FanchunTsang/KindergartenProject
|
9cffbe985b37dc320704dff2ae9a07deb2d16f9f
|
[
"Apache-2.0"
] | null | null | null |
application/admin/model/Gallery.php
|
FanchunTsang/KindergartenProject
|
9cffbe985b37dc320704dff2ae9a07deb2d16f9f
|
[
"Apache-2.0"
] | null | null | null |
<?php
namespace app\admin\model;
use think\Model;
class Gallery extends Model
{
}
| 9.333333
| 27
| 0.738095
|
f9a6c6e537871a0a93240dfa163966dc0bba6d33
| 14,094
|
html
|
HTML
|
Python_Fundamentals/08_magic_methods_and_exercise/Pet-store-master/Templates/items.html
|
Dochko0/Python
|
e9612c4e842cfd3d9a733526cc7485765ef2238f
|
[
"MIT"
] | null | null | null |
Python_Fundamentals/08_magic_methods_and_exercise/Pet-store-master/Templates/items.html
|
Dochko0/Python
|
e9612c4e842cfd3d9a733526cc7485765ef2238f
|
[
"MIT"
] | null | null | null |
Python_Fundamentals/08_magic_methods_and_exercise/Pet-store-master/Templates/items.html
|
Dochko0/Python
|
e9612c4e842cfd3d9a733526cc7485765ef2238f
|
[
"MIT"
] | null | null | null |
<!DOCTYPE html>
<html lang="en">
<head>
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0/css/bootstrap.min.css" integrity="sha384-Gn5384xqQ1aoWXA+058RXPxPg6fy4IWvTNh0E263XmFcJlSAwiGgFAW/dAiS6JXm" crossorigin="anonymous">
<meta charset="UTF-8">
<title>Title</title>
</head>
<body>
<div class="row">
{% for item in items %}
<div class="col-lg-4">
<div class="card" style="width: 18rem;">
{% if is_toy %}
<img class="card-img-top" src="data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wCEAAkGBxMTEhUSExIVFRUXFRUWFxgXGRYWFRUXFxcWHRcXFxUYHSghGBolHRcYITEhJSkrLi4uFyAzODMsNygtLisBCgoKDg0OGxAQGy0mICYvNS8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLf/AABEIAOEA4QMBEQACEQEDEQH/xAAbAAEAAQUBAAAAAAAAAAAAAAAABAECAwUGB//EAD0QAAIBAgMECAMGBQQDAQAAAAECAAMRBBIhBTFBUQYTImFxgZGhMrHwBxRCUsHRI2JyguEzorLxQ1OSJP/EABsBAQACAwEBAAAAAAAAAAAAAAADBQECBAYH/8QANBEAAgEDAgQDBgYDAQEBAAAAAAECAwQREiEFMUFREyJhMnGBkaHRBhQjQrHwM8Hh8RVS/9oADAMBAAIRAxEAPwD3GAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAgCAIAvAMNTFKOMgdxDpv7g2ksshvt2gDYuJsq0WccuI2sXh1F8yRhdo0qnwODNlUi+pNSuaVX/HJMlTcmEAQBALHqqN5A84MqLfJFi4lDuYQZcJLmjIrg7iJjUuRqXTIEAQBAEAQBAEAQBAEAQBAEAQBAEAtqOALmR1akacXKXIGsxeL0LE2UC8q4TldtyltBdO4qTjSg5y5Lc4naW0Gqtcns/hXgO/vM60kkfOeIcSq3c25Py9F/eZCLTGor8inUKm4hklOpKDzF7m+2R0mdCFc5k7/iHnN4VNJ6Gw45UjJQrbx79TtKFdXUMpuDuM6k01lHr4TjOOqLyi8uBqZk2W5psdtQ3spsOfEzGSwo2q5y5mreuYO1QS5Fq1uRg2cCXhcdrZvWc1e3VRdn3XM461rtqibihjLaHUc5xUbydKfhV/gyvcScDLZM0KzIEAQBAEAQBAEAQBAEAQBAKXgFCYBAxda5tynm+L3eZ+Gnsv5JIRNXtqmzUHC77X8QNSPaWFrj8tHT2KzjNKdS0moc8fRczjSsl3Z836FlpjDGS0zVvuZRaYTNkdN0S2rZuqJsG1F+fdJqM8Swz13ArxN+C/gbnbGO/AD3n9BOpvoe1s6Orzs1BeEWSRSZMlL2gYyULc5kzjsT9n4r8J8v2nHd20a0MM4bqh++JucBXschO+5H6ic/Dq73oz9pFZJYJ4MtDUreAVgCAIAgCAIAgCAIBjq1lXeQJpOpGCzJ4BAq7bormJdQF+IsyqF8bnScdTiNGOyy36Iylkrh9qJUUOjK6ncysGU25EaTn/8ArwTxKLRnSZfvItOmHEKFReWW/rsYcWjWdbrPHV5eZtk6M6m8v+E3CnS0djSaOe2tsdlJampZTwGpXy4iWjWOR4jivBp05OpQjmL6c2vh2NERw+vSR5webaaeGWGYbMIsYTRki3MQrFGVwbFSD5A6zTxNLWTtsqrp1oyXc7alRWtd+bGx7hoPlJ6d1CpOSi+R9PtrqUVtyItWiVOU/rrOyLyW8KinHUiqj6tNwyxx8pk2TLEe4vMGzWNi1ntuPhDCWVubvBYjMFbz85SXn6NxCquu39+BS1qemTiblXl5k5TDW2pRTR61ND/M6qfcySNGpL2Yt/AilXpQ9qSXxM+GxlOpqlRH/pYN8jNZQlH2lg2jUhP2WmSJqbiAIAgCAIAgEXE4m2g3/KVHEOI+D5Ic/wCDeMcmm2uKppP1RAcg2LLn8bLmW5tuuwE8+quuopVW2jdrbY87qdHqlVaeFalVouCXHVpRNPtEjrcQECopFjlCu72LS9VaFJOspJrl1z7ln7IhUGzqehfR6vg1dKlZXQkFUUN2W/Ecx56aWnBeVPzOJU6cs98EsIuPU6QiV04Th7SaNyLXo8Rv+c5K0XJbGUMNX575FbXU6M04vDNmsk5ak9bbcVo1F5nh/T++8icWjHiMLTf40VvEa+u+WacZrK3OWvZ0a6/Vgn71v8zU4no7SPwll87j3195rKmiorfhu1n7DcfqvqaPaex6lIZrhl5jePEcvOc9SDRQ3vA69rHWvNHuufyNBtF7U2tvsbSvqy3SK23jqqJHfdH9MPSB35FvzvbW8pY150qzlHnk+mW0f00T69IOtuPAz1drcxqwU4nXTm6csmpqNYkGWKae5Zx3WUYWf6/aMkiiRHqZW7jNW8E2Mr1MGIxNr2O6/wBe01lM2jDuTKO1ko0Osc6a2HFjc2A77ThlZVuIVo06S5PLfRHneK3EKEpSl/6chtfphXrG3WZE/IlwLd5Grec+gW3DaVFLbL7s8ZXu69d82l2RohVz6nfLHTjZHJo07Iz0sYytdSQeBBsfUbppKmpbM1UHHdbM7Do50+qUuxXvUTTtE9tR48fOVN1wuEt6ez7dCyteJVaW1TzL6r7/ABPTcBjUrItSmwZW3EfI8j3Tz1SnKnJxksM9DSqxqxUoPKJE0JBAEAQDFiKmVbzlvLjwKLn8jKWWa2eMblOWp82T4wVCyytuFzqeapsvqY1YL8svKNpRpeyt+/U1bMbMJ1xpSaI3Uii1mE0qUnjDWxtGaZY081f2PhrXT5duxKmQMaLHMPOecqw3yiVGbD17iZpyBIDzphWnF+Vte7YYKF52U+J3Ef3Z95rpRhqvLW04kq/lnszDicvSwAbFkD4EIc8r7wvrr5Tqo0tdbPY8pT4dFcSk4ryrf4vodLQ00Hj6yj4pbulXbXJ7np6fYzrUkVjdOhU35PmS4yQdp0swzLvHuJ62lWTWVyZ121bQ9MuRoauJtp9fW+TORbKOdyNWxWlrzWUiSMSmxsKa9QgnsrqxHnpecVWo8qEebOe7rqjDbmc10q2yK1a1O3VU7pTA3W0u3nb0tPoPCrJW1BLq92fNuIXDuKredlyNFmvpzP0PrlLVHHjBmB7hBG1knbI2a2IqrSS12Op4ADeT4CQXNeNCm5yJKNKVWahE9h2Xsqnh6Yp01sLC50zOR+JjxM8RdXc6knOb/wCHqaFvClHTFGfA0VpVMyAKGPbA0B5Nb8w58R4CcNLi1Ou/Dbfo3/Bt4ChLXHbPP7+83k7TYQBAEAiY7gPGVnEraddRjF4WdzeDwR1WYtrGlR3xl92bZbKFuUso0m+ZFKolyMT3k8YJcjnlNvmWEzbJqWmamUWB9bTiuKaa9GddKWdmRNqt/Dbu/eeEr08VHH1OpGpwmNkXhYNmbWlibzGGgZetm8YOTwglkyCizDlO6nQ8KSlN49DDcev0MdHZmW+upNybbzLKPEdGyRAqdNNvD3LzhiNxB9prWuYXUNEufQ3ShnqjBVcjfcSqq2dSn02JlB4ytyO2I75vbXlShst12MELFU1bUqD37jLSHFYPnlE0K04cma2pgE5Ef3aRPiUejJvz1Xv9DNtCqMJgKrpfNU7IPG79m9+4XMs+AQ/N3euXJf63Knil1J0nJvfkeYqRbed8+nI8hgzIBrYndvkkTR5DeU2MI7/7LMH2q1XTsqqDn2iST/tE89x2piMYfEteEwzOU/h8z0F54Di1y4pUo9eZ6CKMd5RReHk2ZscDVzL4Ej0nsrOo6lGMnzIHzJE6jAgCARMad3nMOLlyGpR5kNmm8aSXMinUb5FbyUjLHaYBHZoZgtNWaNmyItWtr5zlry8p00VmRF2rUvSYDeQPnPHxhru2nyyzqqLynMLXqLvS/eNJ2VbKLflZHGcupt9n12bTIR42nLKykluyaCc3hHR0qeRC5BJCk2Gp0F7DvnVOmraMYx9p832RrUmuUeX8nmO3Ompbqmeu9KqKdcNSoKWFKpULKjVWLjMyIR2BqGH4TpLChY4ylHK23fXG+3v7nI5NnTbAxWIo0azIvXYSjQBwzXDVMS1izPcEnU3GW2lwBexnBcwpTnFN4nJ79om0W0iPU6V4sNQY06ag4WrWq0My9ezqHKItNj1gJshACn4mvum6s6SUkm+aSfT58g5vY6bZGPGJoI7p1dQojPSJu9IsLgEWBHMXA0ijLwqroyeYN4T/ANomhUcfMiDtDDkE23/Mc/GQXVlpnhHbJKcNcfiaaviWHOcbt5J7o59RHSuWPGbRomrbLOnla2DoLzqj2Rv39p7P8JxUZz+P+iq4rl0o+/7nA9Z9ec98mUGkk4IkqDz9pKuRFVwpYL6j2Ouv7a/XlM5NYrKPTPstpEYao5HxVdO9VVbfMzy3HZ5rKPZF/wAJj5JP1OqxxYo+T4srZf6rG3vPnV9PVdPPLYuf2nkmF2krVaNFnq/fGpIGdnsKWKFFqQpEW+Ig9o30e3fLOdHEJTSWjPbnHOc/3ocx650XfsVV/LXqrfn2r+17eU77D/CmZN3O0CAUMAjYwXXwm0XuaTWxr3YAXOnjN3JRWWQpOTwjVYvbSLoNT36CcVS+ivZ3O6nYTlvLY1VTbdS97i3ICcrvaucnYrCljBam2S3A68QDJFezfNfIjlw+K5SMy7Qvx1G/n6GTwuIz2OSpbTp7vkWti9frjunLdVcIloR6kqn2tTu3CU/D6WXKq+pvUfQr93XlLFpGhIwSAuBb6AkFPE66XRHZp8Oi31ZtXW4IuRcEXG8XHCVvEZv8y/TBxpbHBU/snwoFmrVyeYNMD0yH5zqfGqvSKI/C9TsqOzhTw/3ekSgFMojbyvZsG8b6yqnWc6viT33ySKOFhHju1/s/x+H/AIyZa2VgwNEsawINw2RludQPhJM9NR4nb1fI9vR8iFwa5no/RDYzUw+LrZvvOJSi1ZWy2RkW1gANCd5HA6DdKe5uFKcaUPZi9vmSQi+bNrtJdx8vr0ltfR2Ujvsnziczi11lW2aVYaJtGOjblESFox9J8Ia2EdVF2Qioo4nLe4HflLS54HdKhd4fJnHe0fFotdVueY9bYDUT6NGex53QsknZ9a6jKOG4XP1x9ZJ48IxzJpI1lbTqVNMItv0WSSaLsd1vG04KvG7SDxqz7kXFv+GOIVI58PHveD0zort3D0cNTos5DgEt2Wtckk6277eU8rfXsK9aU1yL624BdW9JRcV80dLhtpUqn+nUVu4HX03zxl5CXjSk1tkxUt6tLacWjkNmfZ0lPFjFtXZyKzVQmUBdWJUEkkkgkG/dunbPikp0vCUemMnGqeHuz0PZWGFNSF/E7ufF2JPuZeWCkqEcmjWGTxOwwVgFDAImOxC00LMdAPXums5qEdTNowc3hHB7S2m1Qm5sOAHAfr4yoq1pVHllvQto01saoudSRqdfAcNJEzqwdDsbYSuq1aovcXC7hbgTb5d87KVBJa5FfcXTT0QOhRAosAAOQFh6SquOKyb00tkceM7swYrBpUFmUHv3EeB3ici4hW/c8m0W48mc5j9llGAvdOB/Q9/znQ6vjxWl+/uTOScdiQKwAAE64zUUkiHDYXEazDqZNox3Juym7Y8D8prZv9VFhdxxSZuSJwcUWLh+5FZHkLSvSMlIBSAWvMw9pGSFtQfw/Q+9p627WaJNZf5Uc7iEvrKlLJ1XscNMjhY0nESMNVsbzWWU1KPNGDkOl/R+mtXPTqBQ/aana5XmV4BT9X4evs+PP8uljL6f9931IrX8PO8rak8Q6/Zf3Y1mFw6otlFh7/5lbWuKlZ5m8nvrPh9vaLFKOPXq/ezOD4yA7GiufjBjSuRko4kqQVNiOINiIaT2ZpOkmsNHa9GelGZhSrtvsFfQf/X7zjdjB1E1sup5viXCdMXUo/Ffb7HodKegiklhHljOJkFYBQwDiul2OLVOrB7K/wDLiT4bpWXdTVLT2LSzpYjq7nK1Gv577/W7WchYJGbC4dqlRU/M1uFrE8Lb9JtTjqkka1ZqEHI9Dy2AA3bpPxKs6VHy9dihW7LZ5MlLTMNg0u0tuYZWahVqZSVBvraxcIbMNLqxXN+UEE6Tqo0K2FUguv8A369DGtJmtxNNkYqeHvO6E1OOUSJClJUjK5mx2S9nX09dJi3lpqosrlaqTwb9jrOfi+1ZP0+5UQF5VqSNil5hyMFCZjKNiyo2hm0eYNftJv4R8B8xPXVv8HwRNaf5kabDLe/lK+hHLZ2372SFSjN5QK1MhY+uKVNnPDcOZ4SOMNUsHTaW7uKqpo4utVLksxuTvvO+MVFYR7qlSjSioRWyKX+v3mTfB03RzofUrhatQ5KR1H53XmvBQeZk0KTe7KTiHGqdBunTWZL5L7nZ4XothKY/0EY837RPmZvN0qa82F7zzlXit3U/e/ctkSMVsbDVPjoUzbd2QCPAjUSJ3Vs3jUiCneXNP2JtfE5bbfQqwL4Vjpr1bEm/9LHUHuN78xN3TTWYl5Zcdbei4XxX+0br7P8AbprIaFQnrKe6+8qDYg34g6SWhUb8rOPjViqU1Wp+zL+f+nZrOkoi6AWtAPLto1izsfzMTz4kmUUnltnoKUcRSNbkABBvvseOtxb5/KYJjdbDp/8A6Kd+d+QFg1hfjuGkmt1+ojluW1SkdtUEj4w/0l7yohzLDPNZJDBUxKDNd1GW2btDs33ZtdL9820SaWFzMZSPKOk+MpUcTjavUqXVqYejVuab9auQYlGBFs1PNTZLakqSTa0v7WE50qcdXfdc1j9r/lED5s6fY20RicHQr2scpRhcnWmSh1OpvlvrrOeNPwq86fx+Z003lEtROjSbl9B8rd17j9ZzTWmeSyoy8SljqjpGqyPjG6hIqYrDKdbKLUbDPMqYwM0ZBZUYWktN+de8M1O1q/YI8B7z1txJeG0dNlH9VGPAUrJc721/aaUYaYmLyrrqbckXMky4nMcj05r2anSvwLn5D9YhFI9LwCksTqP3HNqfrdJD0h0vQrYf3irmcA0qZBYfmP4Vty5/5k1KGXllPxm//L09Efaly9F1Z6idNJBf3qoR0x9p/Q8Sk2WzzM5ynLVJ5ZIJoC0idtreToSW+3Yw0cntakMLjqGKXRajFanK+4nzUk/2T0uqL01I8mXdnUd1Z1LeXOKzH+/3mehrOw84XTILWmGDyrFUrORyZh6GUTWGehpvMUyJV0B42trz1BMEuTZJWyMrgjsnNa3I8eczCWmSZDOOqLR3YqBgCNQbETvu6Hj0XFfAo94ywyHtPFLSpO7VFpAKe25AVSdASTpvInkYU5eKoYy+xu3seO46qELV9os9N66VMLWWmgYYhVFPJiksQuUXXUDUroNSJ6OCytFvhpYksvk+sX7zlzvub+ng8PjtlUqfX5mCqgrFCKl6TGwKk5iLcCeN5Bb2127+SpQ25vfyrPr3I7i8oW9PVUlj+fkbqhWpUqVOjTpWpoLKL28zYbzqSeZMvbb8OSU3UrVMt9lt7tyjqfifG1Ont6v7EujttFt/AA8CCfcTqn+HqbXtP6/c0j+J235qf1/4TU2hhqmjAL/ULf7hK24/DtRLMG/5/ktrX8RWzftOL9fvyJvVhhmRrjgb39xPP8Ts7rSozjy7FrSrQqLVFpr0IT1yDY755ppp7k2S5MTMmMmQYiEzJhxWLVR2mtf1PlLLh1nWuKi0LZPdklOjOptFGqxGIz2sLKNdd7eNvlPcLh8ZYc2WNK30J5e7LWx7/mHoJ0fk6Zn8nS7fUrS2mR8S3HMaGQysV+1kU7BftZxHT7EXxKuL5TTUA2sAQWuPHjOKdGUH5kXXCs0aOmXPLNLhcQCZHgu4S1HsvQiiEwdK29xnPeW3e1p1U8RieF4xUc7ueejx8h0t6RjCUKlRAlSohpFqeaxC1HsGIGoB1sec89Ck7uv5spPO/uK5vStjUYXpuEqYz7wMtGi9EUyqkvlq6DML66WbwJ7ptU4c3Gn4fOWc/A1VTd5Lumu2GqCrgMPnGIbDpiKbobZgtQEopvfMURu47piyoKOK1TGnOH8jM5Z2Rr9nbVxFGoKmenWpYnH4emSjirkNWiRUAykhCrhB/b33k1ajTqRxjDjF+nJ/XY0TaOh6cUb4Um2q1KbDuOYL8mM6OHz1WzXZlxwiWm7SfJpr6Z/0dnR3C/IS7XIppc2ZJkwUaAcD0mwGWsxto5zj9R63lPcQ01GXFpU1Qx2NG9O4I3C3rffrzkB2ZKK/YHHhw398yYOj6NbYBtRbvKH3yEetvCd9tVz5GVl5Qx+ovibLbuBSvRqUKg7DrlNtCORHeDYjwmtzaZkqtPaS+voV7ltg8m2/0OrnE0KLVC2Go0UVHa3wi+ZLD8RIt4ZZPwZK71SSw8+ZfwVXEL2NpTzzb5HT4ekqIqIoCKLACwtPXwpRgsI8PVrTqtzm8suJ7pJgjyWFptgzkoTBgyYfFvTOZGK/I+I4yOrQhUWJrJ0W9zVoS1UpYN1R2guIGRrJVHwncG7u7wniuN/h3EXVoo9nwzjUbh+HV2n9H/0174gqSCbW754n8u+xe5KHHH8x95ZcP4U7ie+0Vz+x1W1B1pehHU5u0eO79/Ce5oUIUoqMVhF6oKC0xL2M6AkW9bA0kujsys+opkDm3Z9j/wBSKVWETnqXdGnzl/ssxuwahUhqYdSNQCG9t/pI3Upy2Zinf0W+ePeeb7a2O2He41pk2HNT+UiV1ejo9xc29fLPX+i2KBw9K2g6tLDkMomG/JhHkLxPxpZ55ZzPTkjB16mNZVrLiKIw4pOLpnGU53B0KAUxpvueG8U3D5fmIKim04vOfQ5JrDycfs3pFUxLHC4tjUp4ipRUsoRKlNg4yshC2IF7ZSN263G0qW0aS8Wls4p+5+8jz3PQPtG2cVoCth6RNYL92zLmLLQe4YADedAt7XGc23ym4bW1VNE3t7XxJKiwtjmPs+6OY2ljKVVqVSlSAYvm7IYZGCgpe98xB1Glp38QuqE6LipJs1hF5PStu0esRKX/ALKtIeStnPshmnC03Ra7ssLOfh1HPtF/xhfVnTJL0rS+ZBRoBq9ubP65LD4hqv7TluKWuO3NE9Cr4cs9DhMRQKsQwNxpbv5kfW6VbWC5jJSWUQnNj3Ne2ul7f4B9YNzBWuOY3m+txbj9cpmLwzWW6wdLsDbhrqVfSolr/wAwO5rcNx9Jb0J64+pQ3VPw5ehBx9XM5I14DyJnpbShGjTwl6s+ccRuPHuJS6ckYAv16cZ0tnFhDLwjPUztyKMv7zKZjBgdLfW6bp5M9TG02MosqG2v/cxhPZm8W+aNtgcQKyln+JdH9Pi9Pe8+f8W4cqF09PKW6/2e/wCFXUrmgtXtLZ/cg5zUOm4mw8PH63y4tbdUqaij3FvSVKCX9ySSBusAOHMW4TqNlkoPPd9GZNjf7B2Pa1WoO9V+THv7pyVqv7UVF9e5/Th8Wb4yuq3FOkvOyqSND0g6T0MG6LiBURXGlXJeiDc6MwNweO7cZpSuYVU3DfHz+QbwW9K9gLiqDoABUtmRv5l1AJ5Hd4EzWF1TrR0xl8Dts7uVCpF9OqNV0WDJQpKwIIppcEEEHKNCDuMgVbzYM3slKvNrlk320dlUMVTFOvTFRAQwF2UgjiGUgjeRv4zzMLipQqydN45nM4prcYPo3g6bKyYWirKbqwprmB5hrXvJZXleaalN/MxoiuhtpzpGwksUC+hQBYOfw3A89/npPWcPo6KMc+8jc2k13NiksCIvgAwDE4mrBq9p7LSrqRZuDD5HmJy1aCnv1J6VeVPlyOT2nsCqoIC3F7gpqb+H+JxSozj0LGndQlzZoatF9zqVYHyP1aaYZM6ke5O2Fs2orvWKlVyEa6X1BvY+EsLB/qqPcpeLVYqjJrmkyr/X+Z7RHyqTyXBOf+JrqM6WuaLwv1/iYybJZMbD69f2myNWiwibJjBheny/7m6l3CZEqE7jN13Jo+hoqm1mp1npD/y01XzDrf8A2l5TcVoKc6b7M9t+GI5qf3odVgqfw25e+khwfQ29nkmCn3fXdBpk2OxsGKlUflXtH9B5n9ZDWnpicd5W8Ok+7OraU13cqhT1Pn0KNLJYZ5Sc5TlqluyTB5EXr4ZsbiBhMXQYIzZXfrsIc1RAXPYHaUEuLMwFjuAtPQYp1I04a4v3bS5ciB5RsfsexVV/vKtUdkUUiAxLWZusuQTzy6zn4xGMdDisPc2pnZbQoAPoPi18+M4qNZKDcmSEqhppKnLcss2JStJEgXgyWMTBkp07y2suHyk9U9kaSkTKaz0kUksIjM6ibGC6AIBYwmAYXWMAwssxgyWGauIIuOp3pv8A0t8pvQio1Yv1OW7jqoTXo/4LdlbNSmi6AtYEsbXud9jwElubmdSb327EPD+H0balHCWrG76/+E+05ix0owYjA0n+JFPsfUSWFxUhyZzVrG3rLzwX+/mjn9p7BYa0+0uummYX9BLS34hF7T2ffoeavuBVIZlQ3Xbr9mc8313S1WHyPNPK5mJjNkEYK+o1+vObokjzONx+GZsbRYblve3foNfEyv4h7UD3v4Ua8TB3+BF7D+X11t+s4We+qbInqIIWzoujtG1Mt+ZvZdP3nHcvzYKjiE81EuyNi08fxOpqrNdEcsVsWyuNjz96OExu0Wajj6yVUYrVodsJUFM5XRQ9gVIBBAvztxlxGVa3tkpwTT5PtnchaUnzN90U6K0sB1opO7CoVPbscoXNlUEAX+I6nunFdXkrjTqS27G8Y4NhtOkWK2NrX+rRStvFgGy2kjTdcPY1EyjTbjOiHDl1ZjUTKVKWVCzpw3SNXIkKssIo1MyrJEYLxMgrAEApALSsAxukAwOkwDCwmTDLaeihRwFvTd7Si4tc1Yz0xeFzN6MUopdti68pVWqLlJ/MmwVzzop8Qrw/dn3hxRXPLe24lTq4jLZmjia/amykrD8rfmA9jzEvba8nRfddvsVV/wAKo3ay9pd/v3RyG0Nn1KR7Y0voRuPdeX9C5p1V5GeKvLCvav8AVW3R9Ga2qDunUn1IKe7witHYdkd3+M5SBxUK17eJ/aeS4hxSFW6jGHKOVn1Z9G4BbO0cZT5vp2/6SqDBWXfqbW4WYXHyt/dO7O57WS1RZsGaxmSFLKOp2L/oJ/d/yM4K/tsor3/M/wC9CU08Xer9eS9SKPI4/wC0fHMtBaS/eV6xtauHXMaeUggNqDqSNAQTlOvAz8NpKVTU9O3R9TWozz3pDtqtRrdTTCnEUwqtihTy4utdQRe9yujBdbsbb9bS5oUIThrl7L/bnyoiZ7HsinVWhSWu2esKaio2gu9u1uAG/unm6ul1HoWFknXLcmGnL6yo6aSz1I2y9ac61AxkzJTkkYGDMqSRRBlVZIkYLwJsCsAQBAEAQC0iAY3SAR3SDBiyyv4ja+NTzHmjMJYZhp4hWJVWBK/EOI3jUeU8xOlKO7ROmmaDpP0hbDkU6aLnYqEZz/CLlh/Bcg3R2X4SdLkcL26bW0VVapPbr3x3XfHU1lPBHbpPXDUWbBsKNQhHuQK1GpnKnOhtdNxBG8HnoZfyVN6lGe63XZr7mPEZ1WadFlxFRWir8/uZwY6yKwIYAjkdRLunXS80JfUiqUoVI6ZrK9TU1cFSpC6oL8L3J8iZFecWqTWiMsv6I57bhlrbvVCG/wA/lkg06ZJlRCDZ3kLaGzyBcbtCO4ggge09Ha1XKGHzReWV0prS+ZcDmAPPW3G/EfOdqJsaXg6Po9VvSynepI8ibg/P0nHXWJZKbiEMVdXRmxM8xxS3an4i5P8Ak5YvoafpHsZsTTCriK1BlNw1Jit+5gCLjzE4bav4MsuKfvEo5NKOgNM4qniqlepVderLBgv8R6aqFYkbh2VJHdvnV/8AQn4Tpxikn9Mmvh78zsVWRWls6kl2NmzMqz0kY9CMypTkqiYMoSb4MF4WZwC4CZBWAIAgCAIAgCAUIgGJ0gGJqcwDWbT2OtUMQFWoVyhyCdOTAEEgjS1xOWtbxn6A0mzth1VqL1wfMlwtZKubrKfCnWDAFxyJBI/NfU1lzSnBPStn0x17rsZXqcph/spIq9rEA0tdQCKpPhuHjc+E3fFMQwob/Qz4Z6FsrZyYektFCxVd2Zizam5ufPcNJU1FUrzc2t/QkWESXB4CdNHh83z2DkQjgyxu2ss6VoorCMZMqYK3CdUaCRjJkbCAixGhk8FjkbRk4vKNHjtkOl2pjMp3gfEO8d87IVe5b0byE1iezMGCxTK2ZLab1Ol+5hvE3lFTWCatSjUjpl8/sb/CbRRwNQrcVJ1B7uflOGrQ5xksopqtvOm+W3ck5pU1OFU28xeCFSZkppfhI48Kw95ByM605ZUqKprCNMmVUk6RgyhZuC60yCsAQBAEAQBAEAQBAEApaAWlZgFpSYaBaUmjiZKGnNPCj2A6uZVNDI6ubqIHVTOAOrm2AWmlGDOS00pkymR62z6bG7IpPO2vrvmybXIljWnHky07KpH8A95nXLuZ/M1P/wBEmlhVXcAJoQyk3zMwSa4NC8LGAXATILpkCAIAgCAIAgCAIAgCAIAgCAUtAFpjAKWjAFowBaAVtMgWgFLQCmWAMsAZYAtAK2gFbQCsAQBAEAQBAEAQBAEAQBAEAQBAEAQCkAQCsAQBAEApAEAGAIBUQBAEAQBAEAQBAEAQD//Z" alt="Card image cap">
{% else %}
<img class="card-img-top" src="http://www.blonkconsultants.nl/wp-content/uploads/2017/10/animal-feeds.jpg" alt="Card image cap">
{% endif %}
<div class="card-body">
<h5 class="card-title"> {{ item.kind }}'s food </h5>
{% if not is_toy %}
<p class="card-text"> Date of validity: {{ item.date_of_validity }} </p>
<p class="card-text"> Kilograms: {{ item.kilograms }} </p>
{% endif %}
<h5 class="card-title"> Price: {{ item.price }} </h5>
</div>
</div>
</div>
{% endfor %}
</div>
</body>
</html>
| 427.090909
| 12,809
| 0.926777
|
42aa4651e79e18c349b404c6b72c2af703343f69
| 2,096
|
lua
|
Lua
|
DDR SN3/BGAnimations/ScreenLogo background/old.lua
|
MidflightDigital/ddr-supernova3
|
1fbdc1588b807ab6c2621eb318e468bc377dbe24
|
[
"MIT"
] | 10
|
2015-12-12T07:24:52.000Z
|
2022-03-27T13:43:31.000Z
|
DDR SN3/BGAnimations/ScreenLogo background/old.lua
|
MidflightDigital/ddr-supernova3
|
1fbdc1588b807ab6c2621eb318e468bc377dbe24
|
[
"MIT"
] | 18
|
2015-11-04T18:41:28.000Z
|
2019-12-15T12:34:26.000Z
|
DDR SN3/BGAnimations/ScreenLogo background/old.lua
|
MidflightDigital/ddr-supernova3
|
1fbdc1588b807ab6c2621eb318e468bc377dbe24
|
[
"MIT"
] | 3
|
2017-02-02T08:31:52.000Z
|
2021-05-17T05:59:57.000Z
|
local counter = 0;
local t = Def.ActorFrame{
};
t[#t+1] = Def.ActorFrame {
InitCommand=function(self)
self:fov(120);
end;
Def.ActorFrame{
InitCommand=cmd(Center);
LoadActor("back")..{
InitCommand=cmd(diffuse,color("0,1,0,0.812"));
};
};
Def.ActorFrame{
InitCommand=cmd(Center;blend,Blend.Add;;diffusealpha,0.6);
LoadActor(THEME:GetPathB("","ScreenLogo background/stars"))..{
InitCommand=cmd(diffusealpha,0.3;fadetop,0.5;fadebottom,0.5);
};
LoadActor(THEME:GetPathB("","ScreenLogo background/flash"))..{
InitCommand=cmd(y,-50;x,-200;diffusealpha,0.5);
OnCommand=cmd(spin;effectmagnitude,0,0,50);
};
LoadActor(THEME:GetPathB("","ScreenLogo background/left flash"))..{
OnCommand=cmd(blend,Blend.Add;;diffuse,color("#a8d313");diffusealpha,0;sleep,4;accelerate,0.2;diffusealpha,1;sleep,1;linear,1;diffusealpha,0;queuecommand,'On');
};
LoadActor(THEME:GetPathB("","ScreenLogo background/right flash"))..{
OnCommand=cmd(diffusealpha,0;sleep,2;accelerate,0.2;diffusealpha,1;sleep,0.5;linear,1;diffusealpha,0;sleep,2;queuecommand,'On');
};
LoadActor(THEME:GetPathB("","ScreenLogo background/round grid"))..{
InitCommand=cmd(diffusealpha,0.5;diffuse,color("#2e800b");zoom,1.8;blend,Blend.Add;;);
};
LoadActor(THEME:GetPathB("","ScreenLogo background/middle flash"))..{
InitCommand=cmd(y,-240;CenterX;zoomx,SCREEN_WIDTH;fadetop,0.5;fadebottom,0.5);
OnCommand=cmd(diffusealpha,0;blend,Blend.Add;;linear,2;diffusealpha,0.55;addy,SCREEN_HEIGHT;queuecommand,"Queue");
QueueCommand=cmd(diffusealpha,0;addy,-SCREEN_HEIGHT;sleep,4;queuecommand,"On");
};
};
Def.ActorFrame{
LoadActor("ddrsn_logo")..{
InitCommand=cmd(x,SCREEN_CENTER_X;y,SCREEN_CENTER_Y-8;zoom,0.9);
};
LoadActor("ddrsn_logo")..{
InitCommand=cmd(x,SCREEN_CENTER_X;y,SCREEN_CENTER_Y-8;zoom,0.9);
OnCommand=cmd(sleep,2;diffusealpha,0.4;zoom,0.9;linear,2;zoom,1;diffusealpha,0;queuecommand,"Queue");
QueueCommand=cmd(sleep,3.5;queuecommand,"On");
};
LoadActor("ddrsn_konami")..{
InitCommand=cmd(x,SCREEN_CENTER_X;y,SCREEN_BOTTOM-40);
};
};
};
return t;
| 38.109091
| 163
| 0.718989
|
3f2299e9143320b53f45fac6eaa2180f06b3c74e
| 922
|
lua
|
Lua
|
Room Path.lua
|
xVoid-xyz/Roblox-Scripts
|
7eb176fa654f2ea5fbc6bcccced1b15df7ed82c2
|
[
"BSD-3-Clause"
] | 70
|
2021-02-09T17:21:32.000Z
|
2022-03-28T12:41:42.000Z
|
Room Path.lua
|
xVoid-xyz/Roblox-Scripts
|
7eb176fa654f2ea5fbc6bcccced1b15df7ed82c2
|
[
"BSD-3-Clause"
] | 4
|
2021-08-19T22:05:58.000Z
|
2022-03-19T18:58:01.000Z
|
Room Path.lua
|
xVoid-xyz/Roblox-Scripts
|
7eb176fa654f2ea5fbc6bcccced1b15df7ed82c2
|
[
"BSD-3-Clause"
] | 325
|
2021-02-26T22:23:41.000Z
|
2022-03-31T19:36:12.000Z
|
me = game.Players.LocalPlayer.Character
me.Humanoid.WalkSpeed = "100"
color = "255"
material = "Neon"
while true do wait()
b=Instance.new("Part",workspace)
b.Anchored = true
b.FormFactor = "Custom"
b.Size = Vector3.new(30,0,20)
b.CFrame = me.Torso.CFrame * CFrame.new(0,-3,0)
b.Color = Color3.new(color,color,color)
b.Material = material
y=Instance.new("Part",workspace)
y.Anchored = true
y.FormFactor = "Custom"
y.Size = Vector3.new(30,0,20)
y.CFrame = me.Torso.CFrame * CFrame.new(0,23,0)
y.Color = Color3.new(color)
y.Material = material
x=Instance.new("Part",b)
x.Size = Vector3.new(0,25,20)
x.CFrame = me.Torso.CFrame * CFrame.new(15,10,0)
x.Anchored = true
x.Transparency="0.9"
x.Color = Color3.new(0,0,0)
c=Instance.new("Part",b)
c.Size = Vector3.new(0,25,20)
c.CFrame = me.Torso.CFrame * CFrame.new(-15,10,0)
c.Anchored = true
c.Transparency="0.9"
c.Color = Color3.new(0,0,0)
end
| 25.611111
| 50
| 0.678959
|
c3a42325338e92ecb208d88dff4aadc5c5dcb8a3
| 202
|
rb
|
Ruby
|
spec/parser/lasgn_call_spec.rb
|
marnen/rubinius
|
05b3f9789d01bada0604a7f09921c956bc9487e7
|
[
"BSD-3-Clause"
] | 1
|
2016-05-08T16:58:14.000Z
|
2016-05-08T16:58:14.000Z
|
spec/parser/lasgn_call_spec.rb
|
taf2/rubinius
|
493bfa2351fc509ca33d3bb03991c2e9c2b6dafa
|
[
"BSD-3-Clause"
] | null | null | null |
spec/parser/lasgn_call_spec.rb
|
taf2/rubinius
|
493bfa2351fc509ca33d3bb03991c2e9c2b6dafa
|
[
"BSD-3-Clause"
] | null | null | null |
def test_case
{"RawParseTree"=>[:lasgn, :c, [:call, [:lit, 2], :+, [:array, [:lit, 3]]]],
"Ruby"=>"c = (2 + 3)",
"RubyParser"=>
s(:lasgn, :c, s(:call, s(:lit, 2), :+, s(:arglist, s(:lit, 3))))}
end
| 28.857143
| 75
| 0.480198
|
c48e2c67895bfaf6a4505b02335a24157fb46692
| 7,949
|
cpp
|
C++
|
LTSketchbook/libraries/LT_PMBUS/LT_FaultLog.cpp
|
LinduinoBob/Linduino
|
a6465b549ee8daee4eec11c36cabf5487bd2a3bc
|
[
"FSFAP"
] | null | null | null |
LTSketchbook/libraries/LT_PMBUS/LT_FaultLog.cpp
|
LinduinoBob/Linduino
|
a6465b549ee8daee4eec11c36cabf5487bd2a3bc
|
[
"FSFAP"
] | null | null | null |
LTSketchbook/libraries/LT_PMBUS/LT_FaultLog.cpp
|
LinduinoBob/Linduino
|
a6465b549ee8daee4eec11c36cabf5487bd2a3bc
|
[
"FSFAP"
] | null | null | null |
/*!
LTC PMBus Support: API for a shared LTC Fault Log
@verbatim
This API is shared with Linduino and RTOS code. End users should code to this
API to enable use of the PMBus code without modifications.
@endverbatim
Copyright 2018(c) Analog Devices, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
- Neither the name of Analog Devices, Inc. nor the names of its
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
- The use of this software may or may not infringe the patent rights
of one or more patent holders. This license does not release you
from the requirement that you obtain separate licenses from these
patent holders to use this software.
- Use of the software either in source or binary form, must be run
on or directly connected to an Analog Devices Inc. component.
THIS SOFTWARE IS PROVIDED BY ANALOG DEVICES "AS IS" AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, NON-INFRINGEMENT,
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL ANALOG DEVICES BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, INTELLECTUAL PROPERTY RIGHTS, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
//! @ingroup PMBus_SMBus
//! @{
//! @defgroup LT_FaultLog LT_FaultLog: PLTC PSM Fault Log support
//! @}
/*! @file
@ingroup LT_FaultLog
Library Header File for LT_FaultLog
*/
#include <Arduino.h>
#include "LT_FaultLog.h"
LT_FaultLog::LT_FaultLog(LT_PMBus *pmbus)
{
pmbus_ = pmbus;
}
/*
* Read the status byte
*
* address: PMBUS address
*/
uint8_t
LT_FaultLog::readMfrStatusByte(uint8_t address)
{
uint8_t status_byte;
status_byte = pmbus_->smbus()->readByte(address, STATUS_MFR_SPECIFIC);
return status_byte;
}
/*
* Read the mfr status byte
*
* address: PMBUS address
*/
uint8_t
LT_FaultLog::readMfrFaultLogStatusByte(uint8_t address)
{
uint8_t status_byte;
status_byte = pmbus_->smbus()->readByte(address, MFR_FAULT_LOG_STATUS);
return status_byte;
}
/*
* Check if there is a fault log
*
* address: PMBUS address
*/
bool
LT_FaultLog::hasFaultLog(uint8_t address)
{
uint8_t status;
PsmDeviceType t = pmbus_->deviceType(address);
if (t == LTC3880 || t == LTC3886 || t == LTC3887 || t == LTM4675 || t == LTM4676|| t == LTM4676A || t == LTM4677)
{
status = readMfrStatusByte(address);
return (status & LTC3880_SMFR_FAULT_LOG) > 0;
}
else if (t == LTC3882 || t == LTC3882_1)
{
status = readMfrStatusByte(address);
return (status & LTC3882_SMFR_FAULT_LOG) > 0;
}
else if (t == LTC3883)
{
status = readMfrStatusByte(address);
return (status & LTC3883_SMFR_FAULT_LOG) > 0;
}
else if (t == LTC2974 || t == LTC2975)
{
status = readMfrFaultLogStatusByte(address);
return (status & LTC2974_SFL_EEPROM) > 0;
}
else if (t == LTC2977 || t == LTC2978 || t == LTC2980 || t == LTM2987)
{
status = readMfrFaultLogStatusByte(address);
return (status & LTC2978_SFL_EEPROM) > 0;
}
else
return false;
}
/*
* Enable fault log
*
* address: PMBUS address
*/
void
LT_FaultLog::enableFaultLog(uint8_t address)
{
uint8_t config8;
uint16_t config16;
PsmDeviceType t = pmbus_->deviceType(address);
if (
(t == LTC3880)
|| (t == LTC3882)
|| (t == LTC3882_1)
|| (t == LTC3883)
|| (t == LTC3886)
|| (t == LTM4675)
|| (t == LTM4676)
|| (t == LTM4676A)
|| (t == LTM4677)
|| (t == LTC2978)
)
{
config8 = pmbus_->smbus()->readByte(address, MFR_CONFIG_ALL);
pmbus_->smbus()->writeByte(address, MFR_CONFIG_ALL, config8 | CFGALL_EFL);
}
else if (
(t == LTC2974)
|| (t == LTC2975)
|| (t == LTC2977)
|| (t == LTC2980)
|| (t == LTM2987)
)
{
config16 = pmbus_->smbus()->readWord(address, MFR_CONFIG_ALL);
pmbus_->smbus()->writeWord(address, MFR_CONFIG_ALL, config16 | LTC2974_CFGALL_EFL);
}
}
/*
* Disable fault log
*
* address: PMBUS address
*/
void
LT_FaultLog::disableFaultLog(uint8_t address)
{
uint8_t config8;
uint16_t config16;
PsmDeviceType t = pmbus_->deviceType(address);
if (
(t == LTC3880)
|| (t == LTC3882)
|| (t == LTC3882_1)
|| (t == LTC3883)
|| (t == LTC3886)
|| (t == LTM4675)
|| (t == LTM4676)
|| (t == LTM4676A)
|| (t == LTM4677)
|| (t == LTC2978)
)
{
config8 = pmbus_->smbus()->readByte(address, MFR_CONFIG_ALL);
pmbus_->smbus()->writeByte(address, MFR_CONFIG_ALL, config8 & ~CFGALL_EFL);
}
else if (
(t == LTC2974)
|| (t == LTC2975)
|| (t == LTC2977)
|| (t == LTC2980)
|| (t == LTM2987)
)
{
config16 = pmbus_->smbus()->readWord(address, MFR_CONFIG_ALL);
pmbus_->smbus()->writeWord(address, MFR_CONFIG_ALL, config16 & ~CFGALL_EFL);
}
}
void LT_FaultLog::dumpBin(Print *printer, uint8_t *log, uint8_t size)
{
if (printer == 0)
printer = &Serial;
uint8_t *temp = log;
for (uint8_t i = 0; i < size; i++)
{
if (!(i % 16))
printer->println();
if (temp[i] < 0x10)
printer->write('0');
printer->print(temp[i], HEX);
}
printer->println();
}
/*
* Clear fault log
*
* address: PMBUS address
*/
void
LT_FaultLog::clearFaultLog(uint8_t address)
{
pmbus_->smbus()->sendByte(address, MFR_FAULT_LOG_CLEAR);
}
uint64_t
LT_FaultLog::getSharedTime200us(FaultLogTimeStamp time_stamp)
{
uint64_t num200us = ((uint64_t) time_stamp.shared_time_byte5 << 40);
num200us = num200us | ((uint64_t) time_stamp.shared_time_byte4 << 32);
num200us = num200us | ((uint32_t) time_stamp.shared_time_byte3 << 24);
num200us = num200us | ((uint32_t) time_stamp.shared_time_byte2 << 16);
num200us = num200us | ((uint32_t) time_stamp.shared_time_byte1 << 8);
num200us = num200us | (time_stamp.shared_time_byte0);
return num200us;
}
float
LT_FaultLog::getTimeInMs(FaultLogTimeStamp time_stamp)
{
double ms = getSharedTime200us(time_stamp)/5.0;
return ms;
}
uint8_t
LT_FaultLog::getRawByteVal(RawByte value)
{
return (uint8_t) value.uint8_tValue;
}
uint16_t
LT_FaultLog::getRawWordVal(RawWord value)
{
return (uint16_t) (value.lo_byte | (value.hi_byte << 8));
}
uint16_t
LT_FaultLog::getRawWordReverseVal(RawWordReverse value)
{
return (uint16_t) (value.lo_byte | (value.hi_byte << 8));
}
uint16_t
LT_FaultLog::getLin5_11WordVal(Lin5_11Word value)
{
return (uint16_t) (value.lo_byte | (value.hi_byte << 8));
}
uint16_t
LT_FaultLog::getLin5_11WordReverseVal(Lin5_11WordReverse value)
{
return (uint16_t) (value.lo_byte | (value.hi_byte << 8));
}
uint16_t
LT_FaultLog::getLin16WordVal(Lin16Word value)
{
return (uint16_t) (value.lo_byte | (value.hi_byte << 8));
}
uint16_t
LT_FaultLog::getLin16WordReverseVal(Lin16WordReverse value)
{
return (uint16_t) (value.lo_byte | (value.hi_byte << 8));
}
| 25.977124
| 116
| 0.662473
|
3f1967536a639ffcea810fb01b2f5e28ba68e1b8
| 1,180
|
php
|
PHP
|
app/Models/RequisicaoLog.php
|
MagicNow/Magic_BildSys
|
5109a77a286c7606bcff433e191ab5d349d78294
|
[
"MIT"
] | null | null | null |
app/Models/RequisicaoLog.php
|
MagicNow/Magic_BildSys
|
5109a77a286c7606bcff433e191ab5d349d78294
|
[
"MIT"
] | null | null | null |
app/Models/RequisicaoLog.php
|
MagicNow/Magic_BildSys
|
5109a77a286c7606bcff433e191ab5d349d78294
|
[
"MIT"
] | 1
|
2020-01-08T18:03:14.000Z
|
2020-01-08T18:03:14.000Z
|
<?php
namespace App\Models;
use Eloquent as Model;
use Illuminate\Database\Eloquent\SoftDeletes;
class RequisicaoLog extends Model
{
public $table = 'requisicao_log';
const CREATED_AT = 'created_at';
const UPDATED_AT = 'updated_at';
public $fillable = [
'requisicao_id',
'status_id_anterior',
'status_id_novo',
'user_id'
];
/**
* The attributes that should be casted to native types.
*
* @var array
*/
protected $casts = [
'id' => 'integer',
'requisicao_id' => 'integer',
'status_id_anterior' => 'integer',
'status_id_novo' => 'integer',
'user_id' => 'integer'
];
/**
* Validation rules
*
* @var array
*/
public static $rules = [
];
/**
* @return \Illuminate\Database\Eloquent\Relations\BelongsTo
**/
public function user()
{
return $this->belongsTo(\App\Models\User::class);
}
/**
* @return \Illuminate\Database\Eloquent\Relations\HasMany
**/
public function requisicao()
{
return $this->hasMany(\App\Models\Requisicao::class);
}
}
| 19.344262
| 64
| 0.561864
|
0a79991625e7c54cc73c10f96c59be6c02d528de
| 364
|
cs
|
C#
|
Ryujinx.HLE/HOS/Services/Hid/Types/SharedMem/Npad/SixAxisState.cs
|
tarocco/Ryujinx
|
c7113049dec71b02e603808663a0b1ebf901697b
|
[
"MIT"
] | 598
|
2018-02-04T23:12:23.000Z
|
2022-03-25T20:28:10.000Z
|
Ryujinx.HLE/HOS/Services/Hid/Types/SharedMem/Npad/SixAxisState.cs
|
tarocco/Ryujinx
|
c7113049dec71b02e603808663a0b1ebf901697b
|
[
"MIT"
] | 198
|
2018-02-08T17:24:05.000Z
|
2022-03-09T22:41:31.000Z
|
Ryujinx.HLE/HOS/Services/Hid/Types/SharedMem/Npad/SixAxisState.cs
|
tarocco/Ryujinx
|
c7113049dec71b02e603808663a0b1ebf901697b
|
[
"MIT"
] | 200
|
2018-02-04T23:28:20.000Z
|
2022-03-29T21:22:47.000Z
|
namespace Ryujinx.HLE.HOS.Services.Hid
{
unsafe struct SixAxisState
{
public ulong SampleTimestamp;
ulong _unknown1;
public ulong SampleTimestamp2;
public HidVector Accelerometer;
public HidVector Gyroscope;
HidVector unknownSensor;
public fixed float Orientation[9];
ulong _unknown2;
}
}
| 26
| 42
| 0.659341
|
8e8376919db085bf6169001f4ec9a3d8cdc7c6b3
| 256
|
js
|
JavaScript
|
_web/src/api/modular/flowable/taskTodoManage.js
|
enjoinpart/snowy
|
6a06e56fa42c0b2dcfb79aff36daf69b3e01d410
|
[
"Apache-2.0"
] | 1
|
2022-01-04T07:13:44.000Z
|
2022-01-04T07:13:44.000Z
|
_web/src/api/modular/flowable/taskTodoManage.js
|
enjoinpart/snowy
|
6a06e56fa42c0b2dcfb79aff36daf69b3e01d410
|
[
"Apache-2.0"
] | null | null | null |
_web/src/api/modular/flowable/taskTodoManage.js
|
enjoinpart/snowy
|
6a06e56fa42c0b2dcfb79aff36daf69b3e01d410
|
[
"Apache-2.0"
] | 1
|
2022-03-26T13:15:36.000Z
|
2022-03-26T13:15:36.000Z
|
import { axios } from '@/utils/request'
/**
* 待办任务
*
* @author yubaoshan
* @date 2020/8/4 23:10
*/
export function flowableTodoTaskPage (parameter) {
return axios({
url: '/flowableTodoTask/page',
method: 'get',
params: parameter
})
}
| 16
| 50
| 0.628906
|
7a92fafcbd72a684f9e222262823d9b05b8e4db1
| 839
|
swift
|
Swift
|
AudioMate/Extensions/NSUserDefaults+CustomObjects.swift
|
kant/AudioMate
|
d80430166a5e846ed699a234a9e10882525929d7
|
[
"MIT"
] | null | null | null |
AudioMate/Extensions/NSUserDefaults+CustomObjects.swift
|
kant/AudioMate
|
d80430166a5e846ed699a234a9e10882525929d7
|
[
"MIT"
] | null | null | null |
AudioMate/Extensions/NSUserDefaults+CustomObjects.swift
|
kant/AudioMate
|
d80430166a5e846ed699a234a9e10882525929d7
|
[
"MIT"
] | null | null | null |
//
// NSUserDefaults+CustomObjects.swift
// AudioMate
//
// Created by Ruben Nine on 25/04/16.
// Copyright © 2016 Ruben Nine. All rights reserved.
//
import Foundation
extension UserDefaults {
public func customObjectForKey(defaultName: String) -> NSCoding? {
guard let objectForKey = object(forKey: defaultName) as? NSData else { return nil }
if let decodedObject = try? NSKeyedUnarchiver.unarchiveTopLevelObjectWithData(objectForKey) as? NSCoding {
return decodedObject
} else {
return nil
}
}
public func setCustomObject(value: NSCoding?, forKey defaultName: String) {
guard let object = value else { return }
let encodedObject = NSKeyedArchiver.archivedData(withRootObject: object)
set(encodedObject, forKey: defaultName)
}
}
| 26.21875
| 114
| 0.678188
|
25853c9f74f44cbf29c344580ad45c00bf3260bd
| 3,453
|
cs
|
C#
|
MMO-RPG-Console-Client/Api/ModifyPlayer.cs
|
forsbergsskola-se/gp20-2021-0426-rest-gameserver-Jackoberto
|
1720c03824275b457d9f18813b0469a1da1a70d5
|
[
"CECILL-B"
] | null | null | null |
MMO-RPG-Console-Client/Api/ModifyPlayer.cs
|
forsbergsskola-se/gp20-2021-0426-rest-gameserver-Jackoberto
|
1720c03824275b457d9f18813b0469a1da1a70d5
|
[
"CECILL-B"
] | null | null | null |
MMO-RPG-Console-Client/Api/ModifyPlayer.cs
|
forsbergsskola-se/gp20-2021-0426-rest-gameserver-Jackoberto
|
1720c03824275b457d9f18813b0469a1da1a70d5
|
[
"CECILL-B"
] | null | null | null |
using System;
using System.Threading.Tasks;
namespace MMO_RPG_Console_Client.Api
{
public class ModifyPlayer
{
public ModifyPlayer(IHttpHandler httpHandler)
{
HttpHandler = httpHandler;
}
private IHttpHandler HttpHandler { get; }
public async Task<Player> Run(Player player)
{
var input = string.Empty;
while (true)
{
Console.WriteLine("Do You Wanna\n" +
"1: Change Player Score\n" +
"2: Get Player Info\n"+
"3: Add Item\n"+
"4: Logout");
input = Console.ReadLine();
switch (input)
{
case "1":
{
return await GetModifiedPlayer(player);
}
case "2":
{
Console.WriteLine(player.ToString());
break;
}
case "3":
{
return await AddItem(player);
}
case "4":
{
return null;
}
}
}
}
async Task<Player> GetModifiedPlayer(Player player)
{
ModifiedPlayer modifiedPlayer = null;
do
{
Console.WriteLine("Write a score");
var input = Console.ReadLine();
if (int.TryParse(input, out var result))
{
modifiedPlayer = new ModifiedPlayer() {Score = result};
}
} while (modifiedPlayer == null);
player = await HttpHandler.ModifyPlayer(modifiedPlayer, player.Id);
return player;
}
async Task<Player> AddItem(Player player)
{
NewItem newItem = null;
do
{
var level = 0;
ItemType type;
Console.WriteLine("Write a name for the item");
var name = Console.ReadLine();
Console.WriteLine("Write a level between 1-99");
var input = Console.ReadLine();
if (int.TryParse(input, out var inputtedLevel) && inputtedLevel is <= 99 and >= 1)
level = inputtedLevel;
else
{
Console.WriteLine("Invalid Number");
continue;
}
Console.WriteLine("Write a itemType between 0-4");
input = Console.ReadLine();
if (Enum.TryParse<ItemType>(input, out var inputtedType))
type = inputtedType;
else
{
Console.WriteLine("Invalid Type");
continue;
}
newItem = new NewItem
{
Name = name,
CreationTime = DateTime.Now,
Level = level,
Type = type
};
} while (newItem == null);
player = await HttpHandler.AddItem(newItem, player.Id);
return player;
}
}
}
| 31.972222
| 98
| 0.400232
|
0dfc34b2a59162c9a5b131896a1cdabbd2b2c1c7
| 462
|
rb
|
Ruby
|
lib/card.rb
|
shkherad/ruby-enumerable-custom
|
a44ae50a49603417697882a1a3862c5b2413694a
|
[
"MIT"
] | null | null | null |
lib/card.rb
|
shkherad/ruby-enumerable-custom
|
a44ae50a49603417697882a1a3862c5b2413694a
|
[
"MIT"
] | null | null | null |
lib/card.rb
|
shkherad/ruby-enumerable-custom
|
a44ae50a49603417697882a1a3862c5b2413694a
|
[
"MIT"
] | null | null | null |
# A simple representation of a playing card.
class Card
SUITS = %w(C D H S).freeze
RANKS = [(2..10).to_a, %w(J Q K A)].flatten
attr_reader :suit, :rank
def initialize(rank, suit)
fail ArgumentError,
"Suit: '#{suit}' not in #{SUITS}" unless SUITS.include? suit
fail ArgumentError,
"Rank: '#{rank}' not in #{RANKS}" unless RANKS.include? rank
@suit = suit
@rank = rank
end
def to_s
"#{rank}:#{suit}"
end
end
| 21
| 69
| 0.601732
|
52e70720576a51676696b2025e96483161c01a0c
| 2,743
|
rb
|
Ruby
|
test/unit/library_cloud_test.rb
|
berkmancenter/collection_shift
|
337a5499e5b234e725a632737441b1342ccd6e04
|
[
"Apache-2.0"
] | null | null | null |
test/unit/library_cloud_test.rb
|
berkmancenter/collection_shift
|
337a5499e5b234e725a632737441b1342ccd6e04
|
[
"Apache-2.0"
] | 1
|
2016-11-20T23:08:50.000Z
|
2016-11-20T23:08:50.000Z
|
test/unit/library_cloud_test.rb
|
berkmancenter/collection_shift
|
337a5499e5b234e725a632737441b1342ccd6e04
|
[
"Apache-2.0"
] | null | null | null |
require 'test_helper'
class LibraryCloudTest < ActiveSupport::TestCase
def setup
@api = LibraryCloud.new
end
test "build result" do
library = 'MUS'
coll = 'GEN'
start_num = 'ML410.T173 A3 2013'
end_num = 'ML410.V4 P58 2004'
# result = @api.pages_in_range(library, coll, start_num, end_num)
# puts result.inspect
end
test "call number to sort number" do
call_num = 'QP501 .A7'
sort_num = 9669356
assert @api.call_num_to_sort_num(call_num) == sort_num
call_num = 'ML410.T173 A3 2013'
sort_num = 6278895
assert @api.call_num_to_sort_num(call_num) == sort_num
end
test "total records" do
start_num = 'ML410.T173 A3 2013'
end_num = 'ML410.V4 P58 2004'
assert @api.total_records('MUS', start_num, end_num) >= 670
end
test "has library records" do
assert @api.has_library_records?('MUS')
assert @api.has_library_records?('WID')
assert !@api.has_library_records?('ASDF')
end
test "record call numbers" do
start_num = 'ML410.A165 D5 2006'
end_num = 'ML410.A2 M2 1982'
# start and end nums are not included because they're in HD
records = @api.records_in_range('MUS', 'HD', start_num, end_num)
call_nums = ["ML410.A2 K49 1996", "ML410.A17 H87 2005"]
output_call_nums = []
records.each do |record|
output_call_nums += @api.record_call_numbers(record)
end
assert call_nums.sort == output_call_nums.sort
end
test "normalize call num" do
input = ['ML410.T173 A3 2013', 'ML410.T1768 P8 1960']
expected_output = ['ML0410 T173 A3 02013','ML0410 T1768 P8 01960']
assert @api.normalize_call_num(input) == expected_output
assert @api.normalize_call_num('ML410.T173 A3 2013') == 'ML0410 T173 A3 02013'
end
test "filter records by range" do
start_call_num= 'ML410.A165 D5 2006'
end_call_num = 'ML410.A2 M2 1982'
filter = @api.library_and_range_filter('MUS', start_call_num, end_call_num)
records = @api.all_records(filter)
@api.add_item_data!(records)
@api.filter_records_by_range!(records, start_call_num, end_call_num)
output_call_nums = []
records.each do |record|
output_call_nums += @api.record_call_numbers(record)
end
expected_output = [
"ML410.A2 K49 1996", "ZHCL Mus1220.5.53", "ML410.A2 M2 1982",
"ML410.A17 H87 2005", "ML410.A165 D5 2006"
]
assert output_call_nums.sort == expected_output.sort
end
test "wildcardize" do
call_nums = [
'ML410.T173 A3 2013',
'ML410 .T173 A3 2013',
'ML410.T173 A3 2013',
'ML410 . T173 A3 2013'
]
call_nums.each do |call_num|
wildcarded = @api.wildcardize(call_num)
assert wildcarded == 'ML410*.*T173*A3*2013'
end
end
end
| 29.180851
| 83
| 0.672257
|
37f10fa92e6cf58eee407b1f1a5471fd630e2b2a
| 831
|
swift
|
Swift
|
Diary/Diary/Cell/HomeYearCollectionViewCell.swift
|
lilongcnc/demoSpaces_Swift
|
699a8a86c2a6b85e2c86b38cf33881d71f1cb52b
|
[
"MIT"
] | null | null | null |
Diary/Diary/Cell/HomeYearCollectionViewCell.swift
|
lilongcnc/demoSpaces_Swift
|
699a8a86c2a6b85e2c86b38cf33881d71f1cb52b
|
[
"MIT"
] | null | null | null |
Diary/Diary/Cell/HomeYearCollectionViewCell.swift
|
lilongcnc/demoSpaces_Swift
|
699a8a86c2a6b85e2c86b38cf33881d71f1cb52b
|
[
"MIT"
] | null | null | null |
//
// HomeYearCollectionViewCell.swift
// Diary
//
// Created by kevinzhow on 15/5/19.
// Copyright (c) 2015年 kevinzhow. All rights reserved.
//
import UIKit
class HomeYearCollectionViewCell: UICollectionViewCell {
var textLabel: DiaryLabel!
var textInt: Int = 0
var labelText: String = "" {
didSet {
self.textLabel.updateText(labelText)
}
}
override func awakeFromNib() {
self.textLabel = DiaryLabel(fontname: "TpldKhangXiDictTrial", labelText: labelText, fontSize: 16.0, lineHeight: 5.0)
self.backgroundColor = UIColor.orangeColor()
self.addSubview(textLabel)
}
override func layoutSubviews() {
self.textLabel.center = CGPointMake(itemWidth/2.0, 150.0/2.0)
self.textLabel.backgroundColor = UIColor.redColor()
}
}
| 26.806452
| 124
| 0.659446
|
72823f73cdd6a278d960e01ac04faaadb72111bb
| 2,794
|
cs
|
C#
|
Backend/Model/Entities/EmpresaDbContext.cs
|
Altairseven/EjemploWebAppJquery
|
206283c7541eede9ca46b4dd896f17c8ec068bd5
|
[
"MIT"
] | null | null | null |
Backend/Model/Entities/EmpresaDbContext.cs
|
Altairseven/EjemploWebAppJquery
|
206283c7541eede9ca46b4dd896f17c8ec068bd5
|
[
"MIT"
] | null | null | null |
Backend/Model/Entities/EmpresaDbContext.cs
|
Altairseven/EjemploWebAppJquery
|
206283c7541eede9ca46b4dd896f17c8ec068bd5
|
[
"MIT"
] | null | null | null |
using System;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Metadata;
namespace Model.Entities
{
public partial class EmpresaDbContext : DbContext
{
public EmpresaDbContext()
{
}
public EmpresaDbContext(DbContextOptions<EmpresaDbContext> options)
: base(options)
{
}
public virtual DbSet<Clientes> Clientes { get; set; }
public virtual DbSet<Tipos_Documento> Tipos_Documento { get; set; }
public virtual DbSet<Usuarios> Usuarios { get; set; }
public static string ConnectionString { get; set; }
protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder) {
optionsBuilder.UseMySql(ConnectionString);
}
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
modelBuilder.Entity<Clientes>(entity =>
{
entity.Property(e => e.ID).HasColumnType("bigint(18)");
entity.Property(e => e.Apellido).HasColumnType("varchar(45)");
entity.Property(e => e.Dir_Calle).HasColumnType("varchar(45)");
entity.Property(e => e.Dir_Dpto).HasColumnType("varchar(20)");
entity.Property(e => e.Dir_Numer).HasColumnType("varchar(20)");
entity.Property(e => e.Dir_Piso).HasColumnType("varchar(20)");
entity.Property(e => e.Email).HasColumnType("varchar(45)");
entity.Property(e => e.ID_TipoDocumento).HasColumnType("bigint(18)");
entity.Property(e => e.Nombre).HasColumnType("varchar(45)");
entity.Property(e => e.Nro_Documento).HasColumnType("varchar(20)");
entity.Property(e => e.Telefono).HasColumnType("varchar(45)");
});
modelBuilder.Entity<Tipos_Documento>(entity =>
{
entity.Property(e => e.ID).HasColumnType("bigint(18)");
entity.Property(e => e.Nombre).HasColumnType("varchar(45)");
});
modelBuilder.Entity<Usuarios>(entity =>
{
entity.Property(e => e.ID).HasColumnType("bigint(18)");
entity.Property(e => e.Apellido).HasColumnType("varchar(45)");
entity.Property(e => e.Email).HasColumnType("varchar(100)");
entity.Property(e => e.FechaAlta).HasColumnType("datetime");
entity.Property(e => e.LastLogin).HasColumnType("datetime");
entity.Property(e => e.Nombre).HasColumnType("varchar(45)");
entity.Property(e => e.Password).HasColumnType("varchar(70)");
entity.Property(e => e.Username).HasColumnType("varchar(45)");
});
}
}
}
| 34.073171
| 87
| 0.586256
|
6d0c079e7c16366b7c9cef803f9439b6c96d3247
| 1,170
|
ts
|
TypeScript
|
bot/src/analytics/datadog.ts
|
Xetera/Hifumi
|
ceeef42f8b9b88b0cad4512a4bd94f226b23c7ba
|
[
"MIT"
] | 22
|
2018-07-19T08:47:37.000Z
|
2019-07-12T17:33:20.000Z
|
bot/src/analytics/datadog.ts
|
moedevs/hifumi-bot
|
ceeef42f8b9b88b0cad4512a4bd94f226b23c7ba
|
[
"MIT"
] | 7
|
2018-07-18T12:20:45.000Z
|
2018-08-08T14:36:32.000Z
|
bot/src/analytics/datadog.ts
|
moedevs/hifumi-bot
|
ceeef42f8b9b88b0cad4512a4bd94f226b23c7ba
|
[
"MIT"
] | 4
|
2018-09-12T07:39:34.000Z
|
2019-01-28T23:53:17.000Z
|
import { BufferedMetricsLogger } from "datadog-metrics";
import { AkairoClient } from "discord-akairo";
import { countMembers, logger } from "../utils";
const { HIFUMI_DATADOG_API_KEY } = process.env;
const _dd = new BufferedMetricsLogger({
apiKey: HIFUMI_DATADOG_API_KEY || "disabled in development",
host: "hifumi",
prefix: "hifumi.",
// allows us to buffer stats and send them in bulk
flushIntervalSeconds: 15,
});
/**
* Wrapper function that prevents calling metrics in development mode
* @param func
*/
export const withDatadog = (func: (client: BufferedMetricsLogger) => void): void => {
if (HIFUMI_DATADOG_API_KEY) {
func(_dd);
}
};
export const sendAnalytics = (client: AkairoClient) => {
const perMinute = 60000;
const totalMembers = countMembers(client);
const totalServers = client.guilds.size;
const ping = client.ping;
const uptime = client.uptime / perMinute;
withDatadog((datadog) => {
logger.debug(`Sending analytics`);
datadog.gauge("bot.member.count", totalMembers);
datadog.gauge("bot.server.count", totalServers);
datadog.gauge("bot.ping", ping);
datadog.gauge("bot.uptime", uptime);
});
};
| 30
| 85
| 0.703419
|
5f025e5cbc8932d97c9223ee1e5fadb7741ba00f
| 9,272
|
ps1
|
PowerShell
|
tools/azure-diagnostics-policy-generator/Trigger-PolicyEvaluation.ps1
|
simkjels/Azure-Lighthouse-samples
|
6af72c8b843a6ea9a1a86f959d92deb0263a48cd
|
[
"MIT"
] | null | null | null |
tools/azure-diagnostics-policy-generator/Trigger-PolicyEvaluation.ps1
|
simkjels/Azure-Lighthouse-samples
|
6af72c8b843a6ea9a1a86f959d92deb0263a48cd
|
[
"MIT"
] | null | null | null |
tools/azure-diagnostics-policy-generator/Trigger-PolicyEvaluation.ps1
|
simkjels/Azure-Lighthouse-samples
|
6af72c8b843a6ea9a1a86f959d92deb0263a48cd
|
[
"MIT"
] | null | null | null |
<#PSScriptInfo
.VERSION 1.3
.GUID efd1a650-e9e6-4cd3-beca-cc0e940cc672
.AUTHOR jbritt@microsoft.com
.COMPANYNAME Microsoft
.COPYRIGHT Microsoft
.TAGS
.LICENSEURI
.PROJECTURI
https://github.com/JimGBritt/AzurePolicy/tree/master/AzureMonitor/Scripts
.ICONURI
.EXTERNALMODULEDEPENDENCIES
.REQUIREDSCRIPTS
.EXTERNALSCRIPTDEPENDENCIES
.RELEASENOTES
November 03, 2020 1.3
Fixed a bug with REST API logic
#>
<#
.SYNOPSIS
Use this script to trigger the Azure Policy Evaluation API
.DESCRIPTION
This script takes a SubscriptionID and optionally a ResourceGroup as parameters, allows you to also specify an interval
for how many seconds you want to delay before checking status of the policy evaluation (default is 20 seconds)
Based on the API documented here: https://docs.microsoft.com/en-us/azure/governance/policy/how-to/get-compliance-data#evaluation-triggers
.PARAMETER SubscriptionId
The subscriptionID of the Azure Subscription that contains the policies to evaluate
.PARAMETER ResourceGroupName
If desired, use a resourcegroup in addition to SubscriptionID to narrow in on a scope of ResourceGroup to evaluate policy compliance
.PARAMETER Interval
Specify an interval in seconds (default is 20) to check for status of trigger - loops until complete.
.PARAMETER ADO
This parameter allows you to run this script in Azure DevOps pipeline utilizing an SPN
.EXAMPLE
.\Trigger-PolicyEvaluation.ps1 -SubscriptionId "fd2323a9-2324-4d2a-90f6-7e6c2fe03512" -ResourceGroup "RGName" interval 25
Trigger evaluation against the scope of a Resource Group, with a specified subscriptionID with an interval of 25 seconds
.EXAMPLE
.\Trigger-PolicyEvaluation.ps1 -SubscriptionId "fd2323a9-2324-4d2a-90f6-7e6c2fe03512"
Trigger evaluation against the scope of a subscriptionID
.EXAMPLE
.\Trigger-PolicyEvaluation.ps1
Prompt for a subscriptionId from a menu listing of all available subscriptions within the context of the logged in user.
Trigger evaluation against the scope of a subscriptionID selected.
.EXAMPLE
.\Trigger-PolicyEvaluation.ps1 -SubscriptionId "fd2323a9-2324-4d2a-90f6-7e6c2fe03512" -ADO
Trigger evaluation against the scope of a subscriptionID while leveraging an SPN in an ADO pipeline
.NOTES
AUTHOR: Jim Britt Principal Program Manager - Azure CXP API (Azure Product Improvement)
LASTEDIT: November 03, 2020 1.3
Fixed a bug with REST API logic
October 30, 2020 1.2 - Updates
Changed REST API Token creation due to a recent breaking change I observed where the old way no longer worked.
If you have any issues with this change, please let me know here on Github (https://aka.ms/AzPolicyScripts)
August 13, 2020 1.1
Added parameter -ADO
This parameter provides the option to run this script leveraging an SPN in Azure DevOps.
Special Thanks to Nikolay Sucheninov and the VIAcode team for working to get these scripts
integrated and operational in Azure DevOps to streamline "Policy as Code" processes with version
drift detection and remediation through automation!
May 01, 2019
Initial
.LINK
This script posted to and discussed at the following locations:
https://github.com/JimGBritt/AzurePolicy/tree/master/AzureMonitor/Scripts
#>
param
(
[Parameter(Mandatory=$false)]
[ValidateSet("AzureChinaCloud","AzureCloud","AzureGermanCloud","AzureUSGovernment")]
[string]$Environment = "AzureCloud",
[Parameter(Mandatory = $False)]
[switch]$ADO = $False,
# Provide SubscriptionID to bypass subscription listing
[Parameter(Mandatory = $False)]
[guid]$SubscriptionId,
# Add a ResourceGroup name to reduce scope from entire Azure Subscription to RG
[Parameter(Mandatory = $False)]
[string]$ResourceGroupName,
# An interval in seconds to check that trigger was successful
[Parameter(Mandatory = $False)]
[int]$interval = 20
)
# Function used to build numbers in selection tables for menus
function Add-IndexNumberToArray (
[Parameter(Mandatory=$True)]
[array]$array
)
{
for($i=0; $i -lt $array.Count; $i++)
{
Add-Member -InputObject $array[$i] -Name "#" -Value ($i+1) -MemberType NoteProperty
}
$array
}
function BuildBody
(
[parameter(mandatory=$True)]
[string]$method
)
{
$BuildBody = @{
Headers = @{
Authorization = "Bearer $($token.AccessToken)"
'Content-Type' = 'application/json'
}
Method = $Method
UseBasicParsing = $true
}
$BuildBody
}
# Login to Azure - if already logged in, use existing credentials.
If($ADO){write-host "Leveraging ADO switch for SPN authentication in Azure DevOps"}
Write-Host "Authenticating to Azure..." -ForegroundColor Cyan
try
{
$AzureLogin = Get-AzSubscription
$currentContext = Get-AzContext
if($ADO){$token = $currentContext.TokenCache.ReadItems()}
else
{
$azProfile = [Microsoft.Azure.Commands.Common.Authentication.Abstractions.AzureRmProfileProvider]::Instance.Profile
$profileClient = New-Object -TypeName Microsoft.Azure.Commands.ResourceManager.Common.RMProfileClient -ArgumentList ($azProfile)
$token = $profileClient.AcquireAccessToken($currentContext.Subscription.TenantId)
}
if($Token.ExpiresOn -lt $(get-date))
{
"Logging you out due to cached token is expired for REST AUTH. Re-run script"
$null = Disconnect-AzAccount
}
}
catch
{
$null = Login-AzAccount -Environment $Environment
$AzureLogin = Get-AzSubscription
$currentContext = Get-AzContext
if($ADO){$token = $currentContext.TokenCache.ReadItems()}
else
{
$azProfile = [Microsoft.Azure.Commands.Common.Authentication.Abstractions.AzureRmProfileProvider]::Instance.Profile
$profileClient = New-Object -TypeName Microsoft.Azure.Commands.ResourceManager.Common.RMProfileClient -ArgumentList ($azProfile)
$token = $profileClient.AcquireAccessToken($currentContext.Subscription.TenantId)
}
}
If($AzureLogin -and !($SubscriptionID))
{
[array]$SubscriptionArray = Add-IndexNumberToArray (Get-AzSubscription)
[int]$SelectedSub = 0
# use the current subscription if there is only one subscription available
if ($SubscriptionArray.Count -eq 1)
{
$SelectedSub = 1
}
# Get SubscriptionID if one isn't provided
while($SelectedSub -gt $SubscriptionArray.Count -or $SelectedSub -lt 1)
{
Write-host "Please select a subscription from the list below"
$SubscriptionArray | Select-Object "#", Id, Name | Format-Table
try
{
$SelectedSub = Read-Host "Please enter a selection from 1 to $($SubscriptionArray.count)"
}
catch
{
Write-Warning -Message 'Invalid option, please try again.'
}
}
if($($SubscriptionArray[$SelectedSub - 1].Name))
{
$SubscriptionName = $($SubscriptionArray[$SelectedSub - 1].Name)
}
elseif($($SubscriptionArray[$SelectedSub - 1].SubscriptionName))
{
$SubscriptionName = $($SubscriptionArray[$SelectedSub - 1].SubscriptionName)
}
write-verbose "You Selected Azure Subscription: $SubscriptionName"
if($($SubscriptionArray[$SelectedSub - 1].SubscriptionID))
{
[guid]$SubscriptionID = $($SubscriptionArray[$SelectedSub - 1].SubscriptionID)
}
if($($SubscriptionArray[$SelectedSub - 1].ID))
{
[guid]$SubscriptionID = $($SubscriptionArray[$SelectedSub - 1].ID)
}
}
Write-Host "Selecting Azure Subscription: $($SubscriptionID.Guid) ..." -ForegroundColor Cyan
$Null = Select-AzSubscription -SubscriptionId $SubscriptionID.Guid
$PostBody = BuildBody -method "POST"
#Establish URI to gather resources
$Subscription = $(Get-AzContext).Subscription.id
If($SubscriptionId -and !($ResourceGroupName))
{
write-host "No Resourcegroup provided as a parameter ... triggering against subscription: $SubscriptionId" -ForegroundColor Yellow
$RESOURCEID = "/subscriptions/$Subscription"
}
elseif ($ResourceGroupName)
{
write-host "ResourceGroup provided ... triggering against resource group name:$ResourceGroupName" -ForegroundColor Yellow
$RESOURCEID = "/subscriptions/$Subscription/$ResourceGroup"
}
$azEnvironment = Get-AzEnvironment -Name $Environment
$PostURI = "$($azEnvironment.ResourceManagerUrl)$RESOURCEID/providers/Microsoft.PolicyInsights/policyStates/latest/triggerEvaluation?api-version=2018-07-01-preview"
try
{
$PostRAW = $(Invoke-WebRequest -uri $PostURI @PostBody).Rawcontent
Write-Host "Submitted Policy Evaluation Trigger Request" -foregroundcolor Yellow
}
catch
{
"Error"
exit
}
$PostArray = $PostRaw.Split("`n")
[string]$LocationVar = $($PostArray|Select-String -SimpleMatch "Location")
$GetURI = $($LocationVar.Split(" ",2))[1]
$GetBody = BuildBody -method "GET"
$GetResults = Invoke-WebRequest -uri $GetURI @GetBody
while($GetResults.StatusCode -ne 200)
{
$GetResults = Invoke-WebRequest -uri $GetURI @GetBody
Write-Host "Status code $($GetResults.Statuscode) returned on query. Still in progress...waiting $interval seconds to requery"
start-sleep $interval
}
Write-Host "Successfully Triggered a Policy Evaluation Request" -foregroundcolor Cyan
| 34.214022
| 164
| 0.727567
|
72da7ced3768320eb9795b1dfc9c909feb061c5a
| 952
|
cs
|
C#
|
Assets/Sources/Features/Coroutines/CoroutineSystem.cs
|
OndrejNepozitek/Roguelike-Entitas
|
6a1c6e84aab51240db4e29b2bc32ffae27dea71f
|
[
"MIT"
] | 10
|
2019-01-14T18:25:25.000Z
|
2021-12-14T07:38:58.000Z
|
Assets/Sources/Features/Coroutines/CoroutineSystem.cs
|
OndrejNepozitek/Roguelike-Entitas
|
6a1c6e84aab51240db4e29b2bc32ffae27dea71f
|
[
"MIT"
] | null | null | null |
Assets/Sources/Features/Coroutines/CoroutineSystem.cs
|
OndrejNepozitek/Roguelike-Entitas
|
6a1c6e84aab51240db4e29b2bc32ffae27dea71f
|
[
"MIT"
] | 2
|
2021-01-21T16:19:22.000Z
|
2021-08-05T00:39:28.000Z
|
namespace Assets.Sources.Features.Coroutines
{
using Entitas;
using Helpers.SystemDependencies.Attributes;
using Helpers.SystemDependencies.Phases;
/// <summary>
/// Handle coroutines. Coroutines must not create or alter actions!!!
/// </summary>
[ExecutePhase(ExecutePhase.Input)]
public sealed class CoroutineSystem : IExecuteSystem
{
private readonly IGroup<GameEntity> coroutines;
public CoroutineSystem(Contexts contexts)
{
coroutines = contexts.game.GetGroup(GameMatcher.Coroutine);
}
public void Execute()
{
foreach (var entity in coroutines.GetEntities())
{
var coroutine = entity.coroutine.Value;
if (!coroutine.MoveNext())
{
if (entity.coroutine.Callback != null)
{
entity.coroutine.Callback(entity);
}
entity.RemoveCoroutine();
// TODO: should be better
if (entity.GetComponentIndices().Length == 0)
{
entity.Destroy();
}
}
}
}
}
}
| 22.139535
| 70
| 0.681723
|
b99ea527f10dc52c33bbb2a7e3201fc9dc0e0588
| 3,784
|
dart
|
Dart
|
test/painters/fan_piece_painter_test.dart
|
mjablecnik/FlutterColorPickerWheel
|
28951c8bbcadbca031c2587c2e86d67aa94b9370
|
[
"MIT"
] | 35
|
2022-02-26T03:22:41.000Z
|
2022-03-19T01:36:48.000Z
|
test/painters/fan_piece_painter_test.dart
|
mjablecnik/FlutterColorPickerWheel
|
28951c8bbcadbca031c2587c2e86d67aa94b9370
|
[
"MIT"
] | 2
|
2022-03-08T18:22:34.000Z
|
2022-03-24T21:36:46.000Z
|
test/painters/fan_piece_painter_test.dart
|
mjablecnik/FlutterColorPickerWheel
|
28951c8bbcadbca031c2587c2e86d67aa94b9370
|
[
"MIT"
] | 3
|
2022-03-06T00:25:58.000Z
|
2022-03-12T03:24:38.000Z
|
import 'dart:math';
import 'dart:ui';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter/rendering.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:flutter_color_picker_wheel/models/fan_piece.dart';
import 'package:flutter_color_picker_wheel/painters/fan_piece_painter.dart';
class TestCase {
final FanPiece fanPiece;
final int caseNumber;
TestCase({
required this.fanPiece,
required this.caseNumber,
});
}
class TestPainter extends CustomPainter{
final Offset point;
TestPainter({required this.point});
@override
void paint(Canvas canvas, Size size) {
canvas.drawPoints(PointMode.points, [point], Paint()..color=Colors.white..strokeWidth=5);
}
@override
bool shouldRepaint(covariant CustomPainter oldDelegate) {
return false;
}
}
void main() {
final TargetPlatformVariant platformVariant = TargetPlatformVariant.all();
final ValueVariant<TestCase> fanPieceVariant = ValueVariant<TestCase>(
<TestCase>{
TestCase(
fanPiece: FanPiece(angleStart: 0, swipe: 0.3*pi, radiusEnd: 80, radiusStart: 60, color: Colors.red, center: const Offset(155,155)),
caseNumber: 1,
),
TestCase(
fanPiece: FanPiece(angleStart: pi, swipe: 0.2*pi, radiusEnd: 90, radiusStart: 70, color: Colors.green, center: const Offset(255,255)),
caseNumber: 2,
),
TestCase(
fanPiece: FanPiece(angleStart: 2 * pi, swipe: 0.5*pi, radiusEnd: 180, radiusStart: 140, color: Colors.yellow, center: const Offset(125,125)),
caseNumber: 3,
)
}
);
testWidgets(
"Testing FanPiecePainter rendering",
(WidgetTester tester) async {
final GlobalKey target = GlobalKey();
await tester.pumpWidget(
Container(
height: 800,
width: 500,
decoration: BoxDecoration(
border: Border.all(width: 2)
),
child: Center(
child: CustomPaint(
key: target,
painter: FanPiecePainter(
fanPiece: fanPieceVariant.currentValue!.fanPiece,
),
)
)
)
);
await expectLater(find.byKey(target).first, matchesGoldenFile('snapshots/fan_piece_painter_${fanPieceVariant.currentValue!.caseNumber}.png'));
},
variant: fanPieceVariant
);
testWidgets(
"Testing FanPiecePainter hitTest",
(WidgetTester tester) async {
final GlobalKey target = GlobalKey();
await tester.pumpWidget(
Container(
height: 1000,
width: 1000,
decoration: BoxDecoration(
border: Border.all(width: 2)
),
child: Center(
child: CustomPaint(
key: target,
foregroundPainter: TestPainter(
point: const Offset(200,200)
),
painter: FanPiecePainter(
fanPiece: FanPiece(angleStart: 0, swipe: 0.3*pi, radiusEnd: 80, radiusStart: 60, color: Colors.red, center: const Offset(155,155)),
),
)
)
)
);
await expectLater(
find.byKey(target).first,
matchesGoldenFile('snapshots/fan_piece_painter_hitTest_${debugDefaultTargetPlatformOverride.toString()}.png')
);
RenderCustomPaint renderCustomPaint = target.currentContext!.findRenderObject() as RenderCustomPaint;
expect(renderCustomPaint.hitTestSelf(const Offset(200, 200)), true);
},
variant: platformVariant
);
}
| 32.62069
| 153
| 0.598573
|
1aab6fcbb3627e3d3450977a0a36d9208ef6a222
| 173
|
cs
|
C#
|
HaloEzAPI/Model/Response/Stats/Halo5/Player.cs
|
glitch100/Halo-API
|
d0591a27ef81e4b55dd7d7f79de93b6ebde262f4
|
[
"MIT"
] | 23
|
2015-11-06T14:39:46.000Z
|
2022-03-12T22:11:10.000Z
|
HaloEzAPI/Model/Response/Stats/Halo5/Player.cs
|
glitch100/HaloEzAPI
|
d0591a27ef81e4b55dd7d7f79de93b6ebde262f4
|
[
"MIT"
] | 7
|
2015-12-09T10:59:32.000Z
|
2021-06-05T08:28:22.000Z
|
HaloEzAPI/Model/Response/Stats/Halo5/Player.cs
|
glitch100/HaloEzAPI
|
d0591a27ef81e4b55dd7d7f79de93b6ebde262f4
|
[
"MIT"
] | 14
|
2015-11-10T15:59:02.000Z
|
2021-04-01T11:54:41.000Z
|
namespace HaloEzAPI.Model.Response.Stats.Halo5
{
public class Player
{
public string Gamertag { get; set; }
public string Xuid { get; set; }
}
}
| 21.625
| 47
| 0.612717
|
797ec505df88f0a471a28c6b9c88d8f95781f1d8
| 430
|
php
|
PHP
|
app/Post.php
|
DungEmmanuel/Dastu-Foundation
|
3ca47806f757587a2a364e07af697ddeb7d91d30
|
[
"MIT"
] | null | null | null |
app/Post.php
|
DungEmmanuel/Dastu-Foundation
|
3ca47806f757587a2a364e07af697ddeb7d91d30
|
[
"MIT"
] | null | null | null |
app/Post.php
|
DungEmmanuel/Dastu-Foundation
|
3ca47806f757587a2a364e07af697ddeb7d91d30
|
[
"MIT"
] | null | null | null |
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class Post extends Model
{
protected $guarded = ['id'];
public function categories()
{
return $this->belongsToMany('App\Category')->withTimestamps();
}
public function comment()
{
return $this->hasMany('App\Comment', 'post_id');
}
public function image()
{
return $this->hasMany('App\Image');
}
}
| 16.538462
| 72
| 0.602326
|
87e8a53d5ec0b64e0177f6bee9b81eec519c2578
| 1,104
|
gemspec
|
Ruby
|
maybe_ruby.gemspec
|
o-bo/maybe_ruby
|
f7664af8f756203840b168e5b5fec179001e3668
|
[
"MIT"
] | null | null | null |
maybe_ruby.gemspec
|
o-bo/maybe_ruby
|
f7664af8f756203840b168e5b5fec179001e3668
|
[
"MIT"
] | null | null | null |
maybe_ruby.gemspec
|
o-bo/maybe_ruby
|
f7664af8f756203840b168e5b5fec179001e3668
|
[
"MIT"
] | null | null | null |
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'maybe_ruby/version'
Gem::Specification.new do |spec|
spec.name = "maybe_ruby"
spec.version = MaybeRuby::VERSION
spec.authors = ["o-bo"]
spec.summary = %q{An implementation of the Maybe monad in ruby.}
spec.description = %q{MaubeRuby is a simple implementation of the Maybe monad in ruby.}
spec.homepage = "https://github.com/o-bo/maybe_ruby"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.11"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "minitest", "~> 5.0"
spec.add_development_dependency "guard"
spec.add_development_dependency "guard-minitest"
spec.add_development_dependency "terminal-notifier-guard"
end
| 39.428571
| 104
| 0.67663
|
0d338876bea499a629a13fd6e226326d6c90cabb
| 267
|
c
|
C
|
BitwiseOps_Arrays/B2/mainb2.c
|
AxoyTO/C_Programming_UNIX
|
a4a4b732a42f9bc8953e6baa512034f745ea00e4
|
[
"MIT"
] | 1
|
2020-12-13T12:51:03.000Z
|
2020-12-13T12:51:03.000Z
|
BitwiseOps_Arrays/B2/mainb2.c
|
RanyG961/C_Programming_UNIX
|
d6ba64fb1df69b4396793c730c61da0812acd23f
|
[
"MIT"
] | null | null | null |
BitwiseOps_Arrays/B2/mainb2.c
|
RanyG961/C_Programming_UNIX
|
d6ba64fb1df69b4396793c730c61da0812acd23f
|
[
"MIT"
] | 1
|
2020-12-14T10:28:13.000Z
|
2020-12-14T10:28:13.000Z
|
#include <stdio.h>
void print_simple(int n)
{
int i;
for (i=2; n!=1; i++)
{
if(n%i == 0)
{
n /= i;
if(n!=1)
printf("%d ",i);
else
printf("%d",i);
i--;
}
}
}
int main()
{
int s;
scanf("%d",&s);
print_simple(s);
return 0;
}
| 9.535714
| 27
| 0.430712
|
5dce0207b6d99ec0a88c88a57160b5a0ecc20997
| 4,507
|
cpp
|
C++
|
source/predictdepth.cpp
|
astolap/WaSP
|
2daa1963d1f3d3fb50d3b576d470f9af0f6ce463
|
[
"BSD-2-Clause"
] | 4
|
2020-03-04T10:41:26.000Z
|
2021-04-15T06:29:41.000Z
|
source/predictdepth.cpp
|
astolap/WaSP
|
2daa1963d1f3d3fb50d3b576d470f9af0f6ce463
|
[
"BSD-2-Clause"
] | 2
|
2019-01-14T15:58:47.000Z
|
2021-04-18T09:09:51.000Z
|
source/predictdepth.cpp
|
astolap/WaSP
|
2daa1963d1f3d3fb50d3b576d470f9af0f6ce463
|
[
"BSD-2-Clause"
] | 4
|
2018-07-20T14:36:42.000Z
|
2021-06-28T13:03:57.000Z
|
/*BSD 2-Clause License
* Copyright(c) 2019, Pekka Astola
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met :
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED.IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "predictdepth.hh"
#include "warping.hh"
#include "medianfilter.hh"
#include "ppm.hh"
#include "inpainting.hh"
#include "merging.hh"
#include <ctime>
#include <vector>
#include <cstdint>
using std::int32_t;
using std::uint32_t;
using std::int16_t;
using std::uint16_t;
using std::int8_t;
using std::uint8_t;
void WaSP_predict_depth(view* SAI, view *LF) {
/* forward warp depth */
if (SAI->n_depth_references > 0) {
printf("Predicting normalized disparity for view %03d_%03d\n", SAI->c, SAI->r);
uint16_t **warped_texture_views_0_N =
new uint16_t*[SAI->n_depth_references]();
uint16_t **warped_depth_views_0_N =
new uint16_t*[SAI->n_depth_references]();
float **DispTargs_0_N =
new float*[SAI->n_depth_references]();
init_warping_arrays(
SAI->n_depth_references,
warped_texture_views_0_N,
warped_depth_views_0_N,
DispTargs_0_N,
SAI->nr,
SAI->nc,
SAI->ncomp);
for (int32_t ij = 0; ij < SAI->n_depth_references; ij++) {
view *ref_view = LF + SAI->depth_references[ij];
int32_t tmp_w, tmp_r, tmp_ncomp;
aux_read16PGMPPM(
ref_view->path_out_pgm,
tmp_w,
tmp_r,
tmp_ncomp,
ref_view->depth);
ref_view->color = new uint16_t[ref_view->nr*ref_view->nc * 3]();
//aux_read16PGMPPM(ref_view->path_out_ppm, tmp_w, tmp_r, tmp_ncomp,
// ref_view->color);
warpView0_to_View1(
ref_view,
SAI,
warped_texture_views_0_N[ij],
warped_depth_views_0_N[ij],
DispTargs_0_N[ij]);
delete[](ref_view->depth);
delete[](ref_view->color);
ref_view->depth = nullptr;
ref_view->color = nullptr;
}
/* merge depth using median*/
//int32_t startt = clock();
double *hole_mask = new double[SAI->nr*SAI->nc]();
for (int32_t ij = 0; ij < SAI->nr * SAI->nc; ij++) {
hole_mask[ij] = INIT_DISPARITY_VALUE;
std::vector<uint16_t> depth_values;
for (int32_t uu = 0; uu < SAI->n_depth_references; uu++) {
uint16_t *pp = warped_depth_views_0_N[uu];
float *pf = DispTargs_0_N[uu];
if (*(pf + ij) > INIT_DISPARITY_VALUE) {
depth_values.push_back(*(pp + ij));
}
}
if (depth_values.size() > 0) {
SAI->depth[ij] = getMedian(depth_values);
hole_mask[ij] = 1.0f;
}
}
uint32_t nholes = holefilling(
SAI->depth,
SAI->nr,
SAI->nc,
INIT_DISPARITY_VALUE,
hole_mask);
delete[](hole_mask);
clean_warping_arrays(
SAI->n_depth_references,
warped_texture_views_0_N,
warped_depth_views_0_N,
DispTargs_0_N);
}
}
| 31.739437
| 87
| 0.607278
|
2c1d12a0af2d9f77092fad7033bf518c233d3155
| 197
|
lua
|
Lua
|
lua/lib/linxsh/handler/uri_handler.lua
|
xslin/behavior
|
db4816f691f4130741aed9a26d9ddbe07ab74825
|
[
"Apache-2.0"
] | null | null | null |
lua/lib/linxsh/handler/uri_handler.lua
|
xslin/behavior
|
db4816f691f4130741aed9a26d9ddbe07ab74825
|
[
"Apache-2.0"
] | null | null | null |
lua/lib/linxsh/handler/uri_handler.lua
|
xslin/behavior
|
db4816f691f4130741aed9a26d9ddbe07ab74825
|
[
"Apache-2.0"
] | null | null | null |
local utils = require "linxsh.utils.utils"
local base64 = require "linxsh.utils.Base64"
local uri = {}
--[[
解析请求资源
--]]
function uri.parseUri()
return utils.uri.getQueryAsTable()
end
return uri
| 15.153846
| 44
| 0.720812
|
6b7c2f6ec612db66ce7e46988e60ab90da038665
| 968
|
js
|
JavaScript
|
packages/h2-alpn/lib/h2/session-proxy.js
|
zentrick/alpn-agent
|
e7ad6f7b51600d23eda29fa7b542151ce76b6600
|
[
"MIT"
] | 4
|
2018-09-06T06:41:25.000Z
|
2019-11-23T23:20:21.000Z
|
packages/h2-alpn/lib/h2/session-proxy.js
|
zentrick/alpn-agent
|
e7ad6f7b51600d23eda29fa7b542151ce76b6600
|
[
"MIT"
] | 2
|
2018-09-07T17:26:53.000Z
|
2018-09-07T17:30:10.000Z
|
packages/h2-alpn/lib/h2/session-proxy.js
|
zentrick/alpn-agent
|
e7ad6f7b51600d23eda29fa7b542151ce76b6600
|
[
"MIT"
] | null | null | null |
const { H2SessionProxy } = require('@zentrick/h2-util')
const defer = require('../util/deferred')
const _backend = Symbol('backend')
class Http2SessionProxy extends H2SessionProxy {
constructor (authority, options, listener) {
super(authority, options, listener)
this[_backend] = defer()
}
_setBackend (backend) {
this._addSession(backend)
this[_backend].resolve(backend)
}
_getSession (condition = null) {
const backend = this[_backend].value
return backend != null && (condition == null || condition(backend))
? backend
: null
}
_getAvailableSession (onAvailable) {
const backend = this[_backend].value
if (backend != null) {
onAvailable(backend)
} else {
this[_backend].promise.then(onAvailable)
}
}
async _forEachSession (action) {
const backend = this[_backend].value
if (backend != null) {
await action(backend)
}
}
}
module.exports = Http2SessionProxy
| 23.047619
| 71
| 0.661157
|
b5c84a2d18b98cc972c81007756f7cf035aff9fc
| 786
|
rb
|
Ruby
|
lib/axe/shared/runnable.rb
|
krisleech/axe
|
001f9ae008f3564072d6ea7fb720686fbadeef0e
|
[
"MIT"
] | 4
|
2015-12-19T09:19:22.000Z
|
2017-05-17T14:09:37.000Z
|
lib/axe/shared/runnable.rb
|
krisleech/axe
|
001f9ae008f3564072d6ea7fb720686fbadeef0e
|
[
"MIT"
] | null | null | null |
lib/axe/shared/runnable.rb
|
krisleech/axe
|
001f9ae008f3564072d6ea7fb720686fbadeef0e
|
[
"MIT"
] | null | null | null |
# State machine mixin for making an object runnable.
#
module Axe
module Runnable
Stopped = 'stopped'.freeze
Started = 'started'.freeze
Stopping = 'stopping'.freeze
def initialize(*args)
@status = Stopped
super
end
# Returns true when consumer has been started
#
def started?
@status == Started
end
# Returns true when the consumer is in the process of stopping
#
def stopping?
@status == Stopping
end
# Returns true when the consumer is stopped
#
def stopped?
@status == Stopped
end
private
# sets a new status and logs change
#
def status(new_status, message = '')
@status = new_status
log "#{@status.to_s.capitalize} #{message}"
end
end
end
| 18.714286
| 66
| 0.613232
|
da3e2055635deea7fe5e0046a99220e02f8824b1
| 7,937
|
php
|
PHP
|
wp-content/plugins/types/vendor/toolset/toolset-common/utility/admin/notice/abstract.php
|
moraycreative/swimhere.test
|
004889f38cff8e63369337d0ce055b6efab23d8f
|
[
"MIT"
] | null | null | null |
wp-content/plugins/types/vendor/toolset/toolset-common/utility/admin/notice/abstract.php
|
moraycreative/swimhere.test
|
004889f38cff8e63369337d0ce055b6efab23d8f
|
[
"MIT"
] | null | null | null |
wp-content/plugins/types/vendor/toolset/toolset-common/utility/admin/notice/abstract.php
|
moraycreative/swimhere.test
|
004889f38cff8e63369337d0ce055b6efab23d8f
|
[
"MIT"
] | null | null | null |
<?php
/**
* Class Toolset_Admin_Notice_Abstract
*
* @since 2.3.0 First release of Toolset_Admin_Notice_Abstract
* All containing properties and methods without since tag are part of the initial release
*/
abstract class Toolset_Admin_Notice_Abstract implements Toolset_Admin_Notice_Interface {
/**
* @var string
*/
protected $id;
/**
* @var string
*/
protected $title;
/**
* @var string
*/
protected $content;
/**
* @var int
*/
protected $priority = 0;
/**
* @var Toolset_Condition_Interface[]
*/
protected $conditions;
/**
* Temporary message
* @var bool
*/
protected $is_temporary = false;
/**
* By default every of our messages is permanent dismissible
*
* @var bool
*/
protected $is_dismissible_permanent = true;
/**
* $is_dismissible_globally if the message is per user or per installation
* e.g. our toolset installer should be per installation (makes no sense to let every user install the site)
*
* @var bool
*/
protected $is_dismissible_globally = false;
/**
* template file
*/
protected $template_file;
/**
* @var Toolset_Constants
*/
protected $constants;
/**
* Notice is only for administrators
*
* This is an EXCEPTION of condition being placed directly into notice class
* Reason for Exception: We need it for all common notices (no rule without exception) and it's too easy
* missing to add the "Toolset_Condition_User_Role_Admin" condition to every new future notice.
*
* For all other conditions use the ->add_condition() concept.
*
* @var bool
*/
protected $is_only_for_administrators = true;
/**
* @var string
*/
private $similar_notices_key;
/**
* @var callable[]
*/
private $dependency_callbacks = array();
/**
* @var array|null
*/
private $template_context;
/**
* @var boolean
*/
private $is_inline = false;
/**
* Toolset_Admin_Notice constructor.
*
* @param string $id
* @param string $message
* @param Toolset_Constants|null $constants
*/
public function __construct( $id, $message = '', Toolset_Constants $constants = null ) {
if ( null === $constants ) {
$constants = new Toolset_Constants();
}
$this->constants = $constants;
if( ! function_exists( 'sanitize_title' ) ) {
// abort, called to early
throw new InvalidArgumentException(
'Toolset_Admin_Notice_Abstract Error: "sanitize_title()" does not exists. '
. 'Toolset_Admin_Notice_Abstract::create_notice() was called too early.'
);
}
if( ! is_string( $id ) ) {
// no string given
throw new InvalidArgumentException( 'Toolset_Admin_Notice_Abstract Error: $id must be a string.' );
}
if( ! empty( $message ) ) {
$this->set_content( $message );
}
$this->id = sanitize_title( $id );
// set default template file
$this->set_default_template_file();
}
/**
* @return string
*/
public function get_id() {
return $this->id;
}
/**
* @param string $title
*/
public function set_title( $title ) {
$this->title = $title;
}
/**
* @param string $key
*/
public function set_similar_notices_key( $key ) {
$this->similar_notices_key = $key;
}
/**
* @return string
*/
public function get_similar_notices_key() {
return $this->similar_notices_key;
}
/**
* @return string
*/
public function get_title() {
return $this->title;
}
/**
* @param string $content
*
* @return bool
*/
public function set_content( $content ) {
if( ! is_string( $content ) ) {
return false;
}
$this->content = $content;
}
/**
* @return string
*/
public function get_content() {
return $this->content;
}
/**
* Output of string
*/
public function render_content() {
if( is_file( $this->content ) ) {
include( $this->content );
return;
}
echo $this->content;
}
/**
* Adds a condition
*
* @param Toolset_Condition_Interface $condition
*/
public function add_condition( Toolset_Condition_Interface $condition ) {
$this->conditions[] = $condition;
}
/**
* Sets priority of the message
*
* @param int $priority
*/
public function set_priority( $priority ) {
if( is_numeric( $priority ) ) {
$this->priority = $priority;
}
}
/**
* @return int
*/
public function get_priority( ) {
return $this->priority;
}
/**
* True or false
* @param bool $bool
*/
public function set_is_dismissible_permanent( $bool ) {
$this->is_dismissible_permanent = $bool === false
? false
: true;
}
/**
* @return bool
*/
public function is_dismissible_permanent() {
return $this->is_dismissible_permanent;
}
/**
* True or false
* @param bool $bool
*/
public function set_is_dismissible_globally( $bool ) {
$this->is_dismissible_globally = $bool === false
? false
: true;
if( $this->is_dismissible_globally ) {
$this->is_dismissible_permanent = true;
}
}
/**
* @return bool
*/
public function is_dimissibile_globally() {
return $this->is_dismissible_globally;
}
/**
* Getter of is_temporary
* @return bool
*/
public function is_temporary(){
return $this->is_temporary;
}
/**
* Print Notice
*/
public function render() {
if( ! file_exists( $this->template_file ) ) {
error_log( 'Toolset_Admin_Notice_Abstract Error: Template "'. $this->template_file . '" could not be found.' );
return;
}
$this->run_dependency_callbacks();
include( $this->template_file );
}
abstract protected function set_default_template_file();
public function conditions_met() {
if( $this->get_is_only_for_administrators() && ! current_user_can( 'manage_options' ) ) {
// this notice is only for administrators
return false;
}
if( empty( $this->conditions ) ) {
// this notice has no conditions
return true;
}
foreach( $this->conditions as $condition ) {
if( ! $condition->is_met() ) {
return false;
}
}
// all conditions met
return true;
}
/**
* Dismiss notice
*/
public function dismiss() {
if( ! $this->is_dismissible_permanent() ) {
error_log( 'Notice with id "' . $this->get_id() . '" is not dismissible.' );
return;
}
Toolset_Admin_Notices_Manager::dismiss_notice_by_id( $this->get_id(), $this->is_dimissibile_globally() );
}
/**
* @return bool
*/
public function get_is_only_for_administrators() {
return $this->is_only_for_administrators;
}
/**
* @param bool $bool
*/
public function set_is_only_for_administrators( $bool ) {
$this->is_only_for_administrators = $bool === false
? false
: true;
}
public function set_template_path( $template_path ) {
if( file_exists( $template_path ) ) {
$this->template_file = $template_path;
}
}
/**
* Template path for a notice with the Toolset Robot
*/
public function set_template_toolset_robot() {
$this->template_file = TOOLSET_COMMON_PATH . '/templates/admin/notice/toolset-robot.phtml';
}
/**
* @inheritdoc
* @param callable $callback
* @since 2.8
*/
public function add_dependency_callback( $callback ) {
if( ! is_callable( $callback ) ) {
throw new InvalidArgumentException();
}
$this->dependency_callbacks[] = $callback;
}
/**
* Run all callbacks previously added via add_dependency_callback().
*
* @since 2.8
*/
public function run_dependency_callbacks() {
foreach( $this->dependency_callbacks as $callback ) {
$callback();
}
}
/**
* Set a context variable that will be accessible when rendering the notice template.
*
* @param array $context
*
* @return void
* @since 2.á
*/
public function set_template_context( $context ) {
$this->template_context = toolset_ensarr( $context );
}
/**
* Get the context variable.
*
* @return array
*/
public function get_template_context() {
return toolset_ensarr( $this->template_context );
}
/**
* Sets inline mode.
*/
public function set_inline_mode( $inline ) {
$this->is_inline = $inline;
}
}
| 18.897619
| 114
| 0.654529
|
5d31edcab161836e23e007f313e60c9d7ee1aec6
| 3,435
|
cpp
|
C++
|
src/qt/qtwebkit/Source/WebCore/page/scrolling/ScrollingThread.cpp
|
viewdy/phantomjs
|
eddb0db1d253fd0c546060a4555554c8ee08c13c
|
[
"BSD-3-Clause"
] | 1
|
2015-05-27T13:52:20.000Z
|
2015-05-27T13:52:20.000Z
|
src/qt/qtwebkit/Source/WebCore/page/scrolling/ScrollingThread.cpp
|
mrampersad/phantomjs
|
dca6f77a36699eb4e1c46f7600cca618f01b0ac3
|
[
"BSD-3-Clause"
] | null | null | null |
src/qt/qtwebkit/Source/WebCore/page/scrolling/ScrollingThread.cpp
|
mrampersad/phantomjs
|
dca6f77a36699eb4e1c46f7600cca618f01b0ac3
|
[
"BSD-3-Clause"
] | 1
|
2022-02-18T10:41:38.000Z
|
2022-02-18T10:41:38.000Z
|
/*
* Copyright (C) 2012 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "config.h"
#include "ScrollingThread.h"
#if ENABLE(THREADED_SCROLLING)
#include <wtf/MainThread.h>
namespace WebCore {
ScrollingThread::ScrollingThread()
: m_threadIdentifier(0)
{
}
bool ScrollingThread::isCurrentThread()
{
if (!shared().m_threadIdentifier)
return false;
return currentThread() == shared().m_threadIdentifier;
}
void ScrollingThread::dispatch(const Function<void()>& function)
{
shared().createThreadIfNeeded();
{
MutexLocker locker(shared().m_functionsMutex);
shared().m_functions.append(function);
}
shared().wakeUpRunLoop();
}
static void callFunctionOnMainThread(const Function<void()>* function)
{
callOnMainThread(*function);
delete function;
}
void ScrollingThread::dispatchBarrier(const Function<void()>& function)
{
dispatch(bind(callFunctionOnMainThread, new Function<void()>(function)));
}
ScrollingThread& ScrollingThread::shared()
{
DEFINE_STATIC_LOCAL(ScrollingThread, scrollingThread, ());
return scrollingThread;
}
void ScrollingThread::createThreadIfNeeded()
{
if (m_threadIdentifier)
return;
// Wait for the thread to initialize the run loop.
{
MutexLocker locker(m_initializeRunLoopConditionMutex);
m_threadIdentifier = createThread(threadCallback, this, "WebCore: Scrolling");
#if PLATFORM(MAC)
while (!m_threadRunLoop)
m_initializeRunLoopCondition.wait(m_initializeRunLoopConditionMutex);
#endif
}
}
void ScrollingThread::threadCallback(void* scrollingThread)
{
static_cast<ScrollingThread*>(scrollingThread)->threadBody();
}
void ScrollingThread::threadBody()
{
initializeRunLoop();
}
void ScrollingThread::dispatchFunctionsFromScrollingThread()
{
ASSERT(isCurrentThread());
Vector<Function<void()> > functions;
{
MutexLocker locker(m_functionsMutex);
m_functions.swap(functions);
}
for (size_t i = 0; i < functions.size(); ++i)
functions[i]();
}
} // namespace WebCore
#endif // ENABLE(THREADED_SCROLLING)
| 27.926829
| 86
| 0.726929
|
b313ed7595626274004698670f484688dba69b04
| 3,759
|
rs
|
Rust
|
gml_fmt_lib/src/statements.rs
|
CpazR/gml_fmt
|
7f9eb0944e15a06dbfc82f05e326da1952505e3e
|
[
"MIT"
] | 13
|
2020-02-23T13:52:08.000Z
|
2021-11-11T23:36:39.000Z
|
gml_fmt_lib/src/statements.rs
|
CpazR/gml_fmt
|
7f9eb0944e15a06dbfc82f05e326da1952505e3e
|
[
"MIT"
] | 30
|
2019-08-05T20:25:29.000Z
|
2020-01-11T13:41:13.000Z
|
gml_fmt_lib/src/statements.rs
|
CpazR/gml_fmt
|
7f9eb0944e15a06dbfc82f05e326da1952505e3e
|
[
"MIT"
] | 4
|
2020-07-04T08:55:45.000Z
|
2022-01-01T10:38:07.000Z
|
use super::expressions::*;
use super::lex_token::Token;
pub type StmtBox<'a> = Box<StatementWrapper<'a>>;
#[derive(Debug)]
pub struct DelimitedLines<'a, T> {
pub lines: Vec<DelimitedLine<'a, T>>,
pub has_end_delimiter: bool,
}
#[derive(Debug)]
pub struct StatementWrapper<'a> {
pub statement: Statement<'a>,
pub has_semicolon: bool,
}
impl<'a> StatementWrapper<'a> {
pub fn new(statement: Statement<'a>, has_semicolon: bool) -> Box<StatementWrapper<'a>> {
Box::new(StatementWrapper {
statement,
has_semicolon,
})
}
pub fn hold_expr(&self) -> bool {
if let Statement::ExpresssionStatement { .. } = &self.statement {
true
} else {
false
}
}
}
#[derive(Debug)]
pub enum Statement<'a> {
VariableDeclList {
starting_var_type: Token<'a>,
comments_after_control_word: CommentsAndNewlines<'a>,
var_decl: DelimitedLines<'a, VariableDecl<'a>>,
},
EnumDeclaration {
comments_after_control_word: CommentsAndNewlines<'a>,
name: ExprBox<'a>,
comments_after_lbrace: CommentsAndNewlines<'a>,
members: DelimitedLines<'a, ExprBox<'a>>,
},
If {
comments_after_control_word: CommentsAndNewlines<'a>,
condition: ExprBox<'a>,
then_branch: StmtBox<'a>,
comments_between: CommentsAndNewlines<'a>,
else_branch: Option<StmtBox<'a>>,
},
WhileWithRepeat {
comments_after_control_word: CommentsAndNewlines<'a>,
token: Token<'a>,
condition: ExprBox<'a>,
body: StmtBox<'a>,
},
DoUntil {
comments_after_control_word: CommentsAndNewlines<'a>,
body: StmtBox<'a>,
comments_between: CommentsAndNewlines<'a>,
condition: ExprBox<'a>,
},
For {
comments_after_control_word: CommentsAndNewlines<'a>,
comments_after_lparen: CommentsAndNewlines<'a>,
initializer: Option<StmtBox<'a>>,
comments_after_initializer: CommentsAndNewlines<'a>,
condition: Option<ExprBox<'a>>,
comments_after_condition: CommentsAndNewlines<'a>,
increment: Option<ExprBox<'a>>,
comments_after_increment: CommentsAndNewlines<'a>,
comments_after_rparen: CommentsAndNewlines<'a>,
body: StmtBox<'a>,
},
Switch {
comments_after_control_word: CommentsAndNewlines<'a>,
condition: ExprBox<'a>,
comments_after_lbrace: CommentsAndNewlines<'a>,
cases: Vec<Case<'a>>,
},
ExpresssionStatement {
expression: ExprBox<'a>,
},
Block {
comments_after_lbrace: CommentsAndNewlines<'a>,
statements: Vec<StmtBox<'a>>,
},
Return {
expression: Option<ExprBox<'a>>,
},
Break,
Exit,
Comment {
comment: Token<'a>,
},
MultilineComment {
multiline_comment: Token<'a>,
},
RegionBegin(Token<'a>),
RegionEnd(Token<'a>),
Macro(Token<'a>),
Define {
comments_after_control_word: CommentsAndNewlines<'a>,
script_name: ExprBox<'a>,
body: Vec<StmtBox<'a>>,
},
}
#[derive(Debug)]
pub struct Case<'a> {
pub control_word: CaseType<'a>,
pub comments_after_control_word: CommentsAndNewlines<'a>,
pub comments_after_colon: CommentsAndNewlines<'a>,
pub statements: Vec<StmtBox<'a>>,
}
#[derive(Debug)]
pub enum CaseType<'a> {
Case(ExprBox<'a>),
Default,
}
#[derive(Debug)]
pub struct VariableDecl<'a> {
pub var_expr: ExprBox<'a>,
pub say_var: Option<Token<'a>>,
pub say_var_comments: Option<CommentsAndNewlines<'a>>,
}
#[derive(Debug)]
pub struct DelimitedLine<'a, T> {
pub expr: T,
pub trailing_comment: CommentsAndNewlines<'a>,
}
| 27.23913
| 92
| 0.62091
|
4f0c1890be98c77985cf75014af8759629922546
| 52,817
|
rb
|
Ruby
|
activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb
|
imajeet/rails
|
1689d9dbb228e427193bf5f32325bd8d2b87aa9e
|
[
"MIT"
] | 1
|
2021-04-13T21:30:51.000Z
|
2021-04-13T21:30:51.000Z
|
activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb
|
imajeet/rails
|
1689d9dbb228e427193bf5f32325bd8d2b87aa9e
|
[
"MIT"
] | 1
|
2021-03-11T07:37:41.000Z
|
2021-03-11T07:37:41.000Z
|
activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb
|
imajeet/rails
|
1689d9dbb228e427193bf5f32325bd8d2b87aa9e
|
[
"MIT"
] | null | null | null |
# frozen_string_literal: true
require_relative "../../migration/join_table"
require "active_support/core_ext/string/access"
require "digest/sha2"
module ActiveRecord
module ConnectionAdapters # :nodoc:
module SchemaStatements
include ActiveRecord::Migration::JoinTable
# Returns a hash of mappings from the abstract data types to the native
# database types. See TableDefinition#column for details on the recognized
# abstract data types.
def native_database_types
{}
end
def table_options(table_name)
nil
end
# Returns the table comment that's stored in database metadata.
def table_comment(table_name)
nil
end
# Truncates a table alias according to the limits of the current adapter.
def table_alias_for(table_name)
table_name[0...table_alias_length].tr(".", "_")
end
# Returns the relation names useable to back Active Record models.
# For most adapters this means all #tables and #views.
def data_sources
query_values(data_source_sql, "SCHEMA")
rescue NotImplementedError
tables | views
end
# Checks to see if the data source +name+ exists on the database.
#
# data_source_exists?(:ebooks)
#
def data_source_exists?(name)
query_values(data_source_sql(name), "SCHEMA").any? if name.present?
rescue NotImplementedError
data_sources.include?(name.to_s)
end
# Returns an array of table names defined in the database.
def tables
query_values(data_source_sql(type: "BASE TABLE"), "SCHEMA")
end
# Checks to see if the table +table_name+ exists on the database.
#
# table_exists?(:developers)
#
def table_exists?(table_name)
query_values(data_source_sql(table_name, type: "BASE TABLE"), "SCHEMA").any? if table_name.present?
rescue NotImplementedError
tables.include?(table_name.to_s)
end
# Returns an array of view names defined in the database.
def views
query_values(data_source_sql(type: "VIEW"), "SCHEMA")
end
# Checks to see if the view +view_name+ exists on the database.
#
# view_exists?(:ebooks)
#
def view_exists?(view_name)
query_values(data_source_sql(view_name, type: "VIEW"), "SCHEMA").any? if view_name.present?
rescue NotImplementedError
views.include?(view_name.to_s)
end
# Returns an array of indexes for the given table.
def indexes(table_name, name = nil)
raise NotImplementedError, "#indexes is not implemented"
end
# Checks to see if an index exists on a table for a given index definition.
#
# # Check an index exists
# index_exists?(:suppliers, :company_id)
#
# # Check an index on multiple columns exists
# index_exists?(:suppliers, [:company_id, :company_type])
#
# # Check a unique index exists
# index_exists?(:suppliers, :company_id, unique: true)
#
# # Check an index with a custom name exists
# index_exists?(:suppliers, :company_id, name: "idx_company_id")
#
def index_exists?(table_name, column_name, options = {})
column_names = Array(column_name).map(&:to_s)
checks = []
checks << lambda { |i| i.columns == column_names }
checks << lambda { |i| i.unique } if options[:unique]
checks << lambda { |i| i.name == options[:name].to_s } if options[:name]
indexes(table_name).any? { |i| checks.all? { |check| check[i] } }
end
# Returns an array of +Column+ objects for the table specified by +table_name+.
def columns(table_name)
table_name = table_name.to_s
column_definitions(table_name).map do |field|
new_column_from_field(table_name, field)
end
end
# Checks to see if a column exists in a given table.
#
# # Check a column exists
# column_exists?(:suppliers, :name)
#
# # Check a column exists of a particular type
# column_exists?(:suppliers, :name, :string)
#
# # Check a column exists with a specific definition
# column_exists?(:suppliers, :name, :string, limit: 100)
# column_exists?(:suppliers, :name, :string, default: 'default')
# column_exists?(:suppliers, :name, :string, null: false)
# column_exists?(:suppliers, :tax, :decimal, precision: 8, scale: 2)
#
def column_exists?(table_name, column_name, type = nil, options = {})
column_name = column_name.to_s
checks = []
checks << lambda { |c| c.name == column_name }
checks << lambda { |c| c.type == type } if type
column_options_keys.each do |attr|
checks << lambda { |c| c.send(attr) == options[attr] } if options.key?(attr)
end
columns(table_name).any? { |c| checks.all? { |check| check[c] } }
end
# Returns just a table's primary key
def primary_key(table_name)
pk = primary_keys(table_name)
pk = pk.first unless pk.size > 1
pk
end
# Creates a new table with the name +table_name+. +table_name+ may either
# be a String or a Symbol.
#
# There are two ways to work with #create_table. You can use the block
# form or the regular form, like this:
#
# === Block form
#
# # create_table() passes a TableDefinition object to the block.
# # This form will not only create the table, but also columns for the
# # table.
#
# create_table(:suppliers) do |t|
# t.column :name, :string, limit: 60
# # Other fields here
# end
#
# === Block form, with shorthand
#
# # You can also use the column types as method calls, rather than calling the column method.
# create_table(:suppliers) do |t|
# t.string :name, limit: 60
# # Other fields here
# end
#
# === Regular form
#
# # Creates a table called 'suppliers' with no columns.
# create_table(:suppliers)
# # Add a column to 'suppliers'.
# add_column(:suppliers, :name, :string, {limit: 60})
#
# The +options+ hash can include the following keys:
# [<tt>:id</tt>]
# Whether to automatically add a primary key column. Defaults to true.
# Join tables for {ActiveRecord::Base.has_and_belongs_to_many}[rdoc-ref:Associations::ClassMethods#has_and_belongs_to_many] should set it to false.
#
# A Symbol can be used to specify the type of the generated primary key column.
# [<tt>:primary_key</tt>]
# The name of the primary key, if one is to be added automatically.
# Defaults to +id+. If <tt>:id</tt> is false, then this option is ignored.
#
# If an array is passed, a composite primary key will be created.
#
# Note that Active Record models will automatically detect their
# primary key. This can be avoided by using
# {self.primary_key=}[rdoc-ref:AttributeMethods::PrimaryKey::ClassMethods#primary_key=] on the model
# to define the key explicitly.
#
# [<tt>:options</tt>]
# Any extra options you want appended to the table definition.
# [<tt>:temporary</tt>]
# Make a temporary table.
# [<tt>:force</tt>]
# Set to true to drop the table before creating it.
# Set to +:cascade+ to drop dependent objects as well.
# Defaults to false.
# [<tt>:as</tt>]
# SQL to use to generate the table. When this option is used, the block is
# ignored, as are the <tt>:id</tt> and <tt>:primary_key</tt> options.
#
# ====== Add a backend specific option to the generated SQL (MySQL)
#
# create_table(:suppliers, options: 'ENGINE=InnoDB DEFAULT CHARSET=utf8')
#
# generates:
#
# CREATE TABLE suppliers (
# id int auto_increment PRIMARY KEY
# ) ENGINE=InnoDB DEFAULT CHARSET=utf8
#
# ====== Rename the primary key column
#
# create_table(:objects, primary_key: 'guid') do |t|
# t.column :name, :string, limit: 80
# end
#
# generates:
#
# CREATE TABLE objects (
# guid int auto_increment PRIMARY KEY,
# name varchar(80)
# )
#
# ====== Change the primary key column type
#
# create_table(:tags, id: :string) do |t|
# t.column :label, :string
# end
#
# generates:
#
# CREATE TABLE tags (
# id varchar PRIMARY KEY,
# label varchar
# )
#
# ====== Create a composite primary key
#
# create_table(:orders, primary_key: [:product_id, :client_id]) do |t|
# t.belongs_to :product
# t.belongs_to :client
# end
#
# generates:
#
# CREATE TABLE order (
# product_id integer NOT NULL,
# client_id integer NOT NULL
# );
#
# ALTER TABLE ONLY "orders"
# ADD CONSTRAINT orders_pkey PRIMARY KEY (product_id, client_id);
#
# ====== Do not add a primary key column
#
# create_table(:categories_suppliers, id: false) do |t|
# t.column :category_id, :integer
# t.column :supplier_id, :integer
# end
#
# generates:
#
# CREATE TABLE categories_suppliers (
# category_id int,
# supplier_id int
# )
#
# ====== Create a temporary table based on a query
#
# create_table(:long_query, temporary: true,
# as: "SELECT * FROM orders INNER JOIN line_items ON order_id=orders.id")
#
# generates:
#
# CREATE TEMPORARY TABLE long_query AS
# SELECT * FROM orders INNER JOIN line_items ON order_id=orders.id
#
# See also TableDefinition#column for details on how to create columns.
def create_table(table_name, comment: nil, **options)
td = create_table_definition table_name, options[:temporary], options[:options], options[:as], comment: comment
if options[:id] != false && !options[:as]
pk = options.fetch(:primary_key) do
Base.get_primary_key table_name.to_s.singularize
end
if pk.is_a?(Array)
td.primary_keys pk
else
td.primary_key pk, options.fetch(:id, :primary_key), options
end
end
yield td if block_given?
if options[:force]
drop_table(table_name, **options, if_exists: true)
end
result = execute schema_creation.accept td
unless supports_indexes_in_create?
td.indexes.each do |column_name, index_options|
add_index(table_name, column_name, index_options)
end
end
if supports_comments? && !supports_comments_in_create?
change_table_comment(table_name, comment) if comment.present?
td.columns.each do |column|
change_column_comment(table_name, column.name, column.comment) if column.comment.present?
end
end
result
end
# Creates a new join table with the name created using the lexical order of the first two
# arguments. These arguments can be a String or a Symbol.
#
# # Creates a table called 'assemblies_parts' with no id.
# create_join_table(:assemblies, :parts)
#
# You can pass an +options+ hash which can include the following keys:
# [<tt>:table_name</tt>]
# Sets the table name, overriding the default.
# [<tt>:column_options</tt>]
# Any extra options you want appended to the columns definition.
# [<tt>:options</tt>]
# Any extra options you want appended to the table definition.
# [<tt>:temporary</tt>]
# Make a temporary table.
# [<tt>:force</tt>]
# Set to true to drop the table before creating it.
# Defaults to false.
#
# Note that #create_join_table does not create any indices by default; you can use
# its block form to do so yourself:
#
# create_join_table :products, :categories do |t|
# t.index :product_id
# t.index :category_id
# end
#
# ====== Add a backend specific option to the generated SQL (MySQL)
#
# create_join_table(:assemblies, :parts, options: 'ENGINE=InnoDB DEFAULT CHARSET=utf8')
#
# generates:
#
# CREATE TABLE assemblies_parts (
# assembly_id int NOT NULL,
# part_id int NOT NULL,
# ) ENGINE=InnoDB DEFAULT CHARSET=utf8
#
def create_join_table(table_1, table_2, column_options: {}, **options)
join_table_name = find_join_table_name(table_1, table_2, options)
column_options.reverse_merge!(null: false, index: false)
t1_ref, t2_ref = [table_1, table_2].map { |t| t.to_s.singularize }
create_table(join_table_name, options.merge!(id: false)) do |td|
td.references t1_ref, column_options
td.references t2_ref, column_options
yield td if block_given?
end
end
# Drops the join table specified by the given arguments.
# See #create_join_table for details.
#
# Although this command ignores the block if one is given, it can be helpful
# to provide one in a migration's +change+ method so it can be reverted.
# In that case, the block will be used by #create_join_table.
def drop_join_table(table_1, table_2, options = {})
join_table_name = find_join_table_name(table_1, table_2, options)
drop_table(join_table_name)
end
# A block for changing columns in +table+.
#
# # change_table() yields a Table instance
# change_table(:suppliers) do |t|
# t.column :name, :string, limit: 60
# # Other column alterations here
# end
#
# The +options+ hash can include the following keys:
# [<tt>:bulk</tt>]
# Set this to true to make this a bulk alter query, such as
#
# ALTER TABLE `users` ADD COLUMN age INT, ADD COLUMN birthdate DATETIME ...
#
# Defaults to false.
#
# Only supported on the MySQL adapter, ignored elsewhere.
#
# ====== Add a column
#
# change_table(:suppliers) do |t|
# t.column :name, :string, limit: 60
# end
#
# ====== Add 2 integer columns
#
# change_table(:suppliers) do |t|
# t.integer :width, :height, null: false, default: 0
# end
#
# ====== Add created_at/updated_at columns
#
# change_table(:suppliers) do |t|
# t.timestamps
# end
#
# ====== Add a foreign key column
#
# change_table(:suppliers) do |t|
# t.references :company
# end
#
# Creates a <tt>company_id(integer)</tt> column.
#
# ====== Add a polymorphic foreign key column
#
# change_table(:suppliers) do |t|
# t.belongs_to :company, polymorphic: true
# end
#
# Creates <tt>company_type(varchar)</tt> and <tt>company_id(integer)</tt> columns.
#
# ====== Remove a column
#
# change_table(:suppliers) do |t|
# t.remove :company
# end
#
# ====== Remove several columns
#
# change_table(:suppliers) do |t|
# t.remove :company_id
# t.remove :width, :height
# end
#
# ====== Remove an index
#
# change_table(:suppliers) do |t|
# t.remove_index :company_id
# end
#
# See also Table for details on all of the various column transformations.
def change_table(table_name, options = {})
if supports_bulk_alter? && options[:bulk]
recorder = ActiveRecord::Migration::CommandRecorder.new(self)
yield update_table_definition(table_name, recorder)
bulk_change_table(table_name, recorder.commands)
else
yield update_table_definition(table_name, self)
end
end
# Renames a table.
#
# rename_table('octopuses', 'octopi')
#
def rename_table(table_name, new_name)
raise NotImplementedError, "rename_table is not implemented"
end
# Drops a table from the database.
#
# [<tt>:force</tt>]
# Set to +:cascade+ to drop dependent objects as well.
# Defaults to false.
# [<tt>:if_exists</tt>]
# Set to +true+ to only drop the table if it exists.
# Defaults to false.
#
# Although this command ignores most +options+ and the block if one is given,
# it can be helpful to provide these in a migration's +change+ method so it can be reverted.
# In that case, +options+ and the block will be used by #create_table.
def drop_table(table_name, options = {})
execute "DROP TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_table_name(table_name)}"
end
# Add a new +type+ column named +column_name+ to +table_name+.
#
# The +type+ parameter is normally one of the migrations native types,
# which is one of the following:
# <tt>:primary_key</tt>, <tt>:string</tt>, <tt>:text</tt>,
# <tt>:integer</tt>, <tt>:bigint</tt>, <tt>:float</tt>, <tt>:decimal</tt>, <tt>:numeric</tt>,
# <tt>:datetime</tt>, <tt>:time</tt>, <tt>:date</tt>,
# <tt>:binary</tt>, <tt>:boolean</tt>.
#
# You may use a type not in this list as long as it is supported by your
# database (for example, "polygon" in MySQL), but this will not be database
# agnostic and should usually be avoided.
#
# Available options are (none of these exists by default):
# * <tt>:limit</tt> -
# Requests a maximum column length. This is the number of characters for a <tt>:string</tt> column
# and number of bytes for <tt>:text</tt>, <tt>:binary</tt> and <tt>:integer</tt> columns.
# This option is ignored by some backends.
# * <tt>:default</tt> -
# The column's default value. Use +nil+ for +NULL+.
# * <tt>:null</tt> -
# Allows or disallows +NULL+ values in the column.
# * <tt>:precision</tt> -
# Specifies the precision for the <tt>:decimal</tt> and <tt>:numeric</tt> columns.
# * <tt>:scale</tt> -
# Specifies the scale for the <tt>:decimal</tt> and <tt>:numeric</tt> columns.
# * <tt>:comment</tt> -
# Specifies the comment for the column. This option is ignored by some backends.
#
# Note: The precision is the total number of significant digits,
# and the scale is the number of digits that can be stored following
# the decimal point. For example, the number 123.45 has a precision of 5
# and a scale of 2. A decimal with a precision of 5 and a scale of 2 can
# range from -999.99 to 999.99.
#
# Please be aware of different RDBMS implementations behavior with
# <tt>:decimal</tt> columns:
# * The SQL standard says the default scale should be 0, <tt>:scale</tt> <=
# <tt>:precision</tt>, and makes no comments about the requirements of
# <tt>:precision</tt>.
# * MySQL: <tt>:precision</tt> [1..63], <tt>:scale</tt> [0..30].
# Default is (10,0).
# * PostgreSQL: <tt>:precision</tt> [1..infinity],
# <tt>:scale</tt> [0..infinity]. No default.
# * SQLite3: No restrictions on <tt>:precision</tt> and <tt>:scale</tt>,
# but the maximum supported <tt>:precision</tt> is 16. No default.
# * Oracle: <tt>:precision</tt> [1..38], <tt>:scale</tt> [-84..127].
# Default is (38,0).
# * DB2: <tt>:precision</tt> [1..63], <tt>:scale</tt> [0..62].
# Default unknown.
# * SqlServer: <tt>:precision</tt> [1..38], <tt>:scale</tt> [0..38].
# Default (38,0).
#
# == Examples
#
# add_column(:users, :picture, :binary, limit: 2.megabytes)
# # ALTER TABLE "users" ADD "picture" blob(2097152)
#
# add_column(:articles, :status, :string, limit: 20, default: 'draft', null: false)
# # ALTER TABLE "articles" ADD "status" varchar(20) DEFAULT 'draft' NOT NULL
#
# add_column(:answers, :bill_gates_money, :decimal, precision: 15, scale: 2)
# # ALTER TABLE "answers" ADD "bill_gates_money" decimal(15,2)
#
# add_column(:measurements, :sensor_reading, :decimal, precision: 30, scale: 20)
# # ALTER TABLE "measurements" ADD "sensor_reading" decimal(30,20)
#
# # While :scale defaults to zero on most databases, it
# # probably wouldn't hurt to include it.
# add_column(:measurements, :huge_integer, :decimal, precision: 30)
# # ALTER TABLE "measurements" ADD "huge_integer" decimal(30)
#
# # Defines a column that stores an array of a type.
# add_column(:users, :skills, :text, array: true)
# # ALTER TABLE "users" ADD "skills" text[]
#
# # Defines a column with a database-specific type.
# add_column(:shapes, :triangle, 'polygon')
# # ALTER TABLE "shapes" ADD "triangle" polygon
def add_column(table_name, column_name, type, options = {})
at = create_alter_table table_name
at.add_column(column_name, type, options)
execute schema_creation.accept at
end
# Removes the given columns from the table definition.
#
# remove_columns(:suppliers, :qualification, :experience)
#
def remove_columns(table_name, *column_names)
raise ArgumentError.new("You must specify at least one column name. Example: remove_columns(:people, :first_name)") if column_names.empty?
column_names.each do |column_name|
remove_column(table_name, column_name)
end
end
# Removes the column from the table definition.
#
# remove_column(:suppliers, :qualification)
#
# The +type+ and +options+ parameters will be ignored if present. It can be helpful
# to provide these in a migration's +change+ method so it can be reverted.
# In that case, +type+ and +options+ will be used by #add_column.
def remove_column(table_name, column_name, type = nil, options = {})
execute "ALTER TABLE #{quote_table_name(table_name)} DROP #{quote_column_name(column_name)}"
end
# Changes the column's definition according to the new options.
# See TableDefinition#column for details of the options you can use.
#
# change_column(:suppliers, :name, :string, limit: 80)
# change_column(:accounts, :description, :text)
#
def change_column(table_name, column_name, type, options = {})
raise NotImplementedError, "change_column is not implemented"
end
# Sets a new default value for a column:
#
# change_column_default(:suppliers, :qualification, 'new')
# change_column_default(:accounts, :authorized, 1)
#
# Setting the default to +nil+ effectively drops the default:
#
# change_column_default(:users, :email, nil)
#
# Passing a hash containing +:from+ and +:to+ will make this change
# reversible in migration:
#
# change_column_default(:posts, :state, from: nil, to: "draft")
#
def change_column_default(table_name, column_name, default_or_changes)
raise NotImplementedError, "change_column_default is not implemented"
end
# Sets or removes a <tt>NOT NULL</tt> constraint on a column. The +null+ flag
# indicates whether the value can be +NULL+. For example
#
# change_column_null(:users, :nickname, false)
#
# says nicknames cannot be +NULL+ (adds the constraint), whereas
#
# change_column_null(:users, :nickname, true)
#
# allows them to be +NULL+ (drops the constraint).
#
# The method accepts an optional fourth argument to replace existing
# <tt>NULL</tt>s with some other value. Use that one when enabling the
# constraint if needed, since otherwise those rows would not be valid.
#
# Please note the fourth argument does not set a column's default.
def change_column_null(table_name, column_name, null, default = nil)
raise NotImplementedError, "change_column_null is not implemented"
end
# Renames a column.
#
# rename_column(:suppliers, :description, :name)
#
def rename_column(table_name, column_name, new_column_name)
raise NotImplementedError, "rename_column is not implemented"
end
# Adds a new index to the table. +column_name+ can be a single Symbol, or
# an Array of Symbols.
#
# The index will be named after the table and the column name(s), unless
# you pass <tt>:name</tt> as an option.
#
# ====== Creating a simple index
#
# add_index(:suppliers, :name)
#
# generates:
#
# CREATE INDEX suppliers_name_index ON suppliers(name)
#
# ====== Creating a unique index
#
# add_index(:accounts, [:branch_id, :party_id], unique: true)
#
# generates:
#
# CREATE UNIQUE INDEX accounts_branch_id_party_id_index ON accounts(branch_id, party_id)
#
# ====== Creating a named index
#
# add_index(:accounts, [:branch_id, :party_id], unique: true, name: 'by_branch_party')
#
# generates:
#
# CREATE UNIQUE INDEX by_branch_party ON accounts(branch_id, party_id)
#
# ====== Creating an index with specific key length
#
# add_index(:accounts, :name, name: 'by_name', length: 10)
#
# generates:
#
# CREATE INDEX by_name ON accounts(name(10))
#
# ====== Creating an index with specific key lengths for multiple keys
#
# add_index(:accounts, [:name, :surname], name: 'by_name_surname', length: {name: 10, surname: 15})
#
# generates:
#
# CREATE INDEX by_name_surname ON accounts(name(10), surname(15))
#
# Note: SQLite doesn't support index length.
#
# ====== Creating an index with a sort order (desc or asc, asc is the default)
#
# add_index(:accounts, [:branch_id, :party_id, :surname], order: {branch_id: :desc, party_id: :asc})
#
# generates:
#
# CREATE INDEX by_branch_desc_party ON accounts(branch_id DESC, party_id ASC, surname)
#
# Note: MySQL doesn't yet support index order (it accepts the syntax but ignores it).
#
# ====== Creating a partial index
#
# add_index(:accounts, [:branch_id, :party_id], unique: true, where: "active")
#
# generates:
#
# CREATE UNIQUE INDEX index_accounts_on_branch_id_and_party_id ON accounts(branch_id, party_id) WHERE active
#
# Note: Partial indexes are only supported for PostgreSQL and SQLite 3.8.0+.
#
# ====== Creating an index with a specific method
#
# add_index(:developers, :name, using: 'btree')
#
# generates:
#
# CREATE INDEX index_developers_on_name ON developers USING btree (name) -- PostgreSQL
# CREATE INDEX index_developers_on_name USING btree ON developers (name) -- MySQL
#
# Note: only supported by PostgreSQL and MySQL
#
# ====== Creating an index with a specific type
#
# add_index(:developers, :name, type: :fulltext)
#
# generates:
#
# CREATE FULLTEXT INDEX index_developers_on_name ON developers (name) -- MySQL
#
# Note: only supported by MySQL.
def add_index(table_name, column_name, options = {})
index_name, index_type, index_columns, index_options = add_index_options(table_name, column_name, options)
execute "CREATE #{index_type} INDEX #{quote_column_name(index_name)} ON #{quote_table_name(table_name)} (#{index_columns})#{index_options}"
end
# Removes the given index from the table.
#
# Removes the index on +branch_id+ in the +accounts+ table if exactly one such index exists.
#
# remove_index :accounts, :branch_id
#
# Removes the index on +branch_id+ in the +accounts+ table if exactly one such index exists.
#
# remove_index :accounts, column: :branch_id
#
# Removes the index on +branch_id+ and +party_id+ in the +accounts+ table if exactly one such index exists.
#
# remove_index :accounts, column: [:branch_id, :party_id]
#
# Removes the index named +by_branch_party+ in the +accounts+ table.
#
# remove_index :accounts, name: :by_branch_party
#
def remove_index(table_name, options = {})
index_name = index_name_for_remove(table_name, options)
execute "DROP INDEX #{quote_column_name(index_name)} ON #{quote_table_name(table_name)}"
end
# Renames an index.
#
# Rename the +index_people_on_last_name+ index to +index_users_on_last_name+:
#
# rename_index :people, 'index_people_on_last_name', 'index_users_on_last_name'
#
def rename_index(table_name, old_name, new_name)
validate_index_length!(table_name, new_name)
# this is a naive implementation; some DBs may support this more efficiently (PostgreSQL, for instance)
old_index_def = indexes(table_name).detect { |i| i.name == old_name }
return unless old_index_def
add_index(table_name, old_index_def.columns, name: new_name, unique: old_index_def.unique)
remove_index(table_name, name: old_name)
end
def index_name(table_name, options) #:nodoc:
if Hash === options
if options[:column]
"index_#{table_name}_on_#{Array(options[:column]) * '_and_'}"
elsif options[:name]
options[:name]
else
raise ArgumentError, "You must specify the index name"
end
else
index_name(table_name, index_name_options(options))
end
end
# Verifies the existence of an index with a given name.
def index_name_exists?(table_name, index_name, default = nil)
unless default.nil?
ActiveSupport::Deprecation.warn(<<-MSG.squish)
Passing default to #index_name_exists? is deprecated without replacement.
MSG
end
index_name = index_name.to_s
indexes(table_name).detect { |i| i.name == index_name }
end
# Adds a reference. The reference column is an integer by default,
# the <tt>:type</tt> option can be used to specify a different type.
# Optionally adds a +_type+ column, if <tt>:polymorphic</tt> option is provided.
# #add_reference and #add_belongs_to are acceptable.
#
# The +options+ hash can include the following keys:
# [<tt>:type</tt>]
# The reference column type. Defaults to +:integer+.
# [<tt>:index</tt>]
# Add an appropriate index. Defaults to true.
# See #add_index for usage of this option.
# [<tt>:foreign_key</tt>]
# Add an appropriate foreign key constraint. Defaults to false.
# [<tt>:polymorphic</tt>]
# Whether an additional +_type+ column should be added. Defaults to false.
# [<tt>:null</tt>]
# Whether the column allows nulls. Defaults to true.
#
# ====== Create a user_id integer column
#
# add_reference(:products, :user)
#
# ====== Create a user_id string column
#
# add_reference(:products, :user, type: :string)
#
# ====== Create supplier_id, supplier_type columns and appropriate index
#
# add_reference(:products, :supplier, polymorphic: true, index: true)
#
# ====== Create a supplier_id column with a unique index
#
# add_reference(:products, :supplier, index: { unique: true })
#
# ====== Create a supplier_id column with a named index
#
# add_reference(:products, :supplier, index: { name: "my_supplier_index" })
#
# ====== Create a supplier_id column and appropriate foreign key
#
# add_reference(:products, :supplier, foreign_key: true)
#
# ====== Create a supplier_id column and a foreign key to the firms table
#
# add_reference(:products, :supplier, foreign_key: {to_table: :firms})
#
def add_reference(table_name, ref_name, **options)
ReferenceDefinition.new(ref_name, options).add_to(update_table_definition(table_name, self))
end
alias :add_belongs_to :add_reference
# Removes the reference(s). Also removes a +type+ column if one exists.
# #remove_reference and #remove_belongs_to are acceptable.
#
# ====== Remove the reference
#
# remove_reference(:products, :user, index: true)
#
# ====== Remove polymorphic reference
#
# remove_reference(:products, :supplier, polymorphic: true)
#
# ====== Remove the reference with a foreign key
#
# remove_reference(:products, :user, index: true, foreign_key: true)
#
def remove_reference(table_name, ref_name, foreign_key: false, polymorphic: false, **options)
if foreign_key
reference_name = Base.pluralize_table_names ? ref_name.to_s.pluralize : ref_name
if foreign_key.is_a?(Hash)
foreign_key_options = foreign_key
else
foreign_key_options = { to_table: reference_name }
end
foreign_key_options[:column] ||= "#{ref_name}_id"
remove_foreign_key(table_name, **foreign_key_options)
end
remove_column(table_name, "#{ref_name}_id")
remove_column(table_name, "#{ref_name}_type") if polymorphic
end
alias :remove_belongs_to :remove_reference
# Returns an array of foreign keys for the given table.
# The foreign keys are represented as ForeignKeyDefinition objects.
def foreign_keys(table_name)
raise NotImplementedError, "foreign_keys is not implemented"
end
# Adds a new foreign key. +from_table+ is the table with the key column,
# +to_table+ contains the referenced primary key.
#
# The foreign key will be named after the following pattern: <tt>fk_rails_<identifier></tt>.
# +identifier+ is a 10 character long string which is deterministically generated from the
# +from_table+ and +column+. A custom name can be specified with the <tt>:name</tt> option.
#
# ====== Creating a simple foreign key
#
# add_foreign_key :articles, :authors
#
# generates:
#
# ALTER TABLE "articles" ADD CONSTRAINT fk_rails_e74ce85cbc FOREIGN KEY ("author_id") REFERENCES "authors" ("id")
#
# ====== Creating a foreign key on a specific column
#
# add_foreign_key :articles, :users, column: :author_id, primary_key: "lng_id"
#
# generates:
#
# ALTER TABLE "articles" ADD CONSTRAINT fk_rails_58ca3d3a82 FOREIGN KEY ("author_id") REFERENCES "users" ("lng_id")
#
# ====== Creating a cascading foreign key
#
# add_foreign_key :articles, :authors, on_delete: :cascade
#
# generates:
#
# ALTER TABLE "articles" ADD CONSTRAINT fk_rails_e74ce85cbc FOREIGN KEY ("author_id") REFERENCES "authors" ("id") ON DELETE CASCADE
#
# The +options+ hash can include the following keys:
# [<tt>:column</tt>]
# The foreign key column name on +from_table+. Defaults to <tt>to_table.singularize + "_id"</tt>
# [<tt>:primary_key</tt>]
# The primary key column name on +to_table+. Defaults to +id+.
# [<tt>:name</tt>]
# The constraint name. Defaults to <tt>fk_rails_<identifier></tt>.
# [<tt>:on_delete</tt>]
# Action that happens <tt>ON DELETE</tt>. Valid values are +:nullify+, +:cascade+ and +:restrict+
# [<tt>:on_update</tt>]
# Action that happens <tt>ON UPDATE</tt>. Valid values are +:nullify+, +:cascade+ and +:restrict+
def add_foreign_key(from_table, to_table, options = {})
return unless supports_foreign_keys?
options = foreign_key_options(from_table, to_table, options)
at = create_alter_table from_table
at.add_foreign_key to_table, options
execute schema_creation.accept(at)
end
# Removes the given foreign key from the table. Any option parameters provided
# will be used to re-add the foreign key in case of a migration rollback.
# It is recommended that you provide any options used when creating the foreign
# key so that the migration can be reverted properly.
#
# Removes the foreign key on +accounts.branch_id+.
#
# remove_foreign_key :accounts, :branches
#
# Removes the foreign key on +accounts.owner_id+.
#
# remove_foreign_key :accounts, column: :owner_id
#
# Removes the foreign key named +special_fk_name+ on the +accounts+ table.
#
# remove_foreign_key :accounts, name: :special_fk_name
#
# The +options+ hash accepts the same keys as SchemaStatements#add_foreign_key.
def remove_foreign_key(from_table, options_or_to_table = {})
return unless supports_foreign_keys?
fk_name_to_delete = foreign_key_for!(from_table, options_or_to_table).name
at = create_alter_table from_table
at.drop_foreign_key fk_name_to_delete
execute schema_creation.accept(at)
end
# Checks to see if a foreign key exists on a table for a given foreign key definition.
#
# # Checks to see if a foreign key exists.
# foreign_key_exists?(:accounts, :branches)
#
# # Checks to see if a foreign key on a specified column exists.
# foreign_key_exists?(:accounts, column: :owner_id)
#
# # Checks to see if a foreign key with a custom name exists.
# foreign_key_exists?(:accounts, name: "special_fk_name")
#
def foreign_key_exists?(from_table, options_or_to_table = {})
foreign_key_for(from_table, options_or_to_table).present?
end
def foreign_key_column_for(table_name) # :nodoc:
prefix = Base.table_name_prefix
suffix = Base.table_name_suffix
name = table_name.to_s =~ /#{prefix}(.+)#{suffix}/ ? $1 : table_name.to_s
"#{name.singularize}_id"
end
def foreign_key_options(from_table, to_table, options) # :nodoc:
options = options.dup
options[:column] ||= foreign_key_column_for(to_table)
options[:name] ||= foreign_key_name(from_table, options)
options
end
def dump_schema_information #:nodoc:
versions = ActiveRecord::SchemaMigration.all_versions
insert_versions_sql(versions) if versions.any?
end
def initialize_schema_migrations_table # :nodoc:
ActiveRecord::SchemaMigration.create_table
end
deprecate :initialize_schema_migrations_table
def initialize_internal_metadata_table # :nodoc:
ActiveRecord::InternalMetadata.create_table
end
deprecate :initialize_internal_metadata_table
def internal_string_options_for_primary_key # :nodoc:
{ primary_key: true }
end
def assume_migrated_upto_version(version, migrations_paths)
migrations_paths = Array(migrations_paths)
version = version.to_i
sm_table = quote_table_name(ActiveRecord::SchemaMigration.table_name)
migrated = ActiveRecord::SchemaMigration.all_versions.map(&:to_i)
versions = ActiveRecord::Migrator.migration_files(migrations_paths).map do |file|
ActiveRecord::Migrator.parse_migration_filename(file).first.to_i
end
unless migrated.include?(version)
execute "INSERT INTO #{sm_table} (version) VALUES (#{quote(version)})"
end
inserting = (versions - migrated).select { |v| v < version }
if inserting.any?
if (duplicate = inserting.detect { |v| inserting.count(v) > 1 })
raise "Duplicate migration #{duplicate}. Please renumber your migrations to resolve the conflict."
end
if supports_multi_insert?
execute insert_versions_sql(inserting)
else
inserting.each do |v|
execute insert_versions_sql(v)
end
end
end
end
def type_to_sql(type, limit: nil, precision: nil, scale: nil, **) # :nodoc:
type = type.to_sym if type
if native = native_database_types[type]
column_type_sql = (native.is_a?(Hash) ? native[:name] : native).dup
if type == :decimal # ignore limit, use precision and scale
scale ||= native[:scale]
if precision ||= native[:precision]
if scale
column_type_sql << "(#{precision},#{scale})"
else
column_type_sql << "(#{precision})"
end
elsif scale
raise ArgumentError, "Error adding decimal column: precision cannot be empty if scale is specified"
end
elsif [:datetime, :timestamp, :time, :interval].include?(type) && precision ||= native[:precision]
if (0..6) === precision
column_type_sql << "(#{precision})"
else
raise(ActiveRecordError, "No #{native[:name]} type has precision of #{precision}. The allowed range of precision is from 0 to 6")
end
elsif (type != :primary_key) && (limit ||= native.is_a?(Hash) && native[:limit])
column_type_sql << "(#{limit})"
end
column_type_sql
else
type.to_s
end
end
# Given a set of columns and an ORDER BY clause, returns the columns for a SELECT DISTINCT.
# PostgreSQL, MySQL, and Oracle override this for custom DISTINCT syntax - they
# require the order columns appear in the SELECT.
#
# columns_for_distinct("posts.id", ["posts.created_at desc"])
#
def columns_for_distinct(columns, orders) # :nodoc:
columns
end
# Adds timestamps (+created_at+ and +updated_at+) columns to +table_name+.
# Additional options (like +:null+) are forwarded to #add_column.
#
# add_timestamps(:suppliers, null: true)
#
def add_timestamps(table_name, options = {})
options[:null] = false if options[:null].nil?
add_column table_name, :created_at, :datetime, options
add_column table_name, :updated_at, :datetime, options
end
# Removes the timestamp columns (+created_at+ and +updated_at+) from the table definition.
#
# remove_timestamps(:suppliers)
#
def remove_timestamps(table_name, options = {})
remove_column table_name, :updated_at
remove_column table_name, :created_at
end
def update_table_definition(table_name, base) #:nodoc:
Table.new(table_name, base)
end
def add_index_options(table_name, column_name, comment: nil, **options) # :nodoc:
column_names = index_column_names(column_name)
options.assert_valid_keys(:unique, :order, :name, :where, :length, :internal, :using, :algorithm, :type)
index_type = options[:type].to_s if options.key?(:type)
index_type ||= options[:unique] ? "UNIQUE" : ""
index_name = options[:name].to_s if options.key?(:name)
index_name ||= index_name(table_name, column_names)
if options.key?(:algorithm)
algorithm = index_algorithms.fetch(options[:algorithm]) {
raise ArgumentError.new("Algorithm must be one of the following: #{index_algorithms.keys.map(&:inspect).join(', ')}")
}
end
using = "USING #{options[:using]}" if options[:using].present?
if supports_partial_index?
index_options = options[:where] ? " WHERE #{options[:where]}" : ""
end
validate_index_length!(table_name, index_name, options.fetch(:internal, false))
if data_source_exists?(table_name) && index_name_exists?(table_name, index_name)
raise ArgumentError, "Index name '#{index_name}' on table '#{table_name}' already exists"
end
index_columns = quoted_columns_for_index(column_names, options).join(", ")
[index_name, index_type, index_columns, index_options, algorithm, using, comment]
end
def options_include_default?(options)
options.include?(:default) && !(options[:null] == false && options[:default].nil?)
end
# Changes the comment for a table or removes it if +nil+.
def change_table_comment(table_name, comment)
raise NotImplementedError, "#{self.class} does not support changing table comments"
end
# Changes the comment for a column or removes it if +nil+.
def change_column_comment(table_name, column_name, comment) #:nodoc:
raise NotImplementedError, "#{self.class} does not support changing column comments"
end
def create_schema_dumper(options) # :nodoc:
SchemaDumper.create(self, options)
end
private
def column_options_keys
[:limit, :precision, :scale, :default, :null, :collation, :comment]
end
def add_index_sort_order(quoted_columns, **options)
if order = options[:order]
case order
when Hash
order = order.symbolize_keys
quoted_columns.each { |name, column| column << " #{order[name].upcase}" if order[name].present? }
when String
quoted_columns.each { |name, column| column << " #{order.upcase}" if order.present? }
end
end
quoted_columns
end
# Overridden by the MySQL adapter for supporting index lengths
def add_options_for_index_columns(quoted_columns, **options)
if supports_index_sort_order?
quoted_columns = add_index_sort_order(quoted_columns, options)
end
quoted_columns
end
def quoted_columns_for_index(column_names, **options)
return [column_names] if column_names.is_a?(String)
quoted_columns = Hash[column_names.map { |name| [name.to_sym, quote_column_name(name).dup] }]
add_options_for_index_columns(quoted_columns, options).values
end
def index_name_for_remove(table_name, options = {})
return options[:name] if can_remove_index_by_name?(options)
checks = []
if options.is_a?(Hash)
checks << lambda { |i| i.name == options[:name].to_s } if options.key?(:name)
column_names = index_column_names(options[:column])
else
column_names = index_column_names(options)
end
if column_names.present?
checks << lambda { |i| index_name(table_name, i.columns) == index_name(table_name, column_names) }
end
raise ArgumentError, "No name or columns specified" if checks.none?
matching_indexes = indexes(table_name).select { |i| checks.all? { |check| check[i] } }
if matching_indexes.count > 1
raise ArgumentError, "Multiple indexes found on #{table_name} columns #{column_names}. " \
"Specify an index name from #{matching_indexes.map(&:name).join(', ')}"
elsif matching_indexes.none?
raise ArgumentError, "No indexes found on #{table_name} with the options provided."
else
matching_indexes.first.name
end
end
def rename_table_indexes(table_name, new_name)
indexes(new_name).each do |index|
generated_index_name = index_name(table_name, column: index.columns)
if generated_index_name == index.name
rename_index new_name, generated_index_name, index_name(new_name, column: index.columns)
end
end
end
def rename_column_indexes(table_name, column_name, new_column_name)
column_name, new_column_name = column_name.to_s, new_column_name.to_s
indexes(table_name).each do |index|
next unless index.columns.include?(new_column_name)
old_columns = index.columns.dup
old_columns[old_columns.index(new_column_name)] = column_name
generated_index_name = index_name(table_name, column: old_columns)
if generated_index_name == index.name
rename_index table_name, generated_index_name, index_name(table_name, column: index.columns)
end
end
end
def schema_creation
SchemaCreation.new(self)
end
def create_table_definition(*args)
TableDefinition.new(*args)
end
def create_alter_table(name)
AlterTable.new create_table_definition(name)
end
def fetch_type_metadata(sql_type)
cast_type = lookup_cast_type(sql_type)
SqlTypeMetadata.new(
sql_type: sql_type,
type: cast_type.type,
limit: cast_type.limit,
precision: cast_type.precision,
scale: cast_type.scale,
)
end
def index_column_names(column_names)
if column_names.is_a?(String) && /\W/.match?(column_names)
column_names
else
Array(column_names)
end
end
def index_name_options(column_names)
if column_names.is_a?(String) && /\W/.match?(column_names)
column_names = column_names.scan(/\w+/).join("_")
end
{ column: column_names }
end
def foreign_key_name(table_name, options)
options.fetch(:name) do
identifier = "#{table_name}_#{options.fetch(:column)}_fk"
hashed_identifier = Digest::SHA256.hexdigest(identifier).first(10)
"fk_rails_#{hashed_identifier}"
end
end
def foreign_key_for(from_table, options_or_to_table = {})
return unless supports_foreign_keys?
foreign_keys(from_table).detect { |fk| fk.defined_for? options_or_to_table }
end
def foreign_key_for!(from_table, options_or_to_table = {})
foreign_key_for(from_table, options_or_to_table) || \
raise(ArgumentError, "Table '#{from_table}' has no foreign key for #{options_or_to_table}")
end
def extract_foreign_key_action(specifier)
case specifier
when "CASCADE"; :cascade
when "SET NULL"; :nullify
when "RESTRICT"; :restrict
end
end
def validate_index_length!(table_name, new_name, internal = false)
max_index_length = internal ? index_name_length : allowed_index_name_length
if new_name.length > max_index_length
raise ArgumentError, "Index name '#{new_name}' on table '#{table_name}' is too long; the limit is #{allowed_index_name_length} characters"
end
end
def extract_new_default_value(default_or_changes)
if default_or_changes.is_a?(Hash) && default_or_changes.has_key?(:from) && default_or_changes.has_key?(:to)
default_or_changes[:to]
else
default_or_changes
end
end
def can_remove_index_by_name?(options)
options.is_a?(Hash) && options.key?(:name) && options.except(:name, :algorithm).empty?
end
def insert_versions_sql(versions)
sm_table = quote_table_name(ActiveRecord::SchemaMigration.table_name)
if versions.is_a?(Array)
sql = "INSERT INTO #{sm_table} (version) VALUES\n".dup
sql << versions.map { |v| "(#{quote(v)})" }.join(",\n")
sql << ";\n\n"
sql
else
"INSERT INTO #{sm_table} (version) VALUES (#{quote(versions)});"
end
end
def data_source_sql(name = nil, type: nil)
raise NotImplementedError
end
def quoted_scope(name = nil, type: nil)
raise NotImplementedError
end
end
end
end
| 38.245474
| 155
| 0.613893
|
e6cedcc65b6a68a84288dddfc5451bac50f7cced
| 12,662
|
dart
|
Dart
|
lib/core/domain/entities/user/user.freezed.dart
|
definitelyme/washryte
|
e2f43c4867488821ab46681c15ad2cb2f1bf2762
|
[
"Apache-2.0"
] | null | null | null |
lib/core/domain/entities/user/user.freezed.dart
|
definitelyme/washryte
|
e2f43c4867488821ab46681c15ad2cb2f1bf2762
|
[
"Apache-2.0"
] | null | null | null |
lib/core/domain/entities/user/user.freezed.dart
|
definitelyme/washryte
|
e2f43c4867488821ab46681c15ad2cb2f1bf2762
|
[
"Apache-2.0"
] | null | null | null |
// coverage:ignore-file
// GENERATED CODE - DO NOT MODIFY BY HAND
// ignore_for_file: unused_element, deprecated_member_use, deprecated_member_use_from_same_package, use_function_type_syntax_for_parameters, unnecessary_const, avoid_init_to_null, invalid_override_different_default_values_named, prefer_expression_function_bodies, annotate_overrides, invalid_annotation_target
part of user.dart;
// **************************************************************************
// FreezedGenerator
// **************************************************************************
T _$identity<T>(T value) => value;
final _privateConstructorUsedError = UnsupportedError(
'It seems like you constructed your class using `MyClass._()`. This constructor is only meant to be used by freezed and you are not supposed to need it nor use it.\nPlease check the documentation here for more informations: https://github.com/rrousselGit/freezed#custom-getters-and-methods');
/// @nodoc
class _$UserTearOff {
const _$UserTearOff();
_User call(
{required UniqueId<String?> uid,
required DisplayName firstName,
required DisplayName lastName,
required EmailAddress email,
required Phone phone,
required Password password,
required MediaField photo,
AuthProvider provider = AuthProvider.regular,
DateTime? createdAt,
DateTime? updatedAt,
DateTime? deletedAt}) {
return _User(
uid: uid,
firstName: firstName,
lastName: lastName,
email: email,
phone: phone,
password: password,
photo: photo,
provider: provider,
createdAt: createdAt,
updatedAt: updatedAt,
deletedAt: deletedAt,
);
}
}
/// @nodoc
const $User = _$UserTearOff();
/// @nodoc
mixin _$User {
UniqueId<String?> get uid => throw _privateConstructorUsedError;
DisplayName get firstName => throw _privateConstructorUsedError;
DisplayName get lastName => throw _privateConstructorUsedError;
EmailAddress get email => throw _privateConstructorUsedError;
Phone get phone => throw _privateConstructorUsedError;
Password get password => throw _privateConstructorUsedError;
MediaField get photo => throw _privateConstructorUsedError;
AuthProvider get provider => throw _privateConstructorUsedError;
DateTime? get createdAt => throw _privateConstructorUsedError;
DateTime? get updatedAt => throw _privateConstructorUsedError;
DateTime? get deletedAt => throw _privateConstructorUsedError;
@JsonKey(ignore: true)
$UserCopyWith<User> get copyWith => throw _privateConstructorUsedError;
}
/// @nodoc
abstract class $UserCopyWith<$Res> {
factory $UserCopyWith(User value, $Res Function(User) then) =
_$UserCopyWithImpl<$Res>;
$Res call(
{UniqueId<String?> uid,
DisplayName firstName,
DisplayName lastName,
EmailAddress email,
Phone phone,
Password password,
MediaField photo,
AuthProvider provider,
DateTime? createdAt,
DateTime? updatedAt,
DateTime? deletedAt});
}
/// @nodoc
class _$UserCopyWithImpl<$Res> implements $UserCopyWith<$Res> {
_$UserCopyWithImpl(this._value, this._then);
final User _value;
// ignore: unused_field
final $Res Function(User) _then;
@override
$Res call({
Object? uid = freezed,
Object? firstName = freezed,
Object? lastName = freezed,
Object? email = freezed,
Object? phone = freezed,
Object? password = freezed,
Object? photo = freezed,
Object? provider = freezed,
Object? createdAt = freezed,
Object? updatedAt = freezed,
Object? deletedAt = freezed,
}) {
return _then(_value.copyWith(
uid: uid == freezed
? _value.uid
: uid // ignore: cast_nullable_to_non_nullable
as UniqueId<String?>,
firstName: firstName == freezed
? _value.firstName
: firstName // ignore: cast_nullable_to_non_nullable
as DisplayName,
lastName: lastName == freezed
? _value.lastName
: lastName // ignore: cast_nullable_to_non_nullable
as DisplayName,
email: email == freezed
? _value.email
: email // ignore: cast_nullable_to_non_nullable
as EmailAddress,
phone: phone == freezed
? _value.phone
: phone // ignore: cast_nullable_to_non_nullable
as Phone,
password: password == freezed
? _value.password
: password // ignore: cast_nullable_to_non_nullable
as Password,
photo: photo == freezed
? _value.photo
: photo // ignore: cast_nullable_to_non_nullable
as MediaField,
provider: provider == freezed
? _value.provider
: provider // ignore: cast_nullable_to_non_nullable
as AuthProvider,
createdAt: createdAt == freezed
? _value.createdAt
: createdAt // ignore: cast_nullable_to_non_nullable
as DateTime?,
updatedAt: updatedAt == freezed
? _value.updatedAt
: updatedAt // ignore: cast_nullable_to_non_nullable
as DateTime?,
deletedAt: deletedAt == freezed
? _value.deletedAt
: deletedAt // ignore: cast_nullable_to_non_nullable
as DateTime?,
));
}
}
/// @nodoc
abstract class _$UserCopyWith<$Res> implements $UserCopyWith<$Res> {
factory _$UserCopyWith(_User value, $Res Function(_User) then) =
__$UserCopyWithImpl<$Res>;
@override
$Res call(
{UniqueId<String?> uid,
DisplayName firstName,
DisplayName lastName,
EmailAddress email,
Phone phone,
Password password,
MediaField photo,
AuthProvider provider,
DateTime? createdAt,
DateTime? updatedAt,
DateTime? deletedAt});
}
/// @nodoc
class __$UserCopyWithImpl<$Res> extends _$UserCopyWithImpl<$Res>
implements _$UserCopyWith<$Res> {
__$UserCopyWithImpl(_User _value, $Res Function(_User) _then)
: super(_value, (v) => _then(v as _User));
@override
_User get _value => super._value as _User;
@override
$Res call({
Object? uid = freezed,
Object? firstName = freezed,
Object? lastName = freezed,
Object? email = freezed,
Object? phone = freezed,
Object? password = freezed,
Object? photo = freezed,
Object? provider = freezed,
Object? createdAt = freezed,
Object? updatedAt = freezed,
Object? deletedAt = freezed,
}) {
return _then(_User(
uid: uid == freezed
? _value.uid
: uid // ignore: cast_nullable_to_non_nullable
as UniqueId<String?>,
firstName: firstName == freezed
? _value.firstName
: firstName // ignore: cast_nullable_to_non_nullable
as DisplayName,
lastName: lastName == freezed
? _value.lastName
: lastName // ignore: cast_nullable_to_non_nullable
as DisplayName,
email: email == freezed
? _value.email
: email // ignore: cast_nullable_to_non_nullable
as EmailAddress,
phone: phone == freezed
? _value.phone
: phone // ignore: cast_nullable_to_non_nullable
as Phone,
password: password == freezed
? _value.password
: password // ignore: cast_nullable_to_non_nullable
as Password,
photo: photo == freezed
? _value.photo
: photo // ignore: cast_nullable_to_non_nullable
as MediaField,
provider: provider == freezed
? _value.provider
: provider // ignore: cast_nullable_to_non_nullable
as AuthProvider,
createdAt: createdAt == freezed
? _value.createdAt
: createdAt // ignore: cast_nullable_to_non_nullable
as DateTime?,
updatedAt: updatedAt == freezed
? _value.updatedAt
: updatedAt // ignore: cast_nullable_to_non_nullable
as DateTime?,
deletedAt: deletedAt == freezed
? _value.deletedAt
: deletedAt // ignore: cast_nullable_to_non_nullable
as DateTime?,
));
}
}
/// @nodoc
class _$_User extends _User with DiagnosticableTreeMixin {
const _$_User(
{required this.uid,
required this.firstName,
required this.lastName,
required this.email,
required this.phone,
required this.password,
required this.photo,
this.provider = AuthProvider.regular,
this.createdAt,
this.updatedAt,
this.deletedAt})
: super._();
@override
final UniqueId<String?> uid;
@override
final DisplayName firstName;
@override
final DisplayName lastName;
@override
final EmailAddress email;
@override
final Phone phone;
@override
final Password password;
@override
final MediaField photo;
@JsonKey(defaultValue: AuthProvider.regular)
@override
final AuthProvider provider;
@override
final DateTime? createdAt;
@override
final DateTime? updatedAt;
@override
final DateTime? deletedAt;
@override
String toString({DiagnosticLevel minLevel = DiagnosticLevel.info}) {
return 'User(uid: $uid, firstName: $firstName, lastName: $lastName, email: $email, phone: $phone, password: $password, photo: $photo, provider: $provider, createdAt: $createdAt, updatedAt: $updatedAt, deletedAt: $deletedAt)';
}
@override
void debugFillProperties(DiagnosticPropertiesBuilder properties) {
super.debugFillProperties(properties);
properties
..add(DiagnosticsProperty('type', 'User'))
..add(DiagnosticsProperty('uid', uid))
..add(DiagnosticsProperty('firstName', firstName))
..add(DiagnosticsProperty('lastName', lastName))
..add(DiagnosticsProperty('email', email))
..add(DiagnosticsProperty('phone', phone))
..add(DiagnosticsProperty('password', password))
..add(DiagnosticsProperty('photo', photo))
..add(DiagnosticsProperty('provider', provider))
..add(DiagnosticsProperty('createdAt', createdAt))
..add(DiagnosticsProperty('updatedAt', updatedAt))
..add(DiagnosticsProperty('deletedAt', deletedAt));
}
@override
bool operator ==(dynamic other) {
return identical(this, other) ||
(other.runtimeType == runtimeType &&
other is _User &&
(identical(other.uid, uid) || other.uid == uid) &&
(identical(other.firstName, firstName) ||
other.firstName == firstName) &&
(identical(other.lastName, lastName) ||
other.lastName == lastName) &&
(identical(other.email, email) || other.email == email) &&
(identical(other.phone, phone) || other.phone == phone) &&
(identical(other.password, password) ||
other.password == password) &&
(identical(other.photo, photo) || other.photo == photo) &&
(identical(other.provider, provider) ||
other.provider == provider) &&
(identical(other.createdAt, createdAt) ||
other.createdAt == createdAt) &&
(identical(other.updatedAt, updatedAt) ||
other.updatedAt == updatedAt) &&
(identical(other.deletedAt, deletedAt) ||
other.deletedAt == deletedAt));
}
@override
int get hashCode => Object.hash(runtimeType, uid, firstName, lastName, email,
phone, password, photo, provider, createdAt, updatedAt, deletedAt);
@JsonKey(ignore: true)
@override
_$UserCopyWith<_User> get copyWith =>
__$UserCopyWithImpl<_User>(this, _$identity);
}
abstract class _User extends User {
const factory _User(
{required UniqueId<String?> uid,
required DisplayName firstName,
required DisplayName lastName,
required EmailAddress email,
required Phone phone,
required Password password,
required MediaField photo,
AuthProvider provider,
DateTime? createdAt,
DateTime? updatedAt,
DateTime? deletedAt}) = _$_User;
const _User._() : super._();
@override
UniqueId<String?> get uid;
@override
DisplayName get firstName;
@override
DisplayName get lastName;
@override
EmailAddress get email;
@override
Phone get phone;
@override
Password get password;
@override
MediaField get photo;
@override
AuthProvider get provider;
@override
DateTime? get createdAt;
@override
DateTime? get updatedAt;
@override
DateTime? get deletedAt;
@override
@JsonKey(ignore: true)
_$UserCopyWith<_User> get copyWith => throw _privateConstructorUsedError;
}
| 32.550129
| 309
| 0.64737
|
a3c6bc54edd065a8fe229cbb9738535265503d0b
| 1,777
|
java
|
Java
|
src/java/org/malbino/orion/enums/Modalidad.java
|
malbino/orion
|
c4ab517a3b2c9f33af8ae70133794b2727c466f9
|
[
"MIT"
] | null | null | null |
src/java/org/malbino/orion/enums/Modalidad.java
|
malbino/orion
|
c4ab517a3b2c9f33af8ae70133794b2727c466f9
|
[
"MIT"
] | null | null | null |
src/java/org/malbino/orion/enums/Modalidad.java
|
malbino/orion
|
c4ab517a3b2c9f33af8ae70133794b2727c466f9
|
[
"MIT"
] | null | null | null |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.malbino.orion.enums;
import java.util.Arrays;
/**
*
* @author Martin
*/
public enum Modalidad {
REGULAR("REGULAR", "R", true),
CONVALIDACION("CONVALIDACIÓN", "C", false),
TRASPASO("TRASPASO", "T", false),
MIGRACION("MIGRACIÓN", "M", false);
private String nombre;
private String abreviatura;
private Boolean regular;
private Modalidad(String nombre, String abreviatura, Boolean regular) {
this.nombre = nombre;
this.abreviatura = abreviatura;
this.regular = regular;
}
/**
* @return the nombre
*/
public String getNombre() {
return nombre;
}
/**
* @param nombre the nombre to set
*/
public void setNombre(String nombre) {
this.nombre = nombre;
}
/**
* @return the abreviatura
*/
public String getAbreviatura() {
return abreviatura;
}
/**
* @param abreviatura the abreviatura to set
*/
public void setAbreviatura(String abreviatura) {
this.abreviatura = abreviatura;
}
/**
* @return the regular
*/
public Boolean getRegular() {
return regular;
}
/**
* @param regular the regular to set
*/
public void setRegular(Boolean regular) {
this.regular = regular;
}
@Override
public String toString() {
return nombre;
}
public static Modalidad[] values(Boolean regular){
return Arrays.stream(Modalidad.values()).filter(modalidad -> !modalidad.regular).toArray(Modalidad[]::new);
}
}
| 21.938272
| 115
| 0.604952
|
08f0191f118185c79f9e4217ea1fd90a1164e694
| 424
|
swift
|
Swift
|
emu/emu/Debug/DebugTileMapViewController.swift
|
donnellyk/emu
|
6a6dd091369866a7bd9b7a143c93b92be43b49ee
|
[
"Apache-2.0"
] | 1
|
2020-04-29T00:15:26.000Z
|
2020-04-29T00:15:26.000Z
|
emu/emu/Debug/DebugTileMapViewController.swift
|
donnellyk/emu
|
6a6dd091369866a7bd9b7a143c93b92be43b49ee
|
[
"Apache-2.0"
] | null | null | null |
emu/emu/Debug/DebugTileMapViewController.swift
|
donnellyk/emu
|
6a6dd091369866a7bd9b7a143c93b92be43b49ee
|
[
"Apache-2.0"
] | null | null | null |
import Cocoa
class DebugTileMapViewController: ImageViewController {
var queue: DispatchQueue = DispatchQueue(label: "Render")
}
extension DebugTileMapViewController {
func display(_ canvas: BitmapCanvas) {
queue.async {
let bitmap = canvas.bitmapImageRep
let image = NSImage(cgImage: bitmap.cgImage!, size: bitmap.size)
DispatchQueue.main.async {
self.image = image
}
}
}
}
| 22.315789
| 70
| 0.695755
|
2b8d921ed3716cf5e7572f1af4d22d4b789da508
| 311
|
rb
|
Ruby
|
db/migrate/20120805130540_create_centres.rb
|
JordanHatch/rs-hacks
|
72cb358d05b823f56c0c391eda32881b89db4bb5
|
[
"MIT"
] | 1
|
2015-08-03T21:00:04.000Z
|
2015-08-03T21:00:04.000Z
|
db/migrate/20120805130540_create_centres.rb
|
youngrewired/yrs-hacks
|
4d4eaa9be042574db6dd8887ea4ad8f54546ab2d
|
[
"MIT"
] | 2
|
2021-05-18T18:54:59.000Z
|
2021-11-04T19:13:24.000Z
|
db/migrate/20120805130540_create_centres.rb
|
youngrewired/yrs-hacks
|
4d4eaa9be042574db6dd8887ea4ad8f54546ab2d
|
[
"MIT"
] | 1
|
2015-07-04T19:13:17.000Z
|
2015-07-04T19:13:17.000Z
|
class CreateCentres < ActiveRecord::Migration
def change
create_table :centres do |t|
t.string :name
t.string :slug
t.references :event
t.timestamps
end
add_column :events, :use_centres, :boolean, :default => false
add_column :projects, :centre_id, :integer
end
end
| 22.214286
| 65
| 0.66881
|
4d11716e36aa63a5f4d5e77162789067379e0665
| 760
|
asm
|
Assembly
|
programs/oeis/100/A100214.asm
|
neoneye/loda
|
afe9559fb53ee12e3040da54bd6aa47283e0d9ec
|
[
"Apache-2.0"
] | 22
|
2018-02-06T19:19:31.000Z
|
2022-01-17T21:53:31.000Z
|
programs/oeis/100/A100214.asm
|
neoneye/loda
|
afe9559fb53ee12e3040da54bd6aa47283e0d9ec
|
[
"Apache-2.0"
] | 41
|
2021-02-22T19:00:34.000Z
|
2021-08-28T10:47:47.000Z
|
programs/oeis/100/A100214.asm
|
neoneye/loda
|
afe9559fb53ee12e3040da54bd6aa47283e0d9ec
|
[
"Apache-2.0"
] | 5
|
2021-02-24T21:14:16.000Z
|
2021-08-09T19:48:05.000Z
|
; A100214: a(n) = 4*n^3 + 4.
; 4,8,36,112,260,504,868,1376,2052,2920,4004,5328,6916,8792,10980,13504,16388,19656,23332,27440,32004,37048,42596,48672,55300,62504,70308,78736,87812,97560,108004,119168,131076,143752,157220,171504,186628,202616,219492,237280,256004,275688,296356,318032,340740,364504,389348,415296,442372,470600,500004,530608,562436,595512,629860,665504,702468,740776,780452,821520,864004,907928,953316,1000192,1048580,1098504,1149988,1203056,1257732,1314040,1372004,1431648,1492996,1556072,1620900,1687504,1755908,1826136,1898212,1972160,2048004,2125768,2205476,2287152,2370820,2456504,2544228,2634016,2725892,2819880,2916004,3014288,3114756,3217432,3322340,3429504,3538948,3650696,3764772,3881200
pow $0,3
add $0,1
gcd $1,$0
mul $1,4
mov $0,$1
| 84.444444
| 682
| 0.814474
|
2ff35089a6ef2faae60035d74db690ac433be79f
| 1,330
|
py
|
Python
|
literate/commands/commands.py
|
debrouwere/python-literate
|
0d9e187c3e7adf6dda76ffddfac4c09bd01ff53d
|
[
"MIT"
] | 27
|
2015-03-23T12:20:10.000Z
|
2021-12-29T00:57:36.000Z
|
literate/commands/commands.py
|
debrouwere/python-literate
|
0d9e187c3e7adf6dda76ffddfac4c09bd01ff53d
|
[
"MIT"
] | null | null | null |
literate/commands/commands.py
|
debrouwere/python-literate
|
0d9e187c3e7adf6dda76ffddfac4c09bd01ff53d
|
[
"MIT"
] | 1
|
2018-01-27T16:08:37.000Z
|
2018-01-27T16:08:37.000Z
|
import fs
from literate import parser, templates, package
def run(options):
for path in options.src:
source = fs.File(path).read()
parser.run(source)
def untangle(options):
raise NotImplementedError()
def weave(options):
src = fs.File(options.src)
dest = fs.Directory(options.dest)
# find literate python files
if src.is_file:
documents = [src]
else:
# TODO: specify whether we should search recursively or not
# recursive=options.recursive
# TODO: support for Literate Python with docstrings
documents = fs.Directory(src.path).find('*.pylit') # '*.py.md', '.md.py'
package.create(dest)
# weave literate python
for doc in documents:
raw = doc.read()
blocks = parser.weave(raw, evaluate=options.evaluate)
# template=None
# TODO: template should be handled differently depending on whether
# it's a Jinja template (.html) or an executable that we should
# pass the context (w/ simplify=True above)
html = templates.document(doc.name, blocks, prose=options.prose, capture=options.capture)
filename = doc.name + '.html'
f = fs.File(dest.path, filename)
f.write(html)
# documents are groups of blocks
#literate.package(documents, options.dest)
| 32.439024
| 97
| 0.650376
|
c8f949aeced9eb8efeb463cb13e79466344d0270
| 78
|
go
|
Go
|
2015_08_11/panqueca.go
|
samisafatli/dojo
|
3751f8413e70da84e928e037193e8cb03f6b3e65
|
[
"MIT"
] | 114
|
2015-03-10T22:17:42.000Z
|
2022-03-09T17:49:48.000Z
|
2015_08_11/panqueca.go
|
samisafatli/dojo
|
3751f8413e70da84e928e037193e8cb03f6b3e65
|
[
"MIT"
] | 9
|
2018-09-04T12:49:59.000Z
|
2019-11-17T21:29:51.000Z
|
2015_08_11/panqueca.go
|
samisafatli/dojo
|
3751f8413e70da84e928e037193e8cb03f6b3e65
|
[
"MIT"
] | 39
|
2015-01-29T01:20:56.000Z
|
2022-02-17T16:26:25.000Z
|
package main
func Gostosura(g []int64) (float64, error) {
return 0.0, nil
}
| 13
| 44
| 0.679487
|
20e0214c89190cdd169156bb60ba558629342d6e
| 1,131
|
py
|
Python
|
upelis_settings.py
|
saknis/upelis
|
32b571f3596c561a53645c652ab4ae75b452f7a4
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
upelis_settings.py
|
saknis/upelis
|
32b571f3596c561a53645c652ab4ae75b452f7a4
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
upelis_settings.py
|
saknis/upelis
|
32b571f3596c561a53645c652ab4ae75b452f7a4
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
import locale
import gettext
DEBUG = True
MAILSENDER="upelis@upe.lt"
MAILRCPTTO="upelis@upe.lt"
CURLOCALE = 'en_US'
#current_locale = 'lt_LT'
LANGUAGES={'ru': 'ru_RU', 'en': 'en_US', 'lt': 'lt_LT', 'ua': 'ua_UA', 'it': 'it_IT', 'lv': 'lv_LV', 'by': 'by_BY', 'pl': 'pl_PL', 'de': 'de_DE'}
LANGUAGESNR={'lt': 1, 'ru': 2, 'en': 3, 'lv': 4, 'pl': 5, 'by': 6, 'ua': 7, 'de': 8, 'it': 9}
LANGUAGESSORT = sorted(LANGUAGESNR.iteritems(), key=lambda (k,v):(v,k))
LOCALEPATH = 'lang/'
FILEEXT='html'
LANG='lt'
LANGDEF=LANG
TITAUTH = "Vardenis Pavardenis"
#_titauth = "Nerijus Terebas"
VERSION='1.6.3'
DYNABOPT="&font_size=14&button_filename=cloud.png&font_file=Ubuntu-B.ttf&left_width=10&right_width=10"
DYNABFONT="Ubuntu-B.ttf"
CMSNAME="Upelis"
CMSPATH="upelis"
CMSTRANS="upelis"
LANGHTML = "<li><a href=\"/"+CMSPATH+"-%s-%s.%s\"><img src=\"/static/images/flag/%s.gif\" border=\"0\" alt=\"%s\" /></a></li>"
RSSTITLE="Nerijaus Terebo puslapis"
SITE1A='upelis.org'
SITE1B='www.upelis.org'
SITE2A='nerij.us'
SITE2B='www.nerij.us'
SITEDOWN='http://www.upelis.org/static/upelis.zip'
| 32.314286
| 146
| 0.64191
|
afa3e161a4f50fca44e6d864479bdec101755f51
| 649
|
py
|
Python
|
scripts/euler_from_quaternion.py
|
gbmxdwmssj/path_planner
|
53e1d743dc8f780b677a69ea232ee0b9aeacaf5f
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/euler_from_quaternion.py
|
gbmxdwmssj/path_planner
|
53e1d743dc8f780b677a69ea232ee0b9aeacaf5f
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/euler_from_quaternion.py
|
gbmxdwmssj/path_planner
|
53e1d743dc8f780b677a69ea232ee0b9aeacaf5f
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python2
import rospy
import tf
from hybrid_astar.srv import *
def eulerFromQuaternion(req):
# print('I get a quaternion!')
quaternion = (req.quaternion.x,
req.quaternion.y,
req.quaternion.z,
req.quaternion.w)
euler = tf.transformations.euler_from_quaternion(quaternion) # rad
# print('I compute a euler!')
return EulerFromQuaternionResponse(euler[0], euler[1], euler[2]) # rad
rospy.init_node('euler_quaternion_server', anonymous=True)
s = rospy.Service('/euler_from_quaternion', EulerFromQuaternion, eulerFromQuaternion)
print('Ready to calculate euler from quaternion.')
rospy.spin()
| 29.5
| 85
| 0.725732
|
db991fb7e111a4a1714752e6100d6c4de0d83e67
| 3,324
|
php
|
PHP
|
app/Http/Controllers/PostController.php
|
hajer1998/laravel
|
9efc30d55647d0340ad7dc23859ce6a0b358ff46
|
[
"MIT"
] | 2
|
2021-04-24T12:11:39.000Z
|
2021-04-24T12:11:44.000Z
|
app/Http/Controllers/PostController.php
|
hajer1998/laravel
|
9efc30d55647d0340ad7dc23859ce6a0b358ff46
|
[
"MIT"
] | null | null | null |
app/Http/Controllers/PostController.php
|
hajer1998/laravel
|
9efc30d55647d0340ad7dc23859ce6a0b358ff46
|
[
"MIT"
] | null | null | null |
<?php
namespace App\Http\Controllers;
use App\Events\PostLiked;
use App\Models\User;
use App\Repositories\PostRepository;
use Illuminate\Http\Request;
use Illuminate\Http\Response;
class PostController
{
/**
* @var PostRepository
*/
private $repository;
public function __construct(PostRepository $repository)
{
$this->repository = $repository;
}
/**
* create a post
*/
public function create(Request $request)
{
try {
$post = $this->repository->create(
$request->get('body'),
$request->get('imageLink'),
$request->get('user_id')
);
} catch (\Throwable $exception) {
throw new \Exception('post not created');
}
return response()->json($post->toArray());
}
/**
*Edit post
*/
public function edit(Request $request, $id)
{
try {
$post = $this->repository->editPost($id, $request->get('body'));
} catch (\Throwable $exception) {
return \response()->json([
'success' => false,
'message' => 'Post not found'
], 404);
}
}
/**
* delete a post
*/
public function delete($id)
{
try {
$this->repository->deletePost($id);
} catch (\Throwable $exception) {
return \response()->json([
'success' => false,
'message' => 'Post not found'
], 404);
}
return response(null, 204);
}
public function like($id, Request $request)
{
try {
$post = $this->repository->markLike(
$id,
$request->get('user_id')
);
if ($post->user_id != $request->get('user_id')) {
PostLiked::dispatch(
$id,
User::firstWhere('_id', $request->get('user_id'))->name,
$request->get('user_id'),
$post->user_id
);
}
return response(null, 204);
} catch (\Throwable $exception){
return response()->json('post not liked', Response::HTTP_INTERNAL_SERVER_ERROR);
}
}
public function unlike($id, Request $request)
{
try {
$this->repository->dislike($id, $request->get('user_id'));
} catch (\Throwable $exception){
throw new \Exception('unlike not marked');
}
return response(null,204);
}
/**
* listing Posts
*/
public function listing(Request $request)
{
$posts= $this->repository->listingPosts($request->get('user_id'));
if (empty($posts)) {
return \response()->json([
'success' => false,
'message' => 'Post not found'
], 404);
}
return response()->json([
'success' => true,
'data' => $posts
]);
}
/**
* getPost
*/
public function get($id)
{
try {
$post = $this->repository->get($id);
} catch (\Exception $exception) {
exit('post not found');
}
return response()->json($post->toArray());
}
}
| 23.574468
| 92
| 0.474128
|
e8fc97cd31fc8f6681147665b3d8501aa3f327c3
| 865
|
rb
|
Ruby
|
spec/hosts/single_plugin_spec.rb
|
ccin2p3/puppet-collectd
|
47f74e4f66236f0e60ce467dd72e0a46f76b8db1
|
[
"Apache-2.0"
] | null | null | null |
spec/hosts/single_plugin_spec.rb
|
ccin2p3/puppet-collectd
|
47f74e4f66236f0e60ce467dd72e0a46f76b8db1
|
[
"Apache-2.0"
] | null | null | null |
spec/hosts/single_plugin_spec.rb
|
ccin2p3/puppet-collectd
|
47f74e4f66236f0e60ce467dd72e0a46f76b8db1
|
[
"Apache-2.0"
] | null | null | null |
require 'spec_helper'
describe 'single_plugin' do
['Debian', 'RedHat'].each do |osfamily|
let :facts do
{ :osfamily => osfamily, :concat_basedir => 'dir' }
end
describe "should load a plugin on #{osfamily}" do
it { should contain_concat__fragment('collectd loadplugin vmem').with(
:content => /LoadPlugin.+vmem/,
:target => '/etc/collectd/loadplugins.conf'
) }
end
describe "should configure a plugin on #{osfamily}" do
it { should contain_file('/etc/collectd/plugins/configure_vmem.conf') \
.with_content(/^<Plugin.+vmem/) }
it { should contain_file('/etc/collectd/plugins/configure_vmem.conf') \
.with_content(/Verbose false/) }
it { should contain_file('/etc/collectd/plugins/configure_vmem.conf') \
.with_content(/^<\/Plugin>/) }
end
end
end
| 27.903226
| 77
| 0.63237
|
799ac255d148832c5d14aa7c92b5b9acb6b29c2c
| 577
|
php
|
PHP
|
addnewproduct.php
|
fakhruddinVhora/Royal-Decor
|
28e522252aa25f28ff9d000b0dc14323a3613dc0
|
[
"MIT"
] | 1
|
2019-11-15T17:09:48.000Z
|
2019-11-15T17:09:48.000Z
|
addnewproduct.php
|
fakhruddinVhora/Royal-Decor
|
28e522252aa25f28ff9d000b0dc14323a3613dc0
|
[
"MIT"
] | null | null | null |
addnewproduct.php
|
fakhruddinVhora/Royal-Decor
|
28e522252aa25f28ff9d000b0dc14323a3613dc0
|
[
"MIT"
] | null | null | null |
<?php
if (isset($_POST['submit'])) {
include 'db_connect.php';
$name = mysqli_real_escape_string($conn, $_POST['name']);
$name = preg_replace("/[\s]/", "-",$name);
$credit = mysqli_real_escape_string($conn, $_POST['credit']);
$sql = "INSERT INTO product (name, credits) VALUES ('$name', '$credit')";
if (mysqli_query($conn, $sql)) {
echo "<script>alert('New Product added successfully!'); location.href='newproduct.php';</script>";
} else {
echo "Error: " . $sql . "" . mysqli_error($conn);
}
} else {
header("Location: newproduct.php");
}
?>
| 28.85
| 105
| 0.613518
|
da92bba5be48dc7aada9f6b59e24ef0a6539e720
| 332
|
php
|
PHP
|
api/modules/test/controllers/TestController.php
|
donallin823/ks-yii2
|
f85fcb40d1325a7ec7282478cd6de2b9532b1d80
|
[
"MIT"
] | null | null | null |
api/modules/test/controllers/TestController.php
|
donallin823/ks-yii2
|
f85fcb40d1325a7ec7282478cd6de2b9532b1d80
|
[
"MIT"
] | null | null | null |
api/modules/test/controllers/TestController.php
|
donallin823/ks-yii2
|
f85fcb40d1325a7ec7282478cd6de2b9532b1d80
|
[
"MIT"
] | null | null | null |
<?php
/**
* User: donallin
*/
namespace api\modules\test\controllers;
use api\controllers\ApiCoreController;
use common\components\KsComponent;
use common\components\KsUtils;
use Yii;
class TestController extends ApiCoreController
{
public function actionIndex()
{
echo 'hello test';
}
}
| 16.6
| 47
| 0.680723
|
be3c4701134adfdc9786d668b0bfd7e2607bf143
| 1,608
|
ts
|
TypeScript
|
resources/frontend/angular/src/app/core/components/your-account/seller-account/seller-account.component.ts
|
zeuros/homy-care
|
6ad3520d2a117a25c6ca8bbf09c4d6b5d847252f
|
[
"MIT"
] | null | null | null |
resources/frontend/angular/src/app/core/components/your-account/seller-account/seller-account.component.ts
|
zeuros/homy-care
|
6ad3520d2a117a25c6ca8bbf09c4d6b5d847252f
|
[
"MIT"
] | null | null | null |
resources/frontend/angular/src/app/core/components/your-account/seller-account/seller-account.component.ts
|
zeuros/homy-care
|
6ad3520d2a117a25c6ca8bbf09c4d6b5d847252f
|
[
"MIT"
] | null | null | null |
import {Component, OnInit} from '@angular/core';
import {FormBuilder, FormGroup, Validators} from "@angular/forms";
import {DateAdapter} from "@angular/material/core";
import {
faCalendarAlt,
faEnvelope,
faEyeSlash,
faFemale,
faLocationArrow,
faMale,
faPhone
} from '@fortawesome/free-solid-svg-icons';
export const MOBILE_PATTERN = /[0-9\+\-\ ]/;
export const PASSWORD_PATTERN = /(?=.*\d)(?=.*[a-z])(?=.*[A-Z]).{8,}/;
@Component({
selector: 'app-seller-account',
templateUrl: './seller-account.component.html',
styleUrls: ['./seller-account.component.scss'],
})
export class SellerAccountComponent implements OnInit {
minDate: Date;
maxDate: Date = new Date();
sellerForm: FormGroup;
constructor(
private fb: FormBuilder,
private dateAdapter: DateAdapter<Date>
) {
this.dateAdapter.setLocale('fr');
const currentYear = new Date().getFullYear();
this.minDate = new Date(currentYear - 100, 0, 1);
this.sellerForm = this.fb.group({
sex: [],
firstName: [],
lastName: [],
birthDate: [],
siren: [],
address: [],
telephone: ['', [Validators.pattern(MOBILE_PATTERN)]],
email: [],
password: ['', [Validators.pattern(PASSWORD_PATTERN)]],
passwordConfirm: ['', [Validators.pattern(PASSWORD_PATTERN)]],
autoEntrepreneur: [true],
generalConditions: [false],
});
}
ngOnInit(): void {
}
faEyeSlash = faEyeSlash;
faEnvelope = faEnvelope;
faPhone = faPhone;
faLocationArrow = faLocationArrow;
faMale = faMale;
faFemale = faFemale;
faCalendarAlt = faCalendarAlt;
}
| 24.363636
| 70
| 0.651119
|
01f19025767134f59d071780ba078f401c6290b8
| 646
|
kt
|
Kotlin
|
winter-junit5/src/main/kotlin/io/jentz/winter/junit5/WInject.kt
|
beyama/winter
|
51b49c6da3bad69b549b343093322a86f0615703
|
[
"Apache-2.0"
] | 13
|
2017-10-23T08:47:53.000Z
|
2021-08-13T14:10:53.000Z
|
winter-junit5/src/main/kotlin/io/jentz/winter/junit5/WInject.kt
|
beyama/winter
|
51b49c6da3bad69b549b343093322a86f0615703
|
[
"Apache-2.0"
] | 2
|
2018-11-09T15:50:53.000Z
|
2021-02-12T09:05:44.000Z
|
winter-junit5/src/main/kotlin/io/jentz/winter/junit5/WInject.kt
|
beyama/winter
|
51b49c6da3bad69b549b343093322a86f0615703
|
[
"Apache-2.0"
] | 2
|
2018-11-08T13:53:03.000Z
|
2019-10-17T14:43:26.000Z
|
package io.jentz.winter.junit5
import kotlin.annotation.AnnotationRetention.RUNTIME
import kotlin.annotation.AnnotationTarget.*
/**
* This annotation can be used to inject values into JUnit5 methods and constructors by using
* the test graph to resolve them.
*
* The problem with Javax Inject is, that it does not allow value parameter targets.
*
* Example in a test using one of the Winter JUnit5 extensions:
* ```
* @Test fun myTest(@WInject service: Service) {
* // do something with service
* }
*
* ```
*
*/
@Target(VALUE_PARAMETER)
@Retention(RUNTIME)
@MustBeDocumented
annotation class WInject(val qualifier: String = "")
| 25.84
| 93
| 0.73839
|