text
stringlengths
2
99.9k
meta
dict
//MAP CONVERTED BY dmm2tgm.py THIS HEADER COMMENT PREVENTS RECONVERSION, DO NOT REMOVE "a" = ( /turf/template_noop, /area/template_noop) "b" = ( /obj/structure/shuttle/engine/propulsion{ dir = 1 }, /turf/template_noop, /area/ruin/powered/shuttle/medium_3) "c" = ( /turf/closed/wall/mineral/plastitanium, /area/ruin/powered/shuttle/medium_3) "d" = ( /obj/structure/shuttle/engine/heater{ dir = 1 }, /turf/open/floor/plating/airless, /area/ruin/powered/shuttle/medium_3) "h" = ( /turf/closed/wall/mineral/titanium, /area/ruin/powered/shuttle/medium_3) "j" = ( /obj/machinery/door/airlock/external/glass, /turf/open/floor/plating, /area/ruin/powered/shuttle/medium_3) "k" = ( /obj/machinery/power/smes/magical, /turf/open/floor/plasteel, /area/ruin/powered/shuttle/medium_3) "l" = ( /turf/open/floor/plasteel, /area/ruin/powered/shuttle/medium_3) "m" = ( /turf/open/floor/plating, /area/ruin/powered/shuttle/medium_3) "n" = ( /obj/structure/shuttle/engine/propulsion{ dir = 8 }, /turf/template_noop, /area/ruin/powered/shuttle/medium_3) "o" = ( /obj/structure/shuttle/engine/heater{ dir = 8 }, /turf/open/floor/plating/airless, /area/ruin/powered/shuttle/medium_3) "p" = ( /obj/machinery/door/airlock/public/glass, /turf/open/floor/plasteel, /area/ruin/powered/shuttle/medium_3) "q" = ( /obj/structure/shuttle/engine/heater{ dir = 4 }, /turf/open/floor/plating/airless, /area/ruin/powered/shuttle/medium_3) "r" = ( /obj/structure/shuttle/engine/propulsion{ dir = 4 }, /turf/template_noop, /area/ruin/powered/shuttle/medium_3) "t" = ( /obj/effect/spawner/structure/window/reinforced, /turf/open/floor/plating, /area/ruin/powered/shuttle/medium_3) "v" = ( /obj/structure/table, /obj/effect/turf_decal/tile/yellow{ dir = 1 }, /obj/effect/turf_decal/tile/yellow, /obj/effect/turf_decal/tile/yellow{ dir = 4 }, /obj/effect/turf_decal/tile/yellow{ dir = 8 }, /turf/open/floor/plasteel/dark, /area/ruin/powered/shuttle/medium_3) "w" = ( /obj/effect/turf_decal/tile/yellow{ dir = 1 }, /obj/effect/turf_decal/tile/yellow, /obj/effect/turf_decal/tile/yellow{ dir = 4 }, /obj/effect/turf_decal/tile/yellow{ dir = 8 }, /turf/open/floor/plasteel/dark, /area/ruin/powered/shuttle/medium_3) "x" = ( /obj/structure/chair/comfy{ dir = 8 }, /obj/effect/turf_decal/tile/yellow{ dir = 1 }, /obj/effect/turf_decal/tile/yellow, /obj/effect/turf_decal/tile/yellow{ dir = 4 }, /obj/effect/turf_decal/tile/yellow{ dir = 8 }, /turf/open/floor/plasteel/dark, /area/ruin/powered/shuttle/medium_3) "z" = ( /obj/machinery/sleeper, /obj/effect/turf_decal/tile/blue{ dir = 1 }, /obj/effect/turf_decal/tile/blue, /obj/effect/turf_decal/tile/blue{ dir = 4 }, /obj/effect/turf_decal/tile/blue{ dir = 8 }, /turf/open/floor/plasteel/dark, /area/ruin/powered/shuttle/medium_3) "A" = ( /turf/open/floor/plasteel/dark, /area/ruin/powered/shuttle/medium_3) "C" = ( /obj/effect/turf_decal/tile/blue{ dir = 1 }, /obj/effect/turf_decal/tile/blue, /obj/effect/turf_decal/tile/blue{ dir = 4 }, /obj/effect/turf_decal/tile/blue{ dir = 8 }, /turf/open/floor/plasteel/dark, /area/ruin/powered/shuttle/medium_3) "D" = ( /obj/structure/table, /obj/item/storage/firstaid, /obj/effect/turf_decal/tile/blue{ dir = 1 }, /obj/effect/turf_decal/tile/blue, /obj/effect/turf_decal/tile/blue{ dir = 4 }, /obj/effect/turf_decal/tile/blue{ dir = 8 }, /turf/open/floor/plasteel/dark, /area/ruin/powered/shuttle/medium_3) "E" = ( /obj/structure/shuttle/engine/heater, /turf/open/floor/plating/airless, /area/ruin/powered/shuttle/medium_3) "F" = ( /obj/structure/shuttle/engine/propulsion, /turf/template_noop, /area/ruin/powered/shuttle/medium_3) "G" = ( /turf/closed/wall/mineral/titanium/nodiagonal, /area/ruin/powered/shuttle/medium_3) (1,1,1) = {" a a a a a a a a n a a a a n a a a a a a a a "} (2,1,1) = {" a a a a a a n c o a a a a o c n a a a a a a "} (3,1,1) = {" a a a a a c o c c c a a c c c o c a a a a a "} (4,1,1) = {" a a a a c c c c c c a a c c c c c c a a a a "} (5,1,1) = {" a a a c c h h c c c j j c c c h h c c a a a "} (6,1,1) = {" a a c c h k l G h G m m G h G l k h c c a a "} (7,1,1) = {" a b d c h l l l l p l l p l l l l h c E F a "} (8,1,1) = {" a c c c c G l l l G t t G l l l G c c c c a "} (9,1,1) = {" b d c c c h l l G z C C D G l l h c c c E F "} (10,1,1) = {" a a c c c G p G v G C C G A G p G c c c a a "} (11,1,1) = {" a a a a j m l t w w l l A A t l m j a a a a "} (12,1,1) = {" a a a a j m l t x w l l A A t l m j a a a a "} (13,1,1) = {" a a c c c G p G w G A A G A G p G c c c a a "} (14,1,1) = {" b d c c c h l l G A A A A G l l h c c c E F "} (15,1,1) = {" a c c c c G l l l G t t G l l l G c c c c a "} (16,1,1) = {" a b d c h l l l l p l l p l l l l h c E F a "} (17,1,1) = {" a a c c h k l G h G m m G h G l k h c c a a "} (18,1,1) = {" a a a c c h h c c c j j c c c h h c c a a a "} (19,1,1) = {" a a a a c c c c c c a a c c c c c c a a a a "} (20,1,1) = {" a a a a a c q c c c a a c c c q c a a a a a "} (21,1,1) = {" a a a a a a r c q a a a a q c r a a a a a a "} (22,1,1) = {" a a a a a a a a r a a a a r a a a a a a a a "}
{ "pile_set_name": "Github" }
//===----------------------------------------------------------------------===// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// // <random> // template<class UIntType, size_t w, size_t s, size_t r> // class subtract_with_carry_engine; // void discard(unsigned long long z); #include <random> #include <cassert> #include "test_macros.h" void test1() { std::ranlux24_base e1; std::ranlux24_base e2 = e1; assert(e1 == e2); e1.discard(3); assert(e1 != e2); (void)e2(); (void)e2(); (void)e2(); assert(e1 == e2); } void test2() { std::ranlux48_base e1; std::ranlux48_base e2 = e1; assert(e1 == e2); e1.discard(3); assert(e1 != e2); (void)e2(); (void)e2(); (void)e2(); assert(e1 == e2); } int main(int, char**) { test1(); test2(); return 0; }
{ "pile_set_name": "Github" }
#region License // // MIT License // // CoiniumServ - Crypto Currency Mining Pool Server Software // Copyright (C) 2013 - 2017, CoiniumServ Project // Hüseyin Uslu, shalafiraistlin at gmail dot com // https://github.com/bonesoul/CoiniumServ // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. // #endregion using CoiniumServ.Configuration; namespace CoiniumServ.Jobs.Manager { public interface IJobConfig : IConfig { /// <summary> /// timeout in miliseconds to poll coin daemon for a new block. /// </summary> int BlockRefreshInterval { get; } /// <summary> /// if now new blocks are found in this many seconds, a new job will be created and broadcasted. /// </summary> int RebroadcastTimeout { get; } } }
{ "pile_set_name": "Github" }
// WriteLog.cpp: implementation of the CWriteLog class. // ////////////////////////////////////////////////////////////////////// #include "stdafx.h" #include "LogModule.h" #include "WriteLog.h" #include <afxtempl.h> #include "DBConnetParamRegConfig.h" #define DELETE_USELESSFILE_NUM 500 #ifdef _DEBUG #undef THIS_FILE static char THIS_FILE[] = __FILE__; #define new DEBUG_NEW #endif ////////////////////////////////////////////////////////////////////// // Construction/Destruction ////////////////////////////////////////////////////////////////////// extern volatile BOOL m_gTagThreadEnd; extern CRITICAL_SECTION m_gLogCriticalSection; // extern _ConnectionPtr m_pConnection; extern CList<_asc_DataLog, _asc_DataLog> m_dataLogList; extern CString vartostr(const _variant_t& var); extern short vartoi(const _variant_t& var); extern HANDLE g_readEvent; extern HANDLE g_writeEvent; ////////////////////////////////////////////////////////////////////// // Construction/Destruction ////////////////////////////////////////////////////////////////////// CWriteLog::CWriteLog() { m_pRecordsetSelect = NULL; sql = _T(""); exist = FALSE; m_strTimeStamp = _T(""); m_CountAfterDel = 0; } CWriteLog::~CWriteLog() { m_pRecordsetSelect->Close(); } void CWriteLog::initWriteLog() { setConnect(); } void CWriteLog::writeLog() { int i = 0; int nCount = 0; DWORD emlLength = 0; char* pemlBuf = NULL; CString strEmlWholePathFile = "";//eml的全路径文件名 _asc_DataLog datalog; while (1) { if (m_gTagThreadEnd) { //是否终止线程 return; } AfxMessageBox("start w thread"); EnterCriticalSection(&m_gLogCriticalSection); nCount = m_dataLogList.GetCount(); LeaveCriticalSection(&m_gLogCriticalSection); if (nCount != 0) { AfxMessageBox("list is not null"); for (i = 0; i < nCount; i++) { EnterCriticalSection(&m_gLogCriticalSection); datalog = m_dataLogList.RemoveHead(); LeaveCriticalSection(&m_gLogCriticalSection); // COleDateTime oletime = datalog.TimeStamp; //get emlbuf and strMd5 strEmlWholePathFile = datalog.emlwholeDir + "\\" + datalog.emlPathNm; CFile feml; AfxMessageBox(strEmlWholePathFile); if (!feml.Open(strEmlWholePathFile, CFile::modeRead, NULL)) { AfxMessageBox("open file failure"); continue; } else { emlLength = feml.GetLength(); pemlBuf = new char[emlLength]; if (pemlBuf == NULL || feml.Read(pemlBuf, emlLength) != emlLength) { if (pemlBuf != NULL) delete[] pemlBuf; feml.Close(); continue; } feml.Close(); } //add to DB // ColeDateToStr(oletime,m_strTimeStamp); CString strMailSampleSql = ""; CString strLogDataSql = "";// ,m_strTimeStamp strMailSampleSql.Format("INSERT INTO MailSample (emlPathNm) VALUES('%s') ", datalog.emlPathNm); datalog.emlPathNm = datalog.LogFileNm; datalog.emlPathNm.Replace("log", "eml"); strLogDataSql.Format("insert into DataLog values('%s','%s','%s','%d','%s','%d',\ '%d','%d','%s','%s','%d','%s',\ '%s','%s','%s','%s','%s')", \ datalog.LogFileNm, datalog.strTimeStamp, datalog.Rule_ID, (long)datalog.Category_ID, datalog.Client_IP, (long)datalog.IP_Zone, \ (long)datalog.Size, (long)datalog.Body_Size, datalog.emlPathNm, \ datalog.Condition, (long)datalog.Action, datalog.Sender, \ datalog.Subject, datalog.Receiver, datalog.CC, datalog.BCC, datalog.Log_CMT); try { m_pConnection->BeginTrans(); AfxMessageBox(strLogDataSql); m_pConnection->Execute(_bstr_t(strLogDataSql), NULL, adCmdText); AfxMessageBox("table dataspl suc"); AfxMessageBox(strMailSampleSql); m_pConnection->Execute(_bstr_t(strMailSampleSql), NULL, adCmdText); AfxMessageBox("table MailSample suc"); CString strGetLastOfSample = ""; strGetLastOfSample.Format("select * from MailSample where emlPathNm = '%s'", datalog.emlPathNm); //max(emlID) try { m_pRecordsetSelect->Close(); } catch (...) { } m_pRecordsetSelect->Open(_variant_t(LPCTSTR(strGetLastOfSample)), _variant_t((IDispatch*)m_pConnection, true), adOpenStatic, adLockOptimistic, adCmdText); // BOOL bAppend = AppendChunk("emlContent", strEmlWholePathFile); FieldPtr pField = m_pRecordsetSelect->GetFields()->GetItem("emlContent"); BOOL bAppend = AppendChunk(pField, pemlBuf, emlLength); // m_pRecordsetSelect->GetFields()->GetItem("emlContent")->AppendChunk(_variant_t(testPstr)); if (bAppend) { AfxMessageBox("append success"); } else { AfxMessageBox("append failue"); } m_pRecordsetSelect->Update(); m_pConnection->CommitTrans(); AfxMessageBox("add"); } catch (...) { AfxMessageBox("add failure"); m_pConnection->RollbackTrans(); } //清理过期文件 ++m_CountAfterDel; if (m_CountAfterDel == DELETE_USELESSFILE_NUM) { lastDeleteOutOfTimeFile(datalog.emlwholeDir, datalog.LogFileNm); } //release memory delete[] pemlBuf; //delete the current file try { CString delFile = datalog.emlwholeDir + "\\" + datalog.LogFileNm; DeleteFile(delFile);//delete log file delFile += ".sig"; DeleteFile(delFile);//delete sig file DeleteFile(strEmlWholePathFile); } catch (...) { } } } } } void CWriteLog::lastDeleteOutOfTimeFile(CString& strDir, CString& fileName) { CString str = strDir; CString strFileName = ""; CString strFilePath = ""; str = str + "\\*.*"; CFileFind file_finder; BOOL bl = FALSE; bl = file_finder.FindFile(str); while (bl) { bl = file_finder.FindNextFile(); strFileName = file_finder.GetFileName(); strFilePath = file_finder.GetFilePath(); // AfxMessageBox(strFilePath); if (!file_finder.IsDirectory()) { if (!compareTwoLogFileTime(fileName, strFileName)) { TRACE("-----------------%s--%s---\n", fileName, strFileName); // AfxMessageBox(strFilePath); DeleteFile(strFilePath); } else break;//因估计后面的文件都是没有过期的,所以强行退出,可以省时 } } } //filename1比filename2早就返回TURE,filename2就不应该delete BOOL CWriteLog::compareTwoLogFileTime(CString& filename1, CString& filename2) { CString str1 = ""; CString str2 = ""; int index1 = filename1.Find('.'); int index2 = filename2.Find('.'); if (index2 == -1 || index1 == -1) { return TRUE; } str1 = filename1.Mid((index1 + 1), 4); str2 = filename2.Mid((index2 + 1), 4); if (atoi(str1) < atoi(str2)) { return TRUE; } else { if (atoi(str1) > atoi(str2)) { return FALSE; } else { str1 = filename1.Mid((index1 + 5), 2); str2 = filename2.Mid((index2 + 5), 2); if (atoi(str1) < atoi(str2)) { return TRUE; } else { if (atoi(str1) > atoi(str2)) { return FALSE; } else { str1 = filename1.Mid((index1 + 7), 2); str2 = filename2.Mid((index2 + 7), 2); if (atoi(str1) < atoi(str2)) { return TRUE; } else { if (atoi(str1) > atoi(str2)) { return FALSE; } else { str1 = filename1.Mid((index1 + 9), 2); str2 = filename2.Mid((index2 + 9), 2); if (atoi(str1) < atoi(str2)) { return TRUE; } else { if (atoi(str1) > atoi(str2)) { return FALSE; } else { str1 = filename1.Mid((index1 + 11), 2); str2 = filename2.Mid((index2 + 11), 2); if (atoi(str1) < atoi(str2)) { return TRUE; } else { if (atoi(str1) > atoi(str2)) { return FALSE; } else { str1 = filename1.Mid((index1 + 13), 2); str2 = filename2.Mid((index2 + 13), 2); if (atoi(str1) < atoi(str2)) { return TRUE; } else { if (atoi(str1) > atoi(str2)) { return FALSE; } else { //时间相同时 return FALSE; } } } } } } } } } } } } } void CWriteLog::setConnect() { CoInitialize(NULL); //注册表操作 CDBConnetParamRegConfig m_DBConnetParamRegConfig; m_DBConnetParamRegConfig.Load(); CString connectStr = ""; connectStr.Format("driver={SQL Server};Server=%s;DATABASE=%s;UID=%s;PWD=%s", m_DBConnetParamRegConfig.srvName, m_DBConnetParamRegConfig.DBName, m_DBConnetParamRegConfig.loginNm, m_DBConnetParamRegConfig.pwd); HRESULT hr; try { hr = m_pConnection.CreateInstance("ADODB.Connection");///创建Connection对象 if (SUCCEEDED(hr)) { hr = m_pConnection->Open((_bstr_t)connectStr, "", "", adModeUnknown); ///连接数据库 } } catch (_com_error e) { ///捕捉异常 CString errormessage; errormessage.Format("连接数据库失败!\r\n错误信息:%s", e.ErrorMessage()); } } BOOL CWriteLog::AppendChunk(FieldPtr pField, LPVOID lpData, UINT nBytes) { SAFEARRAY FAR *pSafeArray = NULL; SAFEARRAYBOUND rgsabound[1]; try { rgsabound[0].lLbound = 0; rgsabound[0].cElements = nBytes; pSafeArray = SafeArrayCreate(VT_UI1, 1, rgsabound); for (long i = 0; i < (long)nBytes; i++) { UCHAR &chData = ((UCHAR*)lpData)[i]; HRESULT hr = SafeArrayPutElement(pSafeArray, &i, &chData); if (FAILED(hr)) return FALSE; } _variant_t varChunk; varChunk.vt = VT_ARRAY | VT_UI1; varChunk.parray = pSafeArray; return (pField->AppendChunk(varChunk) == S_OK); } catch (_com_error &e) { TRACE(_T("Warning: AppendChunk 方法发生异常. 错误信息: %s; 文件: %s; 行: %d\n"), e.ErrorMessage(), __FILE__, __LINE__); return FALSE; } }
{ "pile_set_name": "Github" }
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """add inspection_started_at and inspection_finished_at Revision ID: 1e1d5ace7dc6 Revises: 3ae36a5f5131 Create Date: 2015-02-26 10:46:46.861927 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '1e1d5ace7dc6' down_revision = '3ae36a5f5131' def upgrade(): op.add_column('nodes', sa.Column('inspection_started_at', sa.DateTime(), nullable=True)) op.add_column('nodes', sa.Column('inspection_finished_at', sa.DateTime(), nullable=True))
{ "pile_set_name": "Github" }
 vec3 fxaa(sampler2D sampler, vec2 tex_coord) { float FXAA_SPAN_MAX = 8.0; float FXAA_REDUCE_MUL = 1.0 / FXAA_SPAN_MAX; float FXAA_REDUCE_MIN = 1.0 / (FXAA_SPAN_MAX * 16.0); vec2 texture_size = textureSize(sampler, 0).xy; vec3 rgbNW = texture(sampler, tex_coord+(vec2(-1.0,-1.0)/texture_size)).xyz; vec3 rgbNE = texture(sampler, tex_coord+(vec2(1.0,-1.0)/texture_size)).xyz; vec3 rgbSW = texture(sampler, tex_coord+(vec2(-1.0,1.0)/texture_size)).xyz; vec3 rgbSE = texture(sampler, tex_coord+(vec2(1.0,1.0)/texture_size)).xyz; vec3 rgbM = texture(sampler, tex_coord).xyz; vec3 luma=vec3(0.299, 0.587, 0.114); float lumaNW = dot(rgbNW, luma); float lumaNE = dot(rgbNE, luma); float lumaSW = dot(rgbSW, luma); float lumaSE = dot(rgbSE, luma); float lumaM = dot(rgbM, luma); float lumaMin = min(lumaM, min(min(lumaNW, lumaNE), min(lumaSW, lumaSE))); float lumaMax = max(lumaM, max(max(lumaNW, lumaNE), max(lumaSW, lumaSE))); vec2 dir; dir.x = -((lumaNW + lumaNE) - (lumaSW + lumaSE)); dir.y = ((lumaNW + lumaSW) - (lumaNE + lumaSE)); float dirReduce = max( (lumaNW + lumaNE + lumaSW + lumaSE) * (0.25 * FXAA_REDUCE_MUL), FXAA_REDUCE_MIN); float rcpDirMin = 1.0/(min(abs(dir.x), abs(dir.y)) + dirReduce); dir = min(vec2( FXAA_SPAN_MAX, FXAA_SPAN_MAX), max(vec2(-FXAA_SPAN_MAX, -FXAA_SPAN_MAX), dir * rcpDirMin)) / texture_size; vec3 rgbA = (0.5) * ( texture(sampler, tex_coord.xy + dir * (1.0/3.0 - 0.5)).xyz + texture(sampler, tex_coord.xy + dir * (2.0/3.0 - 0.5)).xyz); vec3 rgbB = rgbA * (0.5) + (0.25) * ( texture(sampler, tex_coord.xy + dir * (0.0/3.0 - 0.5)).xyz + texture(sampler, tex_coord.xy + dir * (3.0/3.0 - 0.5)).xyz); float lumaB = dot(rgbB, luma); vec3 final = vec3(0.0); if((lumaB < lumaMin) || (lumaB > lumaMax)) { final = rgbA; } else { final = rgbB; } return final; }
{ "pile_set_name": "Github" }
/* Copyright 2016 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package kubelet import ( "fmt" "sync" "github.com/golang/groupcache/lru" "k8s.io/apimachinery/pkg/types" kubecontainer "k8s.io/kubernetes/pkg/kubelet/container" ) // ReasonCache stores the failure reason of the latest container start // in a string, keyed by <pod_UID>_<container_name>. The goal is to // propagate this reason to the container status. This endeavor is // "best-effort" for two reasons: // 1. The cache is not persisted. // 2. We use an LRU cache to avoid extra garbage collection work. This // means that some entries may be recycled before a pod has been // deleted. // TODO(random-liu): Use more reliable cache which could collect garbage of failed pod. // TODO(random-liu): Move reason cache to somewhere better. type ReasonCache struct { lock sync.Mutex cache *lru.Cache } // reasonInfo is the cached item in ReasonCache type reasonInfo struct { reason error message string } // maxReasonCacheEntries is the cache entry number in lru cache. 1000 is a proper number // for our 100 pods per node target. If we support more pods per node in the future, we // may want to increase the number. const maxReasonCacheEntries = 1000 func NewReasonCache() *ReasonCache { return &ReasonCache{cache: lru.New(maxReasonCacheEntries)} } func (c *ReasonCache) composeKey(uid types.UID, name string) string { return fmt.Sprintf("%s_%s", uid, name) } // add adds error reason into the cache func (c *ReasonCache) add(uid types.UID, name string, reason error, message string) { c.lock.Lock() defer c.lock.Unlock() c.cache.Add(c.composeKey(uid, name), reasonInfo{reason, message}) } // Update updates the reason cache with the SyncPodResult. Only SyncResult with // StartContainer action will change the cache. func (c *ReasonCache) Update(uid types.UID, result kubecontainer.PodSyncResult) { for _, r := range result.SyncResults { if r.Action != kubecontainer.StartContainer { continue } name := r.Target.(string) if r.Error != nil { c.add(uid, name, r.Error, r.Message) } else { c.Remove(uid, name) } } } // Remove removes error reason from the cache func (c *ReasonCache) Remove(uid types.UID, name string) { c.lock.Lock() defer c.lock.Unlock() c.cache.Remove(c.composeKey(uid, name)) } // Get gets error reason from the cache. The return values are error reason, error message and // whether an error reason is found in the cache. If no error reason is found, empty string will // be returned for error reason and error message. func (c *ReasonCache) Get(uid types.UID, name string) (error, string, bool) { c.lock.Lock() defer c.lock.Unlock() value, ok := c.cache.Get(c.composeKey(uid, name)) if !ok { return nil, "", ok } info := value.(reasonInfo) return info.reason, info.message, ok }
{ "pile_set_name": "Github" }
// Copyright 2020 The Prometheus Authors // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package procfs import ( "reflect" "testing" ) func TestParseCgroupString(t *testing.T) { tests := []struct { name string s string shouldErr bool cgroup *Cgroup }{ { name: "cgroups-v1 simple line", s: "10:rdma:/", shouldErr: false, cgroup: &Cgroup{ HierarchyID: 10, Controllers: []string{"rdma"}, Path: "/", }, }, { name: "cgroups-v1 multi-hier line", s: "3:cpu,cpuacct:/user.slice/user-1000.slice/session-10.scope", shouldErr: false, cgroup: &Cgroup{ HierarchyID: 3, Controllers: []string{"cpu", "cpuacct"}, Path: "/user.slice/user-1000.slice/session-10.scope", }, }, { name: "cgroup-v2 line", s: "0::/user.slice/user-1000.slice/user@1000.service/gnome-terminal-server.service", shouldErr: false, cgroup: &Cgroup{ HierarchyID: 0, Controllers: nil, Path: "/user.slice/user-1000.slice/user@1000.service/gnome-terminal-server.service", }, }, { name: "extra fields (such as those added by later kernel versions)", s: "0::/:foobar", shouldErr: false, cgroup: &Cgroup{ HierarchyID: 0, Controllers: nil, Path: "/", }, }, { name: "bad hierarchy ID field", s: "a:cpu:/", shouldErr: true, cgroup: nil, }, } for i, test := range tests { t.Logf("[%02d] test %q", i, test.name) cgroup, err := parseCgroupString(test.s) if test.shouldErr && err == nil { t.Errorf("%s: expected an error, but none occurred", test.name) } if !test.shouldErr && err != nil { t.Errorf("%s: unexpected error: %v", test.name, err) } if want, have := test.cgroup, cgroup; !reflect.DeepEqual(want, have) { t.Errorf("cgroup:\nwant:\n%+v\nhave:\n%+v", want, have) } } }
{ "pile_set_name": "Github" }
//// DO NOT EDIT THIS FILE. IT WAS GENERATED. Manual changes to this file will be lost when it is generated again. Edit the files in the src/main/asciidoc/ directory instead. //// == Quick Start Unresolved directive in openwhisk-readme.adoc - include::./adapters/openwhisk-quick-start.adoc[] == Examples The following examples are built based on the details and explanations above, on how to deploy Spring Cloud Functions on to https://openwhisk.apache.org/[OpenWhisk] * https://github.com/redhat-developer-demos/ow-scf-fruiteason[Spring Cloud Function PoF Example]. This example shows how to use Spring Cloud Functions by defining simple Plain Old Function (POF) * https://github.com/redhat-developer-demos/ow-scf-greeter[Spring Cloud Function Application Example]. This example shows how to use Spring Cloud Functions with a complete Spring Boot Application that has functions defined by extending `java.util.function.Function` interfaces. The base docker images used for above examples is available https://github.com/redhat-developer-demos/openwhisk-scf-docker[here].
{ "pile_set_name": "Github" }
.form { margin: 5px 0; } .input { composes: form-control from global; height: 30px; color: #bbb; background-color: #626262; border: 1px solid #333; &:focus { color: #666; background-color: white; } }
{ "pile_set_name": "Github" }
fileFormatVersion: 2 guid: 43de5838f293c0c4b9ed1bd014bb898a TextureImporter: fileIDToRecycleName: {} externalObjects: {} serializedVersion: 7 mipmaps: mipMapMode: 0 enableMipMap: 1 sRGBTexture: 1 linearTexture: 0 fadeOut: 0 borderMipMap: 0 mipMapsPreserveCoverage: 0 alphaTestReferenceValue: 0.5 mipMapFadeDistanceStart: 1 mipMapFadeDistanceEnd: 3 bumpmap: convertToNormalMap: 0 externalNormalMap: 0 heightScale: 0.25 normalMapFilter: 0 isReadable: 0 streamingMipmaps: 0 streamingMipmapsPriority: 0 grayScaleToAlpha: 0 generateCubemap: 6 cubemapConvolution: 0 seamlessCubemap: 0 textureFormat: 1 maxTextureSize: 2048 textureSettings: serializedVersion: 2 filterMode: -1 aniso: -1 mipBias: -100 wrapU: 1 wrapV: 1 wrapW: 1 nPOTScale: 1 lightmap: 0 compressionQuality: 50 spriteMode: 0 spriteExtrude: 1 spriteMeshType: 1 alignment: 0 spritePivot: {x: 0.5, y: 0.5} spritePixelsToUnits: 100 spriteBorder: {x: 0, y: 0, z: 0, w: 0} spriteGenerateFallbackPhysicsShape: 1 alphaUsage: 1 alphaIsTransparency: 0 spriteTessellationDetail: -1 textureType: 0 textureShape: 1 singleChannelComponent: 0 maxTextureSizeSet: 0 compressionQualitySet: 0 textureFormatSet: 0 platformSettings: - serializedVersion: 2 buildTarget: DefaultTexturePlatform maxTextureSize: 2048 resizeAlgorithm: 0 textureFormat: -1 textureCompression: 1 compressionQuality: 50 crunchedCompression: 0 allowsAlphaSplitting: 0 overridden: 0 androidETC2FallbackOverride: 0 - serializedVersion: 2 buildTarget: Standalone maxTextureSize: 2048 resizeAlgorithm: 0 textureFormat: -1 textureCompression: 1 compressionQuality: 50 crunchedCompression: 0 allowsAlphaSplitting: 0 overridden: 0 androidETC2FallbackOverride: 0 spriteSheet: serializedVersion: 2 sprites: [] outline: [] physicsShape: [] bones: [] spriteID: vertices: [] indices: edges: [] weights: [] spritePackingTag: pSDRemoveMatte: 0 pSDShowRemoveMatteOption: 0 userData: assetBundleName: assetBundleVariant:
{ "pile_set_name": "Github" }
let postcss = require('postcss') let IMPORTANT = /\s*!important\s*$/i let UNITLESS = { 'box-flex': true, 'box-flex-group': true, 'column-count': true, 'flex': true, 'flex-grow': true, 'flex-positive': true, 'flex-shrink': true, 'flex-negative': true, 'font-weight': true, 'line-clamp': true, 'line-height': true, 'opacity': true, 'order': true, 'orphans': true, 'tab-size': true, 'widows': true, 'z-index': true, 'zoom': true, 'fill-opacity': true, 'stroke-dashoffset': true, 'stroke-opacity': true, 'stroke-width': true } function dashify (str) { return str .replace(/([A-Z])/g, '-$1') .replace(/^ms-/, '-ms-') .toLowerCase() } function decl (parent, name, value) { if (value === false || value === null) return name = dashify(name) if (typeof value === 'number') { if (value === 0 || UNITLESS[name]) { value = value.toString() } else { value += 'px' } } if (name === 'css-float') name = 'float' if (IMPORTANT.test(value)) { value = value.replace(IMPORTANT, '') parent.push(postcss.decl({ prop: name, value, important: true })) } else { parent.push(postcss.decl({ prop: name, value })) } } function atRule (parent, parts, value) { let node = postcss.atRule({ name: parts[1], params: parts[3] || '' }) if (typeof value === 'object') { node.nodes = [] parse(value, node) } parent.push(node) } function parse (obj, parent) { let name, value, node for (name in obj) { value = obj[name] if (value === null || typeof value === 'undefined') { continue } else if (name[0] === '@') { let parts = name.match(/@(\S+)(\s+([\W\w]*)\s*)?/) if (Array.isArray(value)) { for (let i of value) { atRule(parent, parts, i) } } else { atRule(parent, parts, value) } } else if (Array.isArray(value)) { for (let i of value) { decl(parent, name, i) } } else if (typeof value === 'object') { node = postcss.rule({ selector: name }) parse(value, node) parent.push(node) } else { decl(parent, name, value) } } } module.exports = function (obj) { let root = postcss.root() parse(obj, root) return root }
{ "pile_set_name": "Github" }
fileFormatVersion: 2 guid: f9e8a256b2e009743bcefbdabbb05382 timeCreated: 1504768179 licenseType: Store NativeFormatImporter: mainObjectFileID: 2100000 userData: assetBundleName: assetBundleVariant:
{ "pile_set_name": "Github" }
// Colors $mdb-color-lighten-5: #d0d6e2 !default; $mdb-color-lighten-4: #b1bace !default; $mdb-color-lighten-3: #929fba !default; $mdb-color-lighten-2: #7283a7 !default; $mdb-color-lighten-1: #59698d !default; $mdb-color-base: #45526e !default; $mdb-color-darken-1: #3b465e !default; $mdb-color-darken-2: #2e3951 !default; $mdb-color-darken-3: #1c2a48 !default; $mdb-color-darken-4: #1c2331 !default; $red-lighten-5: #ffebee !default; $red-lighten-4: #ffcdd2 !default; $red-lighten-3: #ef9a9a !default; $red-lighten-2: #e57373 !default; $red-lighten-1: #ef5350 !default; $red-base: #f44336 !default; $red-darken-1: #e53935 !default; $red-darken-2: #d32f2f !default; $red-darken-3: #c62828 !default; $red-darken-4: #b71c1c !default; $red-accent-1: #ff8a80 !default; $red-accent-2: #ff5252 !default; $red-accent-3: #ff1744 !default; $red-accent-4: #d50000 !default; $pink-lighten-5: #fce4ec !default; $pink-lighten-4: #f8bbd0 !default; $pink-lighten-3: #f48fb1 !default; $pink-lighten-2: #f06292 !default; $pink-lighten-1: #ec407a !default; $pink-base: #e91e63 !default; $pink-darken-1: #d81b60 !default; $pink-darken-2: #c2185b !default; $pink-darken-3: #ad1457 !default; $pink-darken-4: #880e4f !default; $pink-accent-1: #ff80ab !default; $pink-accent-2: #ff4081 !default; $pink-accent-3: #f50057 !default; $pink-accent-4: #c51162 !default; $purple-lighten-5: #f3e5f5 !default; $purple-lighten-4: #e1bee7 !default; $purple-lighten-3: #ce93d8 !default; $purple-lighten-2: #ba68c8 !default; $purple-lighten-1: #ab47bc !default; $purple-base: #9c27b0 !default; $purple-darken-1: #8e24aa !default; $purple-darken-2: #7b1fa2 !default; $purple-darken-3: #6a1b9a !default; $purple-darken-4: #4a148c !default; $purple-accent-1: #ea80fc !default; $purple-accent-2: #e040fb !default; $purple-accent-3: #d500f9 !default; $purple-accent-4: #a0f !default; $deep-purple-lighten-5: #ede7f6 !default; $deep-purple-lighten-4: #d1c4e9 !default; $deep-purple-lighten-3: #b39ddb !default; $deep-purple-lighten-2: #9575cd !default; $deep-purple-lighten-1: #7e57c2 !default; $deep-purple-base: #673ab7 !default; $deep-purple-darken-1: #5e35b1 !default; $deep-purple-darken-2: #512da8 !default; $deep-purple-darken-3: #4527a0 !default; $deep-purple-darken-4: #311b92 !default; $deep-purple-accent-1: #b388ff !default; $deep-purple-accent-2: #7c4dff !default; $deep-purple-accent-3: #651fff !default; $deep-purple-accent-4: #6200ea !default; $indigo-lighten-5: #e8eaf6 !default; $indigo-lighten-4: #c5cae9 !default; $indigo-lighten-3: #9fa8da !default; $indigo-lighten-2: #7986cb !default; $indigo-lighten-1: #5c6bc0 !default; $indigo-base: #3f51b5 !default; $indigo-darken-1: #3949ab !default; $indigo-darken-2: #303f9f !default; $indigo-darken-3: #283593 !default; $indigo-darken-4: #1a237e !default; $indigo-accent-1: #8c9eff !default; $indigo-accent-2: #536dfe !default; $indigo-accent-3: #3d5afe !default; $indigo-accent-4: #304ffe !default; $blue-lighten-5: #e3f2fd !default; $blue-lighten-4: #bbdefb !default; $blue-lighten-3: #90caf9 !default; $blue-lighten-2: #64b5f6 !default; $blue-lighten-1: #42a5f5 !default; $blue-base: #2196f3 !default; $blue-darken-1: #1e88e5 !default; $blue-darken-2: #1976d2 !default; $blue-darken-3: #1565c0 !default; $blue-darken-4: #0d47a1 !default; $blue-accent-1: #82b1ff !default; $blue-accent-2: #448aff !default; $blue-accent-3: #2979ff !default; $blue-accent-4: #2962ff !default; $light-blue-lighten-5: #e1f5fe !default; $light-blue-lighten-4: #b3e5fc !default; $light-blue-lighten-3: #81d4fa !default; $light-blue-lighten-2: #4fc3f7 !default; $light-blue-lighten-1: #29b6f6 !default; $light-blue-base: #03a9f4 !default; $light-blue-darken-1: #039be5 !default; $light-blue-darken-2: #0288d1 !default; $light-blue-darken-3: #0277bd !default; $light-blue-darken-4: #01579b !default; $light-blue-accent-1: #80d8ff !default; $light-blue-accent-2: #40c4ff !default; $light-blue-accent-3: #00b0ff !default; $light-blue-accent-4: #0091ea !default; $cyan-lighten-5: #e0f7fa !default; $cyan-lighten-4: #b2ebf2 !default; $cyan-lighten-3: #80deea !default; $cyan-lighten-2: #4dd0e1 !default; $cyan-lighten-1: #26c6da !default; $cyan-base: #00bcd4 !default; $cyan-darken-1: #00acc1 !default; $cyan-darken-2: #0097a7 !default; $cyan-darken-3: #00838f !default; $cyan-darken-4: #006064 !default; $cyan-accent-1: #84ffff !default; $cyan-accent-2: #18ffff !default; $cyan-accent-3: #00e5ff !default; $cyan-accent-4: #00b8d4 !default; $teal-lighten-5: #e0f2f1 !default; $teal-lighten-4: #b2dfdb !default; $teal-lighten-3: #80cbc4 !default; $teal-lighten-2: #4db6ac !default; $teal-lighten-1: #26a69a !default; $teal-base: #009688 !default; $teal-darken-1: #00897b !default; $teal-darken-2: #00796b !default; $teal-darken-3: #00695c !default; $teal-darken-4: #004d40 !default; $teal-accent-1: #a7ffeb !default; $teal-accent-2: #64ffda !default; $teal-accent-3: #1de9b6 !default; $teal-accent-4: #00bfa5 !default; $green-lighten-5: #e8f5e9 !default; $green-lighten-4: #c8e6c9 !default; $green-lighten-3: #a5d6a7 !default; $green-lighten-2: #81c784 !default; $green-lighten-1: #66bb6a !default; $green-base: #4caf50 !default; $green-darken-1: #43a047 !default; $green-darken-2: #388e3c !default; $green-darken-3: #2e7d32 !default; $green-darken-4: #1b5e20 !default; $green-accent-1: #b9f6ca !default; $green-accent-2: #69f0ae !default; $green-accent-3: #00e676 !default; $green-accent-4: #00c853 !default; $light-green-lighten-5: #f1f8e9 !default; $light-green-lighten-4: #dcedc8 !default; $light-green-lighten-3: #c5e1a5 !default; $light-green-lighten-2: #aed581 !default; $light-green-lighten-1: #9ccc65 !default; $light-green-base: #8bc34a !default; $light-green-darken-1: #7cb342 !default; $light-green-darken-2: #689f38 !default; $light-green-darken-3: #558b2f !default; $light-green-darken-4: #33691e !default; $light-green-accent-1: #ccff90 !default; $light-green-accent-2: #b2ff59 !default; $light-green-accent-3: #76ff03 !default; $light-green-accent-4: #64dd17 !default; $lime-lighten-5: #f9fbe7 !default; $lime-lighten-4: #f0f4c3 !default; $lime-lighten-3: #e6ee9c !default; $lime-lighten-2: #dce775 !default; $lime-lighten-1: #d4e157 !default; $lime-base: #cddc39 !default; $lime-darken-1: #c0ca33 !default; $lime-darken-2: #afb42b !default; $lime-darken-3: #9e9d24 !default; $lime-darken-4: #827717 !default; $lime-accent-1: #f4ff81 !default; $lime-accent-2: #eeff41 !default; $lime-accent-3: #c6ff00 !default; $lime-accent-4: #aeea00 !default; $yellow-lighten-5: #fffde7 !default; $yellow-lighten-4: #fff9c4 !default; $yellow-lighten-3: #fff59d !default; $yellow-lighten-2: #fff176 !default; $yellow-lighten-1: #ffee58 !default; $yellow-base: #ffeb3b !default; $yellow-darken-1: #fdd835 !default; $yellow-darken-2: #fbc02d !default; $yellow-darken-3: #f9a825 !default; $yellow-darken-4: #f57f17 !default; $yellow-accent-1: #ffff8d !default; $yellow-accent-2: #ff0 !default; $yellow-accent-3: #ffea00 !default; $yellow-accent-4: #ffd600 !default; $amber-lighten-5: #fff8e1 !default; $amber-lighten-4: #ffecb3 !default; $amber-lighten-3: #ffe082 !default; $amber-lighten-2: #ffd54f !default; $amber-lighten-1: #ffca28 !default; $amber-base: #ffc107 !default; $amber-darken-1: #ffb300 !default; $amber-darken-2: #ffa000 !default; $amber-darken-3: #ff8f00 !default; $amber-darken-4: #ff6f00 !default; $amber-accent-1: #ffe57f !default; $amber-accent-2: #ffd740 !default; $amber-accent-3: #ffc400 !default; $amber-accent-4: #ffab00 !default; $orange-lighten-5: #fff3e0 !default; $orange-lighten-4: #ffe0b2 !default; $orange-lighten-3: #ffcc80 !default; $orange-lighten-2: #ffb74d !default; $orange-lighten-1: #ffa726 !default; $orange-base: #ff9800 !default; $orange-darken-1: #fb8c00 !default; $orange-darken-2: #f57c00 !default; $orange-darken-3: #ef6c00 !default; $orange-darken-4: #e65100 !default; $orange-accent-1: #ffd180 !default; $orange-accent-2: #ffab40 !default; $orange-accent-3: #ff9100 !default; $orange-accent-4: #ff6d00 !default; $deep-orange-lighten-5: #fbe9e7 !default; $deep-orange-lighten-4: #ffccbc !default; $deep-orange-lighten-3: #ffab91 !default; $deep-orange-lighten-2: #ff8a65 !default; $deep-orange-lighten-1: #ff7043 !default; $deep-orange-base: #ff5722 !default; $deep-orange-darken-1: #f4511e !default; $deep-orange-darken-2: #e64a19 !default; $deep-orange-darken-3: #d84315 !default; $deep-orange-darken-4: #bf360c !default; $deep-orange-accent-1: #ff9e80 !default; $deep-orange-accent-2: #ff6e40 !default; $deep-orange-accent-3: #ff3d00 !default; $deep-orange-accent-4: #dd2c00 !default; $brown-lighten-5: #efebe9 !default; $brown-lighten-4: #d7ccc8 !default; $brown-lighten-3: #bcaaa4 !default; $brown-lighten-2: #a1887f !default; $brown-lighten-1: #8d6e63 !default; $brown-base: #795548 !default; $brown-darken-1: #6d4c41 !default; $brown-darken-2: #5d4037 !default; $brown-darken-3: #4e342e !default; $brown-darken-4: #3e2723 !default; $blue-grey-lighten-5: #eceff1 !default; $blue-grey-lighten-4: #cfd8dc !default; $blue-grey-lighten-3: #b0bec5 !default; $blue-grey-lighten-2: #90a4ae !default; $blue-grey-lighten-1: #78909c !default; $blue-grey-base: #607d8b !default; $blue-grey-darken-1: #546e7a !default; $blue-grey-darken-2: #455a64 !default; $blue-grey-darken-3: #37474f !default; $blue-grey-darken-4: #263238 !default; $grey-lighten-5: #fafafa !default; $grey-lighten-4: #f5f5f5 !default; $grey-lighten-3: #eee !default; $grey-lighten-2: #e0e0e0 !default; $grey-lighten-1: #bdbdbd !default; $grey-base: #9e9e9e !default; $grey-darken-1: #757575 !default; $grey-darken-2: #616161 !default; $grey-darken-3: #424242 !default; $grey-darken-4: #212121 !default; $black-base: #000 !default; $white-base: #fff !default; $foggy-grey: #4f4f4f !default; $mdb-colors-1: () !default; $mdb-colors-1: map-merge( ( "mdb-color": ( "lighten-5": $mdb-color-lighten-5, "lighten-4": $mdb-color-lighten-4, "lighten-3": $mdb-color-lighten-3, "lighten-2": $mdb-color-lighten-2, "lighten-1": $mdb-color-lighten-1, "base": $mdb-color-base, "darken-1": $mdb-color-darken-1, "darken-2": $mdb-color-darken-2, "darken-3": $mdb-color-darken-3, "darken-4": $mdb-color-darken-4 ), "red": ( "lighten-5": $red-lighten-5, "lighten-4": $red-lighten-4, "lighten-3": $red-lighten-3, "lighten-2": $red-lighten-2, "lighten-1": $red-lighten-1, "base": $red-base, "darken-1": $red-darken-1, "darken-2": $red-darken-2, "darken-3": $red-darken-3, "darken-4": $red-darken-4, "accent-1": $red-accent-1, "accent-2": $red-accent-2, "accent-3": $red-accent-3, "accent-4": $red-accent-4 ), "pink": ( "lighten-5": $pink-lighten-5, "lighten-4": $pink-lighten-4, "lighten-3": $pink-lighten-3, "lighten-2": $pink-lighten-2, "lighten-1": $pink-lighten-1, "base": $pink-base, "darken-1": $pink-darken-1, "darken-2": $pink-darken-2, "darken-3": $pink-darken-3, "darken-4": $pink-darken-4, "accent-1": $pink-accent-1, "accent-2": $pink-accent-2, "accent-3": $pink-accent-3, "accent-4": $pink-accent-4 ), "purple": ( "lighten-5": $purple-lighten-5, "lighten-4": $purple-lighten-4, "lighten-3": $purple-lighten-3, "lighten-2": $purple-lighten-2, "lighten-1": $purple-lighten-1, "base": $purple-base, "darken-1": $purple-darken-1, "darken-2": $purple-darken-2, "darken-3": $purple-darken-3, "darken-4": $purple-darken-4, "accent-1": $purple-accent-1, "accent-2": $purple-accent-2, "accent-3": $purple-accent-3, "accent-4": $purple-accent-4 ), "deep-purple": ( "lighten-5": $deep-purple-lighten-5, "lighten-4": $deep-purple-lighten-4, "lighten-3": $deep-purple-lighten-3, "lighten-2": $deep-purple-lighten-2, "lighten-1": $deep-purple-lighten-1, "base": $deep-purple-base, "darken-1": $deep-purple-darken-1, "darken-2": $deep-purple-darken-2, "darken-3": $deep-purple-darken-3, "darken-4": $deep-purple-darken-4, "accent-1": $deep-purple-accent-1, "accent-2": $deep-purple-accent-2, "accent-3": $deep-purple-accent-3, "accent-4": $deep-purple-accent-4 ), "indigo": ( "lighten-5": $indigo-lighten-5, "lighten-4": $indigo-lighten-4, "lighten-3": $indigo-lighten-3, "lighten-2": $indigo-lighten-2, "lighten-1": $indigo-lighten-1, "base": $indigo-base, "darken-1": $indigo-darken-1, "darken-2": $indigo-darken-2, "darken-3": $indigo-darken-3, "darken-4": $indigo-darken-4, "accent-1": $indigo-accent-1, "accent-2": $indigo-accent-2, "accent-3": $indigo-accent-3, "accent-4": $indigo-accent-4 ), "blue": ( "lighten-5": $blue-lighten-5, "lighten-4": $blue-lighten-4, "lighten-3": $blue-lighten-3, "lighten-2": $blue-lighten-2, "lighten-1": $blue-lighten-1, "base": $blue-base, "darken-1": $blue-darken-1, "darken-2": $blue-darken-2, "darken-3": $blue-darken-3, "darken-4": $blue-darken-4, "accent-1": $blue-accent-1, "accent-2": $blue-accent-2, "accent-3": $blue-accent-3, "accent-4": $blue-accent-4 ), "light-blue": ( "lighten-5": $light-blue-lighten-5, "lighten-4": $light-blue-lighten-4, "lighten-3": $light-blue-lighten-3, "lighten-2": $light-blue-lighten-2, "lighten-1": $light-blue-lighten-1, "base": $light-blue-base, "darken-1": $light-blue-darken-1, "darken-2": $light-blue-darken-2, "darken-3": $light-blue-darken-3, "darken-4": $light-blue-darken-4, "accent-1": $light-blue-accent-1, "accent-2": $light-blue-accent-2, "accent-3": $light-blue-accent-3, "accent-4": $light-blue-accent-4 ), "cyan": ( "lighten-5": $cyan-lighten-5, "lighten-4": $cyan-lighten-4, "lighten-3": $cyan-lighten-3, "lighten-2": $cyan-lighten-2, "lighten-1": $cyan-lighten-1, "base": $cyan-base, "darken-1": $cyan-darken-1, "darken-2": $cyan-darken-2, "darken-3": $cyan-darken-3, "darken-4": $cyan-darken-4, "accent-1": $cyan-accent-1, "accent-2": $cyan-accent-2, "accent-3": $cyan-accent-3, "accent-4": $cyan-accent-4 ), "teal": ( "lighten-5": $teal-lighten-5, "lighten-4": $teal-lighten-4, "lighten-3": $teal-lighten-3, "lighten-2": $teal-lighten-2, "lighten-1": $teal-lighten-1, "base": $teal-base, "darken-1": $teal-darken-1, "darken-2": $teal-darken-2, "darken-3": $teal-darken-3, "darken-4": $teal-darken-4, "accent-1": $teal-accent-1, "accent-2": $teal-accent-2, "accent-3": $teal-accent-3, "accent-4": $teal-accent-4 ), "green": ( "lighten-5": $green-lighten-5, "lighten-4": $green-lighten-4, "lighten-3": $green-lighten-3, "lighten-2": $green-lighten-2, "lighten-1": $green-lighten-1, "base": $green-base, "darken-1": $green-darken-1, "darken-2": $green-darken-2, "darken-3": $green-darken-3, "darken-4": $green-darken-4, "accent-1": $green-accent-1, "accent-2": $green-accent-2, "accent-3": $green-accent-3, "accent-4": $green-accent-4 ), "light-green": ( "lighten-5": $light-green-lighten-5, "lighten-4": $light-green-lighten-4, "lighten-3": $light-green-lighten-3, "lighten-2": $light-green-lighten-2, "lighten-1": $light-green-lighten-1, "base": $light-green-base, "darken-1": $light-green-darken-1, "darken-2": $light-green-darken-2, "darken-3": $light-green-darken-3, "darken-4": $light-green-darken-4, "accent-1": $light-green-accent-1, "accent-2": $light-green-accent-2, "accent-3": $light-green-accent-3, "accent-4": $light-green-accent-4 ), "lime": ( "lighten-5": $lime-lighten-5, "lighten-4": $lime-lighten-4, "lighten-3": $lime-lighten-3, "lighten-2": $lime-lighten-2, "lighten-1": $lime-lighten-1, "base": $lime-base, "darken-1": $lime-darken-1, "darken-2": $lime-darken-2, "darken-3": $lime-darken-3, "darken-4": $lime-darken-4, "accent-1": $lime-accent-1, "accent-2": $lime-accent-2, "accent-3": $lime-accent-3, "accent-4": $lime-accent-4 ), "yellow": ( "lighten-5": $yellow-lighten-5, "lighten-4": $yellow-lighten-4, "lighten-3": $yellow-lighten-3, "lighten-2": $yellow-lighten-2, "lighten-1": $yellow-lighten-1, "base": $yellow-base, "darken-1": $yellow-darken-1, "darken-2": $yellow-darken-2, "darken-3": $yellow-darken-3, "darken-4": $yellow-darken-4, "accent-1": $yellow-accent-1, "accent-2": $yellow-accent-2, "accent-3": $yellow-accent-3, "accent-4": $yellow-accent-4 ), "amber": ( "lighten-5": $amber-lighten-5, "lighten-4": $amber-lighten-4, "lighten-3": $amber-lighten-3, "lighten-2": $amber-lighten-2, "lighten-1": $amber-lighten-1, "base": $amber-base, "darken-1": $amber-darken-1, "darken-2": $amber-darken-2, "darken-3": $amber-darken-3, "darken-4": $amber-darken-4, "accent-1": $amber-accent-1, "accent-2": $amber-accent-2, "accent-3": $amber-accent-3, "accent-4": $amber-accent-4 ), "orange": ( "lighten-5": $orange-lighten-5, "lighten-4": $orange-lighten-4, "lighten-3": $orange-lighten-3, "lighten-2": $orange-lighten-2, "lighten-1": $orange-lighten-1, "base": $orange-base, "darken-1": $orange-darken-1, "darken-2": $orange-darken-2, "darken-3": $orange-darken-3, "darken-4": $orange-darken-4, "accent-1": $orange-accent-1, "accent-2": $orange-accent-2, "accent-3": $orange-accent-3, "accent-4": $orange-accent-4 ), "deep-orange": ( "lighten-5": $deep-orange-lighten-5, "lighten-4": $deep-orange-lighten-4, "lighten-3": $deep-orange-lighten-3, "lighten-2": $deep-orange-lighten-2, "lighten-1": $deep-orange-lighten-1, "base": $deep-orange-base, "darken-1": $deep-orange-darken-1, "darken-2": $deep-orange-darken-2, "darken-3": $deep-orange-darken-3, "darken-4": $deep-orange-darken-4, "accent-1": $deep-orange-accent-1, "accent-2": $deep-orange-accent-2, "accent-3": $deep-orange-accent-3, "accent-4": $deep-orange-accent-4 ), "brown": ( "lighten-5": $brown-lighten-5, "lighten-4": $brown-lighten-4, "lighten-3": $brown-lighten-3, "lighten-2": $brown-lighten-2, "lighten-1": $brown-lighten-1, "base": $brown-base, "darken-1": $brown-darken-1, "darken-2": $brown-darken-2, "darken-3": $brown-darken-3, "darken-4": $brown-darken-4 ), "blue-grey": ( "lighten-5": $blue-grey-lighten-5, "lighten-4": $blue-grey-lighten-4, "lighten-3": $blue-grey-lighten-3, "lighten-2": $blue-grey-lighten-2, "lighten-1": $blue-grey-lighten-1, "base": $blue-grey-base, "darken-1": $blue-grey-darken-1, "darken-2": $blue-grey-darken-2, "darken-3": $blue-grey-darken-3, "darken-4": $blue-grey-darken-4 ), "grey": ( "lighten-5": $grey-lighten-5, "lighten-4": $grey-lighten-4, "lighten-3": $grey-lighten-3, "lighten-2": $grey-lighten-2, "lighten-1": $grey-lighten-1, "base": $grey-base, "darken-1": $grey-darken-1, "darken-2": $grey-darken-2, "darken-3": $grey-darken-3, "darken-4": $grey-darken-4 ), "black": ( "base": $black-base ), "white": ( "base": $white-base ) ), $mdb-colors-1 ); // Full palette of colors $enable_full_palette: true !default; // Stylish rgba colors $stylish-rgba: ( "rgba-stylish-slight": rgba(62, 69, 81, .1), "rgba-stylish-light": rgba(62, 69, 81, .3), "rgba-stylish-strong": rgba(62, 69, 81, .7), ); // Material colors $primary-color: #4285f4 !default; $primary-color-dark: #0d47a1 !default; $secondary-color: #a6c !default; $secondary-color-dark: #93c !default; $default-color: #2bbbad !default; $default-color-dark: #00695c !default; $info-color: #33b5e5 !default; $info-color-dark: #09c !default; $success-color: #00c851 !default; $success-color-dark: #007e33 !default; $warning-color: #fb3 !default; $warning-color-dark: #f80 !default; $danger-color: #ff3547 !default; $danger-color-dark: #c00 !default; $elegant-color: #2e2e2e !default; $elegant-color-dark: #212121 !default; $stylish-color: #4b515d !default; $stylish-color-dark: #3e4551 !default; $unique-color: #3f729b !default; $unique-color-dark: #1c2331 !default; $special-color: #37474f !default; $special-color-dark: #263238 !default; $white: #fff; $black: #000; $error-color: $red-base !default; $material-colors: () !default; $material-colors: map-merge( ( "primary-color": $primary-color, "primary-color-dark": $primary-color-dark, "secondary-color": $secondary-color, "secondary-color-dark": $secondary-color-dark, "default-color": $default-color, "default-color-dark": $default-color-dark, "info-color": $info-color, "info-color-dark": $info-color-dark, "success-color": $success-color, "success-color-dark": $success-color-dark, "warning-color": $warning-color, "warning-color-dark": $warning-color-dark, "danger-color": $danger-color, "danger-color-dark": $danger-color-dark, "elegant-color": $elegant-color, "elegant-color-dark": $elegant-color-dark, "stylish-color": $stylish-color, "stylish-color-dark": $stylish-color-dark, "unique-color": $unique-color, "unique-color-dark": $unique-color-dark, "special-color": $special-color, "special-color-dark": $special-color-dark ), $material-colors ); // Social colors $fb-color: #3b5998 !default; $tw-color: #55acee !default; $gplus-color: #dd4b39 !default; $yt-color: #ed302f !default; $li-color: #0082ca !default; $pin-color: #c61118 !default; $ins-color: #2e5e86 !default; $git-color: #333 !default; $comm-color: #30cfc0 !default; $vk-color: #4c75a3 !default; $drib-color: #ec4a89 !default; $so-color: #ffac44 !default; $slack-color: #56b68b !default; $email-color: #4b515d !default; $redd-color: #ff4500 !default; $twitch-color: #6441a4 !default; $discord-color: #7289da !default; $whatsapp-color:#25d366 !default; $social-colors: () !default; $social-colors: map-merge( ( "fb": $fb-color, "tw": $tw-color, "gplus": $gplus-color, "yt": $yt-color, "li": $li-color, "pin": $pin-color, "ins": $ins-color, "git": $git-color, "comm": $comm-color, "vk": $vk-color, "dribbble": $drib-color, "so": $so-color, "slack": $slack-color, "email": $email-color, "reddit": $redd-color, "twitch": $twitch-color, "discord": $discord-color, "whatsapp": $whatsapp-color ), $social-colors ); // MDB buttons colors $mdb-colors: () !default; $mdb-colors: map-merge( ( "primary": $primary-color, "danger": $danger-color, "warning": $warning-color, "success": $success-color, "info": $info-color, "default": $default-color, "secondary": $secondary-color, "elegant": $elegant-color, "unique": $pink-darken-4, "dark-green": $green-darken-2, "mdb-color": $mdb-color-lighten-1, "red": $red-darken-2, "pink": $pink-lighten-1, "purple": $purple-darken-1, "deep-purple": $deep-purple-darken-2, "indigo": $indigo-base, "blue": $blue-darken-2, "light-blue": $blue-accent-1, "cyan": $cyan-base, "teal": $teal-darken-2, "green": $green-darken-2, "light-green": $light-green-base, "lime": $lime-darken-2, "yellow": $yellow-darken-2, "amber": $amber-darken-2, "orange": $orange-darken-2, "deep-orange": $deep-orange-lighten-1, "brown": $brown-base, "grey": $grey-darken-2, "blue-grey": $blue-grey-lighten-1, "dark": $grey-darken-4, "light": $grey-lighten-2, "white": $white-base, "black": $black-base ), $mdb-colors ); // Basic colors $basic: () !default; $basic: map-merge( ( "primary": $primary-color, "danger": $danger-color, "warning": $warning-color, "success": $success-color, "info": $info-color ), $basic ); $basic-mdb-colors: () !default; $basic-mdb-colors: map-merge( ( "primary": $primary-color, "danger": $danger-color, "warning": $warning-color, "success": $success-color, "info": $info-color, "default": $default-color, "secondary": $secondary-color, "dark": $grey-darken-4, "light": $grey-lighten-2 ), $basic-mdb-colors ); $pagination-colors: () !default; $pagination-colors: map-merge( ( "blue": $primary-color, "red": $danger-color, "teal": $default-color, "dark-grey": $special-color, "dark": $elegant-color, "blue-grey": $unique-color, "amber": $amber-darken-4, "purple": $deep-purple-darken-1 ), $pagination-colors ); $ctbc: () !default; $ctbc: map-merge( ( "tabs-cyan": $yellow-base, "tabs-orange": $red-darken-1, "tabs-grey": $white-base, "tabs-pink": $deep-purple-base, "tabs-green": $blue-darken-3, "tabs-primary": $white-base ), $ctbc ); $switch-color-bg: $secondary-color !default; $switch-color-checked-lever-bg: desaturate(lighten($secondary-color, 25%), 25%) !default; $switch-color-unchecked-bg: #f1f1f1 !default; $switch-color-unchecked-lever-bg: #818181 !default; $switch-colors: () !default; $switch-colors: map-merge( ( "bg": $switch-color-bg, "checked-lever-bg": $switch-color-checked-lever-bg, "unchecked-bg": $switch-color-unchecked-bg, "unchecked-lever-bg": $switch-color-unchecked-lever-bg, ), $switch-colors ); $dropdown-colors: () !default; $dropdown-colors: map-merge( ( "primary" : $primary-color, "danger" : $danger-color-dark, "default" : $default-color, "secondary": $secondary-color, "success" : $success-color, "info" : $info-color, "warning" : $warning-color, "dark" : map-get($mdb-colors, "elegant"), "ins" : map-get($social-colors, "ins") ), $dropdown-colors ); // Gradients $gradients: () !default; $gradients: map-merge( ( "purple": ( "start": #ff6ec4, "end": #7873f5 ), "peach": ( "start": #ffd86f, "end": #fc6262 ), "aqua": ( "start": #2096ff, "end": #05ffa3 ), "blue": ( "start": #45cafc, "end": $indigo-darken-2 ), ), $gradients ); // Gradients RGBA Version $gradients-rgba: () !default; $gradients-rgba: map-merge( ( "purple": ( "start": rgba(255, 110, 196, .9), "end": rgba(120, 115, 245, .9) ), "peach": ( "start": rgba(255, 216, 111, .9), "end": rgba(252, 98, 98, .9) ), "aqua": ( "start": rgba(32, 150, 255, .9), "end": rgba(5, 255, 163, .9) ), "blue": ( "start": rgba(69, 202, 252, .9), "end": rgba(48, 63, 159, .9) ), ), $gradients-rgba ); $note: () !default; $note: map-merge( ( "primary": ( "bgc": #dfeefd, "border-color": #176ac4 ), "secondary": ( "bgc": #e2e3e5, "border-color": #58595a ), "success": ( "bgc": #e2f0e5, "border-color": #49a75f ), "danger": ( "bgc": #fae7e8, "border-color": #e45460 ), "warning": ( "bgc": #faf4e0, "border-color": #c2a442 ), "info": ( "bgc": #e4f2f5, "border-color": #2492a5 ), "light": ( "bgc": #fefefe, "border-color": #0f0f0f ) ), $note );
{ "pile_set_name": "Github" }
// Copyright 2017 Google Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. %module sdfLayerOffset %{ #include "pxr/usd/sdf/layerOffset.h" %} %include "std_vector.i" namespace std { %template(SdfLayerOffsetVector) vector<SdfLayerOffset>; } typedef std::vector<SdfLayerOffset> SdfLayerOffsetVector; %include "pxr/usd/sdf/layerOffset.h"
{ "pile_set_name": "Github" }
# frozen_string_literal: true module JekyllAssetPipeline class TestConverter < JekyllAssetPipeline::Converter def self.filetype '.foo' end def convert 'converted' end end class TestCompressor < JekyllAssetPipeline::Compressor def self.filetype '.foo' end def compress 'compressed' end end class TestTemplate < JekyllAssetPipeline::Template def self.filetype '.foo' end def self.priority 1 end def html 'test_template_html' end end end
{ "pile_set_name": "Github" }
#!/bin/bash set -eu -o pipefail ${SHELLFLAGS} make -C $(dirname $0) run RUNFLAGS="$*"
{ "pile_set_name": "Github" }
/* * Copyright (c) 2015-2017, Arm Limited and affiliates. * SPDX-License-Identifier: Apache-2.0 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "CppUTest/TestHarness.h" #include "test_coap_security_handler.h" #include "mbedtls_stub.h" #include "nsdynmemLIB_stub.h" TEST_GROUP(coap_security_handler) { void setup() { nsdynmemlib_stub.returnCounter = 0; mbedtls_stub.useCounter = false; } void teardown() { } }; TEST(coap_security_handler, test_thread_security_create) { CHECK(test_thread_security_create()); } TEST(coap_security_handler, test_thread_security_destroy) { CHECK(test_thread_security_destroy()); } TEST(coap_security_handler, test_coap_security_handler_connect) { CHECK(test_coap_security_handler_connect()); } TEST(coap_security_handler, test_coap_security_handler_continue_connecting) { CHECK(test_coap_security_handler_continue_connecting()); } TEST(coap_security_handler, test_coap_security_handler_send_message) { CHECK(test_coap_security_handler_send_message()); } TEST(coap_security_handler, test_thread_security_send_close_alert) { CHECK(test_thread_security_send_close_alert()); } TEST(coap_security_handler, test_coap_security_handler_read) { CHECK(test_coap_security_handler_read()); }
{ "pile_set_name": "Github" }
.sort-table { font: Icon; border: 1px Solid ThreeDShadow; background: Window; color: WindowText; } .sort-table thead { background: ButtonFace; } .sort-table td { padding: 2px 5px; } .sort-table thead td { border: 1px solid; border-color: ButtonHighlight ButtonShadow ButtonShadow ButtonHighlight; cursor: default; } .sort-table thead td:active { border-color: ButtonShadow ButtonHighlight ButtonHighlight ButtonShadow; padding: 3px 4px 1px 6px; } .sort-table thead td[_sortType=None]:active { border-color: ButtonHighlight ButtonShadow ButtonShadow ButtonHighlight; padding: 2px 5px; } .sort-arrow { width: 11px; height: 11px; background-position: center center; background-repeat: no-repeat; margin: 0 2px; } .sort-arrow.descending { background-image: url("../images/downsimple.png"); } .sort-arrow.ascending { background-image: url("../images/upsimple.png"); }
{ "pile_set_name": "Github" }
// Place all the styles related to the galaxies controller here. // They will automatically be included in application.css. // You can use Sass (SCSS) here: http://sass-lang.com/
{ "pile_set_name": "Github" }
// DATA_TEMPLATE: empty_table oTest.fnStart( "fnHeaderCallback" ); $(document).ready( function () { /* Check the default */ var oTable = $('#example').dataTable( { "sAjaxSource": "../../../examples/ajax/sources/arrays.txt", "bDeferRender": true } ); var oSettings = oTable.fnSettings(); var mPass, bInit; oTest.fnWaitTest( "Default should be null", null, function () { return oSettings.fnHeaderCallback == null; } ); oTest.fnWaitTest( "Five arguments passed", function () { oSession.fnRestore(); mPass = -1; bInit = false; $('#example').dataTable( { "sAjaxSource": "../../../examples/ajax/sources/arrays.txt", "bDeferRender": true, "fnHeaderCallback": function ( ) { mPass = arguments.length; }, "fnInitComplete": function () { bInit = true; } } ); }, function () { return mPass == 5 && bInit; } ); /* The header callback is called once for the init and then when the data is added */ oTest.fnWaitTest( "fnHeaderCallback called once per draw", function () { oSession.fnRestore(); mPass = 0; bInit = false; $('#example').dataTable( { "sAjaxSource": "../../../examples/ajax/sources/arrays.txt", "bDeferRender": true, "fnHeaderCallback": function ( nHead, aasData, iStart, iEnd, aiDisplay ) { mPass++; }, "fnInitComplete": function () { bInit = true; } } ); }, function () { return mPass == 2 && bInit; } ); oTest.fnWaitTest( "fnRowCallback called on paging (i.e. another draw)", function () { $('#example_next').click(); }, function () { return mPass == 3; } ); oTest.fnWaitTest( "fnRowCallback allows us to alter row information", function () { oSession.fnRestore(); $('#example').dataTable( { "sAjaxSource": "../../../examples/ajax/sources/arrays.txt", "bDeferRender": true, "fnHeaderCallback": function ( nHead, aasData, iStart, iEnd, aiDisplay ) { nHead.getElementsByTagName('th')[0].innerHTML = "Displaying "+(iEnd-iStart)+" records"; } } ); }, function () { return $('#example thead th:eq(0)').html() == "Displaying 10 records"; } ); oTest.fnWaitTest( "iStart correct on first page", function () { oSession.fnRestore(); mPass = true; $('#example').dataTable( { "sAjaxSource": "../../../examples/ajax/sources/arrays.txt", "bDeferRender": true, "fnHeaderCallback": function ( nHead, aasData, iStart, iEnd, aiDisplay ) { if ( iStart != 0 ) { mPass = false; } } } ); }, function () { return mPass; } ); oTest.fnWaitTest( "iStart correct on second page", function () { oSession.fnRestore(); mPass = false; $('#example').dataTable( { "sAjaxSource": "../../../examples/ajax/sources/arrays.txt", "bDeferRender": true, "fnHeaderCallback": function ( nHead, aasData, iStart, iEnd, aiDisplay ) { if ( iStart == 10 ) { mPass = true; } }, "fnInitComplete": function () { $('#example_next').click(); } } ); }, function () { return mPass; } ); oTest.fnWaitTest( "iEnd correct on second page", function () { oSession.fnRestore(); mPass = false; $('#example').dataTable( { "sAjaxSource": "../../../examples/ajax/sources/arrays.txt", "bDeferRender": true, "fnHeaderCallback": function ( nHead, aasData, iStart, iEnd, aiDisplay ) { if ( iEnd == 20 ) { mPass = true; } }, "fnInitComplete": function () { $('#example_next').click(); } } ); }, function () { return mPass; } ); oTest.fnWaitTest( "aiDisplay length is full data when not filtered", function () { oSession.fnRestore(); mPass = false; $('#example').dataTable( { "sAjaxSource": "../../../examples/ajax/sources/arrays.txt", "bDeferRender": true, "fnHeaderCallback": function ( nHead, aasData, iStart, iEnd, aiDisplay ) { if ( aiDisplay.length == 57 ) { mPass = true; } } } ); }, function () { return mPass; } ); oTest.fnWaitTest( "aiDisplay length is 9 when filtering on 'Mozilla'", function () { oSession.fnRestore(); mPass = false; oTable = $('#example').dataTable( { "sAjaxSource": "../../../examples/ajax/sources/arrays.txt", "bDeferRender": true, "fnHeaderCallback": function ( nHead, aasData, iStart, iEnd, aiDisplay ) { if ( aiDisplay.length == 9 ) { mPass = true; } } } ); oTable.fnFilter( "Mozilla" ); }, function () { return mPass; } ); oTest.fnComplete(); } );
{ "pile_set_name": "Github" }
/* * Mount handle * * Copyright (C) 2011-2020, Omar Choudary <choudary.omar@gmail.com>, * Joachim Metz <joachim.metz@gmail.com> * * Refer to AUTHORS for acknowledgements. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <https://www.gnu.org/licenses/>. */ #include <common.h> #include <memory.h> #include <narrow_string.h> #include <system_string.h> #include <types.h> #include <wide_string.h> #include "fvdetools_libbfio.h" #include "fvdetools_libcerror.h" #include "fvdetools_libcpath.h" #include "fvdetools_libfvde.h" #include "fvdetools_libuna.h" #include "mount_file_entry.h" #include "mount_file_system.h" #include "mount_handle.h" #if !defined( LIBFVDE_HAVE_BFIO ) extern \ int libfvde_volume_open_file_io_handle( libfvde_volume_t *volume, libbfio_handle_t *file_io_handle, int access_flags, libfvde_error_t **error ); #endif /* !defined( LIBFVDE_HAVE_BFIO ) */ /* Copies a string of a decimal value to a 64-bit value * Returns 1 if successful or -1 on error */ int mount_handle_system_string_copy_from_64_bit_in_decimal( const system_character_t *string, size_t string_size, uint64_t *value_64bit, libcerror_error_t **error ) { static char *function = "mount_handle_system_string_copy_from_64_bit_in_decimal"; system_character_t character_value = 0; size_t string_index = 0; uint8_t maximum_string_index = 20; int8_t sign = 1; if( string == NULL ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_ARGUMENTS, LIBCERROR_ARGUMENT_ERROR_INVALID_VALUE, "%s: invalid string.", function ); return( -1 ); } if( string_size > (size_t) SSIZE_MAX ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_ARGUMENTS, LIBCERROR_ARGUMENT_ERROR_VALUE_EXCEEDS_MAXIMUM, "%s: invalid string size value exceeds maximum.", function ); return( -1 ); } if( value_64bit == NULL ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_ARGUMENTS, LIBCERROR_ARGUMENT_ERROR_INVALID_VALUE, "%s: invalid value 64-bit.", function ); return( -1 ); } *value_64bit = 0; if( string[ string_index ] == (system_character_t) '-' ) { string_index++; maximum_string_index++; sign = -1; } else if( string[ string_index ] == (system_character_t) '+' ) { string_index++; maximum_string_index++; } while( string_index < string_size ) { if( string[ string_index ] == 0 ) { break; } if( string_index > (size_t) maximum_string_index ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_ARGUMENTS, LIBCERROR_ARGUMENT_ERROR_VALUE_TOO_LARGE, "%s: string too large.", function ); return( -1 ); } *value_64bit *= 10; if( ( string[ string_index ] >= (system_character_t) '0' ) && ( string[ string_index ] <= (system_character_t) '9' ) ) { character_value = (system_character_t) ( string[ string_index ] - (system_character_t) '0' ); } else { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_RUNTIME, LIBCERROR_RUNTIME_ERROR_UNSUPPORTED_VALUE, "%s: unsupported character value: %" PRIc_SYSTEM " at index: %d.", function, string[ string_index ], string_index ); return( -1 ); } *value_64bit += character_value; string_index++; } if( sign == -1 ) { *value_64bit *= (uint64_t) -1; } return( 1 ); } /* Creates a mount handle * Make sure the value mount_handle is referencing, is set to NULL * Returns 1 if successful or -1 on error */ int mount_handle_initialize( mount_handle_t **mount_handle, libcerror_error_t **error ) { static char *function = "mount_handle_initialize"; if( mount_handle == NULL ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_ARGUMENTS, LIBCERROR_ARGUMENT_ERROR_INVALID_VALUE, "%s: invalid mount handle.", function ); return( -1 ); } if( *mount_handle != NULL ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_RUNTIME, LIBCERROR_RUNTIME_ERROR_VALUE_ALREADY_SET, "%s: invalid mount handle value already set.", function ); return( -1 ); } *mount_handle = memory_allocate_structure( mount_handle_t ); if( *mount_handle == NULL ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_MEMORY, LIBCERROR_MEMORY_ERROR_INSUFFICIENT, "%s: unable to create mount handle.", function ); goto on_error; } if( memory_set( *mount_handle, 0, sizeof( mount_handle_t ) ) == NULL ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_MEMORY, LIBCERROR_MEMORY_ERROR_SET_FAILED, "%s: unable to clear mount handle.", function ); goto on_error; } if( mount_file_system_initialize( &( ( *mount_handle )->file_system ), error ) != 1 ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_RUNTIME, LIBCERROR_RUNTIME_ERROR_INITIALIZE_FAILED, "%s: unable to initialize file system.", function ); goto on_error; } return( 1 ); on_error: if( *mount_handle != NULL ) { memory_free( *mount_handle ); *mount_handle = NULL; } return( -1 ); } /* Frees a mount handle * Returns 1 if successful or -1 on error */ int mount_handle_free( mount_handle_t **mount_handle, libcerror_error_t **error ) { static char *function = "mount_handle_free"; int result = 1; if( mount_handle == NULL ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_ARGUMENTS, LIBCERROR_ARGUMENT_ERROR_INVALID_VALUE, "%s: invalid mount handle.", function ); return( -1 ); } if( *mount_handle != NULL ) { if( mount_file_system_free( &( ( *mount_handle )->file_system ), error ) != 1 ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_RUNTIME, LIBCERROR_RUNTIME_ERROR_FINALIZE_FAILED, "%s: unable to free file system.", function ); result = -1; } if( memory_set( ( *mount_handle )->key_data, 0, 16 ) == NULL ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_MEMORY, LIBCERROR_MEMORY_ERROR_SET_FAILED, "%s: unable to clear key data.", function ); result = -1; } memory_free( *mount_handle ); *mount_handle = NULL; } return( result ); } /* Signals the mount handle to abort * Returns 1 if successful or -1 on error */ int mount_handle_signal_abort( mount_handle_t *mount_handle, libcerror_error_t **error ) { static char *function = "mount_handle_signal_abort"; if( mount_handle == NULL ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_ARGUMENTS, LIBCERROR_ARGUMENT_ERROR_INVALID_VALUE, "%s: invalid mount handle.", function ); return( -1 ); } if( mount_file_system_signal_abort( mount_handle->file_system, error ) != 1 ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_RUNTIME, LIBCERROR_RUNTIME_ERROR_SET_FAILED, "%s: unable to signal file system to abort.", function ); return( -1 ); } return( 1 ); } /* Sets the encrypted root plist file path * Returns 1 if successful or -1 on error */ int mount_handle_set_encrypted_root_plist( mount_handle_t *mount_handle, const system_character_t *string, libcerror_error_t **error ) { static char *function = "mount_handle_set_encrypted_root_plist"; if( mount_handle == NULL ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_ARGUMENTS, LIBCERROR_ARGUMENT_ERROR_INVALID_VALUE, "%s: invalid mount handle.", function ); return( -1 ); } if( string == NULL ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_ARGUMENTS, LIBCERROR_ARGUMENT_ERROR_INVALID_VALUE, "%s: invalid string.", function ); return( -1 ); } mount_handle->encrypted_root_plist_path = string; return( 1 ); } /* Sets the keys * Returns 1 if successful or -1 on error */ int mount_handle_set_keys( mount_handle_t *mount_handle, const system_character_t *string, libcerror_error_t **error ) { static char *function = "mount_handle_set_keys"; size_t string_length = 0; uint32_t base16_variant = 0; if( mount_handle == NULL ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_ARGUMENTS, LIBCERROR_ARGUMENT_ERROR_INVALID_VALUE, "%s: invalid mount handle.", function ); return( -1 ); } string_length = system_string_length( string ); if( memory_set( mount_handle->key_data, 0, 16 ) == NULL ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_MEMORY, LIBCERROR_MEMORY_ERROR_SET_FAILED, "%s: unable to clear key data.", function ); goto on_error; } base16_variant = LIBUNA_BASE16_VARIANT_RFC4648; #if defined( HAVE_WIDE_SYSTEM_CHARACTER ) if( _BYTE_STREAM_HOST_IS_ENDIAN_BIG ) { base16_variant |= LIBUNA_BASE16_VARIANT_ENCODING_UTF16_BIG_ENDIAN; } else { base16_variant |= LIBUNA_BASE16_VARIANT_ENCODING_UTF16_LITTLE_ENDIAN; } #endif if( string_length != 32 ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_ARGUMENTS, LIBCERROR_ARGUMENT_ERROR_UNSUPPORTED_VALUE, "%s: unsupported string length.", function ); goto on_error; } if( libuna_base16_stream_copy_to_byte_stream( (uint8_t *) string, string_length, mount_handle->key_data, 16, base16_variant, 0, error ) != 1 ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_RUNTIME, LIBCERROR_RUNTIME_ERROR_COPY_FAILED, "%s: unable to copy key data.", function ); goto on_error; } mount_handle->key_size = 16; return( 1 ); on_error: memory_set( mount_handle->key_data, 0, 16 ); mount_handle->key_size = 0; return( -1 ); } /* Sets the volume offset * Returns 1 if successful or -1 on error */ int mount_handle_set_offset( mount_handle_t *mount_handle, const system_character_t *string, libcerror_error_t **error ) { static char *function = "mount_handle_set_offset"; size_t string_length = 0; uint64_t value_64bit = 0; if( mount_handle == NULL ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_ARGUMENTS, LIBCERROR_ARGUMENT_ERROR_INVALID_VALUE, "%s: invalid mount handle.", function ); return( -1 ); } string_length = system_string_length( string ); if( mount_handle_system_string_copy_from_64_bit_in_decimal( string, string_length + 1, &value_64bit, error ) != 1 ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_RUNTIME, LIBCERROR_RUNTIME_ERROR_COPY_FAILED, "%s: unable to copy string to 64-bit decimal.", function ); return( -1 ); } mount_handle->volume_offset = (off64_t) value_64bit; return( 1 ); } /* Sets the password * Returns 1 if successful or -1 on error */ int mount_handle_set_password( mount_handle_t *mount_handle, const system_character_t *string, libcerror_error_t **error ) { static char *function = "mount_handle_set_password"; size_t string_length = 0; if( mount_handle == NULL ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_ARGUMENTS, LIBCERROR_ARGUMENT_ERROR_INVALID_VALUE, "%s: invalid mount handle.", function ); return( -1 ); } if( string == NULL ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_ARGUMENTS, LIBCERROR_ARGUMENT_ERROR_INVALID_VALUE, "%s: invalid string.", function ); return( -1 ); } string_length = system_string_length( string ); mount_handle->password = string; mount_handle->password_length = string_length; return( 1 ); } /* Sets the recovery password * Returns 1 if successful or -1 on error */ int mount_handle_set_recovery_password( mount_handle_t *mount_handle, const system_character_t *string, libcerror_error_t **error ) { static char *function = "mount_handle_set_recovery_password"; size_t string_length = 0; if( mount_handle == NULL ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_ARGUMENTS, LIBCERROR_ARGUMENT_ERROR_INVALID_VALUE, "%s: invalid mount handle.", function ); return( -1 ); } if( string == NULL ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_ARGUMENTS, LIBCERROR_ARGUMENT_ERROR_INVALID_VALUE, "%s: invalid string.", function ); return( -1 ); } string_length = system_string_length( string ); mount_handle->recovery_password = string; mount_handle->recovery_password_length = string_length; return( 1 ); } /* Sets the path prefix * Returns 1 if successful or -1 on error */ int mount_handle_set_path_prefix( mount_handle_t *mount_handle, const system_character_t *path_prefix, size_t path_prefix_size, libcerror_error_t **error ) { static char *function = "mount_handle_set_path_prefix"; if( mount_handle == NULL ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_ARGUMENTS, LIBCERROR_ARGUMENT_ERROR_INVALID_VALUE, "%s: invalid mount handle.", function ); return( -1 ); } if( mount_file_system_set_path_prefix( mount_handle->file_system, path_prefix, path_prefix_size, error ) != 1 ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_RUNTIME, LIBCERROR_RUNTIME_ERROR_SET_FAILED, "%s: unable to set path prefix.", function ); return( -1 ); } return( 1 ); } /* Opens the mount handle * Returns 1 if successful, 0 if not or -1 on error */ int mount_handle_open( mount_handle_t *mount_handle, const system_character_t *filename, libcerror_error_t **error ) { libbfio_handle_t *file_io_handle = NULL; libfvde_volume_t *fvde_volume = NULL; static char *function = "mount_handle_open"; size_t filename_length = 0; int result = 0; if( mount_handle == NULL ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_ARGUMENTS, LIBCERROR_ARGUMENT_ERROR_INVALID_VALUE, "%s: invalid mount handle.", function ); return( -1 ); } if( filename == NULL ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_ARGUMENTS, LIBCERROR_ARGUMENT_ERROR_INVALID_VALUE, "%s: invalid filename.", function ); return( -1 ); } filename_length = system_string_length( filename ); if( libbfio_file_range_initialize( &file_io_handle, error ) != 1 ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_RUNTIME, LIBCERROR_RUNTIME_ERROR_INITIALIZE_FAILED, "%s: unable to initialize file IO handle.", function ); goto on_error; } #if defined( HAVE_WIDE_SYSTEM_CHARACTER ) if( libbfio_file_range_set_name_wide( file_io_handle, filename, filename_length, error ) != 1 ) #else if( libbfio_file_range_set_name( file_io_handle, filename, filename_length, error ) != 1 ) #endif { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_IO, LIBCERROR_IO_ERROR_OPEN_FAILED, "%s: unable to set file range name.", function ); goto on_error; } if( libbfio_file_range_set( file_io_handle, mount_handle->volume_offset, 0, error ) != 1 ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_IO, LIBCERROR_IO_ERROR_OPEN_FAILED, "%s: unable to set file range offset.", function ); goto on_error; } if( libfvde_volume_initialize( &fvde_volume, error ) != 1 ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_RUNTIME, LIBCERROR_RUNTIME_ERROR_INITIALIZE_FAILED, "%s: unable to initialize volume.", function ); goto on_error; } if( mount_handle->encrypted_root_plist_path != NULL ) { #if defined( HAVE_WIDE_SYSTEM_CHARACTER ) if( libfvde_volume_read_encrypted_root_plist_wide( fvde_volume, mount_handle->encrypted_root_plist_path, error ) != 1 ) #else if( libfvde_volume_read_encrypted_root_plist( fvde_volume, mount_handle->encrypted_root_plist_path, error ) != 1 ) #endif { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_IO, LIBCERROR_IO_ERROR_READ_FAILED, "%s: unable to read encrypted root plist.", function ); goto on_error; } } if( mount_handle->key_size > 0 ) { if( libfvde_volume_set_keys( fvde_volume, mount_handle->key_data, mount_handle->key_size, error ) != 1 ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_RUNTIME, LIBCERROR_RUNTIME_ERROR_SET_FAILED, "%s: unable to set keys.", function ); goto on_error; } } if( mount_handle->password != NULL ) { #if defined( HAVE_WIDE_SYSTEM_CHARACTER ) if( libfvde_volume_set_utf16_password( fvde_volume, (uint16_t *) mount_handle->password, mount_handle->password_length, error ) != 1 ) #else if( libfvde_volume_set_utf8_password( fvde_volume, (uint8_t *) mount_handle->password, mount_handle->password_length, error ) != 1 ) #endif { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_RUNTIME, LIBCERROR_RUNTIME_ERROR_SET_FAILED, "%s: unable to set password.", function ); goto on_error; } } if( mount_handle->recovery_password != NULL ) { #if defined( HAVE_WIDE_SYSTEM_CHARACTER ) if( libfvde_volume_set_utf16_recovery_password( fvde_volume, (uint16_t *) mount_handle->recovery_password, mount_handle->recovery_password_length, error ) != 1 ) #else if( libfvde_volume_set_utf8_recovery_password( fvde_volume, (uint8_t *) mount_handle->recovery_password, mount_handle->recovery_password_length, error ) != 1 ) #endif { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_RUNTIME, LIBCERROR_RUNTIME_ERROR_SET_FAILED, "%s: unable to set recovery password.", function ); goto on_error; } } result = libfvde_volume_open_file_io_handle( fvde_volume, file_io_handle, LIBFVDE_OPEN_READ, error ); if( result == -1 ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_IO, LIBCERROR_IO_ERROR_OPEN_FAILED, "%s: unable to open volume.", function ); goto on_error; } result = libfvde_volume_is_locked( fvde_volume, error ); if( result == -1 ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_RUNTIME, LIBCERROR_RUNTIME_ERROR_GET_FAILED, "%s: unable to determine if volume is locked.", function ); goto on_error; } mount_handle->is_locked = result; if( mount_file_system_append_volume( mount_handle->file_system, fvde_volume, error ) != 1 ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_RUNTIME, LIBCERROR_RUNTIME_ERROR_APPEND_FAILED, "%s: unable to append volume to file system.", function ); goto on_error; } mount_handle->file_io_handle = file_io_handle; return( 1 ); on_error: if( fvde_volume != NULL ) { libfvde_volume_free( &fvde_volume, NULL ); } if( file_io_handle != NULL ) { libbfio_handle_free( &file_io_handle, NULL ); } return( -1 ); } /* Closes the mount handle * Returns the 0 if succesful or -1 on error */ int mount_handle_close( mount_handle_t *mount_handle, libcerror_error_t **error ) { libfvde_volume_t *fvde_volume = NULL; static char *function = "mount_handle_close"; int number_of_volumes = 0; int volume_index = 0; if( mount_handle == NULL ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_ARGUMENTS, LIBCERROR_ARGUMENT_ERROR_INVALID_VALUE, "%s: invalid mount handle.", function ); return( -1 ); } if( mount_file_system_get_number_of_volumes( mount_handle->file_system, &number_of_volumes, error ) != 1 ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_RUNTIME, LIBCERROR_RUNTIME_ERROR_GET_FAILED, "%s: unable to retrieve number of volumes.", function ); goto on_error; } for( volume_index = number_of_volumes - 1; volume_index > 0; volume_index-- ) { if( mount_file_system_get_volume_by_index( mount_handle->file_system, volume_index, &fvde_volume, error ) != 1 ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_RUNTIME, LIBCERROR_RUNTIME_ERROR_GET_FAILED, "%s: unable to retrieve volume: %d.", function, volume_index ); goto on_error; } /* TODO remove fvde_volume from file system */ if( libfvde_volume_close( fvde_volume, error ) != 0 ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_IO, LIBCERROR_IO_ERROR_CLOSE_FAILED, "%s: unable to close volume: %d.", function, volume_index ); goto on_error; } if( libfvde_volume_free( &fvde_volume, error ) != 1 ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_RUNTIME, LIBCERROR_RUNTIME_ERROR_FINALIZE_FAILED, "%s: unable to free volume: %d.", function, volume_index ); goto on_error; } } if( libbfio_handle_close( mount_handle->file_io_handle, error ) != 0 ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_RUNTIME, LIBCERROR_RUNTIME_ERROR_FINALIZE_FAILED, "%s: unable to close file IO handle.", function ); goto on_error; } if( libbfio_handle_free( &( mount_handle->file_io_handle ), error ) != 1 ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_RUNTIME, LIBCERROR_RUNTIME_ERROR_FINALIZE_FAILED, "%s: unable to free file IO handle.", function ); goto on_error; } return( 0 ); on_error: if( fvde_volume != NULL ) { libfvde_volume_free( &fvde_volume, NULL ); } return( -1 ); } /* Determine if the mount handle is locked * Returns 1 if locked, 0 if not or -1 on error */ int mount_handle_is_locked( mount_handle_t *mount_handle, libcerror_error_t **error ) { static char *function = "mount_handle_is_locked"; if( mount_handle == NULL ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_ARGUMENTS, LIBCERROR_ARGUMENT_ERROR_INVALID_VALUE, "%s: invalid mount handle.", function ); return( -1 ); } return( mount_handle->is_locked ); } /* Retrieves a file entry for a specific path * Returns 1 if successful, 0 if no such file entry or -1 on error */ int mount_handle_get_file_entry_by_path( mount_handle_t *mount_handle, const system_character_t *path, mount_file_entry_t **file_entry, libcerror_error_t **error ) { libfvde_volume_t *fvde_volume = NULL; const system_character_t *filename = NULL; static char *function = "mount_handle_get_file_entry_by_path"; size_t filename_length = 0; size_t path_index = 0; size_t path_length = 0; int result = 0; if( mount_handle == NULL ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_ARGUMENTS, LIBCERROR_ARGUMENT_ERROR_INVALID_VALUE, "%s: invalid mount handle.", function ); return( -1 ); } if( path == NULL ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_ARGUMENTS, LIBCERROR_ARGUMENT_ERROR_INVALID_VALUE, "%s: invalid path.", function ); return( -1 ); } path_length = system_string_length( path ); if( path_length == 0 ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_RUNTIME, LIBCERROR_RUNTIME_ERROR_VALUE_OUT_OF_BOUNDS, "%s: invalid path length value out of bounds.", function ); goto on_error; } if( ( path_length >= 2 ) && ( path[ path_length - 1 ] == LIBCPATH_SEPARATOR ) ) { path_length--; } path_index = path_length; while( path_index > 0 ) { if( path[ path_index ] == LIBCPATH_SEPARATOR ) { break; } path_index--; } /* Ignore the name of the root item */ if( path_length == 0 ) { filename = _SYSTEM_STRING( "" ); filename_length = 0; } else { filename = &( path[ path_index + 1 ] ); filename_length = path_length - ( path_index + 1 ); } result = mount_file_system_get_volume_by_path( mount_handle->file_system, path, path_length, &fvde_volume, error ); if( result == -1 ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_RUNTIME, LIBCERROR_RUNTIME_ERROR_GET_FAILED, "%s: unable to retrieve volume.", function ); goto on_error; } else if( result != 0 ) { if( mount_file_entry_initialize( file_entry, mount_handle->file_system, filename, filename_length, fvde_volume, error ) != 1 ) { libcerror_error_set( error, LIBCERROR_ERROR_DOMAIN_RUNTIME, LIBCERROR_RUNTIME_ERROR_INITIALIZE_FAILED, "%s: unable to initialize file entry.", function ); goto on_error; } } return( result ); on_error: return( -1 ); }
{ "pile_set_name": "Github" }
// ** I18N // Calendar pt-BR language // Author: Fernando Dourado, <fernando.dourado@ig.com.br> // Encoding: any // Distributed under the same terms as the calendar itself. // For translators: please use UTF-8 if possible. We strongly believe that // Unicode is the answer to a real internationalized world. Also please // include your contact information in the header, as can be seen above. // full day names Calendar._DN = new Array ("Domingo", "Segunda", "Terça", "Quarta", "Quinta", "Sexta", "Sabádo", "Domingo"); // Please note that the following array of short day names (and the same goes // for short month names, _SMN) isn't absolutely necessary. We give it here // for exemplification on how one can customize the short day names, but if // they are simply the first N letters of the full name you can simply say: // // Calendar._SDN_len = N; // short day name length // Calendar._SMN_len = N; // short month name length // // If N = 3 then this is not needed either since we assume a value of 3 if not // present, to be compatible with translation files that were written before // this feature. // short day names // [No changes using default values] // full month names Calendar._MN = new Array ("Janeiro", "Fevereiro", "Março", "Abril", "Maio", "Junho", "Julho", "Agosto", "Setembro", "Outubro", "Novembro", "Dezembro"); // short month names // [No changes using default values] // tooltips Calendar._TT = {}; Calendar._TT["INFO"] = "Sobre o calendário"; Calendar._TT["ABOUT"] = "DHTML Date/Time Selector\n" + "(c) dynarch.com 2002-2005 / Author: Mihai Bazon\n" + // don't translate this this ;-) "For latest version visit: http://www.dynarch.com/projects/calendar/\n" + "Distributed under GNU LGPL. See http://gnu.org/licenses/lgpl.html for details." + "\n\n" + "Translate to portuguese Brazil (pt-BR) by Fernando Dourado (fernando.dourado@ig.com.br)\n" + "Tradução para o português Brasil (pt-BR) por Fernando Dourado (fernando.dourado@ig.com.br)" + "\n\n" + "Selecionar data:\n" + "- Use as teclas \xab, \xbb para selecionar o ano\n" + "- Use as teclas " + String.fromCharCode(0x2039) + ", " + String.fromCharCode(0x203a) + " para selecionar o mês\n" + "- Clique e segure com o mouse em qualquer botão para selecionar rapidamente."; Calendar._TT["ABOUT_TIME"] = "\n\n" + "Selecionar hora:\n" + "- Clique em qualquer uma das partes da hora para aumentar\n" + "- ou Shift-clique para diminuir\n" + "- ou clique e arraste para selecionar rapidamente."; Calendar._TT["PREV_YEAR"] = "Ano anterior (clique e segure para menu)"; Calendar._TT["PREV_MONTH"] = "Mês anterior (clique e segure para menu)"; Calendar._TT["GO_TODAY"] = "Ir para a data atual"; Calendar._TT["NEXT_MONTH"] = "Próximo mês (clique e segure para menu)"; Calendar._TT["NEXT_YEAR"] = "Próximo ano (clique e segure para menu)"; Calendar._TT["SEL_DATE"] = "Selecione uma data"; Calendar._TT["DRAG_TO_MOVE"] = "Clique e segure para mover"; Calendar._TT["PART_TODAY"] = " (hoje)"; // the following is to inform that "%s" is to be the first day of week // %s will be replaced with the day name. Calendar._TT["DAY_FIRST"] = "Exibir %s primeiro"; // This may be locale-dependent. It specifies the week-end days, as an array // of comma-separated numbers. The numbers are from 0 to 6: 0 means Sunday, 1 // means Monday, etc. Calendar._TT["WEEKEND"] = "0,6"; Calendar._TT["CLOSE"] = "Fechar"; Calendar._TT["TODAY"] = "Hoje"; Calendar._TT["TIME_PART"] = "(Shift-)Clique ou arraste para mudar o valor"; // date formats Calendar._TT["DEF_DATE_FORMAT"] = "%d/%m/%Y"; Calendar._TT["TT_DATE_FORMAT"] = "%d de %B de %Y"; Calendar._TT["WK"] = "sem"; Calendar._TT["TIME"] = "Hora:";
{ "pile_set_name": "Github" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (1.8.0_202) on Fri Jul 24 11:53:36 GMT 2020 --> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <title>SpinFunctionInterpreter (Eclipse RDF4J 3.3.0 API)</title> <meta name="date" content="2020-07-24"> <link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../../script.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="SpinFunctionInterpreter (Eclipse RDF4J 3.3.0 API)"; } } catch(err) { } //--> var methods = {"i0":10}; var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]}; var altColor = "altColor"; var rowColor = "rowColor"; var tableTab = "tableTab"; var activeTableTab = "activeTableTab"; </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar.top"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.top.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="package-summary.html">Package</a></li> <li class="navBarCell1Rev">Class</li> <li><a href="class-use/SpinFunctionInterpreter.html">Use</a></li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../../org/eclipse/rdf4j/sail/spin/ConstraintViolationException.html" title="class in org.eclipse.rdf4j.sail.spin"><span class="typeNameLink">Prev&nbsp;Class</span></a></li> <li><a href="../../../../../org/eclipse/rdf4j/sail/spin/SpinInferencing.html" title="class in org.eclipse.rdf4j.sail.spin"><span class="typeNameLink">Next&nbsp;Class</span></a></li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/eclipse/rdf4j/sail/spin/SpinFunctionInterpreter.html" target="_top">Frames</a></li> <li><a href="SpinFunctionInterpreter.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <div> <ul class="subNavList"> <li>Summary:&nbsp;</li> <li>Nested&nbsp;|&nbsp;</li> <li>Field&nbsp;|&nbsp;</li> <li><a href="#constructor.summary">Constr</a>&nbsp;|&nbsp;</li> <li><a href="#method.summary">Method</a></li> </ul> <ul class="subNavList"> <li>Detail:&nbsp;</li> <li>Field&nbsp;|&nbsp;</li> <li><a href="#constructor.detail">Constr</a>&nbsp;|&nbsp;</li> <li><a href="#method.detail">Method</a></li> </ul> </div> <a name="skip.navbar.top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <!-- ======== START OF CLASS DATA ======== --> <div class="header"> <div class="subTitle">org.eclipse.rdf4j.sail.spin</div> <h2 title="Class SpinFunctionInterpreter" class="title">Class SpinFunctionInterpreter</h2> </div> <div class="contentContainer"> <ul class="inheritance"> <li><a href="http://download.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">java.lang.Object</a></li> <li> <ul class="inheritance"> <li>org.eclipse.rdf4j.sail.spin.SpinFunctionInterpreter</li> </ul> </li> </ul> <div class="description"> <ul class="blockList"> <li class="blockList"> <dl> <dt>All Implemented Interfaces:</dt> <dd><a href="../../../../../org/eclipse/rdf4j/query/algebra/evaluation/QueryOptimizer.html" title="interface in org.eclipse.rdf4j.query.algebra.evaluation">QueryOptimizer</a></dd> </dl> <hr> <br> <pre>public class <span class="typeNameLabel">SpinFunctionInterpreter</span> extends <a href="http://download.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a> implements <a href="../../../../../org/eclipse/rdf4j/query/algebra/evaluation/QueryOptimizer.html" title="interface in org.eclipse.rdf4j.query.algebra.evaluation">QueryOptimizer</a></pre> <div class="block">QueryOptimizer that adds support for SPIN functions.</div> </li> </ul> </div> <div class="summary"> <ul class="blockList"> <li class="blockList"> <!-- ======== CONSTRUCTOR SUMMARY ======== --> <ul class="blockList"> <li class="blockList"><a name="constructor.summary"> <!-- --> </a> <h3>Constructor Summary</h3> <table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Constructor Summary table, listing constructors, and an explanation"> <caption><span>Constructors</span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colOne" scope="col">Constructor and Description</th> </tr> <tr class="altColor"> <td class="colOne"><code><span class="memberNameLink"><a href="../../../../../org/eclipse/rdf4j/sail/spin/SpinFunctionInterpreter.html#SpinFunctionInterpreter-org.eclipse.rdf4j.spin.SpinParser-org.eclipse.rdf4j.query.algebra.evaluation.TripleSource-org.eclipse.rdf4j.query.algebra.evaluation.function.FunctionRegistry-">SpinFunctionInterpreter</a></span>(<a href="../../../../../org/eclipse/rdf4j/spin/SpinParser.html" title="class in org.eclipse.rdf4j.spin">SpinParser</a>&nbsp;parser, <a href="../../../../../org/eclipse/rdf4j/query/algebra/evaluation/TripleSource.html" title="interface in org.eclipse.rdf4j.query.algebra.evaluation">TripleSource</a>&nbsp;tripleSource, <a href="../../../../../org/eclipse/rdf4j/query/algebra/evaluation/function/FunctionRegistry.html" title="class in org.eclipse.rdf4j.query.algebra.evaluation.function">FunctionRegistry</a>&nbsp;functionRegistry)</code>&nbsp;</td> </tr> </table> </li> </ul> <!-- ========== METHOD SUMMARY =========== --> <ul class="blockList"> <li class="blockList"><a name="method.summary"> <!-- --> </a> <h3>Method Summary</h3> <table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation"> <caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd">&nbsp;</span></span><span id="t2" class="tableTab"><span><a href="javascript:show(2);">Instance Methods</a></span><span class="tabEnd">&nbsp;</span></span><span id="t4" class="tableTab"><span><a href="javascript:show(8);">Concrete Methods</a></span><span class="tabEnd">&nbsp;</span></span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Method and Description</th> </tr> <tr id="i0" class="altColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/eclipse/rdf4j/sail/spin/SpinFunctionInterpreter.html#optimize-org.eclipse.rdf4j.query.algebra.TupleExpr-org.eclipse.rdf4j.query.Dataset-org.eclipse.rdf4j.query.BindingSet-">optimize</a></span>(<a href="../../../../../org/eclipse/rdf4j/query/algebra/TupleExpr.html" title="interface in org.eclipse.rdf4j.query.algebra">TupleExpr</a>&nbsp;tupleExpr, <a href="../../../../../org/eclipse/rdf4j/query/Dataset.html" title="interface in org.eclipse.rdf4j.query">Dataset</a>&nbsp;dataset, <a href="../../../../../org/eclipse/rdf4j/query/BindingSet.html" title="interface in org.eclipse.rdf4j.query">BindingSet</a>&nbsp;bindings)</code>&nbsp;</td> </tr> </table> <ul class="blockList"> <li class="blockList"><a name="methods.inherited.from.class.java.lang.Object"> <!-- --> </a> <h3>Methods inherited from class&nbsp;java.lang.<a href="http://download.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></h3> <code><a href="http://download.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#clone--" title="class or interface in java.lang">clone</a>, <a href="http://download.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-" title="class or interface in java.lang">equals</a>, <a href="http://download.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#finalize--" title="class or interface in java.lang">finalize</a>, <a href="http://download.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#getClass--" title="class or interface in java.lang">getClass</a>, <a href="http://download.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#hashCode--" title="class or interface in java.lang">hashCode</a>, <a href="http://download.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#notify--" title="class or interface in java.lang">notify</a>, <a href="http://download.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#notifyAll--" title="class or interface in java.lang">notifyAll</a>, <a href="http://download.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#toString--" title="class or interface in java.lang">toString</a>, <a href="http://download.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait--" title="class or interface in java.lang">wait</a>, <a href="http://download.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait-long-" title="class or interface in java.lang">wait</a>, <a href="http://download.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait-long-int-" title="class or interface in java.lang">wait</a></code></li> </ul> </li> </ul> </li> </ul> </div> <div class="details"> <ul class="blockList"> <li class="blockList"> <!-- ========= CONSTRUCTOR DETAIL ======== --> <ul class="blockList"> <li class="blockList"><a name="constructor.detail"> <!-- --> </a> <h3>Constructor Detail</h3> <a name="SpinFunctionInterpreter-org.eclipse.rdf4j.spin.SpinParser-org.eclipse.rdf4j.query.algebra.evaluation.TripleSource-org.eclipse.rdf4j.query.algebra.evaluation.function.FunctionRegistry-"> <!-- --> </a> <ul class="blockListLast"> <li class="blockList"> <h4>SpinFunctionInterpreter</h4> <pre>public&nbsp;SpinFunctionInterpreter(<a href="../../../../../org/eclipse/rdf4j/spin/SpinParser.html" title="class in org.eclipse.rdf4j.spin">SpinParser</a>&nbsp;parser, <a href="../../../../../org/eclipse/rdf4j/query/algebra/evaluation/TripleSource.html" title="interface in org.eclipse.rdf4j.query.algebra.evaluation">TripleSource</a>&nbsp;tripleSource, <a href="../../../../../org/eclipse/rdf4j/query/algebra/evaluation/function/FunctionRegistry.html" title="class in org.eclipse.rdf4j.query.algebra.evaluation.function">FunctionRegistry</a>&nbsp;functionRegistry)</pre> </li> </ul> </li> </ul> <!-- ============ METHOD DETAIL ========== --> <ul class="blockList"> <li class="blockList"><a name="method.detail"> <!-- --> </a> <h3>Method Detail</h3> <a name="optimize-org.eclipse.rdf4j.query.algebra.TupleExpr-org.eclipse.rdf4j.query.Dataset-org.eclipse.rdf4j.query.BindingSet-"> <!-- --> </a> <ul class="blockListLast"> <li class="blockList"> <h4>optimize</h4> <pre>public&nbsp;void&nbsp;optimize(<a href="../../../../../org/eclipse/rdf4j/query/algebra/TupleExpr.html" title="interface in org.eclipse.rdf4j.query.algebra">TupleExpr</a>&nbsp;tupleExpr, <a href="../../../../../org/eclipse/rdf4j/query/Dataset.html" title="interface in org.eclipse.rdf4j.query">Dataset</a>&nbsp;dataset, <a href="../../../../../org/eclipse/rdf4j/query/BindingSet.html" title="interface in org.eclipse.rdf4j.query">BindingSet</a>&nbsp;bindings)</pre> <dl> <dt><span class="overrideSpecifyLabel">Specified by:</span></dt> <dd><code><a href="../../../../../org/eclipse/rdf4j/query/algebra/evaluation/QueryOptimizer.html#optimize-org.eclipse.rdf4j.query.algebra.TupleExpr-org.eclipse.rdf4j.query.Dataset-org.eclipse.rdf4j.query.BindingSet-">optimize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/eclipse/rdf4j/query/algebra/evaluation/QueryOptimizer.html" title="interface in org.eclipse.rdf4j.query.algebra.evaluation">QueryOptimizer</a></code></dd> </dl> </li> </ul> </li> </ul> </li> </ul> </div> </div> <!-- ========= END OF CLASS DATA ========= --> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar.bottom"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.bottom.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="package-summary.html">Package</a></li> <li class="navBarCell1Rev">Class</li> <li><a href="class-use/SpinFunctionInterpreter.html">Use</a></li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../../org/eclipse/rdf4j/sail/spin/ConstraintViolationException.html" title="class in org.eclipse.rdf4j.sail.spin"><span class="typeNameLink">Prev&nbsp;Class</span></a></li> <li><a href="../../../../../org/eclipse/rdf4j/sail/spin/SpinInferencing.html" title="class in org.eclipse.rdf4j.sail.spin"><span class="typeNameLink">Next&nbsp;Class</span></a></li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/eclipse/rdf4j/sail/spin/SpinFunctionInterpreter.html" target="_top">Frames</a></li> <li><a href="SpinFunctionInterpreter.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <div> <ul class="subNavList"> <li>Summary:&nbsp;</li> <li>Nested&nbsp;|&nbsp;</li> <li>Field&nbsp;|&nbsp;</li> <li><a href="#constructor.summary">Constr</a>&nbsp;|&nbsp;</li> <li><a href="#method.summary">Method</a></li> </ul> <ul class="subNavList"> <li>Detail:&nbsp;</li> <li>Field&nbsp;|&nbsp;</li> <li><a href="#constructor.detail">Constr</a>&nbsp;|&nbsp;</li> <li><a href="#method.detail">Method</a></li> </ul> </div> <a name="skip.navbar.bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &#169; 2015-2020 <a href="https://www.eclipse.org/">Eclipse Foundation</a>. All Rights Reserved.</small></p> </body> </html>
{ "pile_set_name": "Github" }
<!doctype html> <html> <head> <title>Paramedic</title> <meta name="apple-mobile-web-app-capable" content="yes"> <meta name="apple-mobile-web-app-status-bar-style" content="black"> <link rel="apple-touch-icon" href="img/apple-touch-icon.png"> <link rel="stylesheet" href="css/libs/style.css"> <link rel="stylesheet" href="css/libs/cubism.css"> <link rel="stylesheet" href="css/app.css"> <!--[if lt IE 9]><script src="http://html5shiv.googlecode.com/svn/trunk/html5.js"></script><![endif]--> </head> <body> <header class="clearfix"> <script type="text/x-handlebars"> {{#with App.cluster}} <section {{bindAttr class=":cluster_name status"}}> <span class="label">Cluster Name</span> <h1>{{cluster_name}}</h1> <p> <span class="label">Status</span> <span {{bindAttr class=":status status"}}>{{status}}</span> <span class="label">Nodes</span> {{number_of_nodes}} <span class="label">Docs</span> {{#bind docs_count}}{{number_with_delimiter docs_count}}{{/bind}} </p> </section> <section class="shards"> <p><span class="label">Shards</span></p> <p><span class="label darker">Primary</span> {{active_primary_shards}}</p> <p><span class="label darker">Relocating</span> {{relocating_shards}}</p> </section> <section class="shards"> <p><span class="label">&nbsp;</span></p> <p><span class="label darker">Initializing</span> {{initializing_shards}}</p> <p><span class="label darker">Unassigned</span> {{unassigned_shards}}</p> </section> {{#with App }} <section {{bindAttr class=":endpoint refresh_allowed:polling-active refreshing:polling-in-progress"}}> {{/with}} <p title="Change ElasticSearch URL"> <span class="label">URL</span> {{view Ember.TextField valueBinding="App.elasticsearch_url" id="elasticsearch_url"}} </p> <p class="refresh"> <span class="icon-refresh"></span> <span class="refresh-label">Refresh every</span> <span class="refresh-controls"> {{view Ember.Select contentBinding="App.refresh_intervals" selectionBinding="App.refresh_interval" optionLabelPath="content.label" optionValuePath="content.value"}} <button {{action "toggle" target="App.toggleRefreshAllowedButton"}}>{{App.toggleRefreshAllowedButton.text}}</button> </span> </p> <p class="alerts"> {{view Ember.Checkbox id="sound-enabled" checkedBinding="App.sounds_enabled"}} <label for="sound-enabled" class="dimmed">Sounds?</label> </p> </section> {{/with}} </script> </header> <section id="cubism"> <script type="text/x-handlebars"> <h2 class="label clear"> Stats <small><a {{action "toggle" target="App.toggleChart"}}>{{App.toggleChart.text}}</a></small> </h2> </script> <div id="chart"></div> </section> <section id="nodes-wrapper"> <script type="text/x-handlebars"> <h2 class="label clear"> Nodes <small><a {{action "toggle" target="App.toggleNodes"}}>{{App.toggleNodes.text}}</a></small> </h2> </script> <script type="text/x-handlebars"> {{#unless App.nodes.hidden}} <div id="nodes" class="clearfix"> {{#each App.nodes}} <div {{bindAttr class=":node master"}}> <h3><span {{bindAttr class="master:icon-star"}}></span> {{name}}</h3> <div class="meta"> <p><span class="label">ID: </span>{{id}}</p> <p><span class="label">IP: </span>{{http_address}}</p> <p><span class="label">Host: </span>{{hostname}}</p> <p><span class="label">Load: </span>{{load}}</p> <p><span class="label">Size: </span>{{disk}}</p> <p><span class="label">Docs: </span>{{#bind docs}}{{number_with_delimiter docs}}{{/bind}}</p> <p><span class="label">Heap: </span>{{jvm_heap_used}} <small class="dimmed" title="Heap max">/{{jvm_heap_max}}</small></p> </div> </div> {{/each}} </div> {{/unless}} </script> <br> </section> <section id="indices-wrapper"> <script type="text/x-handlebars"> <h2 class="label clear"> Indices <small><a {{action "toggle" target="App.toggleIndices"}}>{{App.toggleIndices.text}}</a></small> </h2> </script> <script type="text/x-handlebars"> {{#unless App.indices.hidden}} <div id="indices"> {{#each index in App.indices.sorted}} {{#with index}} <div {{bindAttr class=":index :clearfix state show_detail:expanded"}}> <div class="basic-info clearfix"> <h3><a {{bindAttr href="url"}} title="Browse Index">{{name}}</a></h3> <div class="buttons"> {{#unless closed}} <button {{action "showDetail" target="App.indices"}}> {{#if show_detail}}Hide details{{else}}Show details{{/if}} </button> {{/unless}} </div> <div class="shards"> {{#each shards}} <div {{bindAttr class=":shard primary state recovery.stage" title="state"}}> {{name}} </div> {{/each}} </div> <div class="meta"> <p> {{settings.number_of_shards}} shards / {{settings.number_of_replicas}} replicas / {{#bind docs}}{{number_with_delimiter docs}}{{/bind}} docs / {{size}} / {{indexing.index_time}} indexing / {{search.query_time}} querying / {{state}} </p> </div> </div> <!-- Shard Allocation --> {{#if show_detail}} <div class="extra-info shards clearfix"> {{#unless show_detail_loaded}} <div class="loading">Waiting for data...</div> {{/unless}} {{#each nodes}} <div class="node"> <h3>{{name}} <small>{{hostname}}</small></h3> <div class="clearfix"> {{#each shards}} <div {{bindAttr class=":shard primary state recovery.stage" title="state"}}> <h3>{{name}}</h3> <div class="meta"> <p>{{state}}/{{recovery.stage}}</p> <p> {{recovery.time}} {{recovery.size}} </p> </div> </div> {{/each}} </div> </div> {{/each}} </div> {{/if}} </div> {{/with}} {{/each}} </div> {{/unless}} </script> </section> <audio id="alert-green" src="audio/alert-green.mp3"></audio> <audio id="alert-yellow" src="audio/alert-yellow.mp3"></audio> <audio id="alert-red" src="audio/alert-red.mp3"></audio> <script src="js/libs/jquery-1.7.2.min.js"></script> <!-- <script src="js/libs/ember-0.9.8.min.js"></script> --> <script src="js/libs/ember-0.9.8.min.js"></script> <script src="js/libs/colorbrewer.min.js"></script> <script src="js/libs/d3.v2.min.js"></script> <script src="js/libs/cubism.v1.js"></script> <script src="js/libs/cubism.elasticsearch.js"></script> <script src="js/app.js"></script> <script src="js/cubism.js"></script> </body> </html>
{ "pile_set_name": "Github" }
// // Generated by class-dump 3.5 (64 bit) (Debug version compiled Oct 15 2018 10:31:50). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by Steve Nygard. // #import <objc/NSObject.h> @protocol OS_dispatch_queue, OS_os_log; @interface STYFrameworkHelper : NSObject { NSObject<OS_dispatch_queue> *_sharedSerialQueueAtUtility; NSObject<OS_dispatch_queue> *_sharedConcurrentQueueAtUtility; NSObject<OS_dispatch_queue> *_sharedConcurrentQueueAtBackground; NSObject<OS_os_log> *_logHandle; } + (id)sharedHelper; - (void).cxx_destruct; @property(retain) NSObject<OS_os_log> *logHandle; // @synthesize logHandle=_logHandle; @property(retain) NSObject<OS_dispatch_queue> *sharedConcurrentQueueAtBackground; // @synthesize sharedConcurrentQueueAtBackground=_sharedConcurrentQueueAtBackground; @property(retain) NSObject<OS_dispatch_queue> *sharedConcurrentQueueAtUtility; // @synthesize sharedConcurrentQueueAtUtility=_sharedConcurrentQueueAtUtility; @property(retain) NSObject<OS_dispatch_queue> *sharedSerialQueueAtUtility; // @synthesize sharedSerialQueueAtUtility=_sharedSerialQueueAtUtility; - (id)subsystemForSignposts; @end
{ "pile_set_name": "Github" }
# eVars Schema ``` https://ns.adobe.com/experience/analytics/evars ``` Custom eVars. | [Abstract](../../../../abstract.md) | [Extensible](../../../../extensions.md) | [Status](../../../../status.md) | [Identifiable](../../../../id.md) | [Custom Properties](../../../../extensions.md) | [Additional Properties](../../../../extensions.md) | Defined In | |-------------------------------------|-----------------------------------------|---------------------------------|-----------------------------------|------------------------------------------------|----------------------------------------------------|------------| | Can be instantiated | Yes | Experimental | No | Forbidden | Permitted | [adobe/experience/analytics/evars.schema.json](adobe/experience/analytics/evars.schema.json) | ## Schema Hierarchy * eVars `https://ns.adobe.com/experience/analytics/evars` * [Extensibility base schema](../../../datatypes/extensible.schema.md) `https://ns.adobe.com/xdm/common/extensible` # eVars Properties | Property | Type | Required | Defined by | |----------|------|----------|------------| | [xdm:eVar1](#xdmevar1) | `string` | Optional | eVars (this schema) | | [xdm:eVar10](#xdmevar10) | `string` | Optional | eVars (this schema) | | [xdm:eVar100](#xdmevar100) | `string` | Optional | eVars (this schema) | | [xdm:eVar101](#xdmevar101) | `string` | Optional | eVars (this schema) | | [xdm:eVar102](#xdmevar102) | `string` | Optional | eVars (this schema) | | [xdm:eVar103](#xdmevar103) | `string` | Optional | eVars (this schema) | | [xdm:eVar104](#xdmevar104) | `string` | Optional | eVars (this schema) | | [xdm:eVar105](#xdmevar105) | `string` | Optional | eVars (this schema) | | [xdm:eVar106](#xdmevar106) | `string` | Optional | eVars (this schema) | | [xdm:eVar107](#xdmevar107) | `string` | Optional | eVars (this schema) | | [xdm:eVar108](#xdmevar108) | `string` | Optional | eVars (this schema) | | [xdm:eVar109](#xdmevar109) | `string` | Optional | eVars (this schema) | | [xdm:eVar11](#xdmevar11) | `string` | Optional | eVars (this schema) | | [xdm:eVar110](#xdmevar110) | `string` | Optional | eVars (this schema) | | [xdm:eVar111](#xdmevar111) | `string` | Optional | eVars (this schema) | | [xdm:eVar112](#xdmevar112) | `string` | Optional | eVars (this schema) | | [xdm:eVar113](#xdmevar113) | `string` | Optional | eVars (this schema) | | [xdm:eVar114](#xdmevar114) | `string` | Optional | eVars (this schema) | | [xdm:eVar115](#xdmevar115) | `string` | Optional | eVars (this schema) | | [xdm:eVar116](#xdmevar116) | `string` | Optional | eVars (this schema) | | [xdm:eVar117](#xdmevar117) | `string` | Optional | eVars (this schema) | | [xdm:eVar118](#xdmevar118) | `string` | Optional | eVars (this schema) | | [xdm:eVar119](#xdmevar119) | `string` | Optional | eVars (this schema) | | [xdm:eVar12](#xdmevar12) | `string` | Optional | eVars (this schema) | | [xdm:eVar120](#xdmevar120) | `string` | Optional | eVars (this schema) | | [xdm:eVar121](#xdmevar121) | `string` | Optional | eVars (this schema) | | [xdm:eVar122](#xdmevar122) | `string` | Optional | eVars (this schema) | | [xdm:eVar123](#xdmevar123) | `string` | Optional | eVars (this schema) | | [xdm:eVar124](#xdmevar124) | `string` | Optional | eVars (this schema) | | [xdm:eVar125](#xdmevar125) | `string` | Optional | eVars (this schema) | | [xdm:eVar126](#xdmevar126) | `string` | Optional | eVars (this schema) | | [xdm:eVar127](#xdmevar127) | `string` | Optional | eVars (this schema) | | [xdm:eVar128](#xdmevar128) | `string` | Optional | eVars (this schema) | | [xdm:eVar129](#xdmevar129) | `string` | Optional | eVars (this schema) | | [xdm:eVar13](#xdmevar13) | `string` | Optional | eVars (this schema) | | [xdm:eVar130](#xdmevar130) | `string` | Optional | eVars (this schema) | | [xdm:eVar131](#xdmevar131) | `string` | Optional | eVars (this schema) | | [xdm:eVar132](#xdmevar132) | `string` | Optional | eVars (this schema) | | [xdm:eVar133](#xdmevar133) | `string` | Optional | eVars (this schema) | | [xdm:eVar134](#xdmevar134) | `string` | Optional | eVars (this schema) | | [xdm:eVar135](#xdmevar135) | `string` | Optional | eVars (this schema) | | [xdm:eVar136](#xdmevar136) | `string` | Optional | eVars (this schema) | | [xdm:eVar137](#xdmevar137) | `string` | Optional | eVars (this schema) | | [xdm:eVar138](#xdmevar138) | `string` | Optional | eVars (this schema) | | [xdm:eVar139](#xdmevar139) | `string` | Optional | eVars (this schema) | | [xdm:eVar14](#xdmevar14) | `string` | Optional | eVars (this schema) | | [xdm:eVar140](#xdmevar140) | `string` | Optional | eVars (this schema) | | [xdm:eVar141](#xdmevar141) | `string` | Optional | eVars (this schema) | | [xdm:eVar142](#xdmevar142) | `string` | Optional | eVars (this schema) | | [xdm:eVar143](#xdmevar143) | `string` | Optional | eVars (this schema) | | [xdm:eVar144](#xdmevar144) | `string` | Optional | eVars (this schema) | | [xdm:eVar145](#xdmevar145) | `string` | Optional | eVars (this schema) | | [xdm:eVar146](#xdmevar146) | `string` | Optional | eVars (this schema) | | [xdm:eVar147](#xdmevar147) | `string` | Optional | eVars (this schema) | | [xdm:eVar148](#xdmevar148) | `string` | Optional | eVars (this schema) | | [xdm:eVar149](#xdmevar149) | `string` | Optional | eVars (this schema) | | [xdm:eVar15](#xdmevar15) | `string` | Optional | eVars (this schema) | | [xdm:eVar150](#xdmevar150) | `string` | Optional | eVars (this schema) | | [xdm:eVar151](#xdmevar151) | `string` | Optional | eVars (this schema) | | [xdm:eVar152](#xdmevar152) | `string` | Optional | eVars (this schema) | | [xdm:eVar153](#xdmevar153) | `string` | Optional | eVars (this schema) | | [xdm:eVar154](#xdmevar154) | `string` | Optional | eVars (this schema) | | [xdm:eVar155](#xdmevar155) | `string` | Optional | eVars (this schema) | | [xdm:eVar156](#xdmevar156) | `string` | Optional | eVars (this schema) | | [xdm:eVar157](#xdmevar157) | `string` | Optional | eVars (this schema) | | [xdm:eVar158](#xdmevar158) | `string` | Optional | eVars (this schema) | | [xdm:eVar159](#xdmevar159) | `string` | Optional | eVars (this schema) | | [xdm:eVar16](#xdmevar16) | `string` | Optional | eVars (this schema) | | [xdm:eVar160](#xdmevar160) | `string` | Optional | eVars (this schema) | | [xdm:eVar161](#xdmevar161) | `string` | Optional | eVars (this schema) | | [xdm:eVar162](#xdmevar162) | `string` | Optional | eVars (this schema) | | [xdm:eVar163](#xdmevar163) | `string` | Optional | eVars (this schema) | | [xdm:eVar164](#xdmevar164) | `string` | Optional | eVars (this schema) | | [xdm:eVar165](#xdmevar165) | `string` | Optional | eVars (this schema) | | [xdm:eVar166](#xdmevar166) | `string` | Optional | eVars (this schema) | | [xdm:eVar167](#xdmevar167) | `string` | Optional | eVars (this schema) | | [xdm:eVar168](#xdmevar168) | `string` | Optional | eVars (this schema) | | [xdm:eVar169](#xdmevar169) | `string` | Optional | eVars (this schema) | | [xdm:eVar17](#xdmevar17) | `string` | Optional | eVars (this schema) | | [xdm:eVar170](#xdmevar170) | `string` | Optional | eVars (this schema) | | [xdm:eVar171](#xdmevar171) | `string` | Optional | eVars (this schema) | | [xdm:eVar172](#xdmevar172) | `string` | Optional | eVars (this schema) | | [xdm:eVar173](#xdmevar173) | `string` | Optional | eVars (this schema) | | [xdm:eVar174](#xdmevar174) | `string` | Optional | eVars (this schema) | | [xdm:eVar175](#xdmevar175) | `string` | Optional | eVars (this schema) | | [xdm:eVar176](#xdmevar176) | `string` | Optional | eVars (this schema) | | [xdm:eVar177](#xdmevar177) | `string` | Optional | eVars (this schema) | | [xdm:eVar178](#xdmevar178) | `string` | Optional | eVars (this schema) | | [xdm:eVar179](#xdmevar179) | `string` | Optional | eVars (this schema) | | [xdm:eVar18](#xdmevar18) | `string` | Optional | eVars (this schema) | | [xdm:eVar180](#xdmevar180) | `string` | Optional | eVars (this schema) | | [xdm:eVar181](#xdmevar181) | `string` | Optional | eVars (this schema) | | [xdm:eVar182](#xdmevar182) | `string` | Optional | eVars (this schema) | | [xdm:eVar183](#xdmevar183) | `string` | Optional | eVars (this schema) | | [xdm:eVar184](#xdmevar184) | `string` | Optional | eVars (this schema) | | [xdm:eVar185](#xdmevar185) | `string` | Optional | eVars (this schema) | | [xdm:eVar186](#xdmevar186) | `string` | Optional | eVars (this schema) | | [xdm:eVar187](#xdmevar187) | `string` | Optional | eVars (this schema) | | [xdm:eVar188](#xdmevar188) | `string` | Optional | eVars (this schema) | | [xdm:eVar189](#xdmevar189) | `string` | Optional | eVars (this schema) | | [xdm:eVar19](#xdmevar19) | `string` | Optional | eVars (this schema) | | [xdm:eVar190](#xdmevar190) | `string` | Optional | eVars (this schema) | | [xdm:eVar191](#xdmevar191) | `string` | Optional | eVars (this schema) | | [xdm:eVar192](#xdmevar192) | `string` | Optional | eVars (this schema) | | [xdm:eVar193](#xdmevar193) | `string` | Optional | eVars (this schema) | | [xdm:eVar194](#xdmevar194) | `string` | Optional | eVars (this schema) | | [xdm:eVar195](#xdmevar195) | `string` | Optional | eVars (this schema) | | [xdm:eVar196](#xdmevar196) | `string` | Optional | eVars (this schema) | | [xdm:eVar197](#xdmevar197) | `string` | Optional | eVars (this schema) | | [xdm:eVar198](#xdmevar198) | `string` | Optional | eVars (this schema) | | [xdm:eVar199](#xdmevar199) | `string` | Optional | eVars (this schema) | | [xdm:eVar2](#xdmevar2) | `string` | Optional | eVars (this schema) | | [xdm:eVar20](#xdmevar20) | `string` | Optional | eVars (this schema) | | [xdm:eVar200](#xdmevar200) | `string` | Optional | eVars (this schema) | | [xdm:eVar201](#xdmevar201) | `string` | Optional | eVars (this schema) | | [xdm:eVar202](#xdmevar202) | `string` | Optional | eVars (this schema) | | [xdm:eVar203](#xdmevar203) | `string` | Optional | eVars (this schema) | | [xdm:eVar204](#xdmevar204) | `string` | Optional | eVars (this schema) | | [xdm:eVar205](#xdmevar205) | `string` | Optional | eVars (this schema) | | [xdm:eVar206](#xdmevar206) | `string` | Optional | eVars (this schema) | | [xdm:eVar207](#xdmevar207) | `string` | Optional | eVars (this schema) | | [xdm:eVar208](#xdmevar208) | `string` | Optional | eVars (this schema) | | [xdm:eVar209](#xdmevar209) | `string` | Optional | eVars (this schema) | | [xdm:eVar21](#xdmevar21) | `string` | Optional | eVars (this schema) | | [xdm:eVar210](#xdmevar210) | `string` | Optional | eVars (this schema) | | [xdm:eVar211](#xdmevar211) | `string` | Optional | eVars (this schema) | | [xdm:eVar212](#xdmevar212) | `string` | Optional | eVars (this schema) | | [xdm:eVar213](#xdmevar213) | `string` | Optional | eVars (this schema) | | [xdm:eVar214](#xdmevar214) | `string` | Optional | eVars (this schema) | | [xdm:eVar215](#xdmevar215) | `string` | Optional | eVars (this schema) | | [xdm:eVar216](#xdmevar216) | `string` | Optional | eVars (this schema) | | [xdm:eVar217](#xdmevar217) | `string` | Optional | eVars (this schema) | | [xdm:eVar218](#xdmevar218) | `string` | Optional | eVars (this schema) | | [xdm:eVar219](#xdmevar219) | `string` | Optional | eVars (this schema) | | [xdm:eVar22](#xdmevar22) | `string` | Optional | eVars (this schema) | | [xdm:eVar220](#xdmevar220) | `string` | Optional | eVars (this schema) | | [xdm:eVar221](#xdmevar221) | `string` | Optional | eVars (this schema) | | [xdm:eVar222](#xdmevar222) | `string` | Optional | eVars (this schema) | | [xdm:eVar223](#xdmevar223) | `string` | Optional | eVars (this schema) | | [xdm:eVar224](#xdmevar224) | `string` | Optional | eVars (this schema) | | [xdm:eVar225](#xdmevar225) | `string` | Optional | eVars (this schema) | | [xdm:eVar226](#xdmevar226) | `string` | Optional | eVars (this schema) | | [xdm:eVar227](#xdmevar227) | `string` | Optional | eVars (this schema) | | [xdm:eVar228](#xdmevar228) | `string` | Optional | eVars (this schema) | | [xdm:eVar229](#xdmevar229) | `string` | Optional | eVars (this schema) | | [xdm:eVar23](#xdmevar23) | `string` | Optional | eVars (this schema) | | [xdm:eVar230](#xdmevar230) | `string` | Optional | eVars (this schema) | | [xdm:eVar231](#xdmevar231) | `string` | Optional | eVars (this schema) | | [xdm:eVar232](#xdmevar232) | `string` | Optional | eVars (this schema) | | [xdm:eVar233](#xdmevar233) | `string` | Optional | eVars (this schema) | | [xdm:eVar234](#xdmevar234) | `string` | Optional | eVars (this schema) | | [xdm:eVar235](#xdmevar235) | `string` | Optional | eVars (this schema) | | [xdm:eVar236](#xdmevar236) | `string` | Optional | eVars (this schema) | | [xdm:eVar237](#xdmevar237) | `string` | Optional | eVars (this schema) | | [xdm:eVar238](#xdmevar238) | `string` | Optional | eVars (this schema) | | [xdm:eVar239](#xdmevar239) | `string` | Optional | eVars (this schema) | | [xdm:eVar24](#xdmevar24) | `string` | Optional | eVars (this schema) | | [xdm:eVar240](#xdmevar240) | `string` | Optional | eVars (this schema) | | [xdm:eVar241](#xdmevar241) | `string` | Optional | eVars (this schema) | | [xdm:eVar242](#xdmevar242) | `string` | Optional | eVars (this schema) | | [xdm:eVar243](#xdmevar243) | `string` | Optional | eVars (this schema) | | [xdm:eVar244](#xdmevar244) | `string` | Optional | eVars (this schema) | | [xdm:eVar245](#xdmevar245) | `string` | Optional | eVars (this schema) | | [xdm:eVar246](#xdmevar246) | `string` | Optional | eVars (this schema) | | [xdm:eVar247](#xdmevar247) | `string` | Optional | eVars (this schema) | | [xdm:eVar248](#xdmevar248) | `string` | Optional | eVars (this schema) | | [xdm:eVar249](#xdmevar249) | `string` | Optional | eVars (this schema) | | [xdm:eVar25](#xdmevar25) | `string` | Optional | eVars (this schema) | | [xdm:eVar250](#xdmevar250) | `string` | Optional | eVars (this schema) | | [xdm:eVar26](#xdmevar26) | `string` | Optional | eVars (this schema) | | [xdm:eVar27](#xdmevar27) | `string` | Optional | eVars (this schema) | | [xdm:eVar28](#xdmevar28) | `string` | Optional | eVars (this schema) | | [xdm:eVar29](#xdmevar29) | `string` | Optional | eVars (this schema) | | [xdm:eVar3](#xdmevar3) | `string` | Optional | eVars (this schema) | | [xdm:eVar30](#xdmevar30) | `string` | Optional | eVars (this schema) | | [xdm:eVar31](#xdmevar31) | `string` | Optional | eVars (this schema) | | [xdm:eVar32](#xdmevar32) | `string` | Optional | eVars (this schema) | | [xdm:eVar33](#xdmevar33) | `string` | Optional | eVars (this schema) | | [xdm:eVar34](#xdmevar34) | `string` | Optional | eVars (this schema) | | [xdm:eVar35](#xdmevar35) | `string` | Optional | eVars (this schema) | | [xdm:eVar36](#xdmevar36) | `string` | Optional | eVars (this schema) | | [xdm:eVar37](#xdmevar37) | `string` | Optional | eVars (this schema) | | [xdm:eVar38](#xdmevar38) | `string` | Optional | eVars (this schema) | | [xdm:eVar39](#xdmevar39) | `string` | Optional | eVars (this schema) | | [xdm:eVar4](#xdmevar4) | `string` | Optional | eVars (this schema) | | [xdm:eVar40](#xdmevar40) | `string` | Optional | eVars (this schema) | | [xdm:eVar41](#xdmevar41) | `string` | Optional | eVars (this schema) | | [xdm:eVar42](#xdmevar42) | `string` | Optional | eVars (this schema) | | [xdm:eVar43](#xdmevar43) | `string` | Optional | eVars (this schema) | | [xdm:eVar44](#xdmevar44) | `string` | Optional | eVars (this schema) | | [xdm:eVar45](#xdmevar45) | `string` | Optional | eVars (this schema) | | [xdm:eVar46](#xdmevar46) | `string` | Optional | eVars (this schema) | | [xdm:eVar47](#xdmevar47) | `string` | Optional | eVars (this schema) | | [xdm:eVar48](#xdmevar48) | `string` | Optional | eVars (this schema) | | [xdm:eVar49](#xdmevar49) | `string` | Optional | eVars (this schema) | | [xdm:eVar5](#xdmevar5) | `string` | Optional | eVars (this schema) | | [xdm:eVar50](#xdmevar50) | `string` | Optional | eVars (this schema) | | [xdm:eVar51](#xdmevar51) | `string` | Optional | eVars (this schema) | | [xdm:eVar52](#xdmevar52) | `string` | Optional | eVars (this schema) | | [xdm:eVar53](#xdmevar53) | `string` | Optional | eVars (this schema) | | [xdm:eVar54](#xdmevar54) | `string` | Optional | eVars (this schema) | | [xdm:eVar55](#xdmevar55) | `string` | Optional | eVars (this schema) | | [xdm:eVar56](#xdmevar56) | `string` | Optional | eVars (this schema) | | [xdm:eVar57](#xdmevar57) | `string` | Optional | eVars (this schema) | | [xdm:eVar58](#xdmevar58) | `string` | Optional | eVars (this schema) | | [xdm:eVar59](#xdmevar59) | `string` | Optional | eVars (this schema) | | [xdm:eVar6](#xdmevar6) | `string` | Optional | eVars (this schema) | | [xdm:eVar60](#xdmevar60) | `string` | Optional | eVars (this schema) | | [xdm:eVar61](#xdmevar61) | `string` | Optional | eVars (this schema) | | [xdm:eVar62](#xdmevar62) | `string` | Optional | eVars (this schema) | | [xdm:eVar63](#xdmevar63) | `string` | Optional | eVars (this schema) | | [xdm:eVar64](#xdmevar64) | `string` | Optional | eVars (this schema) | | [xdm:eVar65](#xdmevar65) | `string` | Optional | eVars (this schema) | | [xdm:eVar66](#xdmevar66) | `string` | Optional | eVars (this schema) | | [xdm:eVar67](#xdmevar67) | `string` | Optional | eVars (this schema) | | [xdm:eVar68](#xdmevar68) | `string` | Optional | eVars (this schema) | | [xdm:eVar69](#xdmevar69) | `string` | Optional | eVars (this schema) | | [xdm:eVar7](#xdmevar7) | `string` | Optional | eVars (this schema) | | [xdm:eVar70](#xdmevar70) | `string` | Optional | eVars (this schema) | | [xdm:eVar71](#xdmevar71) | `string` | Optional | eVars (this schema) | | [xdm:eVar72](#xdmevar72) | `string` | Optional | eVars (this schema) | | [xdm:eVar73](#xdmevar73) | `string` | Optional | eVars (this schema) | | [xdm:eVar74](#xdmevar74) | `string` | Optional | eVars (this schema) | | [xdm:eVar75](#xdmevar75) | `string` | Optional | eVars (this schema) | | [xdm:eVar76](#xdmevar76) | `string` | Optional | eVars (this schema) | | [xdm:eVar77](#xdmevar77) | `string` | Optional | eVars (this schema) | | [xdm:eVar78](#xdmevar78) | `string` | Optional | eVars (this schema) | | [xdm:eVar79](#xdmevar79) | `string` | Optional | eVars (this schema) | | [xdm:eVar8](#xdmevar8) | `string` | Optional | eVars (this schema) | | [xdm:eVar80](#xdmevar80) | `string` | Optional | eVars (this schema) | | [xdm:eVar81](#xdmevar81) | `string` | Optional | eVars (this schema) | | [xdm:eVar82](#xdmevar82) | `string` | Optional | eVars (this schema) | | [xdm:eVar83](#xdmevar83) | `string` | Optional | eVars (this schema) | | [xdm:eVar84](#xdmevar84) | `string` | Optional | eVars (this schema) | | [xdm:eVar85](#xdmevar85) | `string` | Optional | eVars (this schema) | | [xdm:eVar86](#xdmevar86) | `string` | Optional | eVars (this schema) | | [xdm:eVar87](#xdmevar87) | `string` | Optional | eVars (this schema) | | [xdm:eVar88](#xdmevar88) | `string` | Optional | eVars (this schema) | | [xdm:eVar89](#xdmevar89) | `string` | Optional | eVars (this schema) | | [xdm:eVar9](#xdmevar9) | `string` | Optional | eVars (this schema) | | [xdm:eVar90](#xdmevar90) | `string` | Optional | eVars (this schema) | | [xdm:eVar91](#xdmevar91) | `string` | Optional | eVars (this schema) | | [xdm:eVar92](#xdmevar92) | `string` | Optional | eVars (this schema) | | [xdm:eVar93](#xdmevar93) | `string` | Optional | eVars (this schema) | | [xdm:eVar94](#xdmevar94) | `string` | Optional | eVars (this schema) | | [xdm:eVar95](#xdmevar95) | `string` | Optional | eVars (this schema) | | [xdm:eVar96](#xdmevar96) | `string` | Optional | eVars (this schema) | | [xdm:eVar97](#xdmevar97) | `string` | Optional | eVars (this schema) | | [xdm:eVar98](#xdmevar98) | `string` | Optional | eVars (this schema) | | [xdm:eVar99](#xdmevar99) | `string` | Optional | eVars (this schema) | | `*` | any | Additional | this schema *allows* additional properties | ## xdm:eVar1 ### eVar1 Custom conversion variable 1. `xdm:eVar1` * is optional * type: `string` * defined in this schema ### xdm:eVar1 Type `string` ## xdm:eVar10 ### eVar10 Custom conversion variable 10. `xdm:eVar10` * is optional * type: `string` * defined in this schema ### xdm:eVar10 Type `string` ## xdm:eVar100 ### eVar100 Custom conversion variable 100. `xdm:eVar100` * is optional * type: `string` * defined in this schema ### xdm:eVar100 Type `string` ## xdm:eVar101 ### eVar101 Custom conversion variable 101. `xdm:eVar101` * is optional * type: `string` * defined in this schema ### xdm:eVar101 Type `string` ## xdm:eVar102 ### eVar102 Custom conversion variable 102. `xdm:eVar102` * is optional * type: `string` * defined in this schema ### xdm:eVar102 Type `string` ## xdm:eVar103 ### eVar103 Custom conversion variable 103. `xdm:eVar103` * is optional * type: `string` * defined in this schema ### xdm:eVar103 Type `string` ## xdm:eVar104 ### eVar104 Custom conversion variable 104. `xdm:eVar104` * is optional * type: `string` * defined in this schema ### xdm:eVar104 Type `string` ## xdm:eVar105 ### eVar105 Custom conversion variable 105. `xdm:eVar105` * is optional * type: `string` * defined in this schema ### xdm:eVar105 Type `string` ## xdm:eVar106 ### eVar106 Custom conversion variable 106. `xdm:eVar106` * is optional * type: `string` * defined in this schema ### xdm:eVar106 Type `string` ## xdm:eVar107 ### eVar107 Custom conversion variable 107. `xdm:eVar107` * is optional * type: `string` * defined in this schema ### xdm:eVar107 Type `string` ## xdm:eVar108 ### eVar108 Custom conversion variable 108. `xdm:eVar108` * is optional * type: `string` * defined in this schema ### xdm:eVar108 Type `string` ## xdm:eVar109 ### eVar109 Custom conversion variable 109. `xdm:eVar109` * is optional * type: `string` * defined in this schema ### xdm:eVar109 Type `string` ## xdm:eVar11 ### eVar11 Custom conversion variable 11. `xdm:eVar11` * is optional * type: `string` * defined in this schema ### xdm:eVar11 Type `string` ## xdm:eVar110 ### eVar110 Custom conversion variable 110. `xdm:eVar110` * is optional * type: `string` * defined in this schema ### xdm:eVar110 Type `string` ## xdm:eVar111 ### eVar111 Custom conversion variable 111. `xdm:eVar111` * is optional * type: `string` * defined in this schema ### xdm:eVar111 Type `string` ## xdm:eVar112 ### eVar112 Custom conversion variable 112. `xdm:eVar112` * is optional * type: `string` * defined in this schema ### xdm:eVar112 Type `string` ## xdm:eVar113 ### eVar113 Custom conversion variable 113. `xdm:eVar113` * is optional * type: `string` * defined in this schema ### xdm:eVar113 Type `string` ## xdm:eVar114 ### eVar114 Custom conversion variable 114. `xdm:eVar114` * is optional * type: `string` * defined in this schema ### xdm:eVar114 Type `string` ## xdm:eVar115 ### eVar115 Custom conversion variable 115. `xdm:eVar115` * is optional * type: `string` * defined in this schema ### xdm:eVar115 Type `string` ## xdm:eVar116 ### eVar116 Custom conversion variable 116. `xdm:eVar116` * is optional * type: `string` * defined in this schema ### xdm:eVar116 Type `string` ## xdm:eVar117 ### eVar117 Custom conversion variable 117. `xdm:eVar117` * is optional * type: `string` * defined in this schema ### xdm:eVar117 Type `string` ## xdm:eVar118 ### eVar118 Custom conversion variable 118. `xdm:eVar118` * is optional * type: `string` * defined in this schema ### xdm:eVar118 Type `string` ## xdm:eVar119 ### eVar119 Custom conversion variable 119. `xdm:eVar119` * is optional * type: `string` * defined in this schema ### xdm:eVar119 Type `string` ## xdm:eVar12 ### eVar12 Custom conversion variable 12. `xdm:eVar12` * is optional * type: `string` * defined in this schema ### xdm:eVar12 Type `string` ## xdm:eVar120 ### eVar120 Custom conversion variable 120. `xdm:eVar120` * is optional * type: `string` * defined in this schema ### xdm:eVar120 Type `string` ## xdm:eVar121 ### eVar121 Custom conversion variable 121. `xdm:eVar121` * is optional * type: `string` * defined in this schema ### xdm:eVar121 Type `string` ## xdm:eVar122 ### eVar122 Custom conversion variable 122. `xdm:eVar122` * is optional * type: `string` * defined in this schema ### xdm:eVar122 Type `string` ## xdm:eVar123 ### eVar123 Custom conversion variable 123. `xdm:eVar123` * is optional * type: `string` * defined in this schema ### xdm:eVar123 Type `string` ## xdm:eVar124 ### eVar124 Custom conversion variable 124. `xdm:eVar124` * is optional * type: `string` * defined in this schema ### xdm:eVar124 Type `string` ## xdm:eVar125 ### eVar125 Custom conversion variable 125. `xdm:eVar125` * is optional * type: `string` * defined in this schema ### xdm:eVar125 Type `string` ## xdm:eVar126 ### eVar126 Custom conversion variable 126. `xdm:eVar126` * is optional * type: `string` * defined in this schema ### xdm:eVar126 Type `string` ## xdm:eVar127 ### eVar127 Custom conversion variable 127. `xdm:eVar127` * is optional * type: `string` * defined in this schema ### xdm:eVar127 Type `string` ## xdm:eVar128 ### eVar128 Custom conversion variable 128. `xdm:eVar128` * is optional * type: `string` * defined in this schema ### xdm:eVar128 Type `string` ## xdm:eVar129 ### eVar129 Custom conversion variable 129. `xdm:eVar129` * is optional * type: `string` * defined in this schema ### xdm:eVar129 Type `string` ## xdm:eVar13 ### eVar13 Custom conversion variable 13. `xdm:eVar13` * is optional * type: `string` * defined in this schema ### xdm:eVar13 Type `string` ## xdm:eVar130 ### eVar130 Custom conversion variable 130. `xdm:eVar130` * is optional * type: `string` * defined in this schema ### xdm:eVar130 Type `string` ## xdm:eVar131 ### eVar131 Custom conversion variable 131. `xdm:eVar131` * is optional * type: `string` * defined in this schema ### xdm:eVar131 Type `string` ## xdm:eVar132 ### eVar132 Custom conversion variable 132. `xdm:eVar132` * is optional * type: `string` * defined in this schema ### xdm:eVar132 Type `string` ## xdm:eVar133 ### eVar133 Custom conversion variable 133. `xdm:eVar133` * is optional * type: `string` * defined in this schema ### xdm:eVar133 Type `string` ## xdm:eVar134 ### eVar134 Custom conversion variable 134. `xdm:eVar134` * is optional * type: `string` * defined in this schema ### xdm:eVar134 Type `string` ## xdm:eVar135 ### eVar135 Custom conversion variable 135. `xdm:eVar135` * is optional * type: `string` * defined in this schema ### xdm:eVar135 Type `string` ## xdm:eVar136 ### eVar136 Custom conversion variable 136. `xdm:eVar136` * is optional * type: `string` * defined in this schema ### xdm:eVar136 Type `string` ## xdm:eVar137 ### eVar137 Custom conversion variable 137. `xdm:eVar137` * is optional * type: `string` * defined in this schema ### xdm:eVar137 Type `string` ## xdm:eVar138 ### eVar138 Custom conversion variable 138. `xdm:eVar138` * is optional * type: `string` * defined in this schema ### xdm:eVar138 Type `string` ## xdm:eVar139 ### eVar139 Custom conversion variable 139. `xdm:eVar139` * is optional * type: `string` * defined in this schema ### xdm:eVar139 Type `string` ## xdm:eVar14 ### eVar14 Custom conversion variable 14. `xdm:eVar14` * is optional * type: `string` * defined in this schema ### xdm:eVar14 Type `string` ## xdm:eVar140 ### eVar140 Custom conversion variable 140. `xdm:eVar140` * is optional * type: `string` * defined in this schema ### xdm:eVar140 Type `string` ## xdm:eVar141 ### eVar141 Custom conversion variable 141. `xdm:eVar141` * is optional * type: `string` * defined in this schema ### xdm:eVar141 Type `string` ## xdm:eVar142 ### eVar142 Custom conversion variable 142. `xdm:eVar142` * is optional * type: `string` * defined in this schema ### xdm:eVar142 Type `string` ## xdm:eVar143 ### eVar143 Custom conversion variable 143. `xdm:eVar143` * is optional * type: `string` * defined in this schema ### xdm:eVar143 Type `string` ## xdm:eVar144 ### eVar144 Custom conversion variable 144. `xdm:eVar144` * is optional * type: `string` * defined in this schema ### xdm:eVar144 Type `string` ## xdm:eVar145 ### eVar145 Custom conversion variable 145. `xdm:eVar145` * is optional * type: `string` * defined in this schema ### xdm:eVar145 Type `string` ## xdm:eVar146 ### eVar146 Custom conversion variable 146. `xdm:eVar146` * is optional * type: `string` * defined in this schema ### xdm:eVar146 Type `string` ## xdm:eVar147 ### eVar147 Custom conversion variable 147. `xdm:eVar147` * is optional * type: `string` * defined in this schema ### xdm:eVar147 Type `string` ## xdm:eVar148 ### eVar148 Custom conversion variable 148. `xdm:eVar148` * is optional * type: `string` * defined in this schema ### xdm:eVar148 Type `string` ## xdm:eVar149 ### eVar149 Custom conversion variable 149. `xdm:eVar149` * is optional * type: `string` * defined in this schema ### xdm:eVar149 Type `string` ## xdm:eVar15 ### eVar15 Custom conversion variable 15. `xdm:eVar15` * is optional * type: `string` * defined in this schema ### xdm:eVar15 Type `string` ## xdm:eVar150 ### eVar150 Custom conversion variable 150. `xdm:eVar150` * is optional * type: `string` * defined in this schema ### xdm:eVar150 Type `string` ## xdm:eVar151 ### eVar151 Custom conversion variable 151. `xdm:eVar151` * is optional * type: `string` * defined in this schema ### xdm:eVar151 Type `string` ## xdm:eVar152 ### eVar152 Custom conversion variable 152. `xdm:eVar152` * is optional * type: `string` * defined in this schema ### xdm:eVar152 Type `string` ## xdm:eVar153 ### eVar153 Custom conversion variable 153. `xdm:eVar153` * is optional * type: `string` * defined in this schema ### xdm:eVar153 Type `string` ## xdm:eVar154 ### eVar154 Custom conversion variable 154. `xdm:eVar154` * is optional * type: `string` * defined in this schema ### xdm:eVar154 Type `string` ## xdm:eVar155 ### eVar155 Custom conversion variable 155. `xdm:eVar155` * is optional * type: `string` * defined in this schema ### xdm:eVar155 Type `string` ## xdm:eVar156 ### eVar156 Custom conversion variable 156. `xdm:eVar156` * is optional * type: `string` * defined in this schema ### xdm:eVar156 Type `string` ## xdm:eVar157 ### eVar157 Custom conversion variable 157. `xdm:eVar157` * is optional * type: `string` * defined in this schema ### xdm:eVar157 Type `string` ## xdm:eVar158 ### eVar158 Custom conversion variable 158. `xdm:eVar158` * is optional * type: `string` * defined in this schema ### xdm:eVar158 Type `string` ## xdm:eVar159 ### eVar159 Custom conversion variable 159. `xdm:eVar159` * is optional * type: `string` * defined in this schema ### xdm:eVar159 Type `string` ## xdm:eVar16 ### eVar16 Custom conversion variable 16. `xdm:eVar16` * is optional * type: `string` * defined in this schema ### xdm:eVar16 Type `string` ## xdm:eVar160 ### eVar160 Custom conversion variable 160. `xdm:eVar160` * is optional * type: `string` * defined in this schema ### xdm:eVar160 Type `string` ## xdm:eVar161 ### eVar161 Custom conversion variable 161. `xdm:eVar161` * is optional * type: `string` * defined in this schema ### xdm:eVar161 Type `string` ## xdm:eVar162 ### eVar162 Custom conversion variable 162. `xdm:eVar162` * is optional * type: `string` * defined in this schema ### xdm:eVar162 Type `string` ## xdm:eVar163 ### eVar163 Custom conversion variable 163. `xdm:eVar163` * is optional * type: `string` * defined in this schema ### xdm:eVar163 Type `string` ## xdm:eVar164 ### eVar164 Custom conversion variable 164. `xdm:eVar164` * is optional * type: `string` * defined in this schema ### xdm:eVar164 Type `string` ## xdm:eVar165 ### eVar165 Custom conversion variable 165. `xdm:eVar165` * is optional * type: `string` * defined in this schema ### xdm:eVar165 Type `string` ## xdm:eVar166 ### eVar166 Custom conversion variable 166. `xdm:eVar166` * is optional * type: `string` * defined in this schema ### xdm:eVar166 Type `string` ## xdm:eVar167 ### eVar167 Custom conversion variable 167. `xdm:eVar167` * is optional * type: `string` * defined in this schema ### xdm:eVar167 Type `string` ## xdm:eVar168 ### eVar168 Custom conversion variable 168. `xdm:eVar168` * is optional * type: `string` * defined in this schema ### xdm:eVar168 Type `string` ## xdm:eVar169 ### eVar169 Custom conversion variable 169. `xdm:eVar169` * is optional * type: `string` * defined in this schema ### xdm:eVar169 Type `string` ## xdm:eVar17 ### eVar17 Custom conversion variable 17. `xdm:eVar17` * is optional * type: `string` * defined in this schema ### xdm:eVar17 Type `string` ## xdm:eVar170 ### eVar170 Custom conversion variable 170. `xdm:eVar170` * is optional * type: `string` * defined in this schema ### xdm:eVar170 Type `string` ## xdm:eVar171 ### eVar171 Custom conversion variable 171. `xdm:eVar171` * is optional * type: `string` * defined in this schema ### xdm:eVar171 Type `string` ## xdm:eVar172 ### eVar172 Custom conversion variable 172. `xdm:eVar172` * is optional * type: `string` * defined in this schema ### xdm:eVar172 Type `string` ## xdm:eVar173 ### eVar173 Custom conversion variable 173. `xdm:eVar173` * is optional * type: `string` * defined in this schema ### xdm:eVar173 Type `string` ## xdm:eVar174 ### eVar174 Custom conversion variable 174. `xdm:eVar174` * is optional * type: `string` * defined in this schema ### xdm:eVar174 Type `string` ## xdm:eVar175 ### eVar175 Custom conversion variable 175. `xdm:eVar175` * is optional * type: `string` * defined in this schema ### xdm:eVar175 Type `string` ## xdm:eVar176 ### eVar176 Custom conversion variable 176. `xdm:eVar176` * is optional * type: `string` * defined in this schema ### xdm:eVar176 Type `string` ## xdm:eVar177 ### eVar177 Custom conversion variable 177. `xdm:eVar177` * is optional * type: `string` * defined in this schema ### xdm:eVar177 Type `string` ## xdm:eVar178 ### eVar178 Custom conversion variable 178. `xdm:eVar178` * is optional * type: `string` * defined in this schema ### xdm:eVar178 Type `string` ## xdm:eVar179 ### eVar179 Custom conversion variable 179. `xdm:eVar179` * is optional * type: `string` * defined in this schema ### xdm:eVar179 Type `string` ## xdm:eVar18 ### eVar18 Custom conversion variable 18. `xdm:eVar18` * is optional * type: `string` * defined in this schema ### xdm:eVar18 Type `string` ## xdm:eVar180 ### eVar180 Custom conversion variable 180. `xdm:eVar180` * is optional * type: `string` * defined in this schema ### xdm:eVar180 Type `string` ## xdm:eVar181 ### eVar181 Custom conversion variable 181. `xdm:eVar181` * is optional * type: `string` * defined in this schema ### xdm:eVar181 Type `string` ## xdm:eVar182 ### eVar182 Custom conversion variable 182. `xdm:eVar182` * is optional * type: `string` * defined in this schema ### xdm:eVar182 Type `string` ## xdm:eVar183 ### eVar183 Custom conversion variable 183. `xdm:eVar183` * is optional * type: `string` * defined in this schema ### xdm:eVar183 Type `string` ## xdm:eVar184 ### eVar184 Custom conversion variable 184. `xdm:eVar184` * is optional * type: `string` * defined in this schema ### xdm:eVar184 Type `string` ## xdm:eVar185 ### eVar185 Custom conversion variable 185. `xdm:eVar185` * is optional * type: `string` * defined in this schema ### xdm:eVar185 Type `string` ## xdm:eVar186 ### eVar186 Custom conversion variable 186. `xdm:eVar186` * is optional * type: `string` * defined in this schema ### xdm:eVar186 Type `string` ## xdm:eVar187 ### eVar187 Custom conversion variable 187. `xdm:eVar187` * is optional * type: `string` * defined in this schema ### xdm:eVar187 Type `string` ## xdm:eVar188 ### eVar188 Custom conversion variable 188. `xdm:eVar188` * is optional * type: `string` * defined in this schema ### xdm:eVar188 Type `string` ## xdm:eVar189 ### eVar189 Custom conversion variable 189. `xdm:eVar189` * is optional * type: `string` * defined in this schema ### xdm:eVar189 Type `string` ## xdm:eVar19 ### eVar19 Custom conversion variable 19. `xdm:eVar19` * is optional * type: `string` * defined in this schema ### xdm:eVar19 Type `string` ## xdm:eVar190 ### eVar190 Custom conversion variable 190. `xdm:eVar190` * is optional * type: `string` * defined in this schema ### xdm:eVar190 Type `string` ## xdm:eVar191 ### eVar191 Custom conversion variable 191. `xdm:eVar191` * is optional * type: `string` * defined in this schema ### xdm:eVar191 Type `string` ## xdm:eVar192 ### eVar192 Custom conversion variable 192. `xdm:eVar192` * is optional * type: `string` * defined in this schema ### xdm:eVar192 Type `string` ## xdm:eVar193 ### eVar193 Custom conversion variable 193. `xdm:eVar193` * is optional * type: `string` * defined in this schema ### xdm:eVar193 Type `string` ## xdm:eVar194 ### eVar194 Custom conversion variable 194. `xdm:eVar194` * is optional * type: `string` * defined in this schema ### xdm:eVar194 Type `string` ## xdm:eVar195 ### eVar195 Custom conversion variable 195. `xdm:eVar195` * is optional * type: `string` * defined in this schema ### xdm:eVar195 Type `string` ## xdm:eVar196 ### eVar196 Custom conversion variable 196. `xdm:eVar196` * is optional * type: `string` * defined in this schema ### xdm:eVar196 Type `string` ## xdm:eVar197 ### eVar197 Custom conversion variable 197. `xdm:eVar197` * is optional * type: `string` * defined in this schema ### xdm:eVar197 Type `string` ## xdm:eVar198 ### eVar198 Custom conversion variable 198. `xdm:eVar198` * is optional * type: `string` * defined in this schema ### xdm:eVar198 Type `string` ## xdm:eVar199 ### eVar199 Custom conversion variable 199. `xdm:eVar199` * is optional * type: `string` * defined in this schema ### xdm:eVar199 Type `string` ## xdm:eVar2 ### eVar2 Custom conversion variable 2. `xdm:eVar2` * is optional * type: `string` * defined in this schema ### xdm:eVar2 Type `string` ## xdm:eVar20 ### eVar20 Custom conversion variable 20. `xdm:eVar20` * is optional * type: `string` * defined in this schema ### xdm:eVar20 Type `string` ## xdm:eVar200 ### eVar200 Custom conversion variable 200. `xdm:eVar200` * is optional * type: `string` * defined in this schema ### xdm:eVar200 Type `string` ## xdm:eVar201 ### eVar201 Custom conversion variable 201. `xdm:eVar201` * is optional * type: `string` * defined in this schema ### xdm:eVar201 Type `string` ## xdm:eVar202 ### eVar202 Custom conversion variable 202. `xdm:eVar202` * is optional * type: `string` * defined in this schema ### xdm:eVar202 Type `string` ## xdm:eVar203 ### eVar203 Custom conversion variable 203. `xdm:eVar203` * is optional * type: `string` * defined in this schema ### xdm:eVar203 Type `string` ## xdm:eVar204 ### eVar204 Custom conversion variable 204. `xdm:eVar204` * is optional * type: `string` * defined in this schema ### xdm:eVar204 Type `string` ## xdm:eVar205 ### eVar205 Custom conversion variable 205. `xdm:eVar205` * is optional * type: `string` * defined in this schema ### xdm:eVar205 Type `string` ## xdm:eVar206 ### eVar206 Custom conversion variable 206. `xdm:eVar206` * is optional * type: `string` * defined in this schema ### xdm:eVar206 Type `string` ## xdm:eVar207 ### eVar207 Custom conversion variable 207. `xdm:eVar207` * is optional * type: `string` * defined in this schema ### xdm:eVar207 Type `string` ## xdm:eVar208 ### eVar208 Custom conversion variable 208. `xdm:eVar208` * is optional * type: `string` * defined in this schema ### xdm:eVar208 Type `string` ## xdm:eVar209 ### eVar209 Custom conversion variable 209. `xdm:eVar209` * is optional * type: `string` * defined in this schema ### xdm:eVar209 Type `string` ## xdm:eVar21 ### eVar21 Custom conversion variable 21. `xdm:eVar21` * is optional * type: `string` * defined in this schema ### xdm:eVar21 Type `string` ## xdm:eVar210 ### eVar210 Custom conversion variable 210. `xdm:eVar210` * is optional * type: `string` * defined in this schema ### xdm:eVar210 Type `string` ## xdm:eVar211 ### eVar211 Custom conversion variable 211. `xdm:eVar211` * is optional * type: `string` * defined in this schema ### xdm:eVar211 Type `string` ## xdm:eVar212 ### eVar212 Custom conversion variable 212. `xdm:eVar212` * is optional * type: `string` * defined in this schema ### xdm:eVar212 Type `string` ## xdm:eVar213 ### eVar213 Custom conversion variable 213. `xdm:eVar213` * is optional * type: `string` * defined in this schema ### xdm:eVar213 Type `string` ## xdm:eVar214 ### eVar214 Custom conversion variable 214. `xdm:eVar214` * is optional * type: `string` * defined in this schema ### xdm:eVar214 Type `string` ## xdm:eVar215 ### eVar215 Custom conversion variable 215. `xdm:eVar215` * is optional * type: `string` * defined in this schema ### xdm:eVar215 Type `string` ## xdm:eVar216 ### eVar216 Custom conversion variable 216. `xdm:eVar216` * is optional * type: `string` * defined in this schema ### xdm:eVar216 Type `string` ## xdm:eVar217 ### eVar217 Custom conversion variable 217. `xdm:eVar217` * is optional * type: `string` * defined in this schema ### xdm:eVar217 Type `string` ## xdm:eVar218 ### eVar218 Custom conversion variable 218. `xdm:eVar218` * is optional * type: `string` * defined in this schema ### xdm:eVar218 Type `string` ## xdm:eVar219 ### eVar219 Custom conversion variable 219. `xdm:eVar219` * is optional * type: `string` * defined in this schema ### xdm:eVar219 Type `string` ## xdm:eVar22 ### eVar22 Custom conversion variable 22. `xdm:eVar22` * is optional * type: `string` * defined in this schema ### xdm:eVar22 Type `string` ## xdm:eVar220 ### eVar220 Custom conversion variable 220. `xdm:eVar220` * is optional * type: `string` * defined in this schema ### xdm:eVar220 Type `string` ## xdm:eVar221 ### eVar221 Custom conversion variable 221. `xdm:eVar221` * is optional * type: `string` * defined in this schema ### xdm:eVar221 Type `string` ## xdm:eVar222 ### eVar222 Custom conversion variable 222. `xdm:eVar222` * is optional * type: `string` * defined in this schema ### xdm:eVar222 Type `string` ## xdm:eVar223 ### eVar223 Custom conversion variable 223. `xdm:eVar223` * is optional * type: `string` * defined in this schema ### xdm:eVar223 Type `string` ## xdm:eVar224 ### eVar224 Custom conversion variable 224. `xdm:eVar224` * is optional * type: `string` * defined in this schema ### xdm:eVar224 Type `string` ## xdm:eVar225 ### eVar225 Custom conversion variable 225. `xdm:eVar225` * is optional * type: `string` * defined in this schema ### xdm:eVar225 Type `string` ## xdm:eVar226 ### eVar226 Custom conversion variable 226. `xdm:eVar226` * is optional * type: `string` * defined in this schema ### xdm:eVar226 Type `string` ## xdm:eVar227 ### eVar227 Custom conversion variable 227. `xdm:eVar227` * is optional * type: `string` * defined in this schema ### xdm:eVar227 Type `string` ## xdm:eVar228 ### eVar228 Custom conversion variable 228. `xdm:eVar228` * is optional * type: `string` * defined in this schema ### xdm:eVar228 Type `string` ## xdm:eVar229 ### eVar229 Custom conversion variable 229. `xdm:eVar229` * is optional * type: `string` * defined in this schema ### xdm:eVar229 Type `string` ## xdm:eVar23 ### eVar23 Custom conversion variable 23. `xdm:eVar23` * is optional * type: `string` * defined in this schema ### xdm:eVar23 Type `string` ## xdm:eVar230 ### eVar230 Custom conversion variable 230. `xdm:eVar230` * is optional * type: `string` * defined in this schema ### xdm:eVar230 Type `string` ## xdm:eVar231 ### eVar231 Custom conversion variable 231. `xdm:eVar231` * is optional * type: `string` * defined in this schema ### xdm:eVar231 Type `string` ## xdm:eVar232 ### eVar232 Custom conversion variable 232. `xdm:eVar232` * is optional * type: `string` * defined in this schema ### xdm:eVar232 Type `string` ## xdm:eVar233 ### eVar233 Custom conversion variable 233. `xdm:eVar233` * is optional * type: `string` * defined in this schema ### xdm:eVar233 Type `string` ## xdm:eVar234 ### eVar234 Custom conversion variable 234. `xdm:eVar234` * is optional * type: `string` * defined in this schema ### xdm:eVar234 Type `string` ## xdm:eVar235 ### eVar235 Custom conversion variable 235. `xdm:eVar235` * is optional * type: `string` * defined in this schema ### xdm:eVar235 Type `string` ## xdm:eVar236 ### eVar236 Custom conversion variable 236. `xdm:eVar236` * is optional * type: `string` * defined in this schema ### xdm:eVar236 Type `string` ## xdm:eVar237 ### eVar237 Custom conversion variable 237. `xdm:eVar237` * is optional * type: `string` * defined in this schema ### xdm:eVar237 Type `string` ## xdm:eVar238 ### eVar238 Custom conversion variable 238. `xdm:eVar238` * is optional * type: `string` * defined in this schema ### xdm:eVar238 Type `string` ## xdm:eVar239 ### eVar239 Custom conversion variable 239. `xdm:eVar239` * is optional * type: `string` * defined in this schema ### xdm:eVar239 Type `string` ## xdm:eVar24 ### eVar24 Custom conversion variable 24. `xdm:eVar24` * is optional * type: `string` * defined in this schema ### xdm:eVar24 Type `string` ## xdm:eVar240 ### eVar240 Custom conversion variable 240. `xdm:eVar240` * is optional * type: `string` * defined in this schema ### xdm:eVar240 Type `string` ## xdm:eVar241 ### eVar241 Custom conversion variable 241. `xdm:eVar241` * is optional * type: `string` * defined in this schema ### xdm:eVar241 Type `string` ## xdm:eVar242 ### eVar242 Custom conversion variable 242. `xdm:eVar242` * is optional * type: `string` * defined in this schema ### xdm:eVar242 Type `string` ## xdm:eVar243 ### eVar243 Custom conversion variable 243. `xdm:eVar243` * is optional * type: `string` * defined in this schema ### xdm:eVar243 Type `string` ## xdm:eVar244 ### eVar244 Custom conversion variable 244. `xdm:eVar244` * is optional * type: `string` * defined in this schema ### xdm:eVar244 Type `string` ## xdm:eVar245 ### eVar245 Custom conversion variable 245. `xdm:eVar245` * is optional * type: `string` * defined in this schema ### xdm:eVar245 Type `string` ## xdm:eVar246 ### eVar246 Custom conversion variable 246. `xdm:eVar246` * is optional * type: `string` * defined in this schema ### xdm:eVar246 Type `string` ## xdm:eVar247 ### eVar247 Custom conversion variable 247. `xdm:eVar247` * is optional * type: `string` * defined in this schema ### xdm:eVar247 Type `string` ## xdm:eVar248 ### eVar248 Custom conversion variable 248. `xdm:eVar248` * is optional * type: `string` * defined in this schema ### xdm:eVar248 Type `string` ## xdm:eVar249 ### eVar249 Custom conversion variable 249. `xdm:eVar249` * is optional * type: `string` * defined in this schema ### xdm:eVar249 Type `string` ## xdm:eVar25 ### eVar25 Custom conversion variable 25. `xdm:eVar25` * is optional * type: `string` * defined in this schema ### xdm:eVar25 Type `string` ## xdm:eVar250 ### eVar250 Custom conversion variable 250. `xdm:eVar250` * is optional * type: `string` * defined in this schema ### xdm:eVar250 Type `string` ## xdm:eVar26 ### eVar26 Custom conversion variable 26. `xdm:eVar26` * is optional * type: `string` * defined in this schema ### xdm:eVar26 Type `string` ## xdm:eVar27 ### eVar27 Custom conversion variable 27. `xdm:eVar27` * is optional * type: `string` * defined in this schema ### xdm:eVar27 Type `string` ## xdm:eVar28 ### eVar28 Custom conversion variable 28. `xdm:eVar28` * is optional * type: `string` * defined in this schema ### xdm:eVar28 Type `string` ## xdm:eVar29 ### eVar29 Custom conversion variable 29. `xdm:eVar29` * is optional * type: `string` * defined in this schema ### xdm:eVar29 Type `string` ## xdm:eVar3 ### eVar3 Custom conversion variable 3. `xdm:eVar3` * is optional * type: `string` * defined in this schema ### xdm:eVar3 Type `string` ## xdm:eVar30 ### eVar30 Custom conversion variable 30. `xdm:eVar30` * is optional * type: `string` * defined in this schema ### xdm:eVar30 Type `string` ## xdm:eVar31 ### eVar31 Custom conversion variable 31. `xdm:eVar31` * is optional * type: `string` * defined in this schema ### xdm:eVar31 Type `string` ## xdm:eVar32 ### eVar32 Custom conversion variable 32. `xdm:eVar32` * is optional * type: `string` * defined in this schema ### xdm:eVar32 Type `string` ## xdm:eVar33 ### eVar33 Custom conversion variable 33. `xdm:eVar33` * is optional * type: `string` * defined in this schema ### xdm:eVar33 Type `string` ## xdm:eVar34 ### eVar34 Custom conversion variable 34. `xdm:eVar34` * is optional * type: `string` * defined in this schema ### xdm:eVar34 Type `string` ## xdm:eVar35 ### eVar35 Custom conversion variable 35. `xdm:eVar35` * is optional * type: `string` * defined in this schema ### xdm:eVar35 Type `string` ## xdm:eVar36 ### eVar36 Custom conversion variable 36. `xdm:eVar36` * is optional * type: `string` * defined in this schema ### xdm:eVar36 Type `string` ## xdm:eVar37 ### eVar37 Custom conversion variable 37. `xdm:eVar37` * is optional * type: `string` * defined in this schema ### xdm:eVar37 Type `string` ## xdm:eVar38 ### eVar38 Custom conversion variable 38. `xdm:eVar38` * is optional * type: `string` * defined in this schema ### xdm:eVar38 Type `string` ## xdm:eVar39 ### eVar39 Custom conversion variable 39. `xdm:eVar39` * is optional * type: `string` * defined in this schema ### xdm:eVar39 Type `string` ## xdm:eVar4 ### eVar4 Custom conversion variable 4. `xdm:eVar4` * is optional * type: `string` * defined in this schema ### xdm:eVar4 Type `string` ## xdm:eVar40 ### eVar40 Custom conversion variable 40. `xdm:eVar40` * is optional * type: `string` * defined in this schema ### xdm:eVar40 Type `string` ## xdm:eVar41 ### eVar41 Custom conversion variable 41. `xdm:eVar41` * is optional * type: `string` * defined in this schema ### xdm:eVar41 Type `string` ## xdm:eVar42 ### eVar42 Custom conversion variable 42. `xdm:eVar42` * is optional * type: `string` * defined in this schema ### xdm:eVar42 Type `string` ## xdm:eVar43 ### eVar43 Custom conversion variable 43. `xdm:eVar43` * is optional * type: `string` * defined in this schema ### xdm:eVar43 Type `string` ## xdm:eVar44 ### eVar44 Custom conversion variable 44. `xdm:eVar44` * is optional * type: `string` * defined in this schema ### xdm:eVar44 Type `string` ## xdm:eVar45 ### eVar45 Custom conversion variable 45. `xdm:eVar45` * is optional * type: `string` * defined in this schema ### xdm:eVar45 Type `string` ## xdm:eVar46 ### eVar46 Custom conversion variable 46. `xdm:eVar46` * is optional * type: `string` * defined in this schema ### xdm:eVar46 Type `string` ## xdm:eVar47 ### eVar47 Custom conversion variable 47. `xdm:eVar47` * is optional * type: `string` * defined in this schema ### xdm:eVar47 Type `string` ## xdm:eVar48 ### eVar48 Custom conversion variable 48. `xdm:eVar48` * is optional * type: `string` * defined in this schema ### xdm:eVar48 Type `string` ## xdm:eVar49 ### eVar49 Custom conversion variable 49. `xdm:eVar49` * is optional * type: `string` * defined in this schema ### xdm:eVar49 Type `string` ## xdm:eVar5 ### eVar5 Custom conversion variable 5. `xdm:eVar5` * is optional * type: `string` * defined in this schema ### xdm:eVar5 Type `string` ## xdm:eVar50 ### eVar50 Custom conversion variable 50. `xdm:eVar50` * is optional * type: `string` * defined in this schema ### xdm:eVar50 Type `string` ## xdm:eVar51 ### eVar51 Custom conversion variable 51. `xdm:eVar51` * is optional * type: `string` * defined in this schema ### xdm:eVar51 Type `string` ## xdm:eVar52 ### eVar52 Custom conversion variable 52. `xdm:eVar52` * is optional * type: `string` * defined in this schema ### xdm:eVar52 Type `string` ## xdm:eVar53 ### eVar53 Custom conversion variable 53. `xdm:eVar53` * is optional * type: `string` * defined in this schema ### xdm:eVar53 Type `string` ## xdm:eVar54 ### eVar54 Custom conversion variable 54. `xdm:eVar54` * is optional * type: `string` * defined in this schema ### xdm:eVar54 Type `string` ## xdm:eVar55 ### eVar55 Custom conversion variable 55. `xdm:eVar55` * is optional * type: `string` * defined in this schema ### xdm:eVar55 Type `string` ## xdm:eVar56 ### eVar56 Custom conversion variable 56. `xdm:eVar56` * is optional * type: `string` * defined in this schema ### xdm:eVar56 Type `string` ## xdm:eVar57 ### eVar57 Custom conversion variable 57. `xdm:eVar57` * is optional * type: `string` * defined in this schema ### xdm:eVar57 Type `string` ## xdm:eVar58 ### eVar58 Custom conversion variable 58. `xdm:eVar58` * is optional * type: `string` * defined in this schema ### xdm:eVar58 Type `string` ## xdm:eVar59 ### eVar59 Custom conversion variable 59. `xdm:eVar59` * is optional * type: `string` * defined in this schema ### xdm:eVar59 Type `string` ## xdm:eVar6 ### eVar6 Custom conversion variable 6. `xdm:eVar6` * is optional * type: `string` * defined in this schema ### xdm:eVar6 Type `string` ## xdm:eVar60 ### eVar60 Custom conversion variable 60. `xdm:eVar60` * is optional * type: `string` * defined in this schema ### xdm:eVar60 Type `string` ## xdm:eVar61 ### eVar61 Custom conversion variable 61. `xdm:eVar61` * is optional * type: `string` * defined in this schema ### xdm:eVar61 Type `string` ## xdm:eVar62 ### eVar62 Custom conversion variable 62. `xdm:eVar62` * is optional * type: `string` * defined in this schema ### xdm:eVar62 Type `string` ## xdm:eVar63 ### eVar63 Custom conversion variable 63. `xdm:eVar63` * is optional * type: `string` * defined in this schema ### xdm:eVar63 Type `string` ## xdm:eVar64 ### eVar64 Custom conversion variable 64. `xdm:eVar64` * is optional * type: `string` * defined in this schema ### xdm:eVar64 Type `string` ## xdm:eVar65 ### eVar65 Custom conversion variable 65. `xdm:eVar65` * is optional * type: `string` * defined in this schema ### xdm:eVar65 Type `string` ## xdm:eVar66 ### eVar66 Custom conversion variable 66. `xdm:eVar66` * is optional * type: `string` * defined in this schema ### xdm:eVar66 Type `string` ## xdm:eVar67 ### eVar67 Custom conversion variable 67. `xdm:eVar67` * is optional * type: `string` * defined in this schema ### xdm:eVar67 Type `string` ## xdm:eVar68 ### eVar68 Custom conversion variable 68. `xdm:eVar68` * is optional * type: `string` * defined in this schema ### xdm:eVar68 Type `string` ## xdm:eVar69 ### eVar69 Custom conversion variable 69. `xdm:eVar69` * is optional * type: `string` * defined in this schema ### xdm:eVar69 Type `string` ## xdm:eVar7 ### eVar7 Custom conversion variable 7. `xdm:eVar7` * is optional * type: `string` * defined in this schema ### xdm:eVar7 Type `string` ## xdm:eVar70 ### eVar70 Custom conversion variable 70. `xdm:eVar70` * is optional * type: `string` * defined in this schema ### xdm:eVar70 Type `string` ## xdm:eVar71 ### eVar71 Custom conversion variable 71. `xdm:eVar71` * is optional * type: `string` * defined in this schema ### xdm:eVar71 Type `string` ## xdm:eVar72 ### eVar72 Custom conversion variable 72. `xdm:eVar72` * is optional * type: `string` * defined in this schema ### xdm:eVar72 Type `string` ## xdm:eVar73 ### eVar73 Custom conversion variable 73. `xdm:eVar73` * is optional * type: `string` * defined in this schema ### xdm:eVar73 Type `string` ## xdm:eVar74 ### eVar74 Custom conversion variable 74. `xdm:eVar74` * is optional * type: `string` * defined in this schema ### xdm:eVar74 Type `string` ## xdm:eVar75 ### eVar75 Custom conversion variable 75. `xdm:eVar75` * is optional * type: `string` * defined in this schema ### xdm:eVar75 Type `string` ## xdm:eVar76 ### eVar76 Custom conversion variable 76. `xdm:eVar76` * is optional * type: `string` * defined in this schema ### xdm:eVar76 Type `string` ## xdm:eVar77 ### eVar77 Custom conversion variable 77. `xdm:eVar77` * is optional * type: `string` * defined in this schema ### xdm:eVar77 Type `string` ## xdm:eVar78 ### eVar78 Custom conversion variable 78. `xdm:eVar78` * is optional * type: `string` * defined in this schema ### xdm:eVar78 Type `string` ## xdm:eVar79 ### eVar79 Custom conversion variable 79. `xdm:eVar79` * is optional * type: `string` * defined in this schema ### xdm:eVar79 Type `string` ## xdm:eVar8 ### eVar8 Custom conversion variable 8. `xdm:eVar8` * is optional * type: `string` * defined in this schema ### xdm:eVar8 Type `string` ## xdm:eVar80 ### eVar80 Custom conversion variable 80. `xdm:eVar80` * is optional * type: `string` * defined in this schema ### xdm:eVar80 Type `string` ## xdm:eVar81 ### eVar81 Custom conversion variable 81. `xdm:eVar81` * is optional * type: `string` * defined in this schema ### xdm:eVar81 Type `string` ## xdm:eVar82 ### eVar82 Custom conversion variable 82. `xdm:eVar82` * is optional * type: `string` * defined in this schema ### xdm:eVar82 Type `string` ## xdm:eVar83 ### eVar83 Custom conversion variable 83. `xdm:eVar83` * is optional * type: `string` * defined in this schema ### xdm:eVar83 Type `string` ## xdm:eVar84 ### eVar84 Custom conversion variable 84. `xdm:eVar84` * is optional * type: `string` * defined in this schema ### xdm:eVar84 Type `string` ## xdm:eVar85 ### eVar85 Custom conversion variable 85. `xdm:eVar85` * is optional * type: `string` * defined in this schema ### xdm:eVar85 Type `string` ## xdm:eVar86 ### eVar86 Custom conversion variable 86. `xdm:eVar86` * is optional * type: `string` * defined in this schema ### xdm:eVar86 Type `string` ## xdm:eVar87 ### eVar87 Custom conversion variable 87. `xdm:eVar87` * is optional * type: `string` * defined in this schema ### xdm:eVar87 Type `string` ## xdm:eVar88 ### eVar88 Custom conversion variable 88. `xdm:eVar88` * is optional * type: `string` * defined in this schema ### xdm:eVar88 Type `string` ## xdm:eVar89 ### eVar89 Custom conversion variable 89. `xdm:eVar89` * is optional * type: `string` * defined in this schema ### xdm:eVar89 Type `string` ## xdm:eVar9 ### eVar9 Custom conversion variable 9. `xdm:eVar9` * is optional * type: `string` * defined in this schema ### xdm:eVar9 Type `string` ## xdm:eVar90 ### eVar90 Custom conversion variable 90. `xdm:eVar90` * is optional * type: `string` * defined in this schema ### xdm:eVar90 Type `string` ## xdm:eVar91 ### eVar91 Custom conversion variable 91. `xdm:eVar91` * is optional * type: `string` * defined in this schema ### xdm:eVar91 Type `string` ## xdm:eVar92 ### eVar92 Custom conversion variable 92. `xdm:eVar92` * is optional * type: `string` * defined in this schema ### xdm:eVar92 Type `string` ## xdm:eVar93 ### eVar93 Custom conversion variable 93. `xdm:eVar93` * is optional * type: `string` * defined in this schema ### xdm:eVar93 Type `string` ## xdm:eVar94 ### eVar94 Custom conversion variable 94. `xdm:eVar94` * is optional * type: `string` * defined in this schema ### xdm:eVar94 Type `string` ## xdm:eVar95 ### eVar95 Custom conversion variable 95. `xdm:eVar95` * is optional * type: `string` * defined in this schema ### xdm:eVar95 Type `string` ## xdm:eVar96 ### eVar96 Custom conversion variable 96. `xdm:eVar96` * is optional * type: `string` * defined in this schema ### xdm:eVar96 Type `string` ## xdm:eVar97 ### eVar97 Custom conversion variable 97. `xdm:eVar97` * is optional * type: `string` * defined in this schema ### xdm:eVar97 Type `string` ## xdm:eVar98 ### eVar98 Custom conversion variable 98. `xdm:eVar98` * is optional * type: `string` * defined in this schema ### xdm:eVar98 Type `string` ## xdm:eVar99 ### eVar99 Custom conversion variable 99. `xdm:eVar99` * is optional * type: `string` * defined in this schema ### xdm:eVar99 Type `string`
{ "pile_set_name": "Github" }
package netwatch; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.io.FileInputStream; import java.io.IOException; import java.util.Properties; import javax.swing.JOptionPane; /** Main program for the NetWatch program: watch the network status * on a bunch of machines (i.e., in a classroom or lab). Currently only * for RMI, but can be extended for TCP socket, CORBA ORB, etc. * @author Ian F. Darwin, http://www.darwinsys.com/ * Copyright (c) 2000, Ian F. Darwin. See LEGAL.NOTICE for licensing. */ // tag::main[] public class NetWatch { public static void main(String[] argv) { Properties p = null; NetFrame f = new NetFrame("Network Watcher", p); try { FileInputStream is = new FileInputStream("NetWatch.properties"); p = new Properties(); p.load(is); is.close(); } catch (IOException e) { JOptionPane.showMessageDialog(f, e.toString(), "Properties error", JOptionPane.ERROR_MESSAGE); } // NOW CONSTRUCT PANELS, ONE FOR EACH HOST. // If arguments, use them as hostnames. if (argv.length!=0) { for (int i=0; i<argv.length; i++) { f.addHost(argv[i], p); } // No arguments. Can we use properties? } else if (p != null && p.size() > 0) { String net = p.getProperty("netwatch.net"); int start = Integer.parseInt(p.getProperty("netwatch.start")); int end = Integer.parseInt(p.getProperty("netwatch.end")); for (int i=start; i<=end; i++) { f.addHost(net + "." + i, p); } for (int i=0; ; i++) { String nextHost = p.getProperty("nethost" + i); if (nextHost == null) break; f.addHost(nextHost, p); } } // None of the above. Fall back to localhost else { f.addHost("localhost", p); } // All done. Pack the Frame and show it. f.pack(); // UtilGUI.centre(f); f.setVisible(true); f.addWindowListener(new WindowAdapter() { public void windowClosing(WindowEvent e) { System.exit(0); } }); } } // end::main[]
{ "pile_set_name": "Github" }
/* * Copyright 2013-2020 Software Radio Systems Limited * * This file is part of srsLTE. * * srsLTE is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of * the License, or (at your option) any later version. * * srsLTE is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * A copy of the GNU Affero General Public License can be found in * the LICENSE file in the top-level directory of this distribution * and at http://www.gnu.org/licenses/. * */ #ifndef SRSENB_SCHEDULER_UE_H #define SRSENB_SCHEDULER_UE_H #include "scheduler_common.h" #include "srslte/common/log.h" #include "srslte/mac/pdu.h" #include <map> #include <vector> #include "scheduler_harq.h" #include <deque> namespace srsenb { struct sched_ue_carrier { const static int SCHED_MAX_HARQ_PROC = FDD_HARQ_DELAY_UL_MS + FDD_HARQ_DELAY_DL_MS; sched_ue_carrier(const sched_interface::ue_cfg_t& cfg_, const sched_cell_params_t& cell_cfg_, uint16_t rnti_, uint32_t ue_cc_idx); void reset(); void set_cfg(const sched_interface::ue_cfg_t& cfg); ///< reconfigure ue carrier uint32_t get_aggr_level(uint32_t nof_bits); int alloc_tbs(uint32_t nof_prb, uint32_t nof_re, uint32_t req_bytes, bool is_ul, int* mcs); int alloc_tbs_dl(uint32_t nof_prb, uint32_t nof_re, uint32_t req_bytes, int* mcs); int alloc_tbs_ul(uint32_t nof_prb, uint32_t nof_re, uint32_t req_bytes, int* mcs); int get_required_prb_dl(uint32_t req_bytes, uint32_t nof_ctrl_symbols); uint32_t get_required_prb_ul(uint32_t req_bytes); const sched_cell_params_t* get_cell_cfg() const { return cell_params; } bool is_active() const { return active; } void set_dl_cqi(uint32_t tti_tx_dl, uint32_t dl_cqi); harq_entity harq_ent; uint32_t dl_ri = 0; uint32_t dl_ri_tti = 0; uint32_t dl_pmi = 0; uint32_t dl_pmi_tti = 0; uint32_t dl_cqi = 1; uint32_t dl_cqi_tti = 0; uint32_t ul_cqi = 1; uint32_t ul_cqi_tti = 0; bool dl_cqi_rx = false; uint32_t max_mcs_dl = 28, max_mcs_dl_alt = 27, max_mcs_ul = 28; uint32_t max_aggr_level = 3; int fixed_mcs_ul = 0, fixed_mcs_dl = 0; // Allowed DCI locations per per CFI and per subframe std::array<std::array<sched_dci_cce_t, 10>, 3> dci_locations = {}; private: // config srslte::log_ref log_h; const sched_interface::ue_cfg_t* cfg = nullptr; const sched_cell_params_t* cell_params = nullptr; uint16_t rnti; uint32_t ue_cc_idx = 0; bool active = false; }; /** This class is designed to be thread-safe because it is called from workers through scheduler thread and from * higher layers and mac threads. */ class sched_ue { public: /************************************************************* * * FAPI-like Interface * ************************************************************/ sched_ue(); void reset(); void phy_config_enabled(uint32_t tti, bool enabled); void init(uint16_t rnti, const std::vector<sched_cell_params_t>& cell_list_params_); void set_cfg(const sched_interface::ue_cfg_t& cfg); void set_bearer_cfg(uint32_t lc_id, srsenb::sched_interface::ue_bearer_cfg_t* cfg); void rem_bearer(uint32_t lc_id); void dl_buffer_state(uint8_t lc_id, uint32_t tx_queue, uint32_t retx_queue); void ul_buffer_state(uint8_t lc_id, uint32_t bsr, bool set_value = true); void ul_phr(int phr); void mac_buffer_state(uint32_t ce_code, uint32_t nof_cmds); void ul_recv_len(uint32_t lcid, uint32_t len); void set_ul_cqi(uint32_t tti, uint32_t enb_cc_idx, uint32_t cqi, uint32_t ul_ch_code); void set_dl_ri(uint32_t tti, uint32_t enb_cc_idx, uint32_t ri); void set_dl_pmi(uint32_t tti, uint32_t enb_cc_idx, uint32_t ri); void set_dl_cqi(uint32_t tti, uint32_t enb_cc_idx, uint32_t cqi); int set_ack_info(uint32_t tti, uint32_t enb_cc_idx, uint32_t tb_idx, bool ack); void set_ul_crc(srslte::tti_point tti_rx, uint32_t enb_cc_idx, bool crc_res); /******************************************************* * Custom functions *******************************************************/ void tpc_inc(); void tpc_dec(); const dl_harq_proc& get_dl_harq(uint32_t idx, uint32_t cc_idx) const; uint16_t get_rnti() const { return rnti; } std::pair<bool, uint32_t> get_cell_index(uint32_t enb_cc_idx) const; const sched_interface::ue_cfg_t& get_ue_cfg() const { return cfg; } uint32_t get_aggr_level(uint32_t ue_cc_idx, uint32_t nof_bits); /******************************************************* * Functions used by scheduler metric objects *******************************************************/ uint32_t get_required_prb_ul(uint32_t cc_idx, uint32_t req_bytes); rbg_range_t get_required_dl_rbgs(uint32_t ue_cc_idx); std::pair<uint32_t, uint32_t> get_requested_dl_bytes(uint32_t ue_cc_idx); uint32_t get_pending_dl_new_data(); uint32_t get_pending_ul_new_data(uint32_t tti); uint32_t get_pending_ul_old_data(uint32_t cc_idx); uint32_t get_pending_dl_new_data_total(); dl_harq_proc* get_pending_dl_harq(uint32_t tti_tx_dl, uint32_t cc_idx); dl_harq_proc* get_empty_dl_harq(uint32_t tti_tx_dl, uint32_t cc_idx); ul_harq_proc* get_ul_harq(uint32_t tti, uint32_t ue_cc_idx); /******************************************************* * Functions used by the scheduler carrier object *******************************************************/ void finish_tti(const tti_params_t& tti_params, uint32_t enb_cc_idx); /******************************************************* * Functions used by the scheduler object *******************************************************/ void set_sr(); void unset_sr(); int generate_dl_dci_format(uint32_t pid, sched_interface::dl_sched_data_t* data, uint32_t tti, uint32_t ue_cc_idx, uint32_t cfi, const rbgmask_t& user_mask); int generate_format0(sched_interface::ul_sched_data_t* data, uint32_t tti, uint32_t cc_idx, ul_harq_proc::ul_alloc_t alloc, bool needs_pdcch, srslte_dci_location_t cce_range, int explicit_mcs = -1); srslte_dci_format_t get_dci_format(); sched_dci_cce_t* get_locations(uint32_t enb_cc_idx, uint32_t current_cfi, uint32_t sf_idx); sched_ue_carrier* get_ue_carrier(uint32_t enb_cc_idx); bool needs_cqi(uint32_t tti, uint32_t cc_idx, bool will_send = false); uint32_t get_max_retx(); bool pucch_sr_collision(uint32_t current_tti, uint32_t n_cce); static int cqi_to_tbs(uint32_t cqi, uint32_t nof_prb, uint32_t nof_re, uint32_t max_mcs, uint32_t max_Qm, bool use_tbs_index_alt, bool is_ul, uint32_t* mcs); private: struct ue_bearer_t { sched_interface::ue_bearer_cfg_t cfg = {}; int buf_tx = 0; int buf_retx = 0; int bsr = 0; }; void set_bearer_cfg_unlocked(uint32_t lc_id, const sched_interface::ue_bearer_cfg_t& cfg_); bool is_sr_triggered(); int alloc_rlc_pdu(sched_interface::dl_sched_pdu_t* mac_sdu, int rem_tbs); uint32_t allocate_mac_sdus(sched_interface::dl_sched_data_t* data, uint32_t total_tbs, uint32_t tbidx); uint32_t allocate_mac_ces(sched_interface::dl_sched_data_t* data, uint32_t total_tbs, uint32_t ue_cc_idx); std::pair<int, int> allocate_new_dl_mac_pdu(sched_interface::dl_sched_data_t* data, dl_harq_proc* h, const rbgmask_t& user_mask, uint32_t tti_tx_dl, uint32_t ue_cc_idx, uint32_t cfi, uint32_t tb, const char* dci_format); std::pair<int, int> compute_mcs_and_tbs(uint32_t ue_cc_idx, uint32_t tti_tx_dl, uint32_t nof_alloc_prbs, uint32_t cfi, const srslte_dci_dl_t& dci); static bool bearer_is_ul(const ue_bearer_t* lch); static bool bearer_is_dl(const ue_bearer_t* lch); uint32_t get_pending_ul_old_data_unlocked(uint32_t cc_idx); uint32_t get_pending_ul_new_data_unlocked(uint32_t tti); bool needs_cqi_unlocked(uint32_t tti, uint32_t cc_idx, bool will_send = false); int generate_format1(uint32_t pid, sched_interface::dl_sched_data_t* data, uint32_t tti, uint32_t cc_idx, uint32_t cfi, const rbgmask_t& user_mask); int generate_format2a(uint32_t pid, sched_interface::dl_sched_data_t* data, uint32_t tti, uint32_t cc_idx, uint32_t cfi, const rbgmask_t& user_mask); int generate_format2(uint32_t pid, sched_interface::dl_sched_data_t* data, uint32_t tti, uint32_t cc_idx, uint32_t cfi, const rbgmask_t& user_mask); /* Args */ sched_interface::ue_cfg_t cfg = {}; srslte_cell_t cell = {}; srslte::log_ref log_h; const std::vector<sched_cell_params_t>* cell_params_list = nullptr; const sched_cell_params_t* main_cc_params = nullptr; /* Buffer states */ bool sr = false; std::array<ue_bearer_t, sched_interface::MAX_LC> lch = {}; int power_headroom = 0; uint32_t cqi_request_tti = 0; uint16_t rnti = 0; uint32_t max_msg3retx = 0; /* User State */ int next_tpc_pusch = 0; int next_tpc_pucch = 0; bool phy_config_dedicated_enabled = false; std::vector<sched_ue_carrier> carriers; ///< map of UE CellIndex to carrier configuration // Control Element Command queue using ce_cmd = srslte::dl_sch_lcid; std::deque<ce_cmd> pending_ces; }; } // namespace srsenb #endif // SRSENB_SCHEDULER_UE_H
{ "pile_set_name": "Github" }
/*! AutoFill 2.0.0 * ©2008-2015 SpryMedia Ltd - datatables.net/license */ /** * @summary AutoFill * @description Add Excel like click and drag auto-fill options to DataTables * @version 2.0.0 * @file dataTables.autoFill.js * @author SpryMedia Ltd (www.sprymedia.co.uk) * @contact www.sprymedia.co.uk/contact * @copyright Copyright 2010-2015 SpryMedia Ltd. * * This source file is free software, available under the following license: * MIT license - http://datatables.net/license/mit * * This source file is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the license files for details. * * For details please refer to: http://www.datatables.net */ (function( window, document, undefined ) { var factory = function( $, DataTable ) { "use strict"; var _instance = 0; /** * AutoFill provides Excel like auto-fill features for a DataTable * * @class AutoFill * @constructor * @param {object} oTD DataTables settings object * @param {object} oConfig Configuration object for AutoFill */ var AutoFill = function( dt, opts ) { if ( ! DataTable.versionCheck || ! DataTable.versionCheck( '1.10.8' ) ) { throw( "Warning: AutoFill requires DataTables 1.10.8 or greater"); } // User and defaults configuration object this.c = $.extend( true, {}, DataTable.defaults.autoFill, AutoFill.defaults, opts ); /** * @namespace Settings object which contains customisable information for AutoFill instance */ this.s = { /** @type {DataTable.Api} DataTables' API instance */ dt: new DataTable.Api( dt ), /** @type {String} Unique namespace for events attached to the document */ namespace: '.autoFill'+(_instance++), /** @type {Object} Cached dimension information for use in the mouse move event handler */ scroll: {}, /** @type {integer} Interval object used for smooth scrolling */ scrollInterval: null }; /** * @namespace Common and useful DOM elements for the class instance */ this.dom = { /** @type {jQuery} AutoFill handle */ handle: $('<div class="dt-autofill-handle"/>'), /** * @type {Object} Selected cells outline - Need to use 4 elements, * otherwise the mouse over if you back into the selected rectangle * will be over that element, rather than the cells! */ select: { top: $('<div class="dt-autofill-select top"/>'), right: $('<div class="dt-autofill-select right"/>'), bottom: $('<div class="dt-autofill-select bottom"/>'), left: $('<div class="dt-autofill-select left"/>') }, /** @type {jQuery} Fill type chooser background */ background: $('<div class="dt-autofill-background"/>'), /** @type {jQuery} Fill type chooser */ list: $('<div class="dt-autofill-list">'+this.s.dt.i18n('autoFill.info', '')+'<ul/></div>'), /** @type {jQuery} DataTables scrolling container */ dtScroll: null }; /* Constructor logic */ this._constructor(); }; AutoFill.prototype = { /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Constructor */ /** * Initialise the RowReorder instance * * @private */ _constructor: function () { var that = this; var dt = this.s.dt; var dtScroll = $('div.dataTables_scrollBody', this.s.dt.table().container()); if ( dtScroll.length ) { this.dom.dtScroll = dtScroll; // Need to scroll container to be the offset parent if ( dtScroll.css('position') === 'static' ) { dtScroll.css( 'position', 'relative' ); } } this._focusListener(); this.dom.handle.on( 'mousedown', function (e) { that._mousedown( e ); return false; } ); dt.on( 'destroy.autoFill', function () { dt.off( '.autoFill' ); $(dt.table().body()).off( that.s.namespace ); $(document.body).off( that.s.namespace ); } ); }, /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Private methods */ /** * Display the AutoFill drag handle by appending it to a table cell. This * is the opposite of the _detach method. * * @param {node} node TD/TH cell to insert the handle into * @private */ _attach: function ( node ) { var dt = this.s.dt; var idx = dt.cell( node ).index(); if ( ! idx || dt.columns( this.c.columns ).indexes().indexOf( idx.column ) === -1 ) { this._detach(); return; } this.dom.attachedTo = node; this.dom.handle.appendTo( node ); }, /** * Determine can the fill type should be. This can be automatic, or ask the * end user. * * @param {array} cells Information about the selected cells from the key * up function * @private */ _actionSelector: function ( cells ) { var that = this; var dt = this.s.dt; var actions = AutoFill.actions; var available = []; // "Ask" each plug-in if it wants to handle this data $.each( actions, function ( key, action ) { if ( action.available( dt, cells ) ) { available.push( key ); } } ); if ( available.length === 1 && this.c.alwaysAsk === false ) { // Only one action available - enact it immediately var result = actions[ available[0] ].execute( dt, cells ); this._update( result, cells ); } else { // Multiple actions available - ask the end user what they want to do var list = this.dom.list.children('ul').empty(); // Add a cancel option available.push( 'cancel' ); $.each( available, function ( i, name ) { list.append( $('<li/>') .append( '<div class="dt-autofill-question">'+ actions[ name ].option( dt, cells )+ '<div>' ) .append( $('<div class="dt-autofill-button">' ) .append( $('<button class="'+AutoFill.classes.btn+'">'+dt.i18n('autoFill.button', '&gt;')+'</button>') .on( 'click', function () { var result = actions[ name ].execute( dt, cells, $(this).closest('li') ); that._update( result, cells ); that.dom.background.remove(); that.dom.list.remove(); } ) ) ) ); } ); this.dom.background.appendTo( 'body' ); this.dom.list.appendTo( 'body' ); this.dom.list.css( 'margin-top', this.dom.list.outerHeight()/2 * -1 ); } }, /** * Remove the AutoFill handle from the document * * @private */ _detach: function () { this.dom.attachedTo = null; this.dom.handle.detach(); }, /** * Draw the selection outline by calculating the range between the start * and end cells, then placing the highlighting elements to draw a rectangle * * @param {node} target End cell * @param {object} e Originating event * @private */ _drawSelection: function ( target, e ) { // Calculate boundary for start cell to this one var dt = this.s.dt; var start = this.s.start; var startCell = $(this.dom.start); var endCell = $(target); var end = { row: endCell.parent().index(), column: endCell.index() }; // Be sure that is a DataTables controlled cell if ( ! dt.cell( endCell ).any() ) { return; } // if target is not in the columns available - do nothing if ( dt.columns( this.c.columns ).indexes().indexOf( end.column ) === -1 ) { return; } this.s.end = end; var top, bottom, left, right, height, width; top = start.row < end.row ? startCell : endCell; bottom = start.row < end.row ? endCell : startCell; left = start.column < end.column ? startCell : endCell; right = start.column < end.column ? endCell : startCell; top = top.position().top; left = left.position().left; height = bottom.position().top + bottom.outerHeight() - top; width = right.position().left + right.outerWidth() - left; var dtScroll = this.dom.dtScroll; if ( dtScroll ) { top += dtScroll.scrollTop(); left += dtScroll.scrollLeft(); } var select = this.dom.select; select.top.css( { top: top, left: left, width: width } ); select.left.css( { top: top, left: left, height: height } ); select.bottom.css( { top: top + height, left: left, width: width } ); select.right.css( { top: top, left: left + width, height: height } ); }, /** * Use the Editor API to perform an update based on the new data for the * cells * * @param {array} cells Information about the selected cells from the key * up function * @private */ _editor: function ( cells ) { var dt = this.s.dt; var editor = this.c.editor; if ( ! editor ) { return; } // Build the object structure for Editor's multi-row editing var idValues = {}; var nodes = []; for ( var i=0, ien=cells.length ; i<ien ; i++ ) { for ( var j=0, jen=cells[i].length ; j<jen ; j++ ) { var cell = cells[i][j]; // Determine the field name for the cell being edited var col = dt.settings()[0].aoColumns[ cell.index.column ]; var dataSrc = col.editField !== undefined ? col.editField : col.mData; if ( ! dataSrc ) { throw 'Could not automatically determine field name. '+ 'Please see https://datatables.net/tn/11'; } if ( ! idValues[ dataSrc ] ) { idValues[ dataSrc ] = {}; } var id = dt.row( cell.index.row ).id(); idValues[ dataSrc ][ id ] = cell.set; // Keep a list of cells so we can activate the bubble editing // with them nodes.push( cell.index ); } } // Perform the edit using bubble editing as it allows us to specify // the cells to be edited, rather than using full rows editor .bubble( nodes, false ) .multiSet( idValues ) .submit(); }, /** * Emit an event on the DataTable for listeners * * @param {string} name Event name * @param {array} args Event arguments * @private */ _emitEvent: function ( name, args ) { this.s.dt.iterator( 'table', function ( ctx, i ) { $(ctx.nTable).triggerHandler( name+'.dt', args ); } ); }, /** * Attach suitable listeners (based on the configuration) that will attach * and detach the AutoFill handle in the document. * * @private */ _focusListener: function () { var that = this; var dt = this.s.dt; var namespace = this.s.namespace; var focus = this.c.focus !== null ? this.c.focus : dt.settings()[0].keytable ? 'focus' : 'hover'; // All event listeners attached here are removed in the `destroy` // callback in the constructor if ( focus === 'focus' ) { dt .on( 'key-focus.autoFill', function ( e, dt, cell ) { that._attach( cell.node() ); } ) .on( 'key-blur.autoFill', function ( e, dt, cell ) { that._detach(); } ); } else if ( focus === 'click' ) { $(dt.table().body()).on( 'click'+namespace, 'td, th', function (e) { that._attach( this ); } ); $(document.body).on( 'click'+namespace, function (e) { if ( ! $(e.target).parents().filter( dt.table().body() ).length ) { that._detach(); } } ); } else { $(dt.table().body()) .on( 'mouseenter'+namespace, 'td, th', function (e) { that._attach( this ); } ) .on( 'mouseleave'+namespace, function (e) { that._detach(); } ); } }, /** * Start mouse drag - selects the start cell * * @param {object} e Mouse down event * @private */ _mousedown: function ( e ) { var that = this; var dt = this.s.dt; this.dom.start = this.dom.attachedTo; this.s.start = { row: $(this.dom.start).parent().index(), column: $(this.dom.start).index() }; $(document.body) .on( 'mousemove.autoFill', function (e) { that._mousemove( e ); } ) .on( 'mouseup.autoFill', function (e) { that._mouseup( e ); } ); var select = this.dom.select; var offsetParent = $(this.s.dt.table().body()).offsetParent(); select.top.appendTo( offsetParent ); select.left.appendTo( offsetParent ); select.right.appendTo( offsetParent ); select.bottom.appendTo( offsetParent ); this._drawSelection( this.dom.start, e ); this.dom.handle.css( 'display', 'none' ); // Cache scrolling information so mouse move doesn't need to read. // This assumes that the window and DT scroller will not change size // during an AutoFill drag, which I think is a fair assumption var scrollWrapper = this.dom.dtScroll; this.s.scroll = { windowHeight: $(window).height(), windowWidth: $(window).width(), dtTop: scrollWrapper ? scrollWrapper.offset().top : null, dtLeft: scrollWrapper ? scrollWrapper.offset().left : null, dtHeight: scrollWrapper ? scrollWrapper.outerHeight() : null, dtWidth: scrollWrapper ? scrollWrapper.outerWidth() : null }; }, /** * Mouse drag - selects the end cell and update the selection display for * the end user * * @param {object} e Mouse move event * @private */ _mousemove: function ( e ) { var that = this; var dt = this.s.dt; var name = e.target.nodeName.toLowerCase(); if ( name !== 'td' && name !== 'th' ) { return; } this._drawSelection( e.target, e ); this._shiftScroll( e ); }, /** * End mouse drag - perform the update actions * * @param {object} e Mouse up event * @private */ _mouseup: function ( e ) { $(document.body).off( '.autoFill' ); var dt = this.s.dt; var select = this.dom.select; select.top.remove(); select.left.remove(); select.right.remove(); select.bottom.remove(); this.dom.handle.css( 'display', 'block' ); // Display complete - now do something useful with the selection! var start = this.s.start; var end = this.s.end; // Haven't selected multiple cells, so nothing to do if ( start.row === end.row && start.column === end.column ) { return; } // Build a matrix representation of the selected rows var rows = this._range( start.row, end.row ); var columns = this._range( start.column, end.column ); var selected = []; // Can't use Array.prototype.map as IE8 doesn't support it // Can't use $.map as jQuery flattens 2D arrays // Need to use a good old fashioned for loop for ( var rowIdx=0 ; rowIdx<rows.length ; rowIdx++ ) { selected.push( $.map( columns, function (column) { var cell = dt.cell( ':eq('+rows[rowIdx]+')', column+':visible', {page:'current'} ); return { cell: cell, data: cell.data(), index: cell.index() }; } ) ); } this._actionSelector( selected ); }, /** * Create an array with a range of numbers defined by the start and end * parameters passed in (inclusive!). * * @param {integer} start Start * @param {integer} end End * @private */ _range: function ( start, end ) { var out = []; var i; if ( start <= end ) { for ( i=start ; i<=end ; i++ ) { out.push( i ); } } else { for ( i=start ; i>=end ; i-- ) { out.push( i ); } } return out; }, /** * Move the window and DataTables scrolling during a drag to scroll new * content into view. This is done by proximity to the edge of the scrolling * container of the mouse - for example near the top edge of the window * should scroll up. This is a little complicated as there are two elements * that can be scrolled - the window and the DataTables scrolling view port * (if scrollX and / or scrollY is enabled). * * @param {object} e Mouse move event object * @private */ _shiftScroll: function ( e ) { var that = this; var dt = this.s.dt; var scroll = this.s.scroll; var runInterval = false; var scrollSpeed = 5; var buffer = 65; var windowY = e.pageY - document.body.scrollTop, windowX = e.pageX - document.body.scrollLeft, windowVert, windowHoriz, dtVert, dtHoriz; // Window calculations - based on the mouse position in the window, // regardless of scrolling if ( windowY < buffer ) { windowVert = scrollSpeed * -1; } else if ( windowY > scroll.windowHeight - buffer ) { windowVert = scrollSpeed; } if ( windowX < buffer ) { windowHoriz = scrollSpeed * -1; } else if ( windowX > scroll.windowWidth - buffer ) { windowHoriz = scrollSpeed; } // DataTables scrolling calculations - based on the table's position in // the document and the mouse position on the page if ( scroll.dtTop !== null && e.pageY < scroll.dtTop + buffer ) { dtVert = scrollSpeed * -1; } else if ( scroll.dtTop !== null && e.pageY > scroll.dtTop + scroll.dtHeight - buffer ) { dtVert = scrollSpeed; } if ( scroll.dtLeft !== null && e.pageX < scroll.dtLeft + buffer ) { dtHoriz = scrollSpeed * -1; } else if ( scroll.dtLeft !== null && e.pageX > scroll.dtLeft + scroll.dtWidth - buffer ) { dtHoriz = scrollSpeed; } // This is where it gets interesting. We want to continue scrolling // without requiring a mouse move, so we need an interval to be // triggered. The interval should continue until it is no longer needed, // but it must also use the latest scroll commands (for example consider // that the mouse might move from scrolling up to scrolling left, all // with the same interval running. We use the `scroll` object to "pass" // this information to the interval. Can't use local variables as they // wouldn't be the ones that are used by an already existing interval! if ( windowVert || windowHoriz || dtVert || dtHoriz ) { scroll.windowVert = windowVert; scroll.windowHoriz = windowHoriz; scroll.dtVert = dtVert; scroll.dtHoriz = dtHoriz; runInterval = true; } else if ( this.s.scrollInterval ) { // Don't need to scroll - remove any existing timer clearInterval( this.s.scrollInterval ); this.s.scrollInterval = null; } // If we need to run the interval to scroll and there is no existing // interval (if there is an existing one, it will continue to run) if ( ! this.s.scrollInterval && runInterval ) { this.s.scrollInterval = setInterval( function () { // Don't need to worry about setting scroll <0 or beyond the // scroll bound as the browser will just reject that. if ( scroll.windowVert ) { document.body.scrollTop += scroll.windowVert; } if ( scroll.windowHoriz ) { document.body.scrollLeft += scroll.windowHoriz; } // DataTables scrolling if ( scroll.dtVert || scroll.dtHoriz ) { var scroller = that.dom.dtScroll[0]; if ( scroll.dtVert ) { scroller.scrollTop += scroll.dtVert; } if ( scroll.dtHoriz ) { scroller.scrollLeft += scroll.dtHoriz; } } }, 20 ); } }, /** * Update the DataTable after the user has selected what they want to do * * @param {false|undefined} result Return from the `execute` method - can * be false internally to do nothing. This is not documented for plug-ins * and is used only by the cancel option. * @param {array} cells Information about the selected cells from the key * up function, argumented with the set values * @private */ _update: function ( result, cells ) { // Do nothing on `false` return from an execute function if ( result === false ) { return; } var dt = this.s.dt; var cell; // Potentially allow modifications to the cells matrix this._emitEvent( 'preAutoFill', [ dt, cells ] ); this._editor( cells ); // Automatic updates are not performed if `update` is null and the // `editor` parameter is passed in - the reason being that Editor will // update the data once submitted var update = this.c.update !== null ? this.c.update : this.c.editor ? false : true; if ( update ) { for ( var i=0, ien=cells.length ; i<ien ; i++ ) { for ( var j=0, jen=cells[i].length ; j<jen ; j++ ) { cell = cells[i][j]; cell.cell.data( cell.set ); } } dt.draw(false); } this._emitEvent( 'autoFill', [ dt, cells ] ); } }; /** * AutoFill actions. The options here determine how AutoFill will fill the data * in the table when the user has selected a range of cells. Please see the * documentation on the DataTables site for full details on how to create plug- * ins. * * @type {Object} */ AutoFill.actions = { increment: { available: function ( dt, cells ) { return $.isNumeric( cells[0][0].data ); }, option: function ( dt, cells ) { return dt.i18n( 'autoFill.increment', 'Increment / decrement each cell by: <input type="number" value="1">' ); }, execute: function ( dt, cells, node ) { var value = cells[0][0].data * 1; var increment = $('input', node).val() * 1; for ( var i=0, ien=cells.length ; i<ien ; i++ ) { for ( var j=0, jen=cells[i].length ; j<jen ; j++ ) { cells[i][j].set = value; value += increment; } } } }, fill: { available: function ( dt, cells ) { return true; }, option: function ( dt, cells ) { return dt.i18n('autoFill.fill', 'Fill all cells with <i>'+cells[0][0].data+'</i>' ); }, execute: function ( dt, cells, node ) { var value = cells[0][0].data; for ( var i=0, ien=cells.length ; i<ien ; i++ ) { for ( var j=0, jen=cells[i].length ; j<jen ; j++ ) { cells[i][j].set = value; } } } }, fillHorizontal: { available: function ( dt, cells ) { return cells.length > 1 && cells[0].length > 1; }, option: function ( dt, cells ) { return dt.i18n('autoFill.fillHorizontal', 'Fill cells horizontally' ); }, execute: function ( dt, cells, node ) { for ( var i=0, ien=cells.length ; i<ien ; i++ ) { for ( var j=0, jen=cells[i].length ; j<jen ; j++ ) { cells[i][j].set = cells[i][0].data; } } } }, fillVertical: { available: function ( dt, cells ) { return cells.length > 1 && cells[0].length > 1; }, option: function ( dt, cells ) { return dt.i18n('autoFill.fillVertical', 'Fill cells vertically' ); }, execute: function ( dt, cells, node ) { for ( var i=0, ien=cells.length ; i<ien ; i++ ) { for ( var j=0, jen=cells[i].length ; j<jen ; j++ ) { cells[i][j].set = cells[0][j].data; } } } }, // Special type that does not make itself available, but is added // automatically by AutoFill if a multi-choice list is shown. This allows // sensible code reuse cancel: { available: function () { return false; }, option: function ( dt ) { return dt.i18n('autoFill.cancel', 'Cancel' ); }, execute: function () { return false; } } }; /** * AutoFill version * * @static * @type String */ AutoFill.version = '2.0.0'; /** * AutoFill defaults * * @namespace */ AutoFill.defaults = { /** @type {Boolean} Ask user what they want to do, even for a single option */ alwaysAsk: false, /** @type {string|null} What will trigger a focus */ focus: null, // focus, click, hover /** @type {column-selector} Columns to provide auto fill for */ columns: '', // all /** @type {boolean|null} Update the cells after a drag */ update: null, // false is editor given, true otherwise /** @type {DataTable.Editor} Editor instance for automatic submission */ editor: null }; /** * Classes used by AutoFill that are configurable * * @namespace */ AutoFill.classes = { /** @type {String} Class used by the selection button */ btn: 'btn' }; // Attach a listener to the document which listens for DataTables initialisation // events so we can automatically initialise $(document).on( 'init.dt.autofill', function (e, settings, json) { if ( e.namespace !== 'dt' ) { return; } var init = settings.oInit.autoFill; var defaults = DataTable.defaults.autoFill; if ( init || defaults ) { var opts = $.extend( {}, init, defaults ); if ( init !== false ) { new AutoFill( settings, opts ); } } } ); // Alias for access DataTable.AutoFill = AutoFill; DataTable.AutoFill = AutoFill; return AutoFill; }; // Define as an AMD module if possible if ( typeof define === 'function' && define.amd ) { define( ['jquery', 'datatables'], factory ); } else if ( typeof exports === 'object' ) { // Node/CommonJS factory( require('jquery'), require('datatables') ); } else if ( jQuery && !jQuery.fn.dataTable.AutoFill ) { // Otherwise simply initialise as normal, stopping multiple evaluation factory( jQuery, jQuery.fn.dataTable ); } }(window, document));
{ "pile_set_name": "Github" }
import React, { Component } from 'react'; import { MDBContainer, MDBBtn, MDBModal, MDBModalBody, MDBModalHeader, MDBModalFooter, MDBInput, MDBIcon } from 'mdbreact'; import DocsLink from '../components/docsLink'; import SectionContainer from '../components/sectionContainer'; class ModalFormPage extends Component { state = { modal1: false, modal2: false, modal3: false, modal4: false, modal5: false }; toggle = nr => () => { const modalNumber = `modal${nr}`; this.setState({ [modalNumber]: !this.state[modalNumber] }); }; render() { const { modal1, modal2, modal3, modal4, modal5, modal6 } = this.state; return ( <MDBContainer> <DocsLink title='Modal Form' href='https://mdbootstrap.com/docs/react/modals/basic/' /> <SectionContainer header='Simple modal login' flexCenter> <MDBBtn rounded onClick={this.toggle(1)}> Launch Modal Login Form </MDBBtn> <MDBModal isOpen={modal1} toggle={this.toggle(1)}> <MDBModalHeader className='text-center' titleClass='w-100 font-weight-bold' toggle={this.toggle(1)} > Sign in </MDBModalHeader> <MDBModalBody> <form className='mx-3 grey-text'> <MDBInput label='Type your email' icon='envelope' group type='email' validate error='wrong' success='right' /> <MDBInput label='Type your password' icon='lock' group type='password' validate /> </form> </MDBModalBody> <MDBModalFooter className='justify-content-center'> <MDBBtn onClick={this.toggle(1)}>Login</MDBBtn> </MDBModalFooter> </MDBModal> </SectionContainer> <SectionContainer header='Simple modal register' flexCenter> <MDBBtn rounded onClick={this.toggle(2)}> Launch Modal Register Form </MDBBtn> <MDBModal isOpen={modal2} toggle={this.toggle(2)}> <MDBModalHeader className='text-center' titleClass='w-100 font-weight-bold' toggle={this.toggle(2)} > Sign in </MDBModalHeader> <MDBModalBody> <form className='mx-3 grey-text'> <MDBInput label='Your name' icon='user' group type='text' validate error='wrong' success='right' /> <MDBInput label='Your email' icon='envelope' group type='email' validate error='wrong' success='right' /> <MDBInput label='Your password' icon='lock' group type='password' validate /> </form> </MDBModalBody> <MDBModalFooter className='justify-content-center'> <MDBBtn color='deep-orange' onClick={this.toggle(2)}> SIGN UP </MDBBtn> </MDBModalFooter> </MDBModal> </SectionContainer> <SectionContainer header='Simple modal subscription' flexCenter> <MDBBtn rounded onClick={this.toggle(3)}> Launch Modal subscription Form </MDBBtn> <MDBModal isOpen={modal3} toggle={this.toggle(3)}> <MDBModalHeader className='text-center' titleClass='w-100 font-weight-bold' toggle={this.toggle(3)} > Subscribe </MDBModalHeader> <MDBModalBody> <form className='mx-3 grey-text'> <MDBInput label='Your name' icon='user' group type='text' validate /> <MDBInput label='Your email' icon='envelope' group type='email' validate error='wrong' success='right' /> </form> </MDBModalBody> <MDBModalFooter className='justify-content-center'> <MDBBtn color='indigo' onClick={this.toggle(3)}> Send <MDBIcon icon='paper-plane' className='ml-2' /> </MDBBtn> </MDBModalFooter> </MDBModal> </SectionContainer> <SectionContainer header='Simple modal contact' flexCenter> <MDBBtn rounded onClick={this.toggle(4)}> Launch Modal Contact Form </MDBBtn> <MDBModal isOpen={modal4} toggle={this.toggle(4)}> <MDBModalHeader className='text-center' titleClass='w-100 font-weight-bold' toggle={this.toggle(4)} > Write to us </MDBModalHeader> <MDBModalBody> <form className='mx-3 grey-text'> <MDBInput label='Your name' icon='user' group type='text' validate /> <MDBInput label='Your email' icon='envelope' group type='email' validate error='wrong' success='right' /> <MDBInput label='Your Subject' icon='tag' group type='text' /> <MDBInput type='textarea' rows='2' label='Your message' icon='pencil' /> </form> </MDBModalBody> <MDBModalFooter className='justify-content-center'> <MDBBtn color='unique' onClick={this.toggle(4)}> Send <MDBIcon icon='paper-plane' className='ml-2' /> </MDBBtn> </MDBModalFooter> </MDBModal> </SectionContainer> <SectionContainer header='Modal with avatar' flexCenter> <MDBBtn rounded onClick={this.toggle(5)}> Launch Modal Login With Avatar </MDBBtn> <MDBModal size='sm' cascading className='modal-avatar' isOpen={modal5} toggle={this.toggle(5)} > <MDBModalHeader className='mx-auto'> <img src='https://mdbootstrap.com/img/Photos/Avatars/img%20%281%29.jpg' alt='avatar' className='rounded-circle img-responsive' /> </MDBModalHeader> <MDBModalBody className='text-center mb-1'> <h5 className='mt-1 mb-2'>Maria Doe</h5> <form className='mx-3 grey-text'> <MDBInput label='Enter password' group type='password' validate /> </form> </MDBModalBody> <MDBModalFooter className='justify-content-center'> <MDBBtn color='cyan' onClick={this.toggle(5)}> Login <MDBIcon icon='sign-in-alt' className='ml-2' /> </MDBBtn> </MDBModalFooter> </MDBModal> </SectionContainer> <SectionContainer header='Subscription modal with orange header' flexCenter > <MDBBtn rounded onClick={this.toggle(6)}> Launch Modal Subscription </MDBBtn> <MDBModal className='modal-notify modal-warning white-text' isOpen={modal6} toggle={this.toggle(6)} > <MDBModalHeader className='text-center' titleClass='w-100 font-weight-bold' toggle={this.toggle(6)} > Subscribe </MDBModalHeader> <MDBModalBody> <form className='mx-3 grey-text'> <MDBInput label='Your name' icon='user' iconClass='grey-text' group type='text' validate error='wrong' success='right' /> <MDBInput label='Your email' icon='envelope' iconClass='grey-text' group type='email' validate error='wrong' success='right' /> </form> </MDBModalBody> <MDBModalFooter className='justify-content-center'> <MDBBtn color='warning' outline onClick={this.toggle(6)}> Send <MDBIcon icon='paper-plane' className='ml-2' /> </MDBBtn> </MDBModalFooter> </MDBModal> </SectionContainer> </MDBContainer> ); } } export default ModalFormPage;
{ "pile_set_name": "Github" }
/* Copyright The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Code generated by client-gen. DO NOT EDIT. package v1 import ( v1 "k8s.io/api/apps/v1" meta_v1 "k8s.io/apimachinery/pkg/apis/meta/v1" types "k8s.io/apimachinery/pkg/types" watch "k8s.io/apimachinery/pkg/watch" scheme "k8s.io/client-go/kubernetes/scheme" rest "k8s.io/client-go/rest" ) // StatefulSetsGetter has a method to return a StatefulSetInterface. // A group's client should implement this interface. type StatefulSetsGetter interface { StatefulSets(namespace string) StatefulSetInterface } // StatefulSetInterface has methods to work with StatefulSet resources. type StatefulSetInterface interface { Create(*v1.StatefulSet) (*v1.StatefulSet, error) Update(*v1.StatefulSet) (*v1.StatefulSet, error) UpdateStatus(*v1.StatefulSet) (*v1.StatefulSet, error) Delete(name string, options *meta_v1.DeleteOptions) error DeleteCollection(options *meta_v1.DeleteOptions, listOptions meta_v1.ListOptions) error Get(name string, options meta_v1.GetOptions) (*v1.StatefulSet, error) List(opts meta_v1.ListOptions) (*v1.StatefulSetList, error) Watch(opts meta_v1.ListOptions) (watch.Interface, error) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *v1.StatefulSet, err error) StatefulSetExpansion } // statefulSets implements StatefulSetInterface type statefulSets struct { client rest.Interface ns string } // newStatefulSets returns a StatefulSets func newStatefulSets(c *AppsV1Client, namespace string) *statefulSets { return &statefulSets{ client: c.RESTClient(), ns: namespace, } } // Get takes name of the statefulSet, and returns the corresponding statefulSet object, and an error if there is any. func (c *statefulSets) Get(name string, options meta_v1.GetOptions) (result *v1.StatefulSet, err error) { result = &v1.StatefulSet{} err = c.client.Get(). Namespace(c.ns). Resource("statefulsets"). Name(name). VersionedParams(&options, scheme.ParameterCodec). Do(). Into(result) return } // List takes label and field selectors, and returns the list of StatefulSets that match those selectors. func (c *statefulSets) List(opts meta_v1.ListOptions) (result *v1.StatefulSetList, err error) { result = &v1.StatefulSetList{} err = c.client.Get(). Namespace(c.ns). Resource("statefulsets"). VersionedParams(&opts, scheme.ParameterCodec). Do(). Into(result) return } // Watch returns a watch.Interface that watches the requested statefulSets. func (c *statefulSets) Watch(opts meta_v1.ListOptions) (watch.Interface, error) { opts.Watch = true return c.client.Get(). Namespace(c.ns). Resource("statefulsets"). VersionedParams(&opts, scheme.ParameterCodec). Watch() } // Create takes the representation of a statefulSet and creates it. Returns the server's representation of the statefulSet, and an error, if there is any. func (c *statefulSets) Create(statefulSet *v1.StatefulSet) (result *v1.StatefulSet, err error) { result = &v1.StatefulSet{} err = c.client.Post(). Namespace(c.ns). Resource("statefulsets"). Body(statefulSet). Do(). Into(result) return } // Update takes the representation of a statefulSet and updates it. Returns the server's representation of the statefulSet, and an error, if there is any. func (c *statefulSets) Update(statefulSet *v1.StatefulSet) (result *v1.StatefulSet, err error) { result = &v1.StatefulSet{} err = c.client.Put(). Namespace(c.ns). Resource("statefulsets"). Name(statefulSet.Name). Body(statefulSet). Do(). Into(result) return } // UpdateStatus was generated because the type contains a Status member. // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). func (c *statefulSets) UpdateStatus(statefulSet *v1.StatefulSet) (result *v1.StatefulSet, err error) { result = &v1.StatefulSet{} err = c.client.Put(). Namespace(c.ns). Resource("statefulsets"). Name(statefulSet.Name). SubResource("status"). Body(statefulSet). Do(). Into(result) return } // Delete takes name of the statefulSet and deletes it. Returns an error if one occurs. func (c *statefulSets) Delete(name string, options *meta_v1.DeleteOptions) error { return c.client.Delete(). Namespace(c.ns). Resource("statefulsets"). Name(name). Body(options). Do(). Error() } // DeleteCollection deletes a collection of objects. func (c *statefulSets) DeleteCollection(options *meta_v1.DeleteOptions, listOptions meta_v1.ListOptions) error { return c.client.Delete(). Namespace(c.ns). Resource("statefulsets"). VersionedParams(&listOptions, scheme.ParameterCodec). Body(options). Do(). Error() } // Patch applies the patch and returns the patched statefulSet. func (c *statefulSets) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *v1.StatefulSet, err error) { result = &v1.StatefulSet{} err = c.client.Patch(pt). Namespace(c.ns). Resource("statefulsets"). SubResource(subresources...). Name(name). Body(data). Do(). Into(result) return }
{ "pile_set_name": "Github" }
{ "main": "js/tether", "directories": { "lib": "dist" }, "shim": { "js/tether": { "exports": "Tether" } } }
{ "pile_set_name": "Github" }
--- Title: Verordnung über die Berechnung, Zahlung, Weiterleitung, Abrechnung und Prüfung des Gesamtsozialversicherungsbeitrages jurabk: BeitrVV layout: default origslug: beitrvv slug: beitrvv --- # Verordnung über die Berechnung, Zahlung, Weiterleitung, Abrechnung und Prüfung des Gesamtsozialversicherungsbeitrages (BeitrVV) Ausfertigungsdatum : 2006-05-03 Fundstelle : BGBl I: 2006, 1138 Zuletzt geändert durch : Art. 9 G v. 5.12.2012 I 2474 ## Eingangsformel Auf Grund der §§ 28n und 28p Abs. 9 des Vierten Buches Sozialgesetzbuch - Gemeinsame Vorschriften für die Sozialversicherung - in der Fassung der Bekanntmachung vom 23. Januar 2006 (BGBl. I S. 86, 466), in Verbindung mit § 1 des Zuständigkeitsanpassungsgesetzes vom 16. August 2002 (BGBl. I S. 3165) und dem Organisationserlass vom 22\. November 2005 (BGBl. I S. 3197), verordnet das Bundesministerium für Arbeit und Soziales: ## Erster Abschnitt - Berechnung des Gesamtsozialversicherungsbeitrages und der Beitragsbemessungsgrenzen ### § 1 Berechnungsgrundsätze (1) Der Gesamtsozialversicherungsbeitrag und die Beitragsbemessungsgrenzen werden je Kalendermonat für die Kalendertage berechnet, an denen eine versicherungspflichtige Beschäftigung besteht (Sozialversicherungstage); ein voller Kalendermonat wird mit 30 Sozialversicherungstagen angesetzt. Berechnungsbasis ist das aus der Beschäftigung erzielte Arbeitsentgelt bis zur monatlichen Beitragsbemessungsgrenze. (2) Die Rechengänge werden ohne Rundung der einzelnen Zwischenergebnisse durchgeführt. Das Gesamtergebnis wird auf zwei Dezimalstellen berechnet; die zweite Dezimalstelle wird um 1 erhöht, wenn sich in der dritten Dezimalstelle eine der Zahlen 5 bis 9 ergibt. ### § 2 Berechnungsvorgang (1) Beiträge, die der Arbeitgeber und der Beschäftigte je zur Hälfte tragen, werden durch Anwendung des halben Beitragssatzes auf das Arbeitsentgelt und anschließender Verdoppelung des gerundeten Ergebnisses berechnet. Auf Beiträge, die der Arbeitgeber allein trägt, kann Satz 1 entsprechend angewandt werden. Werden Beiträge vom Arbeitgeber und vom Beschäftigten nicht je zur Hälfte getragen, ergibt sich der Beitrag aus der Summe der getrennt berechneten gerundeten Anteile. Beiträge, die vom Beschäftigten allein zu tragen sind, werden durch Anwendung des für diese Beiträge geltenden Beitragssatzes oder Beitragszuschlags auf das Arbeitsentgelt berechnet; Satz 3 zweiter Halbsatz gilt entsprechend. Wird die Mindestbeitragsbemessungsgrundlage des § 163 Abs. 8 des Sechsten Buches Sozialgesetzbuch nicht überschritten, wird der Beitragssatz auf die Mindestbeitragsbemessungsgrundlage angewandt und der vom Arbeitgeber zu tragende Beitragsanteil berechnet und gerundet; durch Abzug des Arbeitgeberanteils vom Beitrag ergibt sich der Beitragsanteil des Beschäftigten. (2) In den Fällen der Gleitzone wird der vom Arbeitgeber zu zahlende Beitrag durch Anwendung des halben Beitragssatzes auf die beitragspflichtige Einnahme und anschließender Verdoppelung des gerundeten Ergebnisses berechnet. Der vom Arbeitgeber zu tragende Beitragsanteil wird durch Anwendung des halben sich aus der Summe des Beitragssatzes zur gesetzlichen Rentenversicherung, der gesetzlichen Pflegeversicherung, zur Arbeitsförderung und des halben um den vom Arbeitnehmer allein zu tragenden Beitragsanteil reduzierten Beitragssatzes in der gesetzlichen Krankenversicherung ergebenden Beitragssatzes auf das der Beschäftigung zugrunde liegende Arbeitsentgelt berechnet und gerundet. Der Abzug des Arbeitgeberanteils von dem nach Satz 1 errechneten Beitrag ergibt den Beitragsanteil des Beschäftigten. Bei Entgelten bis zu 450 Euro ergibt sich die beitragspflichtige Einnahme durch Anwendung des Faktors F (§ 163 Abs. 10 des Sechsten Buches Sozialgesetzbuch) auf das der Beschäftigung zugrunde liegende Arbeitsentgelt. Vom Beschäftigten allein zu tragende Beitragsanteile werden durch Anwendung des maßgebenden Beitragssatzes oder Beitragszuschlags auf die beitragspflichtige Einnahme berechnet und gerundet. ## Zweiter Abschnitt - Zahlungen des Arbeitgebers ### § 3 Tag der Zahlung, Zahlungsmittel (1) Die Zahlungen der Arbeitgeber oder sonstiger Zahlungspflichtiger sind an die zuständige Einzugsstelle zu leisten. Als Tag der Zahlung gilt 1. bei Barzahlung der Tag des Geldeingangs, 2. bei Zahlung durch Scheck, bei Überweisung oder Einzahlung auf ein Konto der Einzugsstelle der Tag der Wertstellung zugunsten der Einzugsstelle, bei rückwirkender Wertstellung das Datum des elektronischen Kontoauszuges des Geldinstituts der Einzugsstelle, 3. bei Vorliegen einer Einzugsermächtigung der Tag der Fälligkeit. Abweichend von Satz 1 und 2 tritt in den Fällen des § 28f Abs. 4 des Vierten Buches Sozialgesetzbuch an die Stelle der Einzugsstelle die beauftragte Stelle. (2) Zahlungen in fremder Währung und durch Wechsel sind nicht zugelassen. (3) Die nach § 28e Abs. 1 Satz 2 des Vierten Buches Sozialgesetzbuch als gezahlt geltenden Beiträge sind auf einem bei den von der Beitragszahlung freigestellten Leistungsträgern zu führenden Sachbuchkonto bei den 1. Kranken- und Pflegekassen am Tag der Fälligkeit nach der Satzung, 2. Trägern der Rentenversicherung und der Bundesagentur für Arbeit am Tag der Fälligkeit in Einnahme zu buchen. Ist eine Krankenkasse der Arbeitgeber, ist der für die Pflegekasse bestimmte Anteil am Gesamtsozialversicherungsbeitrag auf dem entsprechenden Sachbuchkonto der Pflegekasse zu buchen. ### § 4 Reihenfolge der Tilgung Schuldet der Arbeitgeber oder ein sonstiger Zahlungspflichtiger Auslagen der Einzugsstelle, Gebühren, Gesamtsozialversicherungsbeiträge, Säumniszuschläge, Zinsen, Geldbußen oder Zwangsgelder, kann er bei der Zahlung bestimmen, welche Schuld getilgt werden soll; der Arbeitgeber kann hinsichtlich der Beiträge bestimmen, dass vorrangig die Arbeitnehmeranteile getilgt werden sollen. Trifft der Arbeitgeber keine Bestimmung, werden die Schulden in der genannten Reihenfolge getilgt. Innerhalb der gleichen Schuldenart werden die einzelnen Schulden nach ihrer Fälligkeit, bei gleichzeitiger Fälligkeit anteilmäßig getilgt. ## Dritter Abschnitt - Weiterleitung und Abrechnung durch die Einzugsstelle ### § 5 Weiterleitung (1) Die Einzugsstelle erteilt an jedem Arbeitstag Aufträge zur Überweisung der nach § 28k Abs. 1 des Vierten Buches Sozialgesetzbuch weiterzuleitenden Beiträge. Die Einzugsstelle ist verpflichtet, 1. die vertraglichen Vereinbarungen mit ihrem Geldinstitut so zu gestalten, dass die Beiträge dem Konto der Einzugsstelle an dem Tag gutgeschrieben werden, an dem sie dem Geldinstitut gutgeschrieben werden, 2. die Beiträge am Tag der Gutschrift auf ihrem Konto an die Träger der Rentenversicherung, Pflegeversicherung, den Gesundheitsfonds und die Bundesagentur für Arbeit durch Überweisung weiterzuleiten, 3. die Buchungen auf ihrem Konto bei dem Geldinstitut elektronisch so abzufragen, dass die dort gutgeschriebenen Beiträge taggleich vor Bankannahmeschluss weitergeleitet werden können. Werden die Beiträge vom Arbeitgeber im Wege des Lastschriftverfahrens eingezogen oder durch Scheck gezahlt, sind die Beiträge am Tag der Wertstellung auf dem Konto der Einzugsstelle in die Beiträge nach Satz 2 Nr. 3 einzubeziehen. Einzugsstellen mit dezentralem Beitragseinzug leiten die Beiträge zentral weiter; als Tag der Gutschrift im Sinne des Satzes 2 gilt der Tag der Gutschrift bei der Nebenstelle, als Tag der Wertstellung im Sinne des Satzes 3 gilt der Tag der Wertstellung bei der Nebenstelle. Ergibt sich am Monatsende eine Unter- oder Überzahlung, ist diese innerhalb einer Woche auszugleichen. Die Einzugsstelle kann mit den Zahlungsempfängern ein Verfahren über die Avise zu erwartender Zahlungen vereinbaren. (2) Die Einzugsstelle hat für die Weiterleitung der Beiträge zur sozialen Pflegeversicherung ein von Absatz 1 Satz 1 abweichendes Verfahren anzuwenden, wenn es für die Pflegekasse wirtschaftlicher als das Überweisungsverfahren ist. (3) Der Zahlungsempfänger kann bestimmen, auf welches seiner Konten die Einzugsstelle zu überweisen hat. Die Bundesagentur für Arbeit bestimmt, an welche ihrer Dienststellen weiterzuleiten ist. Auf Verlangen des Zahlungsempfängers sind die Überweisungen beschleunigt, z. B. durch Blitzgiro oder telegrafisch, vorzunehmen; die anfallenden Gebühren behalten die Einzugsstellen ein. (4) In den Fällen des § 28f Abs. 4 des Vierten Buches Sozialgesetzbuch tritt an die Stelle der Einzugsstelle im Sinne der Absätze 1 bis 3 die beauftragte Stelle. ### § 6 Abrechnung (1) Die Einzugsstelle hat dem Zahlungsempfänger bis zum Zwanzigsten des Monats eine Abrechnung für den Vormonat einzureichen. (2) Für die Abrechnung ist der von den Spitzenverbänden der Pflegekassen, dem Bundesversicherungsamt als Träger des Gesundheitsfonds, dem Spitzenverband Bund der Krankenkassen, den Trägern der allgemeinen Rentenversicherung, der Deutschen Rentenversicherung Knappschaft-Bahn-See als Träger der knappschaftlichen Rentenversicherung und der Bundesagentur für Arbeit vereinbarte Datensatz (Monatsabrechnung) zu verwenden. ### § 6a Weiterleitung und Abrechnung sonstiger Beiträge (1) Die §§ 5 und 6 gelten entsprechend für Beitragszahlungen und Beitragsweiterleitungen nach § 252 Abs. 2 des Fünften Buches Sozialgesetzbuch. (2) Die Krankenkasse hat dem Bundesversicherungsamt als Verwalter des Gesundheitsfonds die für die Erstellung der Abrechnung nach Absatz 1 erforderlichen Datengrundlagen auf Anforderung vorzulegen. Das Bundesversicherungsamt bestimmt das Nähere über die Datenlieferungen nach Anhörung des Spitzenverbandes Bund der Krankenkassen. ## Vierter Abschnitt - Prüfung beim Arbeitgeber ### § 7 Grundsätze (1) Die Prüfung nach § 28p des Vierten Buches Sozialgesetzbuch erfolgt grundsätzlich nach vorheriger Ankündigung durch die Versicherungsträger. Die Ankündigung soll möglichst einen Monat, sie muss jedoch spätestens 14 Tage vor der Prüfung erfolgen. Mit Zustimmung des Arbeitgebers kann von Satz 2 abgewichen werden. In den Fällen des § 98 Abs. 1 Satz 4 des Zehnten Buches Sozialgesetzbuch kann die Prüfung ohne Ankündigung durchgeführt werden. Der Prüfer oder die Prüferin des Versicherungsträgers hat sich auszuweisen. (2) Für die Prüfung dürfen auf Kosten des Versicherungsträgers schriftliche Unterlagen des Arbeitgebers vervielfältigt und elektronische Unterlagen gespeichert werden, soweit es für die Aufgabenerfüllung erforderlich ist. Der Arbeitgeber oder der Auftragnehmer nach § 28p Abs. 6 des Vierten Buches Sozialgesetzbuch hat einen zur Durchführung der Prüfung geeigneten Raum oder Arbeitsplatz sowie die erforderlichen Hilfsmittel kostenlos zur Verfügung zu stellen; Kosten oder Verdienstausfall, die durch die Prüfung entstehen, werden nicht erstattet. (3) (weggefallen) (4) Das Ergebnis der Prüfung ist dem Arbeitgeber schriftlich mitzuteilen; die Mitteilung soll innerhalb von zwei Monaten nach Abschluss der Prüfung dem Arbeitgeber zugehen. Die Mitteilung ist vom Arbeitgeber bis zur nächsten Prüfung aufzubewahren. In den Fällen des § 28p Abs. 1a Satz 4 des Vierten Buches Sozialgesetzbuch sind der Künstlersozialkasse die Prüfberichte und Prüfbescheide zu übersenden. Für das Ergebnis der Prüfung nach § 166 Abs. 2 des Siebten Buches gelten die Sätze 1 und 2 entsprechend. Die Feststellungen zu den Arbeitsentgelten, die bei der Berechnung der Beiträge nach dem Siebten Buch Sozialgesetzbuch zu berücksichtigen sind, und deren Zuordnung zu den Gefahrtarifstellen sind den zuständigen Unfallversicherungsträgern zu übersenden. ### § 8 Entgeltunterlagen (1) Der Arbeitgeber hat in den Entgeltunterlagen folgende Angaben über den Beschäftigten aufzunehmen: 1. den Familien- und Vornamen und gegebenenfalls das betriebliche Ordnungsmerkmal, 2. das Geburtsdatum, 3. bei Ausländern aus Staaten außerhalb des Europäischen Wirtschaftsraums die Staatsangehörigkeit und den Aufenthaltstitel, 4. die Anschrift, 5. den Beginn und das Ende der Beschäftigung, 6. den Beginn und das Ende der Altersteilzeitarbeit, 7. das Wertguthaben aus flexibler Arbeitszeit einschließlich der Änderungen (Zu- und Abgänge), den Abrechnungsmonat der ersten Gutschrift sowie den Abrechnungsmonat für jede Änderung und einen Nachweis über die getroffenen Vorkehrungen zum Insolvenzschutz; bei auf Dritte übertragenen Wertguthaben sind diese beim Dritten zu kennzeichnen, 8. die Beschäftigungsart, 9. die für die Versicherungsfreiheit oder die Befreiung von der Versicherungspflicht maßgebenden Angaben, 10. das Arbeitsentgelt nach § 14 des Vierten Buches Sozialgesetzbuch, seine Zusammensetzung und zeitliche Zuordnung, ausgenommen sind Sachbezüge und Belegschaftsrabatte, soweit für sie eine Aufzeichnungspflicht nach dem Einkommensteuergesetz nicht besteht, 11. das beitragspflichtige Arbeitsentgelt bis zur Beitragsbemessungsgrenze der Rentenversicherung, seine Zusammensetzung und zeitliche Zuordnung, 12. den Betrag nach § 3 Abs. 1 Nr. 1 Buchstabe b des Altersteilzeitgesetzes, 13. den Beitragsgruppenschlüssel, 14. die Einzugsstelle für den Gesamtsozialversicherungsbeitrag, 15. den vom Beschäftigten zu tragenden Anteil am Gesamtsozialversicherungsbeitrag, nach Beitragsgruppen getrennt, 16. die für die Erstattung von Meldungen erforderlichen Daten, soweit sie in den Nummern 1 bis 14 nicht enthalten sind, 17. bei Entsendung Eigenart und zeitliche Begrenzung der Beschäftigung, 18. gezahltes Kurzarbeitergeld und die hierauf entfallenden beitragspflichtigen Einnahmen. Bestehen die Entgeltunterlagen aus mehreren Teilen, sind diese Teile durch ein betriebliches Ordnungsmerkmal zu verbinden. Die Angaben nach Satz 1 Nr. 10 bis 15 und 18 sind für jeden Entgeltabrechnungszeitraum erforderlich. Die Beträge nach Satz 1 Nr. 11 und 12 sind für die Meldungen zu summieren. Berichtigungen zu den Angaben nach Satz 1 Nr. 10 bis 15 und 18 oder Stornierungen sind besonders kenntlich zu machen. Die Angaben nach Satz 1 Nr. 8, 9 und 14 können verschlüsselt werden. (2) Folgende Unterlagen sind zu den Entgeltunterlagen zu nehmen: 1. Unterlagen, aus denen die nach Absatz 1 Satz 1 Nr. 3, 9 und 17 erforderlichen Angaben ersichtlich sind, 2. die für den Arbeitgeber bestimmte Bescheinigung nach § 175 Abs. 2 des Fünften Buches Sozialgesetzbuch, 3. die Daten der erstatteten Meldungen, 3a. die Daten der von den Krankenkassen übermittelten Meldungen, die Auswirkungen auf die Beitragsberechnung des Arbeitgebers haben, 4. die Erklärung des geringfügig Beschäftigten gegenüber dem Arbeitgeber, dass auf Versicherungsfreiheit in der Rentenversicherung verzichtet wird, 4a. der Antrag auf Befreiung von der Versicherungspflicht nach § 6 Absatz 1b des Sechsten Buches Sozialgesetzbuch, auf dem der Tag des Eingangs beim Arbeitgeber dokumentiert ist, 5. die Erklärung des Beschäftigten gegenüber dem Arbeitgeber, dass auf die Anwendung der Gleitzonenberechnung in der Rentenversicherung verzichtet wird, 5a. die schriftliche Erklärung des Arbeitnehmers gegenüber dem Arbeitgeber, dass die Gleitzonenregelung in der gesetzlichen Rentenversicherung nach § 276b Absatz 2 des Sechsten Buches Sozialgesetzbuch Anwendung finden soll, 6. die Niederschrift nach § 2 des Nachweisgesetzes, 7. die Erklärung des kurzfristig geringfügigen Beschäftigten über weitere kurzfristige Beschäftigungen im Kalenderjahr oder die Erklärung des geringfügig entlohnten Beschäftigten über weitere Beschäftigungen sowie in beiden Fällen die Bestätigung, dass die Aufnahme weiterer Beschäftigungen dem Arbeitgeber anzuzeigen sind, 8. eine Kopie des Antrags nach § 7a Abs. 1 des Vierten Buches Sozialgesetzbuch mit den von der Deutschen Rentenversicherung Bund für ihre Entscheidung benötigten Unterlagen sowie deren Bescheid nach § 7a Abs. 2 des Vierten Buches Sozialgesetzbuch, 9. den Bescheid der zuständigen Einzugsstelle über die Feststellung der Versicherungspflicht nach § 28h Abs. 2 des Vierten Buches Sozialgesetzbuch, 10. die Entscheidung der Finanzbehörden, dass die vom Arbeitgeber getragenen oder übernommenen Studiengebühren für ein Studium des Beschäftigten steuerrechtlich kein Arbeitslohn sind, 11. den Nachweis der Elterneigenschaft nach § 55 Abs. 3 des Elften Buches Sozialgesetzbuch, 12. die Erklärung über den Auszahlungsverzicht von zustehenden Entgeltansprüchen, 13. die Aufzeichnungen nach § 19 Absatz 1 des Arbeitnehmer- Entsendegesetzes. ### § 9 Beitragsabrechnung (1) Der Arbeitgeber hat zur Prüfung der Vollständigkeit der Entgeltabrechnung für jeden Abrechnungszeitraum ein Verzeichnis aller Beschäftigten in der Sortierfolge der Entgeltunterlagen mit den folgenden Angaben und nach Einzugsstellen getrennt zu erfassen und lesbar zur Verfügung zu stellen: 1. dem Familien- und Vornamen und gegebenenfalls dem betrieblichen Ordnungsmerkmal, 2. dem beitragspflichtigen Arbeitsentgelt bis zur Beitragsbemessungsgrenze der Rentenversicherung, 3. dem Betrag nach § 3 Abs. 1 Nr. 1 Buchstabe b des Altersteilzeitgesetzes, 4. dem Beitragsgruppenschlüssel, 5. den Sozialversicherungstagen, 6. dem Gesamtsozialversicherungsbeitrag, nach Arbeitgeber- und Arbeitnehmeranteilen je Beitragsgruppe getrennt, 7. dem gezahlten Kurzarbeitergeld und die hierauf entfallenden beitragspflichtigen Einnahmen, 8. den beitragspflichtigen Sonn-, Feiertags- und Nachtzuschlägen, 9. den Umlagesätzen nach dem Aufwendungsausgleichsgesetz und das umlagepflichtige Arbeitsentgelt, 10. den Parametern zur Berechnung der voraussichtlichen Höhe der Beitragsschuld. Die Beträge nach Satz 1 Nr. 7 sind zu summieren und die hierauf entfallenden Beiträge zur Kranken-, Pflege- und Rentenversicherung anzugeben; die Beträge nach Satz 1 Nr. 6 sind nach Beitragsgruppen zu summieren; aus den Einzelsummen ist die Gesamtsumme aller Beiträge zu bilden. Berichtigungen oder Stornierungen sind besonders zu kennzeichnen. (2) Im Beitragsnachweis nach Absatz 1 sind Beschäftigte mit den Angaben nach Absatz 1 Satz 1 Nr. 1 und dem erzielten Arbeitsentgelt nach § 14 des Vierten Buches Sozialgesetzbuch gesondert zu erfassen, für die Beiträge nicht oder nach den Vorschriften der Gleitzone (§ 20 Abs. 2 des Vierten Buches Sozialgesetzbuch) gezahlt werden. Sind Beitragsnachweise für mehrere Einzugsstellen zu erstellen, hat die Erfassung nach Satz 1 gesondert zu erfolgen. (3) Berechnet die Einzugsstelle die Beiträge, hat ihr der Arbeitgeber die für die Berechnung der Beiträge notwendigen Angaben mitzuteilen. (4) Im Beitragsnachweis sind die als gezahlt geltenden Beiträge nach § 28e Abs. 1 Satz 2 des Vierten Buches Sozialgesetzbuch nicht aufzunehmen. (5) Entgeltunterlagen können auf maschinell verwertbaren Datenträgern geführt werden. § 8 gilt entsprechend. Werden Entgeltunterlagen auf Datenträgern geführt, sind die Daten in der Aufbewahrungsfrist jederzeit verfügbar und unverzüglich lesbar vorzuhalten. § 147 Abs. 5 und 6 der Abgabenordnung gilt entsprechend. ### § 10 Mitwirkung (1) Der Arbeitgeber hat die Aufzeichnungen nach den §§ 8 und 9 so zu führen, dass bei einer Prüfung innerhalb angemessener Zeit ein Überblick über die formelle und sachliche Richtigkeit der Entgeltabrechnung des Arbeitgebers gewährleistet ist. Der Arbeitgeber muss die dafür erforderlichen Darstellungsprogramme sowie Maschinenzeiten und sonstigen Hilfsmittel, z. B. Personal, Bildschirme, Lesegeräte, bereitstellen. Die Angaben sind vollständig, richtig, in zeitlicher Folge und geordnet vorzunehmen. Auf Verlangen sind Fälle, die manuell abgerechnet worden sind oder in denen das beitragspflichtige Arbeitsentgelt manuell vorgegeben worden ist, vorzulegen. (2) Der Arbeitgeber ist verpflichtet, Bescheide und Prüfberichte der Finanzbehörden vorzulegen. Für die Prüfung gilt verpflichtend, diese Unterlagen einzusehen und eine versicherungs- und beitragsrechtliche Auswertung vorzunehmen. § 31 Abs. 2 der Abgabenordnung bleibt unberührt. (3) Bei Abrechnungsverfahren, die mit Hilfe automatischer Einrichtungen durchgeführt werden, hat der Arbeitgeber ein ordnungsmäßiges Verfahren zu gewährleisten. Eine Prüfung einzelner Geschäftsvorfälle wie auch des Abrechnungsverfahrens insgesamt muss möglich sein. (4) Das Abrechnungsverfahren ist einschließlich der Änderungen seit der letzten Prüfung zu dokumentieren. Aus der dazu erforderlichen Verfahrensdokumentation müssen Aufbau und Ablauf des Abrechnungsverfahrens vollständig ersichtlich sein, insbesondere 1. die Verarbeitungsregeln einschließlich Kontrollen und Abstimmverfahren, 2. die Fehlerbehandlung, 3. die Sicherung der ordnungsgemäßen Programmanwendung und 4. die Organisation der manuellen Vor- oder Nachbehandlung von Daten. Änderungen des Abrechnungsverfahrens sind in der Dokumentation so zu vermerken, dass die zeitliche Abgrenzung einzelner Verfahrensversionen ersichtlich ist. (5) Bei der Prüfung von Programmen hat der Arbeitgeber die erforderlichen Testaufgaben auszuführen und das Testergebnis den Prüfern zu übergeben. Bei der Prüfung durch Testaufgaben sind nur gemeinsame Testaufgaben zu verwenden. Der Arbeitgeber kann eine Änderung der Testaufgaben verlangen, soweit dies durch betriebliche Gegebenheiten begründet ist. Eine Dokumentation der Programmprüfung ist bis zur nächsten Prüfung aufzubewahren und zur Prüfung vorzulegen. Verfahren oder Verfahrensteile, die bereits geprüft, nicht beanstandet und später nicht geändert worden sind, sind nicht erneut zu prüfen. Bei bereits geprüften Verfahren oder Verfahrensteilen, die später geändert worden sind, kann die Prüfung auf Änderungen beschränkt werden. Weist der Arbeitgeber nach, dass die Testaufgaben im Rahmen einer Systemprüfung bereits erfolgreich geprüft wurden, ist auf eine Prüfung beim Arbeitgeber zu verzichten. (6) Der Arbeitgeber hat unverzüglich die bei der Prüfung festgestellten Mängel zu beheben und Vorkehrungen zu treffen, dass die festgestellten Mängel sich nicht wiederholen. Dem Arbeitgeber kann dafür eine Frist gesetzt und darüber hinaus die Auflage erteilt werden, dem prüfenden Sozialversicherungsträger die ordnungsmäßige Mängelbeseitigung und die getroffenen Vorkehrungen nachzuweisen. ### § 11 Umfang (1) Die Prüfung der Aufzeichnungen nach den §§ 8 und 9 kann auf Stichproben beschränkt werden. Die für eine Prüfung verlangten Unterlagen nach § 8 Abs. 2 und § 9 sind unverzüglich vorzulegen oder als lesbare Reproduktionen herzustellen. (2) Die Prüfung kann sich beim Arbeitgeber über den Bereich der Entgeltabrechnung jedoch nicht über den Bereich des Rechnungswesens hinaus erstrecken. Der Arbeitgeber hat Unterlagen, die der Aufgabenerfüllung der Prüfung dienen, insbesondere zur Klärung, ob ein versicherungs- oder beitragspflichtiges Beschäftigungsverhältnis vorliegt oder nicht, auf Verlangen vorzulegen. ### § 12 Prüfung bei Steuerberatern oder bei anderen Stellen Für die Prüfung bei den in § 28p Abs. 6 des Vierten Buches Sozialgesetzbuch genannten Stellen gelten die §§ 7 bis 11, soweit sie solche Aufgaben vom Arbeitgeber übernommen haben, entsprechend. Beendet der Arbeitgeber die Beauftragung einer Stelle nach Satz 1 während der Prüfung, bleibt das Recht auf Prüfung für den zu prüfenden Zeitraum bestehen. Das Ergebnis der Prüfung ist auch dem Arbeitgeber schriftlich mitzuteilen; die Mitteilung soll innerhalb von zwei Monaten nach Abschluss der Prüfung dem Arbeitgeber zugehen. Das Recht auf Prüfung beim Arbeitgeber oder in den Räumen des Versicherungsträgers bleibt unberührt. ### § 13 Prüfung in den Räumen des Versicherungsträgers (1) Für die Prüfung beim Versicherungsträger gelten § 7 Abs. 1, 2 Satz 1, Abs. 3 und 4 sowie die §§ 8 bis 11 entsprechend. (2) Entfällt das Wahlrecht des Arbeitgebers nach § 98 Abs. 1 Satz 4 des Zehnten Buches Sozialgesetzbuch, gelten die Vorschriften der §§ 7 bis 11. ### § 13a Prüfung der Entrichtung der Künstlersozialabgabe Die Vorschriften dieses Abschnitts finden für die Prüfung der Entrichtung der Künstlersozialabgabe entsprechende Anwendung; § 1 Abs. 2, § 2 Abs. 1 Nr. 2 und Abs. 2 und die §§ 7 und 8 der KSVG- Beitragsüberwachungsverordnung sowie § 27 Abs. 1 Satz 3 und 4 des Künstlersozialversicherungsgesetzes gelten ergänzend. Den Zeitpunkt der Prüfung bestimmt der Versicherungsträger. ## Fünfter Abschnitt - Datei der Arbeitgeber ### § 14 Inhalt der Datei (1) Die bei der Deutschen Rentenversicherung Bund maschinell geführte Datei (§ 28p Abs. 8 Satz 1 des Vierten Buches Sozialgesetzbuch) enthält über jeden der Beitragsüberwachung unterliegenden Arbeitgeber die für die Übersichten nach § 28p Abs. 7 des Vierten Buches Sozialgesetzbuch erforderlichen Daten sowie folgende Angaben: 1. die Betriebsnummern und Gemeindeschlüssel der zu prüfenden Stellen (Betriebsstätten des Arbeitgebers sowie andere Stellen, auf die sich die Prüfung nach § 28p Abs. 6 des Vierten Buches Sozialgesetzbuch erstreckt), 2. deren Namen, Anschriften, Telefon- und Telefaxanschluss, E-Mail- Adresse, 3. das Datum, bis zu dem der Arbeitgeber zuletzt geprüft wurde, 4. das Datum der geplanten nächsten Prüfung, 5. Angaben für besondere Behandlung: 5.1 Verlangen der zu prüfenden Stelle nach einem besonderen Prüfrhythmus, 5.2 Verlangen der Einzugsstellen nach alsbaldiger Prüfung und den Grund dafür, 6. die Bezeichnung der für Meldungen und Beitragsnachweise verwendeten EDV-Programme oder Ausfüllhilfen, 7. die Anzahl der pflichtversicherten Beschäftigten im Prüfzeitraum, 8. die Anzahl der geringfügig Beschäftigten im Prüfzeitraum, 9. die Bereichsnummer des für die Prüfung zuständigen Trägers der Rentenversicherung (§ 28p Abs. 2 Satz 2 des Vierten Buches Sozialgesetzbuch) sowie die Angabe "Trägerfirma einer Betriebskrankenkasse", 10. die Betriebsnummern anderer Arbeitgeber, für die der Arbeitgeber abrechnet, 11. den Wirtschaftszweig/die Branche des Arbeitgebers, 11a. die Wirtschafts-Identifikationsnummer (§ 139c der Abgabenordnung) des Arbeitgebers, sofern diese noch nicht zugeteilt wurde, die Steuernummer des Arbeitgebers, und das zuständige Finanzamt, 12. die Anzahl der aktuell Beschäftigten, 13. die Betriebsnummern der Einzugsstellen, an die Beiträge im Prüfzeitraum abzuführen waren, 14. den Inhalt der Bescheide nach § 28p Abs. 1 Satz 5 und Abs. 1a Satz 3 des Vierten Buches Sozialgesetzbuch und der Mitteilung an den Unfallversicherungsträger über die Prüfung nach § 166 Abs. 2 des Siebten Buches Sozialgesetzbuch, 15. aus den Mitteilungen der Behörden der Zollverwaltung über Prüfungen nach § 2 des Schwarzarbeitsbekämpfungsgesetzes: 1. Datum und Aufbewahrungsort der Mitteilung, 2. Name der meldenden Stelle, 3. aus dem Inhalt der Mitteilung: 3.1 Meldepflichtverletzung (§ 28a des Vierten Buches Sozialgesetzbuch), 3.2 fehlende Entgeltunterlagen, 3.3 Verdacht der prüfenden Stelle auf Beitragshinterziehung, Verstöße gegen das Arbeitnehmer-Entsendegesetz, 16. Informationen über gegen frühere Bescheide eingelegte Rechtsbehelfe und Rechtsmittel sowie über sozialgerichtliche Verfahren, 17. die Angabe, dass der Arbeitgeber seine Bereitschaft zur Teilnahme an einer Sammel- oder Vorlageprüfung erklärt hat, 18. die Tatsache und der Grund der Nichteinsichtnahme in die Bescheide und Prüfberichte der Finanzbehörden, 19. die Angabe, dass Beschäftigte Entgeltzahlungen durch Dritte erhalten, 20. die Angabe, ob der Arbeitgeber hinsichtlich der Melde- und Abgabepflicht nach dem Künstlersozialversicherungsgesetz zu prüfen ist, sowie die Kennzeichnung des Verfahrensstandes. (2) Die Angaben nach Absatz 1 dürfen nur von dem zuständigen Träger der Rentenversicherung und der Datenstelle der Träger der Rentenversicherung und für Abfragen nach § 28q Abs. 5 Satz 2 des Vierten Buches Sozialgesetzbuch verarbeitet und genutzt werden. (3) Die Angaben nach Absatz 1 Nr. 1 und 2 und der Inhalt der Bescheide nach § 28p Abs. 1 Satz 5 des Vierten Buches Sozialgesetzbuch, soweit dieser nach Einzugsstellen gegliedert ist, dürfen für die Prüfungen nach § 28q Abs. 1 Satz 1 und 2 des Vierten Buches Sozialgesetzbuch verarbeitet und genutzt werden. ## Sechster Abschnitt - Schlussvorschriften ### § 15 Inkrafttreten, Außerkrafttreten Diese Verordnung tritt am 1. Juli 2006 in Kraft. ### Schlussformel Der Bundesrat hat zugestimmt.
{ "pile_set_name": "Github" }
// Copyright 2015 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. //go:generate stringer -type=Kind //go:generate go run gen.go gen_common.go gen_trieval.go // Package width provides functionality for handling different widths in text. // // Wide characters behave like ideographs; they tend to allow line breaks after // each character and remain upright in vertical text layout. Narrow characters // are kept together in words or runs that are rotated sideways in vertical text // layout. // // For more information, see http://unicode.org/reports/tr11/. package width // import "golang.org/x/text/width" import ( "unicode/utf8" "golang.org/x/text/transform" ) // TODO // 1) Reduce table size by compressing blocks. // 2) API proposition for computing display length // (approximation, fixed pitch only). // 3) Implement display length. // Kind indicates the type of width property as defined in http://unicode.org/reports/tr11/. type Kind int const ( // Neutral characters do not occur in legacy East Asian character sets. Neutral Kind = iota // EastAsianAmbiguous characters that can be sometimes wide and sometimes // narrow and require additional information not contained in the character // code to further resolve their width. EastAsianAmbiguous // EastAsianWide characters are wide in its usual form. They occur only in // the context of East Asian typography. These runes may have explicit // halfwidth counterparts. EastAsianWide // EastAsianNarrow characters are narrow in its usual form. They often have // fullwidth counterparts. EastAsianNarrow // Note: there exist Narrow runes that do not have fullwidth or wide // counterparts, despite what the definition says (e.g. U+27E6). // EastAsianFullwidth characters have a compatibility decompositions of type // wide that map to a narrow counterpart. EastAsianFullwidth // EastAsianHalfwidth characters have a compatibility decomposition of type // narrow that map to a wide or ambiguous counterpart, plus U+20A9 ₩ WON // SIGN. EastAsianHalfwidth // Note: there exist runes that have a halfwidth counterparts but that are // classified as Ambiguous, rather than wide (e.g. U+2190). ) // TODO: the generated tries need to return size 1 for invalid runes for the // width to be computed correctly (each byte should render width 1) var trie = newWidthTrie(0) // Lookup reports the Properties of the first rune in b and the number of bytes // of its UTF-8 encoding. func Lookup(b []byte) (p Properties, size int) { v, sz := trie.lookup(b) return Properties{elem(v), b[sz-1]}, sz } // LookupString reports the Properties of the first rune in s and the number of // bytes of its UTF-8 encoding. func LookupString(s string) (p Properties, size int) { v, sz := trie.lookupString(s) return Properties{elem(v), s[sz-1]}, sz } // LookupRune reports the Properties of rune r. func LookupRune(r rune) Properties { var buf [4]byte n := utf8.EncodeRune(buf[:], r) v, _ := trie.lookup(buf[:n]) last := byte(r) if r >= utf8.RuneSelf { last = 0x80 + byte(r&0x3f) } return Properties{elem(v), last} } // Properties provides access to width properties of a rune. type Properties struct { elem elem last byte } func (e elem) kind() Kind { return Kind(e >> typeShift) } // Kind returns the Kind of a rune as defined in Unicode TR #11. // See http://unicode.org/reports/tr11/ for more details. func (p Properties) Kind() Kind { return p.elem.kind() } // Folded returns the folded variant of a rune or 0 if the rune is canonical. func (p Properties) Folded() rune { if p.elem&tagNeedsFold != 0 { buf := inverseData[byte(p.elem)] buf[buf[0]] ^= p.last r, _ := utf8.DecodeRune(buf[1 : 1+buf[0]]) return r } return 0 } // Narrow returns the narrow variant of a rune or 0 if the rune is already // narrow or doesn't have a narrow variant. func (p Properties) Narrow() rune { if k := p.elem.kind(); byte(p.elem) != 0 && (k == EastAsianFullwidth || k == EastAsianWide || k == EastAsianAmbiguous) { buf := inverseData[byte(p.elem)] buf[buf[0]] ^= p.last r, _ := utf8.DecodeRune(buf[1 : 1+buf[0]]) return r } return 0 } // Wide returns the wide variant of a rune or 0 if the rune is already // wide or doesn't have a wide variant. func (p Properties) Wide() rune { if k := p.elem.kind(); byte(p.elem) != 0 && (k == EastAsianHalfwidth || k == EastAsianNarrow) { buf := inverseData[byte(p.elem)] buf[buf[0]] ^= p.last r, _ := utf8.DecodeRune(buf[1 : 1+buf[0]]) return r } return 0 } // TODO for Properties: // - Add Fullwidth/Halfwidth or Inverted methods for computing variants // mapping. // - Add width information (including information on non-spacing runes). // Transformer implements the transform.Transformer interface. type Transformer struct { t transform.SpanningTransformer } // Reset implements the transform.Transformer interface. func (t Transformer) Reset() { t.t.Reset() } // Transform implements the transform.Transformer interface. func (t Transformer) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { return t.t.Transform(dst, src, atEOF) } // Span implements the transform.SpanningTransformer interface. func (t Transformer) Span(src []byte, atEOF bool) (n int, err error) { return t.t.Span(src, atEOF) } // Bytes returns a new byte slice with the result of applying t to b. func (t Transformer) Bytes(b []byte) []byte { b, _, _ = transform.Bytes(t, b) return b } // String returns a string with the result of applying t to s. func (t Transformer) String(s string) string { s, _, _ = transform.String(t, s) return s } var ( // Fold is a transform that maps all runes to their canonical width. // // Note that the NFKC and NFKD transforms in golang.org/x/text/unicode/norm // provide a more generic folding mechanism. Fold Transformer = Transformer{foldTransform{}} // Widen is a transform that maps runes to their wide variant, if // available. Widen Transformer = Transformer{wideTransform{}} // Narrow is a transform that maps runes to their narrow variant, if // available. Narrow Transformer = Transformer{narrowTransform{}} ) // TODO: Consider the following options: // - Treat Ambiguous runes that have a halfwidth counterpart as wide, or some // generalized variant of this. // - Consider a wide Won character to be the default width (or some generalized // variant of this). // - Filter the set of characters that gets converted (the preferred approach is // to allow applying filters to transforms).
{ "pile_set_name": "Github" }
Starting TesterTest::Test with 2 test cases... INFO Benchmarking a debug build. BENCH [44] 0.00 ± 0.00 ns benchmarkDefault()@9x1000000000 (wall time) BENCH [46] 348.36 kB benchmarkOnce()@1x1 Finished TesterTest::Test with 0 errors out of 0 checks.
{ "pile_set_name": "Github" }
recursive-include examples *.py #recursive-include doc *.rst *.txt *.py Makefile *.png include MANIFEST.in LICENSE README.rst pytest.ini tox.ini
{ "pile_set_name": "Github" }
<?php /** * Copyright 2009-2014, Cake Development Corporation (http://cakedc.com) * * Licensed under The MIT License * Redistributions of files must retain the above copyright notice. * * @copyright Copyright 2009-2014, Cake Development Corporation (http://cakedc.com) * @license MIT License (http://www.opensource.org/licenses/mit-license.php) */ ?> <div class="tags index"> <h2><?php echo __d('tags', 'Tags');?></h2> <p> <?php echo $this->Paginator->counter(array( 'format' => __d('tags', 'Page %page% of %pages%, showing %current% records out of %count% total, starting on record %start%, ending on %end%') )); ?></p> <table cellpadding="0" cellspacing="0"> <tr> <th><?php echo $this->Paginator->sort('id');?></th> <th><?php echo $this->Paginator->sort('identifier');?></th> <th><?php echo $this->Paginator->sort('name');?></th> <th><?php echo $this->Paginator->sort('keyname');?></th> <th><?php echo $this->Paginator->sort('created');?></th> <th><?php echo $this->Paginator->sort('modified');?></th> <th class="actions"><?php echo __d('tags', 'Actions');?></th> </tr> <?php $i = 0; foreach ($tags as $tag): $class = null; if ($i++ % 2 == 0) { $class = ' class="altrow"'; } ?> <tr<?php echo $class;?>> <td> <?php echo $tag['Tag']['id']; ?> </td> <td> <?php echo $tag['Tag']['identifier']; ?> </td> <td> <?php echo $tag['Tag']['name']; ?> </td> <td> <?php echo $tag['Tag']['keyname']; ?> </td> <td> <?php echo $tag['Tag']['created']; ?> </td> <td> <?php echo $tag['Tag']['modified']; ?> </td> <td class="actions"> <?php echo $this->Html->link(__d('tags', 'View'), array('action' => 'view', $tag['Tag']['keyname'])); ?> <?php echo $this->Html->link(__d('tags', 'Edit'), array('action' => 'edit', $tag['Tag']['id'])); ?> <?php echo $this->Html->link(__d('tags', 'Delete'), array('action' => 'delete', $tag['Tag']['id']), null, sprintf(__d('tags', 'Are you sure you want to delete # %s?'), $tag['Tag']['id'])); ?> </td> </tr> <?php endforeach; ?> </table> <div class="paging"> <?php echo $this->Paginator->prev('<< '.__d('tags', 'previous'), array(), null, array('class'=>'disabled'));?> | <?php echo $this->Paginator->numbers();?> <?php echo $this->Paginator->next(__d('tags', 'next').' >>', array(), null, array('class' => 'disabled'));?> </div> </div> <div class="actions"> <ul> <li><?php echo $this->Html->link(sprintf(__d('tags', 'New %s'), __d('tags', 'Tag')), array('action' => 'add')); ?></li> </ul> </div>
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <parent> <artifactId>cloud-config</artifactId> <groupId>com.easy</groupId> <version>1.0.0</version> </parent> <modelVersion>4.0.0</modelVersion> <groupId>com.easy.cloudConfigServer</groupId> <artifactId>cloud-config-server</artifactId> <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding> <java.version>1.8</java.version> <spring-cloud.version>Greenwich.SR2</spring-cloud.version> </properties> <dependencies> <dependency> <groupId>org.springframework.cloud</groupId> <artifactId>spring-cloud-config-server</artifactId> </dependency> </dependencies> <dependencyManagement> <dependencies> <dependency> <groupId>org.springframework.cloud</groupId> <artifactId>spring-cloud-dependencies</artifactId> <version>${spring-cloud.version}</version> <type>pom</type> <scope>import</scope> </dependency> </dependencies> </dependencyManagement> <build> <plugins> <plugin> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-maven-plugin</artifactId> </plugin> </plugins> </build> </project>
{ "pile_set_name": "Github" }
// CodeContracts // // Copyright (c) Microsoft Corporation // // All rights reserved. // // MIT License // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // File System.ServiceModel.ComIntegration.ICatalog2.cs // Automatically generated contract file. using System.Collections.Generic; using System.IO; using System.Text; using System.Diagnostics.Contracts; using System; // Disable the "this variable is not used" warning as every field would imply it. #pragma warning disable 0414 // Disable the "this variable is never assigned to". #pragma warning disable 0067 // Disable the "this event is never assigned to". #pragma warning disable 0649 // Disable the "this variable is never used". #pragma warning disable 0169 // Disable the "new keyword not required" warning. #pragma warning disable 0109 // Disable the "extern without DllImport" warning. #pragma warning disable 0626 // Disable the "could hide other member" warning, can happen on certain properties. #pragma warning disable 0108 namespace System.ServiceModel.ComIntegration { internal partial interface ICatalog2 { #region Methods and constructors void AliasComponent(string bstrSrcApplicationIDOrName, string bstrCLSIDOrProgID, string bstrDestApplicationIDOrName, string bstrNewProgId, string bstrNewClsid); bool AreApplicationInstancesPaused(Object pVarApplicationInstanceID); void BackupREGDB(string bstrBackupFilePath); Object Connect(string connectStr); void CopyApplications(string bstrSourcePartitionIDOrName, Object pVarApplicationID, string bstrDestinationPartitionIDOrName); void CopyComponents(string bstrSourceApplicationIDOrName, Object pVarCLSIDOrProgID, string bstrDestinationApplicationIDOrName); void CreateServiceForApplication(string bstrApplicationIDOrName, string bstrServiceName, string bstrStartType, string bstrErrorControl, string bstrDependencies, string bstrRunAs, string bstrPassword, bool bDesktopOk); void CurrentPartition(string bstrPartitionIDOrName); string CurrentPartitionID(); string CurrentPartitionName(); void DeleteServiceForApplication(string bstrApplicationIDOrName); string DumpApplicationInstance(string bstrApplicationInstanceID, string bstrDirectory, int lMaxImages); void ExportApplication(string bstrApplIdOrName, string bstrApplicationFile, int lOptions); void ExportPartition(string bstrPartitionIDOrName, string bstrPartitionFileName, int lOptions); void FlushPartitionCache(); string GetApplicationInstanceIDFromProcessID(int lProcessID); Object GetCollection(string bstrCollName); Object GetCollectionByQuery(string collName, ref Object[] aQuery); Object GetCollectionByQuery2(string bstrCollectionName, Object pVarQueryStrings); int GetComponentVersionCount(string bstrCLSIDOrProgID); void GetEventClassesForIID(string bstrIID, out Object[] varCLSIDS, out Object[] varProgIDs, out Object[] varDescriptions); void GetMultipleComponentsInfo(string bstrApplIdOrName, Object varFileNames, out Object[] varCLSIDS, out Object[] varClassNames, out Object[] varFileFlags, out Object[] varComponentFlags); string GetPartitionID(string bstrApplicationIDOrName); string GetPartitionName(string bstrApplicationIDOrName); string GlobalPartitionID(); void ImportComponent(string bstrApplIdOrName, string bstrCLSIDOrProgId); void ImportComponents(string bstrApplicationIDOrName, Object pVarCLSIDOrProgID, Object pVarComponentType); void ImportUnconfiguredComponents(string bstrApplicationIDOrName, Object pVarCLSIDOrProgID, Object pVarComponentType); void InstallApplication(string bstrApplicationFile, string bstrDestinationDirectory, int lOptions, string bstrUserId, string bstrPassword, string bstrRSN); void InstallComponent(string bstrApplIdOrName, string bstrDLL, string bstrTLB, string bstrPSDLL); void InstallEventClass(string bstrApplIdOrName, string bstrDLL, string bstrTLB, string bstrPSDLL); void InstallMultipleComponents(string bstrApplIdOrName, ref Object[] fileNames, ref Object[] CLSIDS); void InstallMultipleEventClasses(string bstrApplIdOrName, ref Object[] fileNames, ref Object[] CLSIDS); void InstallPartition(string bstrFileName, string bstrDestDirectory, int lOptions, string bstrUserID, string bstrPassword, string bstrRSN); bool Is64BitCatalogServer(); bool IsApplicationInstanceDumpSupported(); Object IsSafeToDelete(string bstrDllName); int MajorVersion(); int MinorVersion(); void MoveComponents(string bstrSourceApplicationIDOrName, Object pVarCLSIDOrProgID, string bstrDestinationApplicationIDOrName); void PauseApplicationInstances(Object pVarApplicationInstanceID); void PromoteUnconfiguredComponents(string bstrApplicationIDOrName, Object pVarCLSIDOrProgID, Object pVarComponentType); void QueryApplicationFile(string bstrApplicationFile, out string bstrApplicationName, out string bstrApplicationDescription, out bool bHasUsers, out bool bIsProxy, out Object[] varFileNames); Object QueryApplicationFile2(string bstrApplicationFile); void RecycleApplicationInstances(Object pVarApplicationInstanceID, int lReasonCode); void RefreshComponents(); void RefreshRouter(); void Reserved1(); void Reserved2(); void RestoreREGDB(string bstrBackupFilePath); void ResumeApplicationInstances(Object pVarApplicationInstanceID); int ServiceCheck(int lService); void ShutdownApplication(string bstrApplIdOrName); void ShutdownApplicationInstances(Object pVarApplicationInstanceID); void StartApplication(string bstrApplIdOrName); void StartRouter(); void StopRouter(); #endregion } }
{ "pile_set_name": "Github" }
// // CALayer+Anim.m // LMJJDNC // // Created by lmj on 16/1/6. // Copyright (c) 2016年 lmj. All rights reserved. // #import "CALayer+Anim.h" @implementation CALayer (Anim) - (CAAnimation *)anim_shake:(NSArray *)rotations duration:(NSTimeInterval)duration repeatCount:(NSUInteger)repeatCount { // 创建关键帧动画 CAKeyframeAnimation *kfa = [CAKeyframeAnimation animationWithKeyPath:@"transform.rotation.z"]; // 指定制 kfa.values = rotations; // 时长 kfa.duration = duration; // 重复次数 kfa.repeatCount = repeatCount; // 完成删除 kfa.removedOnCompletion = YES; // 添加 [self addAnimation:kfa forKey:@"rotation"]; return kfa; } - (CAAnimation *)anim_revers:(AnimReverDirection)direction duration:(NSTimeInterval)duration isReverse:(BOOL)isReverse repeatCount:(NSUInteger)repeatCount timingFuncName:(NSString *)timingFuncName { NSString *key = @"reversAnim"; if ([self animationForKey:key] != nil) { [self removeAnimationForKey:key]; } NSString *directionStr = nil; if (AnimReverDirectionX == direction) { directionStr =@"x"; } if (AnimReverDirectionY == direction) { directionStr =@"y"; } if (AnimReverDirectionZ == direction) { directionStr =@"z"; } // 创建普通动画 CABasicAnimation *reverAnim = [CABasicAnimation animationWithKeyPath:[NSString stringWithFormat:@"transform.rotation.%@",directionStr]]; // 起点值 reverAnim.fromValue = @(0); // 终点值 reverAnim.toValue = @(M_PI_2); // 时唱 reverAnim.duration = duration; // 自动反转 reverAnim.autoreverses = isReverse; // 完成删除 reverAnim.removedOnCompletion = YES; // 重复次数 reverAnim.repeatCount = repeatCount; // 添加 [self addAnimation:reverAnim forKey:key]; return reverAnim; } @end
{ "pile_set_name": "Github" }
/* nonrandomopentest.c -- test nonrandomopen.so Copyright (C) 2017-2019 Dieter Baron and Thomas Klausner This file is part of ckmame, a program to check rom sets for MAME. The authors can be contacted at <ckmame@nih.at> Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "zipint.h" #include <stdio.h> #include <stdlib.h> int main(int argc, const char *argv[]) { zip_uint8_t buf[1024]; int i; #ifdef HAVE_CRYPTO if (!zip_secure_random(buf, sizeof(buf))) { fprintf(stderr, "zip_secure_random returned false\n"); exit(1); } for (i = 0; i < sizeof(buf); i++) { if (buf[i] != 0) { fprintf(stderr, "non-zero byte found\n"); exit(1); } } #endif exit(0); }
{ "pile_set_name": "Github" }
/** @addtogroup coreSystem * * @{ */ #include <Core/ModelicaDefine.h> #include <Core/Modelica.h> #include <Core/System/FactoryExport.h> #include <Core/System/NonLinearAlgLoopDefaultImplementation.h> /*bool BOOST_EXTENSION_EXPORT_DECL mycompare ( mytuple lhs, mytuple rhs) { return lhs.ele1 < rhs.ele1; }*/ NonLinearAlgLoopDefaultImplementation::NonLinearAlgLoopDefaultImplementation() : _dimAEq (0) ,_res(NULL) ,_AData(NULL) ,_Ax(NULL) ,_x0(NULL) , _firstcall(true) { } NonLinearAlgLoopDefaultImplementation::~NonLinearAlgLoopDefaultImplementation() { if(_res) delete [] _res; if (_x0) delete _x0; } /// Provide number (dimension) of variables according to data type int NonLinearAlgLoopDefaultImplementation::getDimReal() const { return _dimAEq; } int NonLinearAlgLoopDefaultImplementation::getDimZeroFunc() const { return _dimZeroFunc; } /// (Re-) initialize the system of equations void NonLinearAlgLoopDefaultImplementation::initialize() { if ( _dimAEq == 0 ) throw ModelicaSimulationError(ALGLOOP_EQ_SYSTEM,"AlgLoop::initialize(): No constraint defined."); if(_res) delete [] _res; _res = new double[_dimAEq]; memset(_res,0,_dimAEq*sizeof(double)); if(_x0) delete [] _x0; _x0 = new double[_dimAEq]; }; //in algloop default verschieben void NonLinearAlgLoopDefaultImplementation::getRHS(double* res) const { memcpy(res, _res, sizeof(double) * _dimAEq); } bool NonLinearAlgLoopDefaultImplementation::getUseSparseFormat(){ return _useSparseFormat; } void NonLinearAlgLoopDefaultImplementation::setUseSparseFormat(bool value){ _useSparseFormat = value; } void NonLinearAlgLoopDefaultImplementation::getRealStartValues(double* vars) const { memcpy(vars, _x0, sizeof(double) * _dimAEq); } //void NonLinearAlgLoopDefaultImplementation::getSparseAdata(double* data, int nonzeros) //{ // memcpy(data, _AData, sizeof(double) * nonzeros); //} /** @} */ // end of coreSystem
{ "pile_set_name": "Github" }
/* * Copyright 2010-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ #import "SimpleDBResponse.h" #import "SimpleDBInvalidParameterValueException.h" #import "SimpleDBNoSuchDomainException.h" #import "SimpleDBAttributeDoesNotExistException.h" #import "SimpleDBMissingParameterException.h" /** * Delete Attributes */ @interface SimpleDBDeleteAttributesResponse:SimpleDBResponse { } @end
{ "pile_set_name": "Github" }
// Copyright (c) 2014-present, Facebook, Inc. All rights reserved. // // You are hereby granted a non-exclusive, worldwide, royalty-free license to use, // copy, modify, and distribute this software in source code or binary form for use // in connection with the web services and APIs provided by Facebook. // // As with any software that integrates with the Facebook platform, your use of // this software is subject to the Facebook Developer Principles and Policies // [http://developers.facebook.com/policy/]. This copyright notice shall be // included in all copies or substantial portions of the software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS // FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR // COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER // IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN // CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. #import <Foundation/Foundation.h> NS_ASSUME_NONNULL_BEGIN /** Flags to indicate support for newer bridge options beyond the initial 20130410 implementation. */ typedef NS_OPTIONS(NSUInteger, FBSDKShareBridgeOptions) { FBSDKShareBridgeOptionsDefault = 0, FBSDKShareBridgeOptionsPhotoAsset = 1 << 0, FBSDKShareBridgeOptionsPhotoImageURL = 1 << 1, // if set, a web-based URL is required; asset, image, and imageURL.isFileURL not allowed FBSDKShareBridgeOptionsVideoAsset = 1 << 2, FBSDKShareBridgeOptionsVideoData = 1 << 3, FBSDKShareBridgeOptionsWebHashtag = 1 << 4, // if set, pass the hashtag as a string value, not an array of one string } NS_SWIFT_NAME(ShareBridgeOptions); /** A base interface for validation of content and media. */ NS_SWIFT_NAME(SharingValidation) @protocol FBSDKSharingValidation /** Asks the receiver to validate that its content or media values are valid. - Parameter errorRef: Optional, will receive an FBSDKShareError if the values are not valid. - Returns: YES if the receiver's values are valid; otherwise NO */ - (BOOL)validateWithOptions:(FBSDKShareBridgeOptions)bridgeOptions error:(NSError *__autoreleasing *)errorRef; @end NS_ASSUME_NONNULL_END
{ "pile_set_name": "Github" }
/** @file fileindex.h Index for looking up files of specific type. * * @authors Copyright (c) 2014-2017 Jaakko Keränen <jaakko.keranen@iki.fi> * * @par License * LGPL: http://www.gnu.org/licenses/lgpl.html * * <small>This program is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 3 of the License, or (at your * option) any later version. This program is distributed in the hope that it * will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty * of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser * General Public License for more details. You should have received a copy of * the GNU Lesser General Public License along with this program; if not, see: * http://www.gnu.org/licenses</small> */ #ifndef LIBDENG2_FILEINDEX_H #define LIBDENG2_FILEINDEX_H #include "../File" #include <map> #include <list> #include <utility> namespace de { class File; class Folder; class Package; /** * Indexes files for quick access. * * @ingroup fs */ class DENG2_PUBLIC FileIndex { public: typedef std::multimap<String, File *> Index; typedef std::pair<Index::iterator, Index::iterator> IndexRange; typedef std::pair<Index::const_iterator, Index::const_iterator> ConstIndexRange; typedef std::list<File *> FoundFiles; class DENG2_PUBLIC IPredicate { public: virtual ~IPredicate() {} /** * Determines if a file should be included in the index. * @param file File. * * @return @c true to index file, @c false to ignore. */ virtual bool shouldIncludeInIndex(File const &file) const = 0; }; DENG2_DEFINE_AUDIENCE2(Addition, void fileAdded (File const &, FileIndex const &)) DENG2_DEFINE_AUDIENCE2(Removal, void fileRemoved(File const &, FileIndex const &)) public: FileIndex(); /** * Sets the predicate that determines whether a file should be included in the * index. * * @param predicate Predicate for inclusion. Must exist while the index is being * used. */ void setPredicate(IPredicate const &predicate); /** * Adds a file to the index if the predicate permits. * * @param file File. * * @return @c true, if the file was added to the index. */ bool maybeAdd(File const &file); /** * Removes a file from the index, if it has been indexed. If not, nothing is done. * * @param file File. */ void remove(File const &file); int size() const; enum Behavior { FindInEntireIndex, FindOnlyInLoadedPackages }; void findPartialPath(String const &path, FoundFiles &found, Behavior behavior = FindInEntireIndex) const; /** * Finds partial paths that reside somewhere inside a specific folder * or one of its subfolders. * * @param rootFolder Folder under which to confine the search. * @param path Partial path to locate. * @param found All matching files. * @param behavior Search behavior. */ void findPartialPath(Folder const &rootFolder, String const &path, FoundFiles &found, Behavior behavior = FindInEntireIndex) const; /** * Finds partial paths that reside in a specific package. * * @param packageId Package whose contents to search. * @param path Partial path to find. * @param found All matching files. */ void findPartialPath(String const &packageId, String const &path, FoundFiles &found) const; /** * Finds all instances of a (partial) path within the index. The results are sorted * so that they are in the same order as the packages in which the files are located. * Files that are not in any package appear before files that belong to a package * (when using FindInEntireIndex behavior). * * If package A has been loaded before package B, files from A are sorted before B * in the resulting list of files. * * @param path Partial path to find. * @param found Found files. * @param behavior Behavior for finding: which results to filter out. * * @return Number of files found. */ int findPartialPathInPackageOrder(String const &path, FoundFiles &found, Behavior behavior = FindOnlyInLoadedPackages) const; void print() const; QList<File *> files() const; protected: // C++ iterator (not thread-safe): typedef Index::const_iterator const_iterator; Index::const_iterator begin() const; Index::const_iterator end() const; private: DENG2_PRIVATE(d) }; } // namespace de #endif // LIBDENG2_FILEINDEX_H
{ "pile_set_name": "Github" }
package net.neoremind.mycode.argorithm.leetcode; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; import org.junit.Test; /** * Find the kth largest element in an unsorted array. * Note that it is the kth largest element in the sorted order, not the kth distinct element. * <p/> * For example, * Given [3,2,1,5,6,4] and k = 2, return 5. * <p/> * Note: * You may assume k is always valid, 1 ≤ k ≤ array's length. * 这个实现在leetcode上击败了99.82%的java。 * 快速选择 Quick Select * 时间 Avg O(N) Worst O(N^2) * 空间 O(1) * 跟快速排序一个思路。先取一个枢纽值,将数组中小于枢纽值的放在左边,大于枢纽值的放在右边,具体方法是用左右两个指针, * 如果他们小于枢纽值则将他们换到对面,一轮过后记得将枢纽值赋回分界点。如果这个分界点是k,说明分界点的数就是第k * 个数。否则,如果分界点大于k,则在左半边做同样的搜索。如果分界点小于k,则在右半边做同样的搜索。 * * @author zhangxu */ public class KthLargestElementInAnArray2 { public int findKthLargest(int[] nums, int k) { // 处理两个特殊的情况 // 找第最后大的,那就是找最小的 if (nums.length == k) { int min = 0; for (int i = 1; i < nums.length; i++) { if (nums[i] < nums[min]) { min = i; } } return nums[min]; } // 找第一个大的,那就是最大的了 if (k == 1) { int max = 0; for (int i = 1; i < nums.length; i++) { if (nums[i] > nums[max]) { max = i; } } return nums[max]; } // 先分隔数组,枢纽是mid,左边的全大于mid,右边的全小于mid int mid = quickSelect(nums, 0, nums.length - 1); int right = nums.length - 1; int left = 0; while (true) { if (mid == k - 1) { // 这里是k-1的原因是,Kth大的,那就是枢纽索引是K-1,否则Kth大的就是枢纽左边中最小的,不是枢纽本身 break; } else if (mid < k - 1) { left = mid + 1; // 缩小左边界 mid = quickSelect(nums, mid + 1, right); } else { right = mid - 1; // 缩小右边界 mid = quickSelect(nums, left, mid - 1); } } return nums[mid]; } /** * 快速选择,参考{@link net.neoremind.mycode.argorithm.sort.QuickSort}来实现 * * @param nums 数组 * @param low 左边界 * @param high 右边界 * @return */ private int quickSelect(int[] nums, int low, int high) { int pivot = nums[low]; int i = low; int j = high + 1; while (true) { while (nums[++i] > pivot) { if (i == high) { break; } } while (nums[--j] < pivot) { if (j == low) { break; } } if (i >= j) { break; } swap(nums, i, j); } swap(nums, low, j); return j; } private void swap(int[] array, int from, int to) { int tmp = array[from]; array[from] = array[to]; array[to] = tmp; } public int findKthLargest2(int[] nums, int k) { // 处理两个特殊的情况 // 找第最后大的,那就是找最小的 if (nums.length == k) { int min = 0; for (int i = 1; i < nums.length; i++) { if (nums[i] < nums[min]) { min = i; } } return nums[min]; } // 找第一个大的,那就是最大的了 if (k == 1) { int max = 0; for (int i = 1; i < nums.length; i++) { if (nums[i] > nums[max]) { max = i; } } return nums[max]; } int right = nums.length - 1; int left = 0; while (left <= right) { int mid = quickSelect(nums, left, right); if (mid == k - 1) { return nums[mid]; } else if (mid < k - 1) { left = mid + 1; } else { right = mid - 1; } } throw new RuntimeException("not here"); } @Test public void test() { int[] nums = new int[]{3, 2, 1, 5, 6, 4}; int k = findKthLargest(nums, 6); System.out.println(k); assertThat(k, is(1)); k = findKthLargest2(nums, 6); System.out.println(k); assertThat(k, is(1)); k = findKthLargest(nums, 5); System.out.println(k); assertThat(k, is(2)); k = findKthLargest(nums, 4); System.out.println(k); assertThat(k, is(3)); k = findKthLargest(nums, 3); System.out.println(k); assertThat(k, is(4)); k = findKthLargest(nums, 2); System.out.println(k); assertThat(k, is(5)); k = findKthLargest(nums, 1); System.out.println(k); assertThat(k, is(6)); nums = new int[]{5, 2, 4, 1, 3, 6, 0}; k = findKthLargest(nums, 4); System.out.println(k); assertThat(k, is(3)); nums = new int[]{3, 3, 3, 3, 3, 3, 3, 3, 3}; k = findKthLargest(nums, 8); System.out.println(k); assertThat(k, is(3)); nums = new int[]{-1, 2, 0}; k = findKthLargest(nums, 2); System.out.println(k); assertThat(k, is(0)); } }
{ "pile_set_name": "Github" }
#!/usr/bin/env python # -*- coding: utf-8 -*- # ============================================================================== # \file aug.py # \author chenghuige # \date 2019-07-21 21:53:11.255995 # \Description # ============================================================================== from __future__ import absolute_import from __future__ import division from __future__ import print_function import sys import os from imgaug import augmenters as iaa import imgaug as ia sometimes = lambda aug: iaa.Sometimes(0.5, aug) seq = iaa.Sequential( [ # apply the following augmenters to most images iaa.Fliplr(0.5), # horizontally flip 50% of all images iaa.Flipud(0.2), # vertically flip 20% of all images sometimes(iaa.Affine( scale={"x": (0.9, 1.1), "y": (0.9, 1.1)}, # scale images to 80-120% of their size, individually per axis translate_percent={"x": (-0.1, 0.1), "y": (-0.1, 0.1)}, # translate by -20 to +20 percent (per axis) rotate=(-10, 10), # rotate by -45 to +45 degrees shear=(-5, 5), # shear by -16 to +16 degrees order=[0, 1], # use nearest neighbour or bilinear interpolation (fast) cval=(0, 255), # if mode is constant, use a cval between 0 and 255 mode=ia.ALL # use any of scikit-image's warping modes (see 2nd image from the top for examples) )), # execute 0 to 5 of the following (less important) augmenters per image # don't execute all of them, as that would often be way too strong iaa.SomeOf((0, 5), [ sometimes(iaa.Superpixels(p_replace=(0, 1.0), n_segments=(20, 200))), # convert images into their superpixel representation iaa.OneOf([ iaa.GaussianBlur((0, 1.0)), # blur images with a sigma between 0 and 3.0 iaa.AverageBlur(k=(3, 5)), # blur image using local means with kernel sizes between 2 and 7 iaa.MedianBlur(k=(3, 5)), # blur image using local medians with kernel sizes between 2 and 7 ]), iaa.Sharpen(alpha=(0, 1.0), lightness=(0.9, 1.1)), # sharpen images iaa.Emboss(alpha=(0, 1.0), strength=(0, 2.0)), # emboss images # search either for all edges or for directed edges, # blend the result with the original image using a blobby mask iaa.SimplexNoiseAlpha(iaa.OneOf([ iaa.EdgeDetect(alpha=(0.5, 1.0)), iaa.DirectedEdgeDetect(alpha=(0.5, 1.0), direction=(0.0, 1.0)), ])), iaa.AdditiveGaussianNoise(loc=0, scale=(0.0, 0.01*255), per_channel=0.5), # add gaussian noise to images iaa.OneOf([ iaa.Dropout((0.01, 0.05), per_channel=0.5), # randomly remove up to 10% of the pixels iaa.CoarseDropout((0.01, 0.03), size_percent=(0.01, 0.02), per_channel=0.2), ]), iaa.Invert(0.01, per_channel=True), # invert color channels iaa.Add((-2, 2), per_channel=0.5), # change brightness of images (by -10 to 10 of original value) iaa.AddToHueAndSaturation((-1, 1)), # change hue and saturation # either change the brightness of the whole image (sometimes # per channel) or change the brightness of subareas iaa.OneOf([ iaa.Multiply((0.9, 1.1), per_channel=0.5), iaa.FrequencyNoiseAlpha( exponent=(-1, 0), first=iaa.Multiply((0.9, 1.1), per_channel=True), second=iaa.ContrastNormalization((0.9, 1.1)) ) ]), sometimes(iaa.ElasticTransformation(alpha=(0.5, 3.5), sigma=0.25)), # move pixels locally around (with random strengths) sometimes(iaa.PiecewiseAffine(scale=(0.01, 0.05))), # sometimes move parts of the image around sometimes(iaa.PerspectiveTransform(scale=(0.01, 0.1))) ], random_order=True ) ], random_order=True)
{ "pile_set_name": "Github" }
/*************************************************************************** graph.cpp - description ------------------- begin : Thu Oct 2 2003 copyright : (C) 2003 by Michael Margraf email : michael.margraf@alumni.tu-berlin.de ***************************************************************************/ /*************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * ***************************************************************************/ #include "graph.h" #include <stdlib.h> #include <iostream> #include <QPainter> #include <QDebug> class Diagram; Graph::Graph(Diagram const* d, const QString& _Line) : Element(), Style(GRAPHSTYLE_SOLID), diagram(d) { Type = isGraph; Var = _Line; countY = 0; // no points in graph Thick = numMode = 0; Color = 0x0000ff; // blue Precision = 3; isSelected = false; yAxisNo = 0; // left y axis cPointsY = 0; gy=NULL; } Graph::~Graph() { if(cPointsY != 0) delete[] cPointsY; } // --------------------------------------------------------------------- void Graph::createMarkerText() const { for(auto pm : Markers) { pm->createText(); } } // --------------------------------------------------------------------- void Graph::paint(ViewPainter *p, int x0, int y0) { if(!ScrPoints.size()) return; if(isSelected) { p->Painter->setPen(QPen(Qt::darkGray,Thick*p->PrintScale+4)); paintLines(p, x0, y0); p->Painter->setPen(QPen(Qt::white, Thick*p->PrintScale, Qt::SolidLine)); paintLines(p, x0, y0); return; } // **** not selected **** p->Painter->setPen(QPen(QColor(Color), Thick*p->PrintScale, Qt::SolidLine)); paintLines(p, x0, y0); } // --------------------------------------------------------------------- void Graph::paintLines(ViewPainter *p, int x0, int y0) { switch(Style) { case GRAPHSTYLE_STAR: drawStarSymbols(x0, y0, p); break; case GRAPHSTYLE_CIRCLE: drawCircleSymbols(x0, y0, p); break; case GRAPHSTYLE_ARROW: drawArrowSymbols(x0, y0, p); break; default: drawLines(x0, y0, p); } } // --------------------------------------------------------------------- /*paint function for phasor diagram*/ void Graph::paintvect(ViewPainter *p, int x0, int y0) { if(!ScrPoints.size()) return; if(isSelected) { p->Painter->setPen(QPen(Qt::darkGray,Thick*p->PrintScale+4)); drawvect(x0, y0, p); p->Painter->setPen(QPen(Qt::white, Thick*p->PrintScale, Qt::SolidLine)); drawvect(x0, y0, p); return; } // **** not selected **** p->Painter->setPen(QPen(QColor(Color), Thick*p->PrintScale, Qt::SolidLine)); drawvect(x0, y0, p); } // --------------------------------------------------------------------- QString Graph::save() { QString s = "\t<\""+Var+"\" "+Color.name()+ " "+QString::number(Thick)+" "+QString::number(Precision)+ " "+QString::number(numMode)+" "+QString::number(Style)+ " "+QString::number(yAxisNo)+">"; foreach(Marker *pm, Markers) s += "\n\t "+pm->save(); return s; } // --------------------------------------------------------------------- bool Graph::load(const QString& _s) { bool ok; QString s = _s; if(s.at(0) != '<') return false; if(s.at(s.length()-1) != '>') return false; s = s.mid(1, s.length()-2); // cut off start and end character Var = s.section('"',1,1); // Var // Var can include a Dataset name, which can contain spaces // remove the Var string so subsequent parsing of the other fields does not fail in this case s = s.section('"', 2); // keep everything after the closing quotes QString n; n = s.section(' ',1,1); // Color Color.setNamedColor(n); if(!Color.isValid()) return false; n = s.section(' ',2,2); // Thick Thick = n.toInt(&ok); if(!ok) return false; n = s.section(' ',3,3); // Precision Precision = n.toInt(&ok); if(!ok) return false; n = s.section(' ',4,4); // numMode numMode = n.toInt(&ok); if(!ok) return false; n = s.section(' ',5,5); // Style int st = n.toInt(&ok); if(!ok) return false; Style = toGraphStyle(st); if(Style==GRAPHSTYLE_INVALID) return false; n = s.section(' ',6,6); // yAxisNo if(n.isEmpty()) return true; // backward compatible yAxisNo = n.toInt(&ok); if(!ok) return false; return true; } // ----------------------------------------------------------------------- /*! * Checks if the coordinates x/y point to the graph. returns the number of the * branch of the graph, -1 upon a miss. * * x/y are relative to diagram cx/cy. 5 is the precision the user must point * onto the graph. * * FIXME: should return reference to hit sample point or some context. */ int Graph::getSelected(int x, int y) { auto pp = ScrPoints.begin(); if(pp == ScrPoints.end()) return -1; int A, z=0; int dx, dx2, x1; int dy, dy2, y1; int countX = cPointsX.at(0)->count; if(pp->isStrokeEnd()) { if(pp->isBranchEnd()) z++; pp++; if(pp->isBranchEnd()) { if(pp->isGraphEnd()) return -1; // not even one point ? z++; pp++; if(pp->isGraphEnd()) return -1; // not even one point ? } } if(Style >= GRAPHSTYLE_STAR || gy!=NULL) { // for graph symbols while(!pp->isGraphEnd()) { if(!pp->isStrokeEnd()) { dx = x - int((pp)->getScrX()); dy = y - int((pp++)->getScrY()); if(dx < -5) continue; if(dx > 5) continue; if(dy < -5) continue; if(dy > 5) continue; return z*countX; // points on graph symbol } else { z++; // next branch pp++; } } return -1; } // for graph lines while(!pp->isGraphEnd()) { while(!pp->isBranchEnd()) { x1 = int(pp->getScrX()); y1 = int((pp++)->getScrY()); dx = x - x1; dy = y - y1; if(pp->isPt()){ dx2 = int(pp->getScrX()); }else if(pp->isBranchEnd()) { break; }else if(pp->isStrokeEnd()) { pp++; dx2 = int(pp->getScrX()); // go on as graph can also be selected between strokes if(pp->isBranchEnd()) break; } if(dx < -5) { if(x < dx2-5) continue; } // point between x coordinates ? else { if(x > 5) if(x > dx2+5) continue; } dy2 = int(pp->getScrY()); if(dy < -5) { if(y < dy2-5) continue; } // point between y coordinates ? else { if(y > 5) if(y > dy2+5) continue; } dx2 -= x1; dy2 -= y1; A = dx2*dy - dx*dy2; // calculate the rectangle area spanned A *= A; // avoid the need for square root A -= 25*(dx2*dx2 + dy2*dy2); // substract selectable area if(A <= 0) return z*countX; // lies x/y onto the graph line ? } pp++; z++; } return -1; } // ----------------------------------------------------------------------- /*it's a select function for phasordiagram that with the 2 points of the vector creates a linear equation and find if the point is in that equation*/ int Graph::getSelectedP(int x, int y) { float f1,f2,f3,f4; float xn,yn; float d1,b1; auto pp = ScrPoints.begin(); if(pp == ScrPoints.end()) return -1; if(pp->isStrokeEnd()) pp++; while(!pp->isGraphEnd()) { if(!pp->isBranchEnd()) { f1 = pp->getScrX(); f2 = (pp++)->getScrY(); f3 = pp->getScrX(); f4 = (pp++)->getScrY(); if((f1 > f3 - 5) && (f1 < f3 + 5)) { xn = f1; yn = y; } else { if((f2 > f4 - 5) && (f2 < f4 + 5)) { xn = x; yn = f2; } else { d1 = (f4 - f2) / (f3 - f1); b1 = f4 - d1 * f3 ; xn = (float(y) - b1) / d1; yn = d1 * float(x) + b1; } } if(((f1 >= f3) && (xn >= f3) && (xn <= f1)) || ((f3 >= f1) && (xn >= f1) && (xn <= f3))) if(((f2 >= f4) && (yn >= f4) && (yn <= f2)) || ((f4 >= f2) && (yn >= f2) && (yn <= f4))) if((y >= int(yn) - 5) && (y <= int(yn) + 5) && (x >= int(xn) - 5) && (x <= int(xn) + 5)) return 1; } else pp++; } return -1; } // ----------------------------------------------------------------------- // Creates a new graph and copies all the properties into it. Graph* Graph::sameNewOne() { Graph *pg = new Graph(diagram, Var); pg->Color = Color; pg->Thick = Thick; pg->Style = Style; pg->Precision = Precision; pg->numMode = numMode; pg->yAxisNo = yAxisNo; foreach(Marker *pm, Markers) pg->Markers.append(pm->sameNewOne(pg)); return pg; } /*! * find a sample point close to VarPos, snap to it, and return data at VarPos */ std::pair<double,double> Graph::findSample(std::vector<double>& VarPos) const { DataX const* pD; unsigned nVarPos=0; unsigned n=0; unsigned m=1; for(unsigned ii=0; (pD=axis(ii)); ++ii) { double* pp = pD->Points; double v = VarPos[nVarPos]; for(unsigned i=pD->count; i>1; i--) { // find appropiate marker position if(fabs(v-(*pp)) < fabs(v-(*(pp+1)))) break; pp++; n += m; } m *= pD->count; VarPos[nVarPos++] = *pp; } return std::pair<double,double>(cPointsY[2*n], cPointsY[2*n+1]); } // ----------------------------------------------------------------------- // meaning of the values in a graph "Points" list #define STROKEEND -2 #define BRANCHEND -10 #define GRAPHEND -100 // ----------------------------------------------------------------------- // screen points pseudo iterator implementation. void Graph::ScrPt::setStrokeEnd() { ScrX = STROKEEND; } void Graph::ScrPt::setBranchEnd() { ScrX = BRANCHEND; } void Graph::ScrPt::setGraphEnd() { ScrX = GRAPHEND; } bool Graph::ScrPt::isPt() const{return ScrX>=0.;} bool Graph::ScrPt::isStrokeEnd() const{return ScrX<=STROKEEND;} bool Graph::ScrPt::isBranchEnd() const{return ScrX<=BRANCHEND;} bool Graph::ScrPt::isGraphEnd() const{return ScrX<=GRAPHEND;} /*! * set screen coordinate for graph sampling point * these must be nonnegative, but sometimes aren't, * eg. between calcCoordinate and clip. * (negative values are reserved for control.) */ void Graph::ScrPt::setScrX(float x) { if(x<0){ std::cerr << "dangerous: negative screen coordinate" << x; } if(ScrX<0){ std::cerr << "dangerous: (maybe) overwriting control token" << x; } ScrX = x; } void Graph::ScrPt::setScrY(float x) { if(x<0){ // need to investigate... qDebug() << "setting negative screen coordinate" << x << "at" << ScrX; } ScrY = x; } void Graph::ScrPt::setScr(float x, float y) { setScrX(x); setScrY(y); } void Graph::ScrPt::setIndep(double x) { assert(ScrX>=0); indep = x; } void Graph::ScrPt::setDep(double x) { assert(ScrX>=0); dep = x; } float Graph::ScrPt::getScrX() const { if(ScrX<0){ std::cerr << "dangerous: returning negative screen coordinate" << ScrX; } return ScrX; } float Graph::ScrPt::getScrY() const { return ScrY; } double Graph::ScrPt::getIndep() const { assert(ScrX>=0); return indep; } double Graph::ScrPt::getDep() const { assert(ScrX>=0); return dep; } // vim:ts=8:sw=2:et
{ "pile_set_name": "Github" }
// [AsmJit] // Machine Code Generation for C++. // // [License] // Zlib - See LICENSE.md file in the package. #ifndef _ASMJIT_X86_X86ASSEMBLER_H #define _ASMJIT_X86_X86ASSEMBLER_H #include "../core/assembler.h" #include "../x86/x86emitter.h" #include "../x86/x86operand.h" ASMJIT_BEGIN_SUB_NAMESPACE(x86) //! \addtogroup asmjit_x86 //! \{ // ============================================================================ // [asmjit::Assembler] // ============================================================================ //! Assembler (X86). //! //! Emits X86 machine-code into buffers managed by `CodeHolder`. class ASMJIT_VIRTAPI Assembler : public BaseAssembler, public EmitterImplicitT<Assembler> { public: ASMJIT_NONCOPYABLE(Assembler) typedef BaseAssembler Base; //! \name Construction & Destruction //! \{ ASMJIT_API explicit Assembler(CodeHolder* code = nullptr) noexcept; ASMJIT_API virtual ~Assembler() noexcept; //! \} //! \cond INTERNAL //! \name Internal //! \{ // NOTE: x86::Assembler uses _privateData to store 'address-override' bit that // is used to decide whether to emit address-override (67H) prefix based on // the memory BASE+INDEX registers. It's either `kX86MemInfo_67H_X86` or // `kX86MemInfo_67H_X64`. inline uint32_t _addressOverrideMask() const noexcept { return _privateData; } inline void _setAddressOverrideMask(uint32_t m) noexcept { _privateData = m; } //! \} //! \endcond //! \cond INTERNAL //! \name Emit //! \{ using BaseEmitter::_emit; ASMJIT_API Error _emit(uint32_t instId, const Operand_& o0, const Operand_& o1, const Operand_& o2, const Operand_& o3) override; //! \} //! \endcond //! \name Align //! \{ ASMJIT_API Error align(uint32_t alignMode, uint32_t alignment) override; //! \} //! \name Events //! \{ ASMJIT_API Error onAttach(CodeHolder* code) noexcept override; ASMJIT_API Error onDetach(CodeHolder* code) noexcept override; //! \} }; //! \} ASMJIT_END_SUB_NAMESPACE #endif // _ASMJIT_X86_X86ASSEMBLER_H
{ "pile_set_name": "Github" }
OC.L10N.register( "lib", { "Cannot write into \"config\" directory!" : "Negalima rašyti į \"config\" aplanką!", "This can usually be fixed by giving the webserver write access to the config directory" : "Tai gali būti ištaisyta suteikiant web serveriui rašymo teises į config aplanką", "See %s" : "Žiūrėk %s", "Sample configuration detected" : "Aptiktas konfigūracijos pavyzdys", "PHP %s or higher is required." : "Reikalinga PHP %s arba aukštesnė.", "Following databases are supported: %s" : "Palaikomos duomenų bazės: %s", "Unknown filetype" : "Nežinomas failo tipas", "Invalid image" : "Netinkamas paveikslėlis", "today" : "šiandien", "yesterday" : "vakar", "last month" : "praeitą mėnesį", "_%n month ago_::_%n months ago_" : ["Prieš %n mėnesį","Prieš %n mėnesius","Prieš %n mėnesių","Prieš %n mėnesių"], "last year" : "praeitais metais", "_%n hour ago_::_%n hours ago_" : ["Prieš %n valandą","Prieš %n valandas","Prieš %n valandų","Prieš %n valandų"], "_%n minute ago_::_%n minutes ago_" : ["prieš %n min.","Prieš % minutes","Prieš %n minučių","Prieš %n minučių"], "seconds ago" : "prieš sekundę", "None" : "Nieko", "Username" : "Prisijungimo vardas", "Password" : "Slaptažodis", "Empty filename is not allowed" : "Tuščias failo pavadinimas neleidžiamas", "File name is a reserved word" : "Failo pavadinimas negalimas, žodis rezervuotas", "File name contains at least one invalid character" : "Failo vardas sudarytas iš neleistinų simbolių", "__language_name__" : "Lietuvių", "App directory already exists" : "Programos aplankas jau egzistuoja", "Can't create app folder. Please fix permissions. %s" : "Nepavyksta sukurti aplanko. Prašome pataisyti leidimus. %s", "No source specified when installing app" : "Nenurodytas šaltinis diegiant programą", "No href specified when installing app from http" : "Nenurodytas href diegiant programą iš http", "No path specified when installing app from local file" : "Nenurodytas kelias diegiant programą iš vietinio failo", "Archives of type %s are not supported" : "%s tipo archyvai nepalaikomi", "Failed to open archive when installing app" : "Nepavyko atverti archyvo diegiant programą", "App does not provide an info.xml file" : "Programa nepateikia info.xml failo", "App can't be installed because it is not compatible with this version of ownCloud" : "Programa negali būti įdiegta, nes yra nesuderinama su šia ownCloud versija", "App can't be installed because it contains the <shipped>true</shipped> tag which is not allowed for non shipped apps" : "Programa negali būti įdiegta, nes turi <shipped>true</shipped> žymę, kuri yra neleistina ne kartu platinamoms programoms", "Apps" : "Programos", "General" : "Bendras", "Storage" : "Saugojimas", "Security" : "Saugumas", "Encryption" : "Šifravimas", "Sharing" : "Dalijimasis", "Search" : "Ieškoti", "%s enter the database username." : "%s įrašykite duombazės naudotojo vardą.", "%s enter the database name." : "%s įrašykite duombazės pavadinimą.", "Oracle connection could not be established" : "Nepavyko sukurti Oracle ryšio", "Oracle username and/or password not valid" : "Neteisingas Oracle naudotojo vardas ir/arba slaptažodis", "DB Error: \"%s\"" : "DB klaida: \"%s\"", "Offending command was: \"%s\"" : "Vykdyta komanda buvo: \"%s\"", "You need to enter either an existing account or the administrator." : "Turite prisijungti su egzistuojančia paskyra arba su administratoriumi.", "Offending command was: \"%s\", name: %s, password: %s" : "Vykdyta komanda buvo: \"%s\", name: %s, password: %s", "PostgreSQL username and/or password not valid" : "Neteisingas PostgreSQL naudotojo vardas ir/arba slaptažodis", "Set an admin username." : "Nustatyti administratoriaus naudotojo vardą.", "Set an admin password." : "Nustatyti administratoriaus slaptažodį.", "%s shared »%s« with you" : "%s pasidalino »%s« su tavimi", "%s via %s" : "%s per %s", "Could not find category \"%s\"" : "Nepavyko rasti kategorijos „%s“", "A valid username must be provided" : "Vartotojo vardas turi būti tinkamas", "A valid password must be provided" : "Slaptažodis turi būti tinkamas", "Settings" : "Parinktys", "Users" : "Vartotojai", "A safe home for all your data" : "Saugus namai jūsų visiems duomenims", "Imprint" : "Imprint", "Application is not enabled" : "Programa neįjungta", "Authentication error" : "Autentikacijos klaida", "Token expired. Please reload page." : "Sesija baigėsi. Prašome perkrauti puslapį.", "Unknown user" : "Neatpažintas naudotojas", "PostgreSQL >= 9 required" : "Reikalinga PostgreSQL >= 9 versija", "Storage is temporarily not available" : "Saugykla yra laikinai neprieinama" }, "nplurals=4; plural=(n % 10 == 1 && (n % 100 > 19 || n % 100 < 11) ? 0 : (n % 10 >= 2 && n % 10 <=9) && (n % 100 > 19 || n % 100 < 11) ? 1 : n % 1 != 0 ? 2: 3);");
{ "pile_set_name": "Github" }
/** * Copyright (C) 2018-present MongoDB, Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the Server Side Public License, version 1, * as published by MongoDB, Inc. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * Server Side Public License for more details. * * You should have received a copy of the Server Side Public License * along with this program. If not, see * <http://www.mongodb.com/licensing/server-side-public-license>. * * As a special exception, the copyright holders give permission to link the * code of portions of this program with the OpenSSL library under certain * conditions as described in each individual source file and distribute * linked combinations including the program with the OpenSSL library. You * must comply with the Server Side Public License in all respects for * all of the code used other than as permitted herein. If you modify file(s) * with this exception, you may extend this exception to your version of the * file(s), but you are not obligated to do so. If you do not wish to do so, * delete this exception statement from your version. If you delete this * exception statement from all source files in the program, then also delete * it in the license file. */ #include "mongo/platform/basic.h" #include "mongo/db/commands/write_commands/write_commands_common.h" #include <algorithm> #include <memory> #include <string> #include <vector> #include "mongo/db/auth/action_set.h" #include "mongo/db/auth/action_type.h" #include "mongo/db/auth/privilege.h" #include "mongo/db/catalog/document_validation.h" #include "mongo/db/ops/write_ops.h" #include "mongo/util/assert_util.h" namespace mongo { namespace auth { namespace { /** * Extracts the namespace being indexed from a raw BSON write command. * TODO: Remove when we have parsing hooked before authorization. */ NamespaceString _getIndexedNss(const std::vector<BSONObj>& documents) { uassert(ErrorCodes::FailedToParse, "index write batch is empty", !documents.empty()); std::string ns = documents.front()["ns"].str(); uassert(ErrorCodes::FailedToParse, "index write batch contains an invalid index descriptor", !ns.empty()); uassert(ErrorCodes::FailedToParse, "index write batches may only contain a single index descriptor", documents.size() == 1); return NamespaceString(std::move(ns)); } void fillPrivileges(const write_ops::Insert& op, std::vector<Privilege>* privileges, ActionSet* actions) { actions->addAction(ActionType::insert); } void fillPrivileges(const write_ops::Update& op, std::vector<Privilege>* privileges, ActionSet* actions) { actions->addAction(ActionType::update); // Upsert also requires insert privs const auto& updates = op.getUpdates(); if (std::any_of(updates.begin(), updates.end(), [](auto&& x) { return x.getUpsert(); })) { actions->addAction(ActionType::insert); } } void fillPrivileges(const write_ops::Delete& op, std::vector<Privilege>* privileges, ActionSet* actions) { actions->addAction(ActionType::remove); } template <typename Op> void checkAuthorizationImpl(AuthorizationSession* authzSession, bool withDocumentValidationBypass, const Op& op) { std::vector<Privilege> privileges; ActionSet actions; if (withDocumentValidationBypass) { actions.addAction(ActionType::bypassDocumentValidation); } fillPrivileges(op, &privileges, &actions); if (!actions.empty()) { privileges.push_back( Privilege(ResourcePattern::forExactNamespace(op.getNamespace()), actions)); } uassert(ErrorCodes::Unauthorized, "unauthorized", authzSession->isAuthorizedForPrivileges(privileges)); } } // namespace void checkAuthForInsertCommand(AuthorizationSession* authzSession, bool withDocumentValidationBypass, const write_ops::Insert& op) { checkAuthorizationImpl(authzSession, withDocumentValidationBypass, op); } void checkAuthForUpdateCommand(AuthorizationSession* authzSession, bool withDocumentValidationBypass, const write_ops::Update& op) { checkAuthorizationImpl(authzSession, withDocumentValidationBypass, op); } void checkAuthForDeleteCommand(AuthorizationSession* authzSession, bool withDocumentValidationBypass, const write_ops::Delete& op) { checkAuthorizationImpl(authzSession, withDocumentValidationBypass, op); } } // namespace auth } // namespace mongo
{ "pile_set_name": "Github" }
autonomy_state = { id = autonomy_dominion min_freedom_level = 0.75 rule = { can_not_declare_war = yes can_decline_call_to_war = no } modifier = { autonomy_manpower_share = 0.0 extra_trade_to_overlord_factor = 0.25 overlord_trade_cost_factor = -0.25 } ai_subject_wants_higher = { factor = 0.0 } ai_overlord_wants_lower = { factor = 1.0 } ai_overlord_wants_garrison = { always = no } allowed = { has_dlc = "Together for Victory" } can_take_level = { } can_lose_level = { if = { limit = { } } } }
{ "pile_set_name": "Github" }
// $Id$ // Author: Yves Lafon <ylafon@w3.org> // // (c) COPYRIGHT MIT, ERCIM and Keio University, 2013. // Please first read the full copyright statement in file COPYRIGHT.html package org.w3c.css.properties.css; import org.w3c.css.parser.CssStyle; import org.w3c.css.properties.css3.Css3Style; import org.w3c.css.util.ApplContext; import org.w3c.css.util.InvalidParamException; import org.w3c.css.values.CssExpression; import org.w3c.css.values.CssValue; /** * @since CSS3 */ public class CssVoiceDuration extends CssProperty { public CssValue value; /** * Create a new CssVoiceDuration */ public CssVoiceDuration() { } /** * Creates a new CssVoiceDuration * * @param expression The expression for this property * @throws org.w3c.css.util.InvalidParamException * Expressions are incorrect */ public CssVoiceDuration(ApplContext ac, CssExpression expression, boolean check) throws InvalidParamException { throw new InvalidParamException("value", expression.getValue().toString(), getPropertyName(), ac); } public CssVoiceDuration(ApplContext ac, CssExpression expression) throws InvalidParamException { this(ac, expression, false); } /** * Returns the value of this property */ public Object get() { return value; } /** * Returns the name of this property */ public final String getPropertyName() { return "voice-duration"; } /** * Returns true if this property is "softly" inherited * e.g. his value is equals to inherit */ public boolean isSoftlyInherited() { return value.equals(inherit); } /** * Returns a string representation of the object. */ public String toString() { return value.toString(); } /** * Add this property to the CssStyle. * * @param style The CssStyle */ public void addToStyle(ApplContext ac, CssStyle style) { Css3Style s = (Css3Style) style; if (s.cssVoiceDuration != null) { style.addRedefinitionWarning(ac, this); } s.cssVoiceDuration = this; } /** * Compares two properties for equality. * * @param property The other property. */ public boolean equals(CssProperty property) { return (property instanceof CssVoiceDuration && value.equals(((CssVoiceDuration) property).value)); } /** * Get this property in the style. * * @param style The style where the property is * @param resolve if true, resolve the style to find this property */ public CssProperty getPropertyInStyle(CssStyle style, boolean resolve) { if (resolve) { return ((Css3Style) style).getVoiceDuration(); } else { return ((Css3Style) style).cssVoiceDuration; } } }
{ "pile_set_name": "Github" }
<!doctype html> <html class="default no-js"> <head> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <title>SignalingSession | amazon-chime-sdk-js</title> <meta name="description" content="Documentation for amazon-chime-sdk-js"> <meta name="viewport" content="width=device-width, initial-scale=1"> <link rel="stylesheet" href="../assets/css/main.css"> </head> <body> <header> <div class="tsd-page-toolbar"> <div class="container"> <div class="table-wrap"> <div class="table-cell" id="tsd-search" data-index="../assets/js/search.json" data-base=".."> <div class="field"> <label for="tsd-search-field" class="tsd-widget search no-caption">Search</label> <input id="tsd-search-field" type="text" /> </div> <ul class="results"> <li class="state loading">Preparing search index...</li> <li class="state failure">The search index is not available</li> </ul> <a href="../index.html" class="title">amazon-chime-sdk-js</a> </div> <div class="table-cell" id="tsd-widgets"> <div id="tsd-filter"> <a href="#" class="tsd-widget options no-caption" data-toggle="options">Options</a> <div class="tsd-filter-group"> <div class="tsd-select" id="tsd-filter-visibility"> <span class="tsd-select-label">All</span> <ul class="tsd-select-list"> <li data-value="public">Public</li> <li data-value="protected">Public/Protected</li> <li data-value="private" class="selected">All</li> </ul> </div> <input type="checkbox" id="tsd-filter-inherited" checked /> <label class="tsd-widget" for="tsd-filter-inherited">Inherited</label> <input type="checkbox" id="tsd-filter-externals" checked /> <label class="tsd-widget" for="tsd-filter-externals">Externals</label> <input type="checkbox" id="tsd-filter-only-exported" /> <label class="tsd-widget" for="tsd-filter-only-exported">Only exported</label> </div> </div> <a href="#" class="tsd-widget menu no-caption" data-toggle="menu">Menu</a> </div> </div> </div> </div> <div class="tsd-page-title"> <div class="container"> <ul class="tsd-breadcrumb"> <li> <a href="../globals.html">Globals</a> </li> <li> <a href="signalingsession.html">SignalingSession</a> </li> </ul> <h1>Interface SignalingSession</h1> </div> </div> </header> <div class="container container-main"> <div class="row"> <div class="col-8 col-content"> <section class="tsd-panel tsd-hierarchy"> <h3>Hierarchy</h3> <ul class="tsd-hierarchy"> <li> <span class="target">SignalingSession</span> </li> </ul> </section> <section class="tsd-panel"> <h3>Implemented by</h3> <ul class="tsd-hierarchy"> <li><a href="../classes/defaultsignalingsession.html" class="tsd-signature-type">DefaultSignalingSession</a></li> </ul> </section> <section class="tsd-panel-group tsd-index-group"> <h2>Index</h2> <section class="tsd-panel tsd-index-panel"> <div class="tsd-index-content"> <section class="tsd-index-section "> <h3>Methods</h3> <ul class="tsd-index-list"> <li class="tsd-kind-method tsd-parent-kind-interface"><a href="signalingsession.html#close" class="tsd-kind-icon">close</a></li> <li class="tsd-kind-method tsd-parent-kind-interface"><a href="signalingsession.html#open" class="tsd-kind-icon">open</a></li> <li class="tsd-kind-method tsd-parent-kind-interface"><a href="signalingsession.html#registerobserver" class="tsd-kind-icon">register<wbr>Observer</a></li> <li class="tsd-kind-method tsd-parent-kind-interface"><a href="signalingsession.html#unregisterobserver" class="tsd-kind-icon">unregister<wbr>Observer</a></li> </ul> </section> </div> </section> </section> <section class="tsd-panel-group tsd-member-group "> <h2>Methods</h2> <section class="tsd-panel tsd-member tsd-kind-method tsd-parent-kind-interface"> <a name="close" class="tsd-anchor"></a> <h3>close</h3> <ul class="tsd-signatures tsd-kind-method tsd-parent-kind-interface"> <li class="tsd-signature tsd-kind-icon">close<span class="tsd-signature-symbol">(</span><span class="tsd-signature-symbol">)</span><span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">Promise</span><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">void</span><span class="tsd-signature-symbol">&gt;</span></li> </ul> <ul class="tsd-descriptions"> <li class="tsd-description"> <aside class="tsd-sources"> <ul> <li>Defined in <a href="https://github.com/aws/amazon-chime-sdk-js/blob/master/src/screenviewing/signalingsession/SignalingSession.ts#L9">src/screenviewing/signalingsession/SignalingSession.ts:9</a></li> </ul> </aside> <h4 class="tsd-returns-title">Returns <span class="tsd-signature-type">Promise</span><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">void</span><span class="tsd-signature-symbol">&gt;</span></h4> </li> </ul> </section> <section class="tsd-panel tsd-member tsd-kind-method tsd-parent-kind-interface"> <a name="open" class="tsd-anchor"></a> <h3>open</h3> <ul class="tsd-signatures tsd-kind-method tsd-parent-kind-interface"> <li class="tsd-signature tsd-kind-icon">open<span class="tsd-signature-symbol">(</span>connectionRequest<span class="tsd-signature-symbol">: </span><a href="../classes/screenviewingsessionconnectionrequest.html" class="tsd-signature-type">ScreenViewingSessionConnectionRequest</a><span class="tsd-signature-symbol">)</span><span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">Promise</span><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">void</span><span class="tsd-signature-symbol">&gt;</span></li> </ul> <ul class="tsd-descriptions"> <li class="tsd-description"> <aside class="tsd-sources"> <ul> <li>Defined in <a href="https://github.com/aws/amazon-chime-sdk-js/blob/master/src/screenviewing/signalingsession/SignalingSession.ts#L8">src/screenviewing/signalingsession/SignalingSession.ts:8</a></li> </ul> </aside> <h4 class="tsd-parameters-title">Parameters</h4> <ul class="tsd-parameters"> <li> <h5>connectionRequest: <a href="../classes/screenviewingsessionconnectionrequest.html" class="tsd-signature-type">ScreenViewingSessionConnectionRequest</a></h5> </li> </ul> <h4 class="tsd-returns-title">Returns <span class="tsd-signature-type">Promise</span><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">void</span><span class="tsd-signature-symbol">&gt;</span></h4> </li> </ul> </section> <section class="tsd-panel tsd-member tsd-kind-method tsd-parent-kind-interface"> <a name="registerobserver" class="tsd-anchor"></a> <h3>register<wbr>Observer</h3> <ul class="tsd-signatures tsd-kind-method tsd-parent-kind-interface"> <li class="tsd-signature tsd-kind-icon">register<wbr>Observer<span class="tsd-signature-symbol">(</span>observer<span class="tsd-signature-symbol">: </span><a href="screenobserver.html" class="tsd-signature-type">ScreenObserver</a><span class="tsd-signature-symbol">)</span><span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">void</span></li> </ul> <ul class="tsd-descriptions"> <li class="tsd-description"> <aside class="tsd-sources"> <ul> <li>Defined in <a href="https://github.com/aws/amazon-chime-sdk-js/blob/master/src/screenviewing/signalingsession/SignalingSession.ts#L10">src/screenviewing/signalingsession/SignalingSession.ts:10</a></li> </ul> </aside> <h4 class="tsd-parameters-title">Parameters</h4> <ul class="tsd-parameters"> <li> <h5>observer: <a href="screenobserver.html" class="tsd-signature-type">ScreenObserver</a></h5> </li> </ul> <h4 class="tsd-returns-title">Returns <span class="tsd-signature-type">void</span></h4> </li> </ul> </section> <section class="tsd-panel tsd-member tsd-kind-method tsd-parent-kind-interface"> <a name="unregisterobserver" class="tsd-anchor"></a> <h3>unregister<wbr>Observer</h3> <ul class="tsd-signatures tsd-kind-method tsd-parent-kind-interface"> <li class="tsd-signature tsd-kind-icon">unregister<wbr>Observer<span class="tsd-signature-symbol">(</span>observer<span class="tsd-signature-symbol">: </span><a href="screenobserver.html" class="tsd-signature-type">ScreenObserver</a><span class="tsd-signature-symbol">)</span><span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">void</span></li> </ul> <ul class="tsd-descriptions"> <li class="tsd-description"> <aside class="tsd-sources"> <ul> <li>Defined in <a href="https://github.com/aws/amazon-chime-sdk-js/blob/master/src/screenviewing/signalingsession/SignalingSession.ts#L11">src/screenviewing/signalingsession/SignalingSession.ts:11</a></li> </ul> </aside> <h4 class="tsd-parameters-title">Parameters</h4> <ul class="tsd-parameters"> <li> <h5>observer: <a href="screenobserver.html" class="tsd-signature-type">ScreenObserver</a></h5> </li> </ul> <h4 class="tsd-returns-title">Returns <span class="tsd-signature-type">void</span></h4> </li> </ul> </section> </section> </div> <div class="col-4 col-menu menu-sticky-wrap menu-highlight"> <nav class="tsd-navigation primary"> <ul> <li class="globals "> <a href="../globals.html"><em>Globals</em></a> </li> </ul> </nav> <nav class="tsd-navigation secondary menu-sticky"> <ul class="before-current"> </ul> <ul class="current"> <li class="current tsd-kind-interface"> <a href="signalingsession.html" class="tsd-kind-icon">Signaling<wbr>Session</a> <ul> <li class=" tsd-kind-method tsd-parent-kind-interface"> <a href="signalingsession.html#close" class="tsd-kind-icon">close</a> </li> <li class=" tsd-kind-method tsd-parent-kind-interface"> <a href="signalingsession.html#open" class="tsd-kind-icon">open</a> </li> <li class=" tsd-kind-method tsd-parent-kind-interface"> <a href="signalingsession.html#registerobserver" class="tsd-kind-icon">register<wbr>Observer</a> </li> <li class=" tsd-kind-method tsd-parent-kind-interface"> <a href="signalingsession.html#unregisterobserver" class="tsd-kind-icon">unregister<wbr>Observer</a> </li> </ul> </li> </ul> <ul class="after-current"> </ul> </nav> </div> </div> </div> <footer class="with-border-bottom"> <div class="container"> <h2>Legend</h2> <div class="tsd-legend-group"> <ul class="tsd-legend"> <li class="tsd-kind-constructor tsd-parent-kind-class"><span class="tsd-kind-icon">Constructor</span></li> <li class="tsd-kind-property tsd-parent-kind-class"><span class="tsd-kind-icon">Property</span></li> <li class="tsd-kind-method tsd-parent-kind-class"><span class="tsd-kind-icon">Method</span></li> <li class="tsd-kind-accessor tsd-parent-kind-class"><span class="tsd-kind-icon">Accessor</span></li> </ul> <ul class="tsd-legend"> <li class="tsd-kind-property tsd-parent-kind-interface"><span class="tsd-kind-icon">Property</span></li> <li class="tsd-kind-method tsd-parent-kind-interface"><span class="tsd-kind-icon">Method</span></li> </ul> <ul class="tsd-legend"> <li class="tsd-kind-property tsd-parent-kind-class tsd-is-inherited"><span class="tsd-kind-icon">Inherited property</span></li> <li class="tsd-kind-method tsd-parent-kind-class tsd-is-inherited"><span class="tsd-kind-icon">Inherited method</span></li> </ul> <ul class="tsd-legend"> <li class="tsd-kind-property tsd-parent-kind-class tsd-is-protected"><span class="tsd-kind-icon">Protected property</span></li> <li class="tsd-kind-method tsd-parent-kind-class tsd-is-protected"><span class="tsd-kind-icon">Protected method</span></li> </ul> <ul class="tsd-legend"> <li class="tsd-kind-property tsd-parent-kind-class tsd-is-private"><span class="tsd-kind-icon">Private property</span></li> <li class="tsd-kind-method tsd-parent-kind-class tsd-is-private"><span class="tsd-kind-icon">Private method</span></li> </ul> <ul class="tsd-legend"> <li class="tsd-kind-property tsd-parent-kind-class tsd-is-static"><span class="tsd-kind-icon">Static property</span></li> <li class="tsd-kind-method tsd-parent-kind-class tsd-is-static"><span class="tsd-kind-icon">Static method</span></li> </ul> </div> </div> </footer> <div class="container tsd-generator"> <p>Generated using <a href="https://typedoc.org/" target="_blank">TypeDoc</a></p> </div> <div class="overlay"></div> <script src="../assets/js/main.js"></script> </body> </html>
{ "pile_set_name": "Github" }
/* The contents of this file are subject to the Netscape Public * License Version 1.1 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at http://www.mozilla.org/NPL/ * * Software distributed under the License is distributed on an "AS * IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or * implied. See the License for the specific language governing * rights and limitations under the License. * * The Original Code is Mozilla Communicator client code, released March * 31, 1998. * * The Initial Developer of the Original Code is Netscape Communications * Corporation. Portions created by Netscape are * Copyright (C) 1998 Netscape Communications Corporation. All * Rights Reserved. * * Contributor(s): * */ /** File Name: 7.2.js ECMA Section: 7.2 Line Terminators Description: - readability - separate tokens - may occur between any two tokens - cannot occur within any token, not even a string - affect the process of automatic semicolon insertion. white space characters are: unicode name formal name string representation \u000A line feed <LF> \n \u000D carriage return <CR> \r this test uses onerror to capture line numbers. because we use on error, we can only have one test case per file. Author: christine@netscape.com Date: 11 september 1997 */ var SECTION = "7.2-5"; var VERSION = "ECMA_1"; startTest(); var TITLE = "Line Terminators"; writeHeaderToLog( SECTION + " "+ TITLE); var testcases = getTestCases(); test(); function test() { // this is line 27 a = "\rb"; eval( a ); // if we get this far, the test failed. testcases[tc].passed = writeTestCaseResult( "failure on line" + testcases[tc].expect, testcases[tc].actual, testcases[tc].description +" = "+ testcases[tc].actual ); testcases[tc].passed = false; testcases[tc].reason = "test should have caused runtime error "; passed = false; stopTest(); // all tests must return a boolean value return ( testcases ); } function getTestCases() { var array = new Array(); var item = 0; array[0] = new TestCase( "7.2", "<cr>a", "error", ""); return ( array ); }
{ "pile_set_name": "Github" }
<?xml version="1.0"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> <modelVersion>4.0.0</modelVersion> <parent> <artifactId>resteasy-jaxrs-all</artifactId> <groupId>org.jboss.resteasy</groupId> <version>4.6.0-SNAPSHOT</version> <relativePath>../pom.xml</relativePath> </parent> <artifactId>resteasy-client</artifactId> <name>RESTEasy JAX-RS Client</name> <dependencies> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <scope>test</scope> </dependency> <dependency> <groupId>org.jboss.resteasy</groupId> <artifactId>resteasy-client-api</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.jboss.resteasy</groupId> <artifactId>resteasy-core-spi</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.jboss.resteasy</groupId> <artifactId>resteasy-core</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.jboss.logging</groupId> <artifactId>jboss-logging</artifactId> </dependency> <dependency> <groupId>org.jboss.logging</groupId> <artifactId>jboss-logging-annotations</artifactId> </dependency> <dependency> <groupId>org.jboss.logging</groupId> <artifactId>jboss-logging-processor</artifactId> </dependency> <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpclient</artifactId> </dependency> <dependency> <groupId>commons-codec</groupId> <artifactId>commons-codec</artifactId> </dependency> <dependency> <groupId>commons-io</groupId> <artifactId>commons-io</artifactId> </dependency> <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpasyncclient</artifactId> <optional>true</optional> </dependency> <dependency> <groupId>org.eclipse.jetty</groupId> <artifactId>jetty-client</artifactId> <optional>true</optional> </dependency> <dependency> <groupId>org.jboss.spec.javax.ws.rs</groupId> <artifactId>jboss-jaxrs-api_2.1_spec</artifactId> </dependency> <dependency> <groupId>org.glassfish</groupId> <artifactId>jakarta.json</artifactId> <scope>provided</scope> </dependency> </dependencies> <profiles> <profile> <id>i18n</id> <activation> <property> <name>i18n</name> </property> </activation> <build> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-resources-plugin</artifactId> <executions> <execution> <id>copy-resources</id> <phase>initialize</phase> <goals> <goal>copy-resources</goal> </goals> <configuration> <outputDirectory> ${basedir}/src/main/resources/org/jboss/resteasy/client/jaxrs/i18n </outputDirectory> <resources> <resource> <directory>${basedir}/src/test/resources/i18n</directory> <includes> <include>*</include> </includes> </resource> </resources> <overwrite>true</overwrite> </configuration> </execution> </executions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> <executions> <execution> <id>i18</id> <phase>test</phase> <goals> <goal>test</goal> </goals> <configuration> <skip>false</skip> <reuseForks>false</reuseForks> <includes> <include>**/I18nTestMessages_*.java</include> </includes> </configuration> </execution> </executions> </plugin> </plugins> </build> </profile> </profiles> </project>
{ "pile_set_name": "Github" }
# -*- coding: utf-8 -*- """ Testing Mail Attach collector """ import os import unittest import unittest.mock as mock import intelmq.lib.test as test from intelmq.bots.collectors.mail.collector_mail_attach import MailAttachCollectorBot from intelmq.lib.utils import base64_encode if os.getenv('INTELMQ_TEST_EXOTIC'): from .lib import MockedZipImbox, MockedBadAttachmentImbox REPORT_FOOBARZIP = { '__type': 'Report', 'extra.email_from': 'wagner@cert.at', 'extra.email_message_id': '<07ce0153-060b-f48d-73d9-d92a20b3b3aa@cert.at>', 'extra.email_subject': 'foobar zip', 'feed.accuracy': 100.0, 'feed.name': 'IMAP Feed', 'raw': base64_encode('bar text\n'), 'extra.file_name': 'foobar', } @test.skip_exotic() class TestMailAttachCollectorBot(test.BotTestCase, unittest.TestCase): """ Test MailAttachCollectorBot """ @classmethod def set_bot(cls): cls.bot_reference = MailAttachCollectorBot cls.sysconfig = {'mail_host': None, 'mail_user': None, 'mail_password': None, 'mail_ssl': None, 'folder': None, 'subject_regex': None, 'attach_regex': '.*zip', 'name': 'IMAP Feed', } def test_extract_files(self): with mock.patch('imbox.Imbox', new=MockedZipImbox): self.run_bot(parameters={'extract_files': True}) self.assertMessageEqual(0, REPORT_FOOBARZIP) def test_attach_unzip(self): self.allowed_warning_count = 1 with mock.patch('imbox.Imbox', new=MockedZipImbox): self.run_bot(parameters={'attach_unzip': True}) self.assertMessageEqual(0, REPORT_FOOBARZIP) def test_attach_no_filename(self): """ https://github.com/certtools/intelmq/issues/1538 """ with mock.patch('imbox.Imbox', new=MockedBadAttachmentImbox): self.run_bot() self.assertOutputQueueLen(0) if __name__ == '__main__': # pragma: no cover unittest.main()
{ "pile_set_name": "Github" }
/************************************************************************************************* Required Notice: Copyright (C) EPPlus Software AB. This software is licensed under PolyForm Noncommercial License 1.0.0 and may only be used for noncommercial purposes https://polyformproject.org/licenses/noncommercial/1.0.0/ A commercial license to use this software can be purchased at https://epplussoftware.com ************************************************************************************************* Date Author Change ************************************************************************************************* 01/27/2020 EPPlus Software AB Initial release EPPlus 5 *************************************************************************************************/ using System; using System.Collections.Generic; using System.Linq; using System.Text; using OfficeOpenXml.FormulaParsing.Excel.Functions.Metadata; using OfficeOpenXml.FormulaParsing.ExpressionGraph; namespace OfficeOpenXml.FormulaParsing.Excel.Functions.Math { [FunctionMetadata( Category = ExcelFunctionCategory.Statistical, EPPlusVersion = "4", Description = "Returns the statistical rank of a given value, within a supplied array of values")] internal class Rank : RankFunctionBase { public Rank() : this(false) { } public Rank(bool isAvg) { _isAvg=isAvg; } private readonly bool _isAvg; public override CompileResult Execute(IEnumerable<FunctionArgument> arguments, ParsingContext context) { ValidateArguments(arguments, 2); var number = ArgToDecimal(arguments, 0); var refArg = arguments.ElementAt(1); var sortAscending = arguments.Count() > 2 ? ArgToBool(arguments, 2) : false; var numbers = GetNumbersFromRange(refArg, sortAscending); double rank = numbers.IndexOf(number) + 1; if(_isAvg) { var lastRank = numbers.LastIndexOf(number) + 1; rank = rank + ((lastRank - rank) / 2d); } if (rank <= 0 || rank > numbers.Count) { return new CompileResult(ExcelErrorValue.Create(eErrorType.NA), DataType.ExcelError); } return CreateResult(rank, DataType.Decimal); } } }
{ "pile_set_name": "Github" }
/*------------------------------------------------------------------------- * * relnode.c * Relation-node lookup/construction routines * * Portions Copyright (c) 1996-2012, PostgreSQL Global Development Group * Portions Copyright (c) 1994, Regents of the University of California * * * IDENTIFICATION * src/backend/optimizer/util/relnode.c * *------------------------------------------------------------------------- */ #include "postgres.h" #include "optimizer/cost.h" #include "optimizer/pathnode.h" #include "optimizer/paths.h" #include "optimizer/placeholder.h" #include "optimizer/plancat.h" #include "optimizer/restrictinfo.h" #include "utils/hsearch.h" typedef struct JoinHashEntry { Relids join_relids; /* hash key --- MUST BE FIRST */ RelOptInfo *join_rel; } JoinHashEntry; static void build_joinrel_tlist(PlannerInfo *root, RelOptInfo *joinrel, RelOptInfo *input_rel); static List *build_joinrel_restrictlist(PlannerInfo *root, RelOptInfo *joinrel, RelOptInfo *outer_rel, RelOptInfo *inner_rel); static void build_joinrel_joinlist(RelOptInfo *joinrel, RelOptInfo *outer_rel, RelOptInfo *inner_rel); static List *subbuild_joinrel_restrictlist(RelOptInfo *joinrel, List *joininfo_list, List *new_restrictlist); static List *subbuild_joinrel_joinlist(RelOptInfo *joinrel, List *joininfo_list, List *new_joininfo); /* * setup_simple_rel_arrays * Prepare the arrays we use for quickly accessing base relations. */ void setup_simple_rel_arrays(PlannerInfo *root) { Index rti; ListCell *lc; /* Arrays are accessed using RT indexes (1..N) */ root->simple_rel_array_size = list_length(root->parse->rtable) + 1; /* simple_rel_array is initialized to all NULLs */ root->simple_rel_array = (RelOptInfo **) palloc0(root->simple_rel_array_size * sizeof(RelOptInfo *)); /* simple_rte_array is an array equivalent of the rtable list */ root->simple_rte_array = (RangeTblEntry **) palloc0(root->simple_rel_array_size * sizeof(RangeTblEntry *)); rti = 1; foreach(lc, root->parse->rtable) { RangeTblEntry *rte = (RangeTblEntry *) lfirst(lc); root->simple_rte_array[rti++] = rte; } } /* * build_simple_rel * Construct a new RelOptInfo for a base relation or 'other' relation. */ RelOptInfo * build_simple_rel(PlannerInfo *root, int relid, RelOptKind reloptkind) { RelOptInfo *rel; RangeTblEntry *rte; /* Rel should not exist already */ Assert(relid > 0 && relid < root->simple_rel_array_size); if (root->simple_rel_array[relid] != NULL) elog(ERROR, "rel %d already exists", relid); /* Fetch RTE for relation */ rte = root->simple_rte_array[relid]; Assert(rte != NULL); rel = makeNode(RelOptInfo); rel->reloptkind = reloptkind; rel->relids = bms_make_singleton(relid); rel->rows = 0; rel->width = 0; rel->reltargetlist = NIL; rel->pathlist = NIL; rel->ppilist = NIL; rel->cheapest_startup_path = NULL; rel->cheapest_total_path = NULL; rel->cheapest_unique_path = NULL; rel->cheapest_parameterized_paths = NIL; rel->relid = relid; rel->rtekind = rte->rtekind; /* min_attr, max_attr, attr_needed, attr_widths are set below */ rel->indexlist = NIL; rel->pages = 0; rel->tuples = 0; rel->allvisfrac = 0; rel->subplan = NULL; rel->subroot = NULL; rel->fdwroutine = NULL; rel->fdw_private = NULL; rel->baserestrictinfo = NIL; rel->baserestrictcost.startup = 0; rel->baserestrictcost.per_tuple = 0; rel->joininfo = NIL; rel->has_eclass_joins = false; /* NEW FOR RECATHON */ rel->recommender = rte->recommender; /* Check type of rtable entry */ switch (rte->rtekind) { case RTE_RELATION: /* Table --- retrieve statistics from the system catalogs */ get_relation_info(root, rte->relid, rte->inh, rel); break; case RTE_SUBQUERY: case RTE_FUNCTION: case RTE_VALUES: case RTE_CTE: /* * Subquery, function, or values list --- set up attr range and * arrays * * Note: 0 is included in range to support whole-row Vars */ rel->min_attr = 0; rel->max_attr = list_length(rte->eref->colnames); rel->attr_needed = (Relids *) palloc0((rel->max_attr - rel->min_attr + 1) * sizeof(Relids)); rel->attr_widths = (int32 *) palloc0((rel->max_attr - rel->min_attr + 1) * sizeof(int32)); break; default: elog(ERROR, "unrecognized RTE kind: %d", (int) rte->rtekind); break; } /* Save the finished struct in the query's simple_rel_array */ root->simple_rel_array[relid] = rel; /* * If this rel is an appendrel parent, recurse to build "other rel" * RelOptInfos for its children. They are "other rels" because they are * not in the main join tree, but we will need RelOptInfos to plan access * to them. */ if (rte->inh) { ListCell *l; foreach(l, root->append_rel_list) { AppendRelInfo *appinfo = (AppendRelInfo *) lfirst(l); /* append_rel_list contains all append rels; ignore others */ if (appinfo->parent_relid != relid) continue; (void) build_simple_rel(root, appinfo->child_relid, RELOPT_OTHER_MEMBER_REL); } } return rel; } /* * find_base_rel * Find a base or other relation entry, which must already exist. */ RelOptInfo * find_base_rel(PlannerInfo *root, int relid) { RelOptInfo *rel; Assert(relid > 0); if (relid < root->simple_rel_array_size) { rel = root->simple_rel_array[relid]; if (rel) return rel; } elog(ERROR, "no relation entry for relid %d", relid); return NULL; /* keep compiler quiet */ } /* * build_join_rel_hash * Construct the auxiliary hash table for join relations. */ static void build_join_rel_hash(PlannerInfo *root) { HTAB *hashtab; HASHCTL hash_ctl; ListCell *l; /* Create the hash table */ MemSet(&hash_ctl, 0, sizeof(hash_ctl)); hash_ctl.keysize = sizeof(Relids); hash_ctl.entrysize = sizeof(JoinHashEntry); hash_ctl.hash = bitmap_hash; hash_ctl.match = bitmap_match; hash_ctl.hcxt = CurrentMemoryContext; hashtab = hash_create("JoinRelHashTable", 256L, &hash_ctl, HASH_ELEM | HASH_FUNCTION | HASH_COMPARE | HASH_CONTEXT); /* Insert all the already-existing joinrels */ foreach(l, root->join_rel_list) { RelOptInfo *rel = (RelOptInfo *) lfirst(l); JoinHashEntry *hentry; bool found; hentry = (JoinHashEntry *) hash_search(hashtab, &(rel->relids), HASH_ENTER, &found); Assert(!found); hentry->join_rel = rel; } root->join_rel_hash = hashtab; } /* * find_join_rel * Returns relation entry corresponding to 'relids' (a set of RT indexes), * or NULL if none exists. This is for join relations. */ RelOptInfo * find_join_rel(PlannerInfo *root, Relids relids) { /* * Switch to using hash lookup when list grows "too long". The threshold * is arbitrary and is known only here. */ if (!root->join_rel_hash && list_length(root->join_rel_list) > 32) build_join_rel_hash(root); /* * Use either hashtable lookup or linear search, as appropriate. * * Note: the seemingly redundant hashkey variable is used to avoid taking * the address of relids; unless the compiler is exceedingly smart, doing * so would force relids out of a register and thus probably slow down the * list-search case. */ if (root->join_rel_hash) { Relids hashkey = relids; JoinHashEntry *hentry; hentry = (JoinHashEntry *) hash_search(root->join_rel_hash, &hashkey, HASH_FIND, NULL); if (hentry) return hentry->join_rel; } else { ListCell *l; foreach(l, root->join_rel_list) { RelOptInfo *rel = (RelOptInfo *) lfirst(l); if (bms_equal(rel->relids, relids)) return rel; } } return NULL; } /* * build_join_rel * Returns relation entry corresponding to the union of two given rels, * creating a new relation entry if none already exists. * * 'joinrelids' is the Relids set that uniquely identifies the join * 'outer_rel' and 'inner_rel' are relation nodes for the relations to be * joined * 'sjinfo': join context info * 'restrictlist_ptr': result variable. If not NULL, *restrictlist_ptr * receives the list of RestrictInfo nodes that apply to this * particular pair of joinable relations. * * restrictlist_ptr makes the routine's API a little grotty, but it saves * duplicated calculation of the restrictlist... */ RelOptInfo * build_join_rel(PlannerInfo *root, Relids joinrelids, RelOptInfo *outer_rel, RelOptInfo *inner_rel, SpecialJoinInfo *sjinfo, List **restrictlist_ptr) { RelOptInfo *joinrel; List *restrictlist; /* * See if we already have a joinrel for this set of base rels. */ joinrel = find_join_rel(root, joinrelids); if (joinrel) { /* * Yes, so we only need to figure the restrictlist for this particular * pair of component relations. */ if (restrictlist_ptr) *restrictlist_ptr = build_joinrel_restrictlist(root, joinrel, outer_rel, inner_rel); return joinrel; } /* * Nope, so make one. */ joinrel = makeNode(RelOptInfo); joinrel->reloptkind = RELOPT_JOINREL; joinrel->relids = bms_copy(joinrelids); joinrel->rows = 0; joinrel->width = 0; joinrel->reltargetlist = NIL; joinrel->pathlist = NIL; joinrel->ppilist = NIL; joinrel->cheapest_startup_path = NULL; joinrel->cheapest_total_path = NULL; joinrel->cheapest_unique_path = NULL; joinrel->cheapest_parameterized_paths = NIL; joinrel->relid = 0; /* indicates not a baserel */ joinrel->rtekind = RTE_JOIN; joinrel->min_attr = 0; joinrel->max_attr = 0; joinrel->attr_needed = NULL; joinrel->attr_widths = NULL; joinrel->indexlist = NIL; joinrel->pages = 0; joinrel->tuples = 0; joinrel->allvisfrac = 0; joinrel->subplan = NULL; joinrel->subroot = NULL; joinrel->fdwroutine = NULL; joinrel->fdw_private = NULL; joinrel->baserestrictinfo = NIL; joinrel->baserestrictcost.startup = 0; joinrel->baserestrictcost.per_tuple = 0; joinrel->joininfo = NIL; joinrel->has_eclass_joins = false; /* * Create a new tlist containing just the vars that need to be output from * this join (ie, are needed for higher joinclauses or final output). * * NOTE: the tlist order for a join rel will depend on which pair of outer * and inner rels we first try to build it from. But the contents should * be the same regardless. */ build_joinrel_tlist(root, joinrel, outer_rel); build_joinrel_tlist(root, joinrel, inner_rel); add_placeholders_to_joinrel(root, joinrel); /* * Construct restrict and join clause lists for the new joinrel. (The * caller might or might not need the restrictlist, but I need it anyway * for set_joinrel_size_estimates().) */ restrictlist = build_joinrel_restrictlist(root, joinrel, outer_rel, inner_rel); if (restrictlist_ptr) *restrictlist_ptr = restrictlist; build_joinrel_joinlist(joinrel, outer_rel, inner_rel); /* * This is also the right place to check whether the joinrel has any * pending EquivalenceClass joins. */ joinrel->has_eclass_joins = has_relevant_eclass_joinclause(root, joinrel); /* * Set estimates of the joinrel's size. */ set_joinrel_size_estimates(root, joinrel, outer_rel, inner_rel, sjinfo, restrictlist); /* * Add the joinrel to the query's joinrel list, and store it into the * auxiliary hashtable if there is one. NB: GEQO requires us to append * the new joinrel to the end of the list! */ root->join_rel_list = lappend(root->join_rel_list, joinrel); if (root->join_rel_hash) { JoinHashEntry *hentry; bool found; hentry = (JoinHashEntry *) hash_search(root->join_rel_hash, &(joinrel->relids), HASH_ENTER, &found); Assert(!found); hentry->join_rel = joinrel; } /* * Also, if dynamic-programming join search is active, add the new joinrel * to the appropriate sublist. Note: you might think the Assert on number * of members should be for equality, but some of the level 1 rels might * have been joinrels already, so we can only assert <=. */ if (root->join_rel_level) { Assert(root->join_cur_level > 0); Assert(root->join_cur_level <= bms_num_members(joinrel->relids)); root->join_rel_level[root->join_cur_level] = lappend(root->join_rel_level[root->join_cur_level], joinrel); } return joinrel; } /* * build_joinrel_tlist * Builds a join relation's target list from an input relation. * (This is invoked twice to handle the two input relations.) * * The join's targetlist includes all Vars of its member relations that * will still be needed above the join. This subroutine adds all such * Vars from the specified input rel's tlist to the join rel's tlist. * * We also compute the expected width of the join's output, making use * of data that was cached at the baserel level by set_rel_width(). */ static void build_joinrel_tlist(PlannerInfo *root, RelOptInfo *joinrel, RelOptInfo *input_rel) { Relids relids = joinrel->relids; ListCell *vars; foreach(vars, input_rel->reltargetlist) { Node *origvar = (Node *) lfirst(vars); Var *var; RelOptInfo *baserel; int ndx; /* * Ignore PlaceHolderVars in the input tlists; we'll make our own * decisions about whether to copy them. */ if (IsA(origvar, PlaceHolderVar)) continue; /* * We can't run into any child RowExprs here, but we could find a * whole-row Var with a ConvertRowtypeExpr atop it. */ var = (Var *) origvar; while (!IsA(var, Var)) { if (IsA(var, ConvertRowtypeExpr)) var = (Var *) ((ConvertRowtypeExpr *) var)->arg; else elog(ERROR, "unexpected node type in reltargetlist: %d", (int) nodeTag(var)); } /* Get the Var's original base rel */ baserel = find_base_rel(root, var->varno); /* Is it still needed above this joinrel? */ ndx = var->varattno - baserel->min_attr; if (bms_nonempty_difference(baserel->attr_needed[ndx], relids)) { /* Yup, add it to the output */ joinrel->reltargetlist = lappend(joinrel->reltargetlist, origvar); joinrel->width += baserel->attr_widths[ndx]; } } } /* * build_joinrel_restrictlist * build_joinrel_joinlist * These routines build lists of restriction and join clauses for a * join relation from the joininfo lists of the relations it joins. * * These routines are separate because the restriction list must be * built afresh for each pair of input sub-relations we consider, whereas * the join list need only be computed once for any join RelOptInfo. * The join list is fully determined by the set of rels making up the * joinrel, so we should get the same results (up to ordering) from any * candidate pair of sub-relations. But the restriction list is whatever * is not handled in the sub-relations, so it depends on which * sub-relations are considered. * * If a join clause from an input relation refers to base rels still not * present in the joinrel, then it is still a join clause for the joinrel; * we put it into the joininfo list for the joinrel. Otherwise, * the clause is now a restrict clause for the joined relation, and we * return it to the caller of build_joinrel_restrictlist() to be stored in * join paths made from this pair of sub-relations. (It will not need to * be considered further up the join tree.) * * In many case we will find the same RestrictInfos in both input * relations' joinlists, so be careful to eliminate duplicates. * Pointer equality should be a sufficient test for dups, since all * the various joinlist entries ultimately refer to RestrictInfos * pushed into them by distribute_restrictinfo_to_rels(). * * 'joinrel' is a join relation node * 'outer_rel' and 'inner_rel' are a pair of relations that can be joined * to form joinrel. * * build_joinrel_restrictlist() returns a list of relevant restrictinfos, * whereas build_joinrel_joinlist() stores its results in the joinrel's * joininfo list. One or the other must accept each given clause! * * NB: Formerly, we made deep(!) copies of each input RestrictInfo to pass * up to the join relation. I believe this is no longer necessary, because * RestrictInfo nodes are no longer context-dependent. Instead, just include * the original nodes in the lists made for the join relation. */ static List * build_joinrel_restrictlist(PlannerInfo *root, RelOptInfo *joinrel, RelOptInfo *outer_rel, RelOptInfo *inner_rel) { List *result; /* * Collect all the clauses that syntactically belong at this level, * eliminating any duplicates (important since we will see many of the * same clauses arriving from both input relations). */ result = subbuild_joinrel_restrictlist(joinrel, outer_rel->joininfo, NIL); result = subbuild_joinrel_restrictlist(joinrel, inner_rel->joininfo, result); /* * Add on any clauses derived from EquivalenceClasses. These cannot be * redundant with the clauses in the joininfo lists, so don't bother * checking. */ result = list_concat(result, generate_join_implied_equalities(root, joinrel->relids, outer_rel->relids, inner_rel)); return result; } static void build_joinrel_joinlist(RelOptInfo *joinrel, RelOptInfo *outer_rel, RelOptInfo *inner_rel) { List *result; /* * Collect all the clauses that syntactically belong above this level, * eliminating any duplicates (important since we will see many of the * same clauses arriving from both input relations). */ result = subbuild_joinrel_joinlist(joinrel, outer_rel->joininfo, NIL); result = subbuild_joinrel_joinlist(joinrel, inner_rel->joininfo, result); joinrel->joininfo = result; } static List * subbuild_joinrel_restrictlist(RelOptInfo *joinrel, List *joininfo_list, List *new_restrictlist) { ListCell *l; foreach(l, joininfo_list) { RestrictInfo *rinfo = (RestrictInfo *) lfirst(l); if (bms_is_subset(rinfo->required_relids, joinrel->relids)) { /* * This clause becomes a restriction clause for the joinrel, since * it refers to no outside rels. Add it to the list, being * careful to eliminate duplicates. (Since RestrictInfo nodes in * different joinlists will have been multiply-linked rather than * copied, pointer equality should be a sufficient test.) */ new_restrictlist = list_append_unique_ptr(new_restrictlist, rinfo); } else { /* * This clause is still a join clause at this level, so we ignore * it in this routine. */ } } return new_restrictlist; } static List * subbuild_joinrel_joinlist(RelOptInfo *joinrel, List *joininfo_list, List *new_joininfo) { ListCell *l; foreach(l, joininfo_list) { RestrictInfo *rinfo = (RestrictInfo *) lfirst(l); if (bms_is_subset(rinfo->required_relids, joinrel->relids)) { /* * This clause becomes a restriction clause for the joinrel, since * it refers to no outside rels. So we can ignore it in this * routine. */ } else { /* * This clause is still a join clause at this level, so add it to * the new joininfo list, being careful to eliminate duplicates. * (Since RestrictInfo nodes in different joinlists will have been * multiply-linked rather than copied, pointer equality should be * a sufficient test.) */ new_joininfo = list_append_unique_ptr(new_joininfo, rinfo); } } return new_joininfo; } /* * find_childrel_appendrelinfo * Get the AppendRelInfo associated with an appendrel child rel. * * This search could be eliminated by storing a link in child RelOptInfos, * but for now it doesn't seem performance-critical. */ AppendRelInfo * find_childrel_appendrelinfo(PlannerInfo *root, RelOptInfo *rel) { Index relid = rel->relid; ListCell *lc; /* Should only be called on child rels */ Assert(rel->reloptkind == RELOPT_OTHER_MEMBER_REL); foreach(lc, root->append_rel_list) { AppendRelInfo *appinfo = (AppendRelInfo *) lfirst(lc); if (appinfo->child_relid == relid) return appinfo; } /* should have found the entry ... */ elog(ERROR, "child rel %d not found in append_rel_list", relid); return NULL; /* not reached */ } /* * get_baserel_parampathinfo * Get the ParamPathInfo for a parameterized path for a base relation, * constructing one if we don't have one already. * * This centralizes estimating the rowcounts for parameterized paths. * We need to cache those to be sure we use the same rowcount for all paths * of the same parameterization for a given rel. This is also a convenient * place to determine which movable join clauses the parameterized path will * be responsible for evaluating. */ ParamPathInfo * get_baserel_parampathinfo(PlannerInfo *root, RelOptInfo *baserel, Relids required_outer) { ParamPathInfo *ppi; Relids joinrelids; List *pclauses; double rows; ListCell *lc; /* Unparameterized paths have no ParamPathInfo */ if (bms_is_empty(required_outer)) return NULL; Assert(!bms_overlap(baserel->relids, required_outer)); /* If we already have a PPI for this parameterization, just return it */ foreach(lc, baserel->ppilist) { ppi = (ParamPathInfo *) lfirst(lc); if (bms_equal(ppi->ppi_req_outer, required_outer)) return ppi; } /* * Identify all joinclauses that are movable to this base rel given this * parameterization. */ joinrelids = bms_union(baserel->relids, required_outer); pclauses = NIL; foreach(lc, baserel->joininfo) { RestrictInfo *rinfo = (RestrictInfo *) lfirst(lc); if (join_clause_is_movable_into(rinfo, baserel->relids, joinrelids)) pclauses = lappend(pclauses, rinfo); } /* * Add in joinclauses generated by EquivalenceClasses, too. (These * necessarily satisfy join_clause_is_movable_into.) */ pclauses = list_concat(pclauses, generate_join_implied_equalities(root, joinrelids, required_outer, baserel)); /* Estimate the number of rows returned by the parameterized scan */ rows = get_parameterized_baserel_size(root, baserel, pclauses); /* And now we can build the ParamPathInfo */ ppi = makeNode(ParamPathInfo); ppi->ppi_req_outer = required_outer; ppi->ppi_rows = rows; ppi->ppi_clauses = pclauses; baserel->ppilist = lappend(baserel->ppilist, ppi); return ppi; } /* * get_joinrel_parampathinfo * Get the ParamPathInfo for a parameterized path for a join relation, * constructing one if we don't have one already. * * This centralizes estimating the rowcounts for parameterized paths. * We need to cache those to be sure we use the same rowcount for all paths * of the same parameterization for a given rel. This is also a convenient * place to determine which movable join clauses the parameterized path will * be responsible for evaluating. * * outer_path and inner_path are a pair of input paths that can be used to * construct the join, and restrict_clauses is the list of regular join * clauses (including clauses derived from EquivalenceClasses) that must be * applied at the join node when using these inputs. * * Unlike the situation for base rels, the set of movable join clauses to be * enforced at a join varies with the selected pair of input paths, so we * must calculate that and pass it back, even if we already have a matching * ParamPathInfo. We handle this by adding any clauses moved down to this * join to *restrict_clauses, which is an in/out parameter. (The addition * is done in such a way as to not modify the passed-in List structure.) * * Note: when considering a nestloop join, the caller must have removed from * restrict_clauses any movable clauses that are themselves scheduled to be * pushed into the right-hand path. We do not do that here since it's * unnecessary for other join types. */ ParamPathInfo * get_joinrel_parampathinfo(PlannerInfo *root, RelOptInfo *joinrel, Path *outer_path, Path *inner_path, SpecialJoinInfo *sjinfo, Relids required_outer, List **restrict_clauses) { ParamPathInfo *ppi; Relids join_and_req; Relids outer_and_req; Relids inner_and_req; List *pclauses; List *eclauses; double rows; ListCell *lc; /* Unparameterized paths have no ParamPathInfo or extra join clauses */ if (bms_is_empty(required_outer)) return NULL; Assert(!bms_overlap(joinrel->relids, required_outer)); /* * Identify all joinclauses that are movable to this join rel given this * parameterization. These are the clauses that are movable into this * join, but not movable into either input path. Treat an unparameterized * input path as not accepting parameterized clauses (because it won't, * per the shortcut exit above), even though the joinclause movement rules * might allow the same clauses to be moved into a parameterized path for * that rel. */ join_and_req = bms_union(joinrel->relids, required_outer); if (outer_path->param_info) outer_and_req = bms_union(outer_path->parent->relids, PATH_REQ_OUTER(outer_path)); else outer_and_req = NULL; /* outer path does not accept parameters */ if (inner_path->param_info) inner_and_req = bms_union(inner_path->parent->relids, PATH_REQ_OUTER(inner_path)); else inner_and_req = NULL; /* inner path does not accept parameters */ pclauses = NIL; foreach(lc, joinrel->joininfo) { RestrictInfo *rinfo = (RestrictInfo *) lfirst(lc); if (join_clause_is_movable_into(rinfo, joinrel->relids, join_and_req) && !join_clause_is_movable_into(rinfo, outer_path->parent->relids, outer_and_req) && !join_clause_is_movable_into(rinfo, inner_path->parent->relids, inner_and_req)) pclauses = lappend(pclauses, rinfo); } /* Consider joinclauses generated by EquivalenceClasses, too */ eclauses = generate_join_implied_equalities(root, join_and_req, required_outer, joinrel); /* We only want ones that aren't movable to lower levels */ foreach(lc, eclauses) { RestrictInfo *rinfo = (RestrictInfo *) lfirst(lc); Assert(join_clause_is_movable_into(rinfo, joinrel->relids, join_and_req)); if (!join_clause_is_movable_into(rinfo, outer_path->parent->relids, outer_and_req) && !join_clause_is_movable_into(rinfo, inner_path->parent->relids, inner_and_req)) pclauses = lappend(pclauses, rinfo); } /* * Now, attach the identified moved-down clauses to the caller's * restrict_clauses list. By using list_concat in this order, we leave * the original list structure of restrict_clauses undamaged. */ *restrict_clauses = list_concat(pclauses, *restrict_clauses); /* If we already have a PPI for this parameterization, just return it */ foreach(lc, joinrel->ppilist) { ppi = (ParamPathInfo *) lfirst(lc); if (bms_equal(ppi->ppi_req_outer, required_outer)) return ppi; } /* Estimate the number of rows returned by the parameterized join */ rows = get_parameterized_joinrel_size(root, joinrel, outer_path->rows, inner_path->rows, sjinfo, *restrict_clauses); /* * And now we can build the ParamPathInfo. No point in saving the * input-pair-dependent clause list, though. * * Note: in GEQO mode, we'll be called in a temporary memory context, but * the joinrel structure is there too, so no problem. */ ppi = makeNode(ParamPathInfo); ppi->ppi_req_outer = required_outer; ppi->ppi_rows = rows; ppi->ppi_clauses = NIL; joinrel->ppilist = lappend(joinrel->ppilist, ppi); return ppi; } /* * get_appendrel_parampathinfo * Get the ParamPathInfo for a parameterized path for an append relation. * * For an append relation, the rowcount estimate will just be the sum of * the estimates for its children. However, we still need a ParamPathInfo * to flag the fact that the path requires parameters. So this just creates * a suitable struct with zero ppi_rows (and no ppi_clauses either, since * the Append node isn't responsible for checking quals). */ ParamPathInfo * get_appendrel_parampathinfo(RelOptInfo *appendrel, Relids required_outer) { ParamPathInfo *ppi; ListCell *lc; /* Unparameterized paths have no ParamPathInfo */ if (bms_is_empty(required_outer)) return NULL; Assert(!bms_overlap(appendrel->relids, required_outer)); /* If we already have a PPI for this parameterization, just return it */ foreach(lc, appendrel->ppilist) { ppi = (ParamPathInfo *) lfirst(lc); if (bms_equal(ppi->ppi_req_outer, required_outer)) return ppi; } /* Else build the ParamPathInfo */ ppi = makeNode(ParamPathInfo); ppi->ppi_req_outer = required_outer; ppi->ppi_rows = 0; ppi->ppi_clauses = NIL; appendrel->ppilist = lappend(appendrel->ppilist, ppi); return ppi; }
{ "pile_set_name": "Github" }
required_approvals = 1 block_labels = ["wip"] delete_merged_branches = true status = [ "Check (stm32f0)", "Check (stm32f1)", "Check (stm32f2)", "Check (stm32f3)", "Check (stm32f4)", "Check (stm32f7)", "Check (stm32h7)", "Check (stm32l0)", "Check (stm32l1)", "Check (stm32l4)", "Check (stm32l5)", "Check (stm32g0)", "Check (stm32g4)", "Check (stm32mp1)", "Check (stm32wl)", ]
{ "pile_set_name": "Github" }
(Statements (Assignment (Identifier) { (Float) ->(Float) }))
{ "pile_set_name": "Github" }
.product { background: #FFF; padding: 15px; margin-bottom: 15px; transition: all 200ms ease-in; } .product:hover { transform: scale(1.1) translateY(-15px); box-shadow: 7px 9px 18px -2px rgba(61,61,61,1); } .product-link { display: block; color: #3D3D3D; } .product-link:hover { color: #3D3D3D; text-decoration: none; } .product .product__image { width: 100%; } .product__image img { display: block; margin-left: auto; margin-right: auto; } .product__description { width: 100%; } .product__description small { color: #808080; } .product__description h4 { margin: 3px 0 5px 0; } .product__price-cart { display: flex; margin-top: 10px; } .product__price-cart p { flex-grow: 2; font-size: 20px; font-weight: bold; }
{ "pile_set_name": "Github" }
// Copyright 2008 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following // disclaimer in the documentation and/or other materials provided // with the distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived // from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include "cctest.h" #include <stdio.h> #include <stdlib.h> #include <string.h> CcTest* CcTest::last_ = NULL; CcTest::CcTest(TestFunction* callback, const char* test_file, const char* test_name, const char* test_dependency, bool test_is_enabled) : callback_(callback), name_(test_name), dependency_(test_dependency), prev_(last_) { // Find the base name of this test (const_cast required on Windows). char *basename = strrchr(const_cast<char *>(test_file), '/'); if (!basename) { basename = strrchr(const_cast<char *>(test_file), '\\'); } if (!basename) { basename = strdup(test_file); } else { basename = strdup(basename + 1); } // Drop the extension, if there is one. char *extension = strrchr(basename, '.'); if (extension) *extension = 0; // Install this test in the list of tests file_ = basename; enabled_ = test_is_enabled; prev_ = last_; last_ = this; } static void PrintTestList(CcTest* current) { if (current == NULL) return; PrintTestList(current->prev()); if (current->dependency() != NULL) { printf("%s/%s<%s\n", current->file(), current->name(), current->dependency()); } else { printf("%s/%s<\n", current->file(), current->name()); } } int main(int argc, char* argv[]) { int tests_run = 0; bool print_run_count = true; for (int i = 1; i < argc; i++) { char* arg = argv[i]; if (strcmp(arg, "--list") == 0) { PrintTestList(CcTest::last()); print_run_count = false; } else { char* arg_copy = strdup(arg); char* testname = strchr(arg_copy, '/'); if (testname) { // Split the string in two by nulling the slash and then run // exact matches. *testname = 0; char* file = arg_copy; char* name = testname + 1; CcTest* test = CcTest::last(); while (test != NULL) { if (test->enabled() && strcmp(test->file(), file) == 0 && strcmp(test->name(), name) == 0) { test->Run(); tests_run++; } test = test->prev(); } } else { // Run all tests with the specified file or test name. char* file_or_name = arg_copy; CcTest* test = CcTest::last(); while (test != NULL) { if (test->enabled() && (strcmp(test->file(), file_or_name) == 0 || strcmp(test->name(), file_or_name) == 0)) { test->Run(); tests_run++; } test = test->prev(); } } free(arg_copy); } } if (print_run_count && tests_run != 1) printf("Ran %i tests.\n", tests_run); return 0; }
{ "pile_set_name": "Github" }
/******************************************************************************* * Copyright 2020 Intel Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ #ifndef GPU_OCL_ZERO_PAD_REF_ZERO_PAD_HPP #define GPU_OCL_ZERO_PAD_REF_ZERO_PAD_HPP #include "gpu/gpu_primitive.hpp" #include "gpu/gpu_resource.hpp" #include "gpu/gpu_zero_pad_pd.hpp" #include "gpu/primitive_conf.hpp" namespace dnnl { namespace impl { namespace gpu { namespace ocl { struct ref_zero_pad_t : public gpu_primitive_t { struct pd_t : public gpu_zero_pad_pd_t { using gpu_zero_pad_pd_t::gpu_zero_pad_pd_t; DECLARE_COMMON_PD_T("ocl:ref:any", ref_zero_pad_t); status_t init(engine_t *engine) { return status::success; } }; ref_zero_pad_t(const pd_t *apd) : gpu_primitive_t(apd) {}; status_t init(engine_t *engine) override { compute::kernel_ctx_t kernel_ctx; create_kernel(engine, &kernel_, "ref_zero_pad", kernel_ctx); if (!kernel_) return status::runtime_error; return status::success; } status_t execute(const exec_ctx_t &ctx) const override; private: const pd_t *pd() const { return (const pd_t *)primitive_t::pd().get(); } compute::kernel_t kernel_; }; } // namespace ocl } // namespace gpu } // namespace impl } // namespace dnnl #endif
{ "pile_set_name": "Github" }
[DECRYPT] COUNT = 0 KEY = 00000000000000000000000000000000 IV = f34481ec3cc627bacd5dc3fb08f273e6 CIPHERTEXT = 0336763e966d92595a567cc9ce537f5e PLAINTEXT = 00000000000000000000000000000000 COUNT = 1 KEY = 00000000000000000000000000000000 IV = 9798c4640bad75c7c3227db910174e72 CIPHERTEXT = a9a1631bf4996954ebc093957b234589 PLAINTEXT = 00000000000000000000000000000000 COUNT = 2 KEY = 00000000000000000000000000000000 IV = 96ab5c2ff612d9dfaae8c31f30c42168 CIPHERTEXT = ff4f8391a6a40ca5b25d23bedd44a597 PLAINTEXT = 00000000000000000000000000000000 COUNT = 3 KEY = 00000000000000000000000000000000 IV = 6a118a874519e64e9963798a503f1d35 CIPHERTEXT = dc43be40be0e53712f7e2bf5ca707209 PLAINTEXT = 00000000000000000000000000000000 COUNT = 4 KEY = 00000000000000000000000000000000 IV = cb9fceec81286ca3e989bd979b0cb284 CIPHERTEXT = 92beedab1895a94faa69b632e5cc47ce PLAINTEXT = 00000000000000000000000000000000 COUNT = 5 KEY = 00000000000000000000000000000000 IV = b26aeb1874e47ca8358ff22378f09144 CIPHERTEXT = 459264f4798f6a78bacb89c15ed3d601 PLAINTEXT = 00000000000000000000000000000000 COUNT = 6 KEY = 00000000000000000000000000000000 IV = 58c8e00b2631686d54eab84b91f0aca1 CIPHERTEXT = 08a4e2efec8a8e3312ca7460b9040bbf PLAINTEXT = 00000000000000000000000000000000
{ "pile_set_name": "Github" }
org.springframework.boot.autoconfigure.EnableAutoConfiguration=\ de.tudarmstadt.ukp.inception.externalsearch.config.ExternalSearchAutoConfiguration
{ "pile_set_name": "Github" }
package io.joynr.androidhelloworldconsumer; import android.app.Application; import com.google.inject.Injector; import com.google.inject.Module; import java.util.Properties; import io.joynr.messaging.MessagingPropertyKeys; import io.joynr.messaging.websocket.WebsocketModule; import io.joynr.runtime.JoynrInjectorFactory; import io.joynr.runtime.JoynrRuntime; import io.joynr.runtime.LibjoynrWebSocketRuntimeModule; /** * This class extends from Application instead of JoynrApplication, because it is the standard way * of creating an application on Android and makes it easier to interact with the GUI. */ public class HelloWorldConsumerApplication extends Application { public JoynrRuntime runtime; public JoynrRuntime getRuntime() { return runtime; } @Override public void onCreate() { super.onCreate(); final String host = "localhost"; final int port = 4242; final String STATIC_PERSISTENCE_FILE = "consumer-joynr.properties"; final Properties joynrConfig = new Properties(); joynrConfig.setProperty(WebsocketModule.PROPERTY_WEBSOCKET_MESSAGING_HOST, host); joynrConfig.setProperty(WebsocketModule.PROPERTY_WEBSOCKET_MESSAGING_PORT, "" + port); joynrConfig.setProperty(WebsocketModule.PROPERTY_WEBSOCKET_MESSAGING_PROTOCOL, "ws"); joynrConfig.setProperty(WebsocketModule.PROPERTY_WEBSOCKET_MESSAGING_PATH, ""); joynrConfig.setProperty(MessagingPropertyKeys.PERSISTENCE_FILE, getApplicationContext().getCacheDir() + "/" + STATIC_PERSISTENCE_FILE); final Module runtimeModule = new LibjoynrWebSocketRuntimeModule(); final Injector joynrInjector = new JoynrInjectorFactory(joynrConfig, runtimeModule).createChildInjector(); runtime = joynrInjector.getInstance(JoynrRuntime.class); } }
{ "pile_set_name": "Github" }
// Package pgx is a PostgreSQL database driver. /* pgx provides lower level access to PostgreSQL than the standard database/sql It remains as similar to the database/sql interface as possible while providing better speed and access to PostgreSQL specific features. Import github.com/jack/pgx/stdlib to use pgx as a database/sql compatible driver. Query Interface pgx implements Query and Scan in the familiar database/sql style. var sum int32 // Send the query to the server. The returned rows MUST be closed // before conn can be used again. rows, err := conn.Query("select generate_series(1,$1)", 10) if err != nil { return err } // rows.Close is called by rows.Next when all rows are read // or an error occurs in Next or Scan. So it may optionally be // omitted if nothing in the rows.Next loop can panic. It is // safe to close rows multiple times. defer rows.Close() // Iterate through the result set for rows.Next() { var n int32 err = rows.Scan(&n) if err != nil { return err } sum += n } // Any errors encountered by rows.Next or rows.Scan will be returned here if rows.Err() != nil { return err } // No errors found - do something with sum pgx also implements QueryRow in the same style as database/sql. var name string var weight int64 err := conn.QueryRow("select name, weight from widgets where id=$1", 42).Scan(&name, &weight) if err != nil { return err } Use exec to execute a query that does not return a result set. commandTag, err := conn.Exec("delete from widgets where id=$1", 42) if err != nil { return err } if commandTag.RowsAffected() != 1 { return errors.New("No row found to delete") } Connection Pool Connection pool usage is explicit and configurable. In pgx, a connection can be created and managed directly, or a connection pool with a configurable maximum connections can be used. Also, the connection pool offers an after connect hook that allows every connection to be automatically setup before being made available in the connection pool. This is especially useful to ensure all connections have the same prepared statements available or to change any other connection settings. It delegates Query, QueryRow, Exec, and Begin functions to an automatically checked out and released connection so you can avoid manually acquiring and releasing connections when you do not need that level of control. var name string var weight int64 err := pool.QueryRow("select name, weight from widgets where id=$1", 42).Scan(&name, &weight) if err != nil { return err } Transactions Transactions are started by calling Begin or BeginIso. The BeginIso variant creates a transaction with a specified isolation level. tx, err := conn.Begin() if err != nil { return err } // Rollback is safe to call even if the tx is already closed, so if // the tx commits successfully, this is a no-op defer tx.Rollback() _, err = tx.Exec("insert into foo(id) values (1)") if err != nil { return err } err = tx.Commit() if err != nil { return err } Listen and Notify pgx can listen to the PostgreSQL notification system with the WaitForNotification function. It takes a maximum time to wait for a notification. err := conn.Listen("channelname") if err != nil { return nil } if notification, err := conn.WaitForNotification(time.Second); err != nil { // do something with notification } Null Mapping pgx can map nulls in two ways. The first is Null* types that have a data field and a valid field. They work in a similar fashion to database/sql. The second is to use a pointer to a pointer. var foo pgx.NullString var bar *string err := conn.QueryRow("select foo, bar from widgets where id=$1", 42).Scan(&a, &b) if err != nil { return err } Array Mapping pgx maps between int16, int32, int64, float32, float64, and string Go slices and the equivalent PostgreSQL array type. Go slices of native types do not support nulls, so if a PostgreSQL array that contains a slice is read into a native Go slice an error will occur. Hstore Mapping pgx includes an Hstore type and a NullHstore type. Hstore is simply a map[string]string and is preferred when the hstore contains no nulls. NullHstore follows the Null* pattern and supports null values. JSON and JSONB Mapping pgx includes built-in support to marshal and unmarshal between Go types and the PostgreSQL JSON and JSONB. Inet and Cidr Mapping pgx encodes from net.IPNet to and from inet and cidr PostgreSQL types. In addition, as a convenience pgx will encode from a net.IP; it will assume a /32 netmask for IPv4 and a /128 for IPv6. Custom Type Support pgx includes support for the common data types like integers, floats, strings, dates, and times that have direct mappings between Go and SQL. Support can be added for additional types like point, hstore, numeric, etc. that do not have direct mappings in Go by the types implementing Scanner and Encoder. Custom types can support text or binary formats. Binary format can provide a large performance increase. The natural place for deciding the format for a value would be in Scanner as it is responsible for decoding the returned data. However, that is impossible as the query has already been sent by the time the Scanner is invoked. The solution to this is the global DefaultTypeFormats. If a custom type prefers binary format it should register it there. pgx.DefaultTypeFormats["point"] = pgx.BinaryFormatCode Note that the type is referred to by name, not by OID. This is because custom PostgreSQL types like hstore will have different OIDs on different servers. When pgx establishes a connection it queries the pg_type table for all types. It then matches the names in DefaultTypeFormats with the returned OIDs and stores it in Conn.PgTypes. See example_custom_type_test.go for an example of a custom type for the PostgreSQL point type. pgx also includes support for custom types implementing the database/sql.Scanner and database/sql/driver.Valuer interfaces. Raw Bytes Mapping []byte passed as arguments to Query, QueryRow, and Exec are passed unmodified to PostgreSQL. In like manner, a *[]byte passed to Scan will be filled with the raw bytes returned by PostgreSQL. This can be especially useful for reading varchar, text, json, and jsonb values directly into a []byte and avoiding the type conversion from string. TLS The pgx ConnConfig struct has a TLSConfig field. If this field is nil, then TLS will be disabled. If it is present, then it will be used to configure the TLS connection. This allows total configuration of the TLS connection. Logging pgx defines a simple logger interface. Connections optionally accept a logger that satisfies this interface. The log15 package (http://gopkg.in/inconshreveable/log15.v2) satisfies this interface and it is simple to define adapters for other loggers. Set LogLevel to control logging verbosity. */ package pgx
{ "pile_set_name": "Github" }
<?php /* Copyright (c) 1998-2013 ILIAS open source, Extended GPL, see docs/LICENSE */ require_once 'Modules/Test/classes/class.ilTestQuestionSetConfig.php'; /** * class that manages/holds the data for a question set configuration for continues tests * * @author Björn Heyser <bheyser@databay.de> * @version $Id$ * * @package Modules/Test */ class ilTestRandomQuestionSetConfig extends ilTestQuestionSetConfig { const QUESTION_AMOUNT_CONFIG_MODE_PER_TEST = 'TEST'; const QUESTION_AMOUNT_CONFIG_MODE_PER_POOL = 'POOL'; /** * @var boolean */ private $requirePoolsWithHomogeneousScoredQuestions = null; /** * @var string */ private $questionAmountConfigurationMode = null; /** * @var integer */ private $questionAmountPerTest = null; /** * @var integer */ private $lastQuestionSyncTimestamp = null; //fau: fixRandomTestBuildable - variable for messages private $buildableMessages = array(); // fau. /** * @param ilTree $tree * @param ilDBInterface $db * @param ilPluginAdmin $pluginAdmin * @param ilObjTest $testOBJ */ public function __construct(ilTree $tree, ilDBInterface $db, ilPluginAdmin $pluginAdmin, ilObjTest $testOBJ) { parent::__construct($tree, $db, $pluginAdmin, $testOBJ); } /** * @param boolean $requirePoolsWithHomogeneousScoredQuestions */ public function setPoolsWithHomogeneousScoredQuestionsRequired($requirePoolsWithHomogeneousScoredQuestions) { $this->requirePoolsWithHomogeneousScoredQuestions = $requirePoolsWithHomogeneousScoredQuestions; } /** * @return boolean */ public function arePoolsWithHomogeneousScoredQuestionsRequired() { return $this->requirePoolsWithHomogeneousScoredQuestions; } /** * @param string $questionAmountConfigurationMode */ public function setQuestionAmountConfigurationMode($questionAmountConfigurationMode) { $this->questionAmountConfigurationMode = $questionAmountConfigurationMode; } /** * @return string */ public function getQuestionAmountConfigurationMode() { return $this->questionAmountConfigurationMode; } /** * @return boolean */ public function isQuestionAmountConfigurationModePerPool() { return $this->getQuestionAmountConfigurationMode() == self::QUESTION_AMOUNT_CONFIG_MODE_PER_POOL; } /** * @return boolean */ public function isQuestionAmountConfigurationModePerTest() { return $this->getQuestionAmountConfigurationMode() == self::QUESTION_AMOUNT_CONFIG_MODE_PER_TEST; } public function isValidQuestionAmountConfigurationMode($amountMode) { switch ($amountMode) { case self::QUESTION_AMOUNT_CONFIG_MODE_PER_POOL: case self::QUESTION_AMOUNT_CONFIG_MODE_PER_TEST: return true; } return false; } /** * @param integer $questionAmountPerTest */ public function setQuestionAmountPerTest($questionAmountPerTest) { $this->questionAmountPerTest = $questionAmountPerTest; } /** * @return integer */ public function getQuestionAmountPerTest() { return $this->questionAmountPerTest; } /** * @param integer $lastQuestionSyncTimestamp */ public function setLastQuestionSyncTimestamp($lastQuestionSyncTimestamp) { $this->lastQuestionSyncTimestamp = $lastQuestionSyncTimestamp; } /** * @return integer */ public function getLastQuestionSyncTimestamp() { return $this->lastQuestionSyncTimestamp; } //fau: fixRandomTestBuildable - function to get messages public function getBuildableMessages() { return $this->buildableMessages; } // fau. // ----------------------------------------------------------------------------------------------------------------- /** * initialises the current object instance with values * from matching properties within the passed array * * @param array $dataArray */ public function initFromArray($dataArray) { foreach ($dataArray as $field => $value) { switch ($field) { case 'req_pools_homo_scored': $this->setPoolsWithHomogeneousScoredQuestionsRequired($value); break; case 'quest_amount_cfg_mode': $this->setQuestionAmountConfigurationMode($value); break; case 'quest_amount_per_test': $this->setQuestionAmountPerTest($value); break; case 'quest_sync_timestamp': $this->setLastQuestionSyncTimestamp($value); break; } } } /** * loads the question set config for current test from the database * * @return boolean */ public function loadFromDb() { $res = $this->db->queryF( "SELECT * FROM tst_rnd_quest_set_cfg WHERE test_fi = %s", array('integer'), array($this->testOBJ->getTestId()) ); while ($row = $this->db->fetchAssoc($res)) { $this->initFromArray($row); return true; } return false; } /** * saves the question set config for current test to the database * * @return boolean */ public function saveToDb() { if ($this->dbRecordExists($this->testOBJ->getTestId())) { $this->updateDbRecord($this->testOBJ->getTestId()); } else { $this->insertDbRecord($this->testOBJ->getTestId()); } } /** * saves the question set config for test with given id to the database * * @param $testId */ public function cloneToDbForTestId($testId) { $this->insertDbRecord($testId); } /** * deletes the question set config for current test from the database */ public function deleteFromDb() { $this->db->manipulateF( "DELETE FROM tst_rnd_quest_set_cfg WHERE test_fi = %s", array('integer'), array($this->testOBJ->getTestId()) ); } // ----------------------------------------------------------------------------------------------------------------- /** * checks wether a question set config for current test exists in the database * * @param $testId * @return boolean */ private function dbRecordExists($testId) { $res = $this->db->queryF( "SELECT COUNT(*) cnt FROM tst_rnd_quest_set_cfg WHERE test_fi = %s", array('integer'), array($testId) ); $row = $this->db->fetchAssoc($res); return (bool) $row['cnt']; } /** * updates the record in the database that corresponds * to the question set config for the current test * * @param $testId */ private function updateDbRecord($testId) { $this->db->update( 'tst_rnd_quest_set_cfg', array( 'req_pools_homo_scored' => array('integer', (int) $this->arePoolsWithHomogeneousScoredQuestionsRequired()), 'quest_amount_cfg_mode' => array('text', $this->getQuestionAmountConfigurationMode()), 'quest_amount_per_test' => array('integer', (int) $this->getQuestionAmountPerTest()), 'quest_sync_timestamp' => array('integer', (int) $this->getLastQuestionSyncTimestamp()) ), array( 'test_fi' => array('integer', $testId) ) ); } /** * inserts a new record for the question set config * for the current test into the database * * @param $testId */ private function insertDbRecord($testId) { $this->db->insert('tst_rnd_quest_set_cfg', array( 'test_fi' => array('integer', $testId), 'req_pools_homo_scored' => array('integer', (int) $this->arePoolsWithHomogeneousScoredQuestionsRequired()), 'quest_amount_cfg_mode' => array('text', $this->getQuestionAmountConfigurationMode()), 'quest_amount_per_test' => array('integer', (int) $this->getQuestionAmountPerTest()), 'quest_sync_timestamp' => array('integer', (int) $this->getLastQuestionSyncTimestamp()) )); } // ----------------------------------------------------------------------------------------------------------------- public function isQuestionSetConfigured() { // fau: delayCopyRandomQuestions - question set is not configured if date of last synchronisation is empty if ($this->getLastQuestionSyncTimestamp() == 0) { return false; } // fau. if (!$this->isQuestionAmountConfigComplete()) { return false; } if (!$this->hasSourcePoolDefinitions()) { return false; } if (!$this->isQuestionSetBuildable()) { return false; } return true; } public function isQuestionAmountConfigComplete() { if ($this->isQuestionAmountConfigurationModePerPool()) { $sourcePoolDefinitionList = $this->buildSourcePoolDefinitionList($this->testOBJ); $sourcePoolDefinitionList->loadDefinitions(); foreach ($sourcePoolDefinitionList as $definition) { /** @var ilTestRandomQuestionSetSourcePoolDefinition $definition */ if ($definition->getQuestionAmount() < 1) { return false; } } } elseif ($this->getQuestionAmountPerTest() < 1) { return false; } return true; } public function hasSourcePoolDefinitions() { $sourcePoolDefinitionList = $this->buildSourcePoolDefinitionList($this->testOBJ); return $sourcePoolDefinitionList->savedDefinitionsExist(); } public function isQuestionSetBuildable() { $sourcePoolDefinitionList = $this->buildSourcePoolDefinitionList($this->testOBJ); $sourcePoolDefinitionList->loadDefinitions(); require_once 'Modules/Test/classes/class.ilTestRandomQuestionSetStagingPoolQuestionList.php'; $stagingPoolQuestionList = new ilTestRandomQuestionSetStagingPoolQuestionList($this->db, $this->pluginAdmin); require_once 'Modules/Test/classes/class.ilTestRandomQuestionSetBuilder.php'; $questionSetBuilder = ilTestRandomQuestionSetBuilder::getInstance($this->db, $this->testOBJ, $this, $sourcePoolDefinitionList, $stagingPoolQuestionList); //fau: fixRandomTestBuildable - get messages if set is not buildable $buildable = $questionSetBuilder->checkBuildable(); $this->buildableMessages = $questionSetBuilder->getCheckMessages(); return $buildable; // fau. return $questionSetBuilder->checkBuildable(); } public function doesQuestionSetRelatedDataExist() { if ($this->dbRecordExists($this->testOBJ->getTestId())) { return true; } $sourcePoolDefinitionList = $this->buildSourcePoolDefinitionList($this->testOBJ); if ($sourcePoolDefinitionList->savedDefinitionsExist()) { return true; } return false; } public function removeQuestionSetRelatedData() { $sourcePoolDefinitionList = $this->buildSourcePoolDefinitionList($this->testOBJ); $sourcePoolDefinitionList->deleteDefinitions(); require_once 'Modules/Test/classes/class.ilTestRandomQuestionSetStagingPoolBuilder.php'; $stagingPool = new ilTestRandomQuestionSetStagingPoolBuilder( $this->db, $this->testOBJ ); $stagingPool->reset(); $this->resetQuestionSetRelatedTestSettings(); $this->deleteFromDb(); } public function resetQuestionSetRelatedTestSettings() { $this->testOBJ->setResultFilterTaxIds(array()); $this->testOBJ->saveToDb(true); } /** * removes all question set config related data for cloned/copied test * * @param ilObjTest $cloneTestOBJ */ public function cloneQuestionSetRelatedData(ilObjTest $cloneTestOBJ) { // clone general config $this->loadFromDb(); $this->cloneToDbForTestId($cloneTestOBJ->getTestId()); // clone source pool definitions (selection rules) $sourcePoolDefinitionList = $this->buildSourcePoolDefinitionList($this->testOBJ); $sourcePoolDefinitionList->loadDefinitions(); $definitionIdMap = $sourcePoolDefinitionList->cloneDefinitionsForTestId($cloneTestOBJ->getTestId()); $this->registerClonedSourcePoolDefinitionIdMapping($cloneTestOBJ, $definitionIdMap); // build new question stage for cloned test $sourcePoolDefinitionList = $this->buildSourcePoolDefinitionList($cloneTestOBJ); $stagingPool = $this->buildStagingPoolBuilder($cloneTestOBJ); $sourcePoolDefinitionList->loadDefinitions(); $stagingPool->rebuild($sourcePoolDefinitionList); $sourcePoolDefinitionList->saveDefinitions(); $this->updateLastQuestionSyncTimestampForTestId($cloneTestOBJ->getTestId(), time()); } private function registerClonedSourcePoolDefinitionIdMapping(ilObjTest $cloneTestOBJ, $definitionIdMap) { global $DIC; $ilLog = $DIC['ilLog']; require_once 'Services/CopyWizard/classes/class.ilCopyWizardOptions.php'; $cwo = ilCopyWizardOptions::_getInstance($cloneTestOBJ->getTmpCopyWizardCopyId()); foreach ($definitionIdMap as $originalDefinitionId => $cloneDefinitionId) { $originalKey = $this->testOBJ->getRefId() . '_rndSelDef_' . $originalDefinitionId; $mappedKey = $cloneTestOBJ->getRefId() . '_rndSelDef_' . $cloneDefinitionId; $cwo->appendMapping($originalKey, $mappedKey); $ilLog->write(__METHOD__ . ": Added random selection definition id mapping $originalKey <-> $mappedKey"); } } private function buildSourcePoolDefinitionList(ilObjTest $testOBJ) { require_once 'Modules/Test/classes/class.ilTestRandomQuestionSetSourcePoolDefinitionFactory.php'; $sourcePoolDefinitionFactory = new ilTestRandomQuestionSetSourcePoolDefinitionFactory( $this->db, $testOBJ ); require_once 'Modules/Test/classes/class.ilTestRandomQuestionSetSourcePoolDefinitionList.php'; $sourcePoolDefinitionList = new ilTestRandomQuestionSetSourcePoolDefinitionList( $this->db, $testOBJ, $sourcePoolDefinitionFactory ); return $sourcePoolDefinitionList; } private function buildStagingPoolBuilder(ilObjTest $testOBJ) { require_once 'Modules/Test/classes/class.ilTestRandomQuestionSetStagingPoolBuilder.php'; $stagingPool = new ilTestRandomQuestionSetStagingPoolBuilder($this->db, $testOBJ); return $stagingPool; } // ----------------------------------------------------------------------------------------------------------------- public function updateLastQuestionSyncTimestampForTestId($testId, $timestamp) { $this->db->update( 'tst_rnd_quest_set_cfg', array( 'quest_sync_timestamp' => array('integer', (int) $timestamp) ), array( 'test_fi' => array('integer', $testId) ) ); } public function isResultTaxonomyFilterSupported() { return true; } // ----------------------------------------------------------------------------------------------------------------- public function getSelectableQuestionPools() { return $this->testOBJ->getAvailableQuestionpools( true, $this->arePoolsWithHomogeneousScoredQuestionsRequired(), false, true, true ); } public function doesSelectableQuestionPoolsExist() { return (bool) count($this->getSelectableQuestionPools()); } // ----------------------------------------------------------------------------------------------------------------- public function areDepenciesBroken() { return (bool) $this->testOBJ->isTestFinalBroken(); } public function getDepenciesBrokenMessage(ilLanguage $lng) { return $lng->txt('tst_old_style_rnd_quest_set_broken'); } public function isValidRequestOnBrokenQuestionSetDepencies($nextClass, $cmd) { //vd($nextClass, $cmd); switch ($nextClass) { case 'ilobjectmetadatagui': case 'ilpermissiongui': return true; case 'ilobjtestgui': case '': $cmds = array( 'infoScreen', 'participants', 'npSetFilter', 'npResetFilter', //'deleteAllUserResults', 'confirmDeleteAllUserResults', //'deleteSingleUserResults', 'confirmDeleteSelectedUserData', 'cancelDeleteSelectedUserData' ); if (in_array($cmd, $cmds)) { return true; } break; } return false; } public function getHiddenTabsOnBrokenDepencies() { return array( 'assQuestions', 'settings', 'manscoring', 'scoringadjust', 'statistics', 'history', 'export' ); } // ----------------------------------------------------------------------------------------------------------------- public function getCommaSeparatedSourceQuestionPoolLinks() { $definitionList = $this->buildSourcePoolDefinitionList($this->testOBJ); $definitionList->loadDefinitions(); $poolTitles = array(); foreach ($definitionList as $definition) { /* @var ilTestRandomQuestionSetSourcePoolDefinition $definition */ $refId = current(ilObject::_getAllReferences($definition->getPoolId())); $href = ilLink::_getLink($refId, 'qpl'); $title = $definition->getPoolTitle(); $poolTitles[$definition->getPoolId()] = "<a href=\"$href\" alt=\"$title\">$title</a>"; } return implode(', ', $poolTitles); } }
{ "pile_set_name": "Github" }
#!/usr/bin/perl # Copyright (C) 2002-2013 Xiph.org Foundation # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # - Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # - Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER # OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. my $bigend; # little/big endian my $nxstack; my $apple = 0; my $symprefix = ""; $nxstack = 0; eval 'exec /usr/local/bin/perl -S $0 ${1+"$@"}' if $running_under_some_shell; while ($ARGV[0] =~ /^-/) { $_ = shift; last if /^--$/; if (/^-n$/) { $nflag++; next; } if (/^--apple$/) { $apple = 1; $symprefix = "_"; next; } die "I don't recognize this switch: $_\\n"; } $printit++ unless $nflag; $\ = "\n"; # automatically add newline on print $n=0; $thumb = 0; # ARM mode by default, not Thumb. @proc_stack = (); printf (" .syntax unified\n"); LINE: while (<>) { # For ADRLs we need to add a new line after the substituted one. $addPadding = 0; # First, we do not dare to touch *anything* inside double quotes, do we? # Second, if you want a dollar character in the string, # insert two of them -- that's how ARM C and assembler treat strings. s/^([A-Za-z_]\w*)[ \t]+DCB[ \t]*\"/$1: .ascii \"/ && do { s/\$\$/\$/g; next }; s/\bDCB\b[ \t]*\"/.ascii \"/ && do { s/\$\$/\$/g; next }; s/^(\S+)\s+RN\s+(\S+)/$1 .req r$2/ && do { s/\$\$/\$/g; next }; # If there's nothing on a line but a comment, don't try to apply any further # substitutions (this is a cheap hack to avoid mucking up the license header) s/^([ \t]*);/$1@/ && do { s/\$\$/\$/g; next }; # If substituted -- leave immediately ! s/@/,:/; s/;/@/; while ( /@.*'/ ) { s/(@.*)'/$1/g; } s/\{FALSE\}/0/g; s/\{TRUE\}/1/g; s/\{(\w\w\w\w+)\}/$1/g; s/\bINCLUDE[ \t]*([^ \t\n]+)/.include \"$1\"/; s/\bGET[ \t]*([^ \t\n]+)/.include \"${ my $x=$1; $x =~ s|\.s|-gnu.S|; \$x }\"/; s/\bIMPORT\b/.extern/; s/\bEXPORT\b\s*/.global $symprefix/; s/^(\s+)\[/$1IF/; s/^(\s+)\|/$1ELSE/; s/^(\s+)\]/$1ENDIF/; s/IF *:DEF:/ .ifdef/; s/IF *:LNOT: *:DEF:/ .ifndef/; s/ELSE/ .else/; s/ENDIF/ .endif/; if( /\bIF\b/ ) { s/\bIF\b/ .if/; s/=/==/; } if ( $n == 2) { s/\$/\\/g; } if ($n == 1) { s/\$//g; s/label//g; $n = 2; } if ( /MACRO/ ) { s/MACRO *\n/.macro/; $n=1; } if ( /\bMEND\b/ ) { s/\bMEND\b/.endm/; $n=0; } # ".rdata" doesn't work in 'as' version 2.13.2, as it is ".rodata" there. # if ( /\bAREA\b/ ) { my $align; $align = "2"; if ( /ALIGN=(\d+)/ ) { $align = $1; } if ( /CODE/ ) { $nxstack = 1; } s/^(.+)CODE(.+)READONLY(.*)/ .text/; s/^(.+)DATA(.+)READONLY(.*)/ .section .rdata/; s/^(.+)\|\|\.data\|\|(.+)/ .data/; s/^(.+)\|\|\.bss\|\|(.+)/ .bss/; s/$/; .p2align $align/; # Enable NEON instructions but don't produce a binary that requires # ARMv7. RVCT does not have equivalent directives, so we just do this # for all CODE areas. if ( /.text/ ) { # Separating .arch, .fpu, etc., by semicolons does not work (gas # thinks the semicolon is part of the arch name, even when there's # whitespace separating them). Sadly this means our line numbers # won't match the original source file (we could use the .line # directive, which is documented to be obsolete, but then gdb will # show the wrong line in the translated source file). s/$/; .arch armv7-a\n .fpu neon\n .object_arch armv4t/ unless ($apple); } } s/\|\|\.constdata\$(\d+)\|\|/.L_CONST$1/; # ||.constdata$3|| s/\|\|\.bss\$(\d+)\|\|/.L_BSS$1/; # ||.bss$2|| s/\|\|\.data\$(\d+)\|\|/.L_DATA$1/; # ||.data$2|| s/\|\|([a-zA-Z0-9_]+)\@([a-zA-Z0-9_]+)\|\|/@ $&/; s/^(\s+)\%(\s)/ .space $1/; s/\|(.+)\.(\d+)\|/\.$1_$2/; # |L80.123| -> .L80_123 s/\bCODE32\b/.code 32/ && do {$thumb = 0}; s/\bCODE16\b/.code 16/ && do {$thumb = 1}; if (/\bPROC\b/) { my $prefix; my $proc; /^([A-Za-z_\.]\w+)\b/; $proc = $1; $prefix = ""; if ($proc) { $prefix = $prefix.sprintf("\t.type\t%s, %%function; ",$proc) unless ($apple); # Make sure we $prefix isn't empty here (for the $apple case). # We handle mangling the label here, make sure it doesn't match # the label handling below (if $prefix would be empty). $prefix = "; "; push(@proc_stack, $proc); s/^[A-Za-z_\.]\w+/$symprefix$&:/; } $prefix = $prefix."\t.thumb_func; " if ($thumb); s/\bPROC\b/@ $&/; $_ = $prefix.$_; } s/^(\s*)(S|Q|SH|U|UQ|UH)ASX\b/$1$2ADDSUBX/; s/^(\s*)(S|Q|SH|U|UQ|UH)SAX\b/$1$2SUBADDX/; if (/\bENDP\b/) { my $proc; s/\bENDP\b/@ $&/; $proc = pop(@proc_stack); $_ = "\t.size $proc, .-$proc".$_ if ($proc && !$apple); } s/\bSUBT\b/@ $&/; s/\bDATA\b/@ $&/; # DATA directive is deprecated -- Asm guide, p.7-25 s/\bKEEP\b/@ $&/; s/\bEXPORTAS\b/@ $&/; s/\|\|(.)+\bEQU\b/@ $&/; s/\|\|([\w\$]+)\|\|/$1/; s/\bENTRY\b/@ $&/; s/\bASSERT\b/@ $&/; s/\bGBLL\b/@ $&/; s/\bGBLA\b/@ $&/; s/^\W+OPT\b/@ $&/; s/:OR:/|/g; s/:SHL:/<</g; s/:SHR:/>>/g; s/:AND:/&/g; s/:LAND:/&&/g; s/CPSR/cpsr/; s/SPSR/spsr/; s/ALIGN$/.balign 4/; s/ALIGN\s+([0-9x]+)$/.balign $1/; s/psr_cxsf/psr_all/; s/LTORG/.ltorg/; s/^([A-Za-z_]\w*)[ \t]+EQU/ .set $1,/; s/^([A-Za-z_]\w*)[ \t]+SETL/ .set $1,/; s/^([A-Za-z_]\w*)[ \t]+SETA/ .set $1,/; s/^([A-Za-z_]\w*)[ \t]+\*/ .set $1,/; # {PC} + 0xdeadfeed --> . + 0xdeadfeed s/\{PC\} \+/ \. +/; # Single hex constant on the line ! # # >>> NOTE <<< # Double-precision floats in gcc are always mixed-endian, which means # bytes in two words are little-endian, but words are big-endian. # So, 0x0000deadfeed0000 would be stored as 0x0000dead at low address # and 0xfeed0000 at high address. # s/\bDCFD\b[ \t]+0x([a-fA-F0-9]{8})([a-fA-F0-9]{8})/.long 0x$1, 0x$2/; # Only decimal constants on the line, no hex ! s/\bDCFD\b[ \t]+([0-9\.\-]+)/.double $1/; # Single hex constant on the line ! # s/\bDCFS\b[ \t]+0x([a-f0-9]{8})([a-f0-9]{8})/.long 0x$1, 0x$2/; # Only decimal constants on the line, no hex ! # s/\bDCFS\b[ \t]+([0-9\.\-]+)/.double $1/; s/\bDCFS[ \t]+0x/.word 0x/; s/\bDCFS\b/.float/; s/^([A-Za-z_]\w*)[ \t]+DCD/$1 .word/; s/\bDCD\b/.word/; s/^([A-Za-z_]\w*)[ \t]+DCW/$1 .short/; s/\bDCW\b/.short/; s/^([A-Za-z_]\w*)[ \t]+DCB/$1 .byte/; s/\bDCB\b/.byte/; s/^([A-Za-z_]\w*)[ \t]+\%/.comm $1,/; s/^[A-Za-z_\.]\w+/$&:/; s/^(\d+)/$1:/; s/\%(\d+)/$1b_or_f/; s/\%[Bb](\d+)/$1b/; s/\%[Ff](\d+)/$1f/; s/\%[Ff][Tt](\d+)/$1f/; s/&([\dA-Fa-f]+)/0x$1/; if ( /\b2_[01]+\b/ ) { s/\b2_([01]+)\b/conv$1&&&&/g; while ( /[01][01][01][01]&&&&/ ) { s/0000&&&&/&&&&0/g; s/0001&&&&/&&&&1/g; s/0010&&&&/&&&&2/g; s/0011&&&&/&&&&3/g; s/0100&&&&/&&&&4/g; s/0101&&&&/&&&&5/g; s/0110&&&&/&&&&6/g; s/0111&&&&/&&&&7/g; s/1000&&&&/&&&&8/g; s/1001&&&&/&&&&9/g; s/1010&&&&/&&&&A/g; s/1011&&&&/&&&&B/g; s/1100&&&&/&&&&C/g; s/1101&&&&/&&&&D/g; s/1110&&&&/&&&&E/g; s/1111&&&&/&&&&F/g; } s/000&&&&/&&&&0/g; s/001&&&&/&&&&1/g; s/010&&&&/&&&&2/g; s/011&&&&/&&&&3/g; s/100&&&&/&&&&4/g; s/101&&&&/&&&&5/g; s/110&&&&/&&&&6/g; s/111&&&&/&&&&7/g; s/00&&&&/&&&&0/g; s/01&&&&/&&&&1/g; s/10&&&&/&&&&2/g; s/11&&&&/&&&&3/g; s/0&&&&/&&&&0/g; s/1&&&&/&&&&1/g; s/conv&&&&/0x/g; } if ( /commandline/) { if( /-bigend/) { $bigend=1; } } if ( /\bDCDU\b/ ) { my $cmd=$_; my $value; my $prefix; my $w1; my $w2; my $w3; my $w4; s/\s+DCDU\b/@ $&/; $cmd =~ /\bDCDU\b\s+0x(\d+)/; $value = $1; $value =~ /(\w\w)(\w\w)(\w\w)(\w\w)/; $w1 = $1; $w2 = $2; $w3 = $3; $w4 = $4; if( $bigend ne "") { # big endian $prefix = "\t.byte\t0x".$w1.";". "\t.byte\t0x".$w2.";". "\t.byte\t0x".$w3.";". "\t.byte\t0x".$w4."; "; } else { # little endian $prefix = "\t.byte\t0x".$w4.";". "\t.byte\t0x".$w3.";". "\t.byte\t0x".$w2.";". "\t.byte\t0x".$w1."; "; } $_=$prefix.$_; } if ( /\badrl\b/i ) { s/\badrl\s+(\w+)\s*,\s*(\w+)/ldr $1,=$2/i; $addPadding = 1; } s/\bEND\b/@ END/; } continue { printf ("%s", $_) if $printit; if ($addPadding != 0) { printf (" mov r0,r0\n"); $addPadding = 0; } } #If we had a code section, mark that this object doesn't need an executable # stack. if ($nxstack && !$apple) { printf (" .section\t.note.GNU-stack,\"\",\%\%progbits\n"); }
{ "pile_set_name": "Github" }
/* # shell sort 希尔排序 #核心——通过设置下标增量来将数据分组,然后用插入排序法处理每个分组,再将下标增量减半。 接下来循环调用此过程直到下标增量为0。 又叫缩小增量排序算法,以科学家 DL.Shell 名字命名。 简直美爆了!小步长增量来插入排序时可以利用大步长增量的有序性, 利用了插入排序适合小数据量和基本有序的特点,充分发挥插入排序的优点, 使得整体用来处理大数据量的排序非常不错——虽然比快排还差点。 # runtime - Worst:O(n^2) O(n^1.5) O(n((log^2)n))跟增量大小有关 - Best:O(n) - Average:O(n^(1~2)) # stability 不稳定 */ package shell /* 排序起始点与步长为变量而非常量0和1的时候乃是更为一般化的实现。 */ func insertion(arr []int, start, gap int) { length := len(arr) for traverseVal := start + gap; traverseVal < length; traverseVal += gap { backup := arr[traverseVal] trackVal := traverseVal - gap for trackVal >= 0 && backup < arr[trackVal] { arr[trackVal + gap] = arr[trackVal] trackVal -= gap } arr[trackVal + gap] = backup } } func Sort(arr []int) []int { //设定步长增量 gap := len(arr)/2 //结束条件 for gap > 0 { for pos := 0; pos<gap; pos ++{ insertion(arr, pos, gap) } gap /= 2 } return arr }
{ "pile_set_name": "Github" }
import { keyBy } from "../index"; export = keyBy;
{ "pile_set_name": "Github" }
# binarydist Package binarydist implements binary diff and patch as described on <http://www.daemonology.net/bsdiff/>. It reads and writes files compatible with the tools there. Documentation at <http://go.pkgdoc.org/github.com/kr/binarydist>.
{ "pile_set_name": "Github" }
// Copyright 2016 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // +build s390x,linux package unix import ( "unsafe" ) //sys dup2(oldfd int, newfd int) (err error) //sysnb EpollCreate(size int) (fd int, err error) //sys EpollWait(epfd int, events []EpollEvent, msec int) (n int, err error) //sys Fadvise(fd int, offset int64, length int64, advice int) (err error) = SYS_FADVISE64 //sys Fchown(fd int, uid int, gid int) (err error) //sys Fstat(fd int, stat *Stat_t) (err error) //sys Fstatat(dirfd int, path string, stat *Stat_t, flags int) (err error) = SYS_NEWFSTATAT //sys Fstatfs(fd int, buf *Statfs_t) (err error) //sys Ftruncate(fd int, length int64) (err error) //sysnb Getegid() (egid int) //sysnb Geteuid() (euid int) //sysnb Getgid() (gid int) //sysnb Getrlimit(resource int, rlim *Rlimit) (err error) //sysnb Getuid() (uid int) //sysnb InotifyInit() (fd int, err error) //sys Lchown(path string, uid int, gid int) (err error) //sys Lstat(path string, stat *Stat_t) (err error) //sys Pause() (err error) //sys Pread(fd int, p []byte, offset int64) (n int, err error) = SYS_PREAD64 //sys Pwrite(fd int, p []byte, offset int64) (n int, err error) = SYS_PWRITE64 //sys Renameat(olddirfd int, oldpath string, newdirfd int, newpath string) (err error) //sys Seek(fd int, offset int64, whence int) (off int64, err error) = SYS_LSEEK //sys Select(nfd int, r *FdSet, w *FdSet, e *FdSet, timeout *Timeval) (n int, err error) //sys sendfile(outfd int, infd int, offset *int64, count int) (written int, err error) //sys setfsgid(gid int) (prev int, err error) //sys setfsuid(uid int) (prev int, err error) //sysnb Setregid(rgid int, egid int) (err error) //sysnb Setresgid(rgid int, egid int, sgid int) (err error) //sysnb Setresuid(ruid int, euid int, suid int) (err error) //sysnb Setrlimit(resource int, rlim *Rlimit) (err error) //sysnb Setreuid(ruid int, euid int) (err error) //sys Splice(rfd int, roff *int64, wfd int, woff *int64, len int, flags int) (n int64, err error) //sys Stat(path string, stat *Stat_t) (err error) //sys Statfs(path string, buf *Statfs_t) (err error) //sys SyncFileRange(fd int, off int64, n int64, flags int) (err error) //sys Truncate(path string, length int64) (err error) //sys Ustat(dev int, ubuf *Ustat_t) (err error) //sysnb getgroups(n int, list *_Gid_t) (nn int, err error) //sysnb setgroups(n int, list *_Gid_t) (err error) //sys futimesat(dirfd int, path string, times *[2]Timeval) (err error) //sysnb Gettimeofday(tv *Timeval) (err error) func Time(t *Time_t) (tt Time_t, err error) { var tv Timeval err = Gettimeofday(&tv) if err != nil { return 0, err } if t != nil { *t = Time_t(tv.Sec) } return Time_t(tv.Sec), nil } //sys Utime(path string, buf *Utimbuf) (err error) //sys utimes(path string, times *[2]Timeval) (err error) func setTimespec(sec, nsec int64) Timespec { return Timespec{Sec: sec, Nsec: nsec} } func setTimeval(sec, usec int64) Timeval { return Timeval{Sec: sec, Usec: usec} } //sysnb pipe2(p *[2]_C_int, flags int) (err error) func Pipe(p []int) (err error) { if len(p) != 2 { return EINVAL } var pp [2]_C_int err = pipe2(&pp, 0) // pipe2 is the same as pipe when flags are set to 0. p[0] = int(pp[0]) p[1] = int(pp[1]) return } func Pipe2(p []int, flags int) (err error) { if len(p) != 2 { return EINVAL } var pp [2]_C_int err = pipe2(&pp, flags) p[0] = int(pp[0]) p[1] = int(pp[1]) return } func Ioperm(from int, num int, on int) (err error) { return ENOSYS } func Iopl(level int) (err error) { return ENOSYS } func (r *PtraceRegs) PC() uint64 { return r.Psw.Addr } func (r *PtraceRegs) SetPC(pc uint64) { r.Psw.Addr = pc } func (iov *Iovec) SetLen(length int) { iov.Len = uint64(length) } func (msghdr *Msghdr) SetControllen(length int) { msghdr.Controllen = uint64(length) } func (msghdr *Msghdr) SetIovlen(length int) { msghdr.Iovlen = uint64(length) } func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint64(length) } // Linux on s390x uses the old mmap interface, which requires arguments to be passed in a struct. // mmap2 also requires arguments to be passed in a struct; it is currently not exposed in <asm/unistd.h>. func mmap(addr uintptr, length uintptr, prot int, flags int, fd int, offset int64) (xaddr uintptr, err error) { mmap_args := [6]uintptr{addr, length, uintptr(prot), uintptr(flags), uintptr(fd), uintptr(offset)} r0, _, e1 := Syscall(SYS_MMAP, uintptr(unsafe.Pointer(&mmap_args[0])), 0, 0) xaddr = uintptr(r0) if e1 != 0 { err = errnoErr(e1) } return } // On s390x Linux, all the socket calls go through an extra indirection. // The arguments to the underlying system call (SYS_SOCKETCALL) are the // number below and a pointer to an array of uintptr. const ( // see linux/net.h netSocket = 1 netBind = 2 netConnect = 3 netListen = 4 netAccept = 5 netGetSockName = 6 netGetPeerName = 7 netSocketPair = 8 netSend = 9 netRecv = 10 netSendTo = 11 netRecvFrom = 12 netShutdown = 13 netSetSockOpt = 14 netGetSockOpt = 15 netSendMsg = 16 netRecvMsg = 17 netAccept4 = 18 netRecvMMsg = 19 netSendMMsg = 20 ) func accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (int, error) { args := [3]uintptr{uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))} fd, _, err := Syscall(SYS_SOCKETCALL, netAccept, uintptr(unsafe.Pointer(&args)), 0) if err != 0 { return 0, err } return int(fd), nil } func accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (int, error) { args := [4]uintptr{uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)), uintptr(flags)} fd, _, err := Syscall(SYS_SOCKETCALL, netAccept4, uintptr(unsafe.Pointer(&args)), 0) if err != 0 { return 0, err } return int(fd), nil } func getsockname(s int, rsa *RawSockaddrAny, addrlen *_Socklen) error { args := [3]uintptr{uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))} _, _, err := RawSyscall(SYS_SOCKETCALL, netGetSockName, uintptr(unsafe.Pointer(&args)), 0) if err != 0 { return err } return nil } func getpeername(s int, rsa *RawSockaddrAny, addrlen *_Socklen) error { args := [3]uintptr{uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))} _, _, err := RawSyscall(SYS_SOCKETCALL, netGetPeerName, uintptr(unsafe.Pointer(&args)), 0) if err != 0 { return err } return nil } func socketpair(domain int, typ int, flags int, fd *[2]int32) error { args := [4]uintptr{uintptr(domain), uintptr(typ), uintptr(flags), uintptr(unsafe.Pointer(fd))} _, _, err := RawSyscall(SYS_SOCKETCALL, netSocketPair, uintptr(unsafe.Pointer(&args)), 0) if err != 0 { return err } return nil } func bind(s int, addr unsafe.Pointer, addrlen _Socklen) error { args := [3]uintptr{uintptr(s), uintptr(addr), uintptr(addrlen)} _, _, err := Syscall(SYS_SOCKETCALL, netBind, uintptr(unsafe.Pointer(&args)), 0) if err != 0 { return err } return nil } func connect(s int, addr unsafe.Pointer, addrlen _Socklen) error { args := [3]uintptr{uintptr(s), uintptr(addr), uintptr(addrlen)} _, _, err := Syscall(SYS_SOCKETCALL, netConnect, uintptr(unsafe.Pointer(&args)), 0) if err != 0 { return err } return nil } func socket(domain int, typ int, proto int) (int, error) { args := [3]uintptr{uintptr(domain), uintptr(typ), uintptr(proto)} fd, _, err := RawSyscall(SYS_SOCKETCALL, netSocket, uintptr(unsafe.Pointer(&args)), 0) if err != 0 { return 0, err } return int(fd), nil } func getsockopt(s int, level int, name int, val unsafe.Pointer, vallen *_Socklen) error { args := [5]uintptr{uintptr(s), uintptr(level), uintptr(name), uintptr(val), uintptr(unsafe.Pointer(vallen))} _, _, err := Syscall(SYS_SOCKETCALL, netGetSockOpt, uintptr(unsafe.Pointer(&args)), 0) if err != 0 { return err } return nil } func setsockopt(s int, level int, name int, val unsafe.Pointer, vallen uintptr) error { args := [4]uintptr{uintptr(s), uintptr(level), uintptr(name), uintptr(val)} _, _, err := Syscall(SYS_SOCKETCALL, netSetSockOpt, uintptr(unsafe.Pointer(&args)), 0) if err != 0 { return err } return nil } func recvfrom(s int, p []byte, flags int, from *RawSockaddrAny, fromlen *_Socklen) (int, error) { var base uintptr if len(p) > 0 { base = uintptr(unsafe.Pointer(&p[0])) } args := [6]uintptr{uintptr(s), base, uintptr(len(p)), uintptr(flags), uintptr(unsafe.Pointer(from)), uintptr(unsafe.Pointer(fromlen))} n, _, err := Syscall(SYS_SOCKETCALL, netRecvFrom, uintptr(unsafe.Pointer(&args)), 0) if err != 0 { return 0, err } return int(n), nil } func sendto(s int, p []byte, flags int, to unsafe.Pointer, addrlen _Socklen) error { var base uintptr if len(p) > 0 { base = uintptr(unsafe.Pointer(&p[0])) } args := [6]uintptr{uintptr(s), base, uintptr(len(p)), uintptr(flags), uintptr(to), uintptr(addrlen)} _, _, err := Syscall(SYS_SOCKETCALL, netSendTo, uintptr(unsafe.Pointer(&args)), 0) if err != 0 { return err } return nil } func recvmsg(s int, msg *Msghdr, flags int) (int, error) { args := [3]uintptr{uintptr(s), uintptr(unsafe.Pointer(msg)), uintptr(flags)} n, _, err := Syscall(SYS_SOCKETCALL, netRecvMsg, uintptr(unsafe.Pointer(&args)), 0) if err != 0 { return 0, err } return int(n), nil } func sendmsg(s int, msg *Msghdr, flags int) (int, error) { args := [3]uintptr{uintptr(s), uintptr(unsafe.Pointer(msg)), uintptr(flags)} n, _, err := Syscall(SYS_SOCKETCALL, netSendMsg, uintptr(unsafe.Pointer(&args)), 0) if err != 0 { return 0, err } return int(n), nil } func Listen(s int, n int) error { args := [2]uintptr{uintptr(s), uintptr(n)} _, _, err := Syscall(SYS_SOCKETCALL, netListen, uintptr(unsafe.Pointer(&args)), 0) if err != 0 { return err } return nil } func Shutdown(s, how int) error { args := [2]uintptr{uintptr(s), uintptr(how)} _, _, err := Syscall(SYS_SOCKETCALL, netShutdown, uintptr(unsafe.Pointer(&args)), 0) if err != 0 { return err } return nil } //sys poll(fds *PollFd, nfds int, timeout int) (n int, err error) func Poll(fds []PollFd, timeout int) (n int, err error) { if len(fds) == 0 { return poll(nil, 0, timeout) } return poll(&fds[0], len(fds), timeout) } //sys kexecFileLoad(kernelFd int, initrdFd int, cmdlineLen int, cmdline string, flags int) (err error) func KexecFileLoad(kernelFd int, initrdFd int, cmdline string, flags int) error { cmdlineLen := len(cmdline) if cmdlineLen > 0 { // Account for the additional NULL byte added by // BytePtrFromString in kexecFileLoad. The kexec_file_load // syscall expects a NULL-terminated string. cmdlineLen++ } return kexecFileLoad(kernelFd, initrdFd, cmdlineLen, cmdline, flags) }
{ "pile_set_name": "Github" }
{ "symbol": "MVL", "name": "Mass Vehicle Ledger Token", "type": "ERC20", "address": "0xA849EaaE994fb86Afa73382e9Bd88c2B6b18Dc71", "ens_address": "", "decimals": 18, "website": "", "logo": { "src": "", "width": "", "height": "", "ipfs_hash": "" }, "support": { "email": "", "url": "" }, "social": { "blog": "", "chat": "", "facebook": "", "forum": "", "github": "", "gitter": "", "instagram": "", "linkedin": "", "reddit": "", "slack": "", "telegram": "", "twitter": "", "youtube": "" } }
{ "pile_set_name": "Github" }
<?php class ThrowNoExceptionTestCase extends PHPUnit_Framework_TestCase { public function test() { } }
{ "pile_set_name": "Github" }
// +build linux // Copyright 2016-2017 Kinvolk // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package elf import ( "fmt" "io/ioutil" "regexp" "strconv" "strings" "syscall" ) var versionRegex = regexp.MustCompile(`^(\d+)\.(\d+).(\d+).*$`) // KernelVersionFromReleaseString converts a release string with format // 4.4.2[-1] to a kernel version number in LINUX_VERSION_CODE format. // That is, for kernel "a.b.c", the version number will be (a<<16 + b<<8 + c) func KernelVersionFromReleaseString(releaseString string) (uint32, error) { versionParts := versionRegex.FindStringSubmatch(releaseString) if len(versionParts) != 4 { return 0, fmt.Errorf("got invalid release version %q (expected format '4.3.2-1')", releaseString) } major, err := strconv.Atoi(versionParts[1]) if err != nil { return 0, err } minor, err := strconv.Atoi(versionParts[2]) if err != nil { return 0, err } patch, err := strconv.Atoi(versionParts[3]) if err != nil { return 0, err } out := major*256*256 + minor*256 + patch return uint32(out), nil } func currentVersionUname() (uint32, error) { var buf syscall.Utsname if err := syscall.Uname(&buf); err != nil { return 0, err } releaseString := strings.Trim(utsnameStr(buf.Release[:]), "\x00") return KernelVersionFromReleaseString(releaseString) } func currentVersionUbuntu() (uint32, error) { procVersion, err := ioutil.ReadFile("/proc/version_signature") if err != nil { return 0, err } var u1, u2, releaseString string _, err = fmt.Sscanf(string(procVersion), "%s %s %s", &u1, &u2, &releaseString) if err != nil { return 0, err } return KernelVersionFromReleaseString(releaseString) } var debianVersionRegex = regexp.MustCompile(`.* SMP Debian (\d+\.\d+.\d+-\d+) .*`) func currentVersionDebian() (uint32, error) { procVersion, err := ioutil.ReadFile("/proc/version") if err != nil { return 0, err } match := debianVersionRegex.FindStringSubmatch(string(procVersion)) if len(match) != 2 { return 0, fmt.Errorf("failed to get kernel version from /proc/version: %s", procVersion) } return KernelVersionFromReleaseString(match[1]) } // CurrentKernelVersion returns the current kernel version in // LINUX_VERSION_CODE format (see KernelVersionFromReleaseString()) func CurrentKernelVersion() (uint32, error) { // We need extra checks for Debian and Ubuntu as they modify // the kernel version patch number for compatibilty with // out-of-tree modules. Linux perf tools do the same for Ubuntu // systems: https://github.com/torvalds/linux/commit/d18acd15c // // See also: // https://kernel-handbook.alioth.debian.org/ch-versions.html // https://wiki.ubuntu.com/Kernel/FAQ version, err := currentVersionUbuntu() if err == nil { return version, nil } version, err = currentVersionDebian() if err == nil { return version, nil } return currentVersionUname() }
{ "pile_set_name": "Github" }
Docker for OMI ====== These DockerFiles enable building and running omi in a container for some Linux distribution we support. There are two sets: release and nightly. This requires an up-to-date version of Docker, such as 1.12. It also expects you to be able to run Docker without `sudo`. Please follow [Docker's official instructions][install] to install `docker` correctly. [install]: https://docs.docker.com/engine/installation/ Build Release OMI docker ------- The release containers derive from the official distribution image, such as `centos:7`, then install dependencies, and finally install the omi package from Microsoft Repo, the command to build nightly omi docker show as follow: ``` git clone https://github.com/Microsoft/omi cd omi/docker docker build --build-arg rootpassword=newpassword -t "microsoft/omi:ubuntu16.04" release/ubuntu16.04/ docker build --build-arg rootpassword=newpassword -t "microsoft/omi:ubuntu14.04" release/ubuntu14.04/ docker build --build-arg rootpassword=newpassword -t "microsoft/omi:centos7" release/centos7/ ``` Build Nightly OMI docker ------- The nightly containers derive from the official distribution image, such as `centos:7`, then install dependencies, and finally build latest omi code and install it, the command to build nightly omi docker show as follow: ``` git clone https://github.com/Microsoft/omi cd omi/docker docker build --build-arg fork=Microsoft --build-arg branch=master --build-arg rootpassword=newpassword -t "microsoft/omi:ubuntu16.04" nightly/ubuntu16.04/ docker build --build-arg fork=Microsoft --build-arg branch=master --build-arg rootpassword=newpassword -t "microsoft/omi:ubuntu14.04" nightly/ubuntu14.04/ docker build --build-arg fork=Microsoft --build-arg branch=master --build-arg rootpassword=newpassword -t "microsoft/omi:centos7" nightly/centos7/ ``` Build Command Usage ------- Variable following with --build-arg: * `fork`: the fork to clone in nightly builds (default: `Microsoft`) * Set `fork=JumpingYang001` after --build-arg to clone https://github.com/JumpingYang001/omi` * `branch`: the branch to checkout in nightly builds (default: `master`) * Set `branch=feature-A` after --build-arg to checkout the `feature-A` branch * `rootpassword`: container's root password to be set in nightly or release builds (default: `newpassword`) * Set `rootpassword=Pa$$word` after --build-arg to set `Pa$$word` as container's root password Variable following with -t: * `microsoft/omi:ubuntu16.04`: the image tag you want to build out for the omi docker * `microsoft/omi:ubuntu14.04`: the image tag you want to build out for the omi docker * `microsoft/omi:centos7`: the image tag you want to build out for the omi docker Run OMI docker ------- You can run below commands to start up a omi docker container: ``` docker run -p 1001:5985 -p 1002:5986 microsoft/omi:ubuntu16.04 docker run -p 1003:5985 -p 1004:5986 microsoft/omi:ubuntu14.04 docker run -p 1005:5985 -p 1006:5986 microsoft/omi:centos7 ``` Enter docker container to check any issues ------- ``` docker exec -it <container_id|container_name> /bin/bash ``` How to Collect Docker Container Performance Metrics using OMI/OMI provider inside Docker Container ------- - To collect containers’ performance metrics (including or not including the container runs OMI/OMI provider), you can use [ContainerStatisticsProvider], or you can implement a similar provider. If you wish to use [ContainerStatisticsProvider], this ships with [OMS]. You can either install [OMS] itself, or you can extract [ContainerStatisticsProvider] from the [OMS] bundle and install that provider independently. - To write your own OMI docker provider, you can use either pseudo-files or the docker API. For details about docker API, you can refer to [Docker API]. For the different options for collecting docker performance metrics, you can refer to [How to collect docker metrics]. - OMI/OMI provider can collect containers’ (including or not including the container runs OMI/OMI provider) performance metrics inside a normal container or a privileged container depending on your host platform. If your host platform is RHEL or CentOS, you need to run OMI/OMI provider in a privileged container. If you use docker API, you need to run a privileged container with --privileged=true, or you need to mount /cgroup with --volume=/cgroup:/cgroup:ro if you use pseudo-files. For other host platforms, you can run OMI/OMI provider in a normal container to collect all containers’ performance metrics. - Multiple OMI instances can run in different containers if they are mapped to different ports on the host system. [OMS]: https://github.com/Microsoft/OMS-Agent-for-Linux [Docker API]: https://docs.docker.com/engine/api/get-started/ [How to collect docker metrics]:https://www.datadoghq.com/blog/how-to-collect-docker-metrics [ContainerStatisticsProvider]: https://github.com/Microsoft/Docker-Provider/blob/master/source/code/providers/Container_ContainerStatistics_Class_Provider.cpp How to Collect Virtual Machine Host Performance Metrics using OMI/OMI provider inside Docker Container ------- - OMI/OMI provider can collect virtual machine host's performance metrics inside a normal container. - To be able to collect host performance metrics, you can use [SCXProvider], or you can implement a similar provider. If you wish to use [SCXProvider], this ships with [OMS]. You can either install [OMS] itself, or you can extract [SCXProvider] from the [OMS] bundle and install that provider independently. [OMS]: https://github.com/Microsoft/OMS-Agent-for-Linux [SCXProvider]: https://github.com/Microsoft/SCXcore
{ "pile_set_name": "Github" }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include <mesos/type_utils.hpp> #include <process/collect.hpp> #include <process/delay.hpp> #include <process/process.hpp> #include <process/reap.hpp> #ifdef __WINDOWS__ #include <process/windows/jobobject.hpp> #endif // __WINDOWS__ #include <stout/unreachable.hpp> #include <stout/os/killtree.hpp> #ifdef __linux__ #include "linux/systemd.hpp" #endif // __linux__ #include "mesos/resources.hpp" #include "slave/containerizer/mesos/launcher.hpp" using namespace process; using std::map; using std::string; using std::vector; using mesos::slave::ContainerState; namespace mesos { namespace internal { namespace slave { Try<Launcher*> SubprocessLauncher::create(const Flags& flags) { return new SubprocessLauncher(); } Future<hashset<ContainerID>> SubprocessLauncher::recover( const vector<ContainerState>& states) { foreach (const ContainerState& state, states) { const ContainerID& containerId = state.container_id(); pid_t pid = static_cast<pid_t>(state.pid()); if (pids.contains_value(pid)) { // This should (almost) never occur. There is the possibility // that a new executor is launched with the same pid as one that // just exited (highly unlikely) and the slave dies after the // new executor is launched but before it hears about the // termination of the earlier executor (also unlikely). // Regardless, the launcher can't do anything sensible so this // is considered an error. return Failure("Detected duplicate pid " + stringify(pid) + " for container " + stringify(containerId)); } pids.put(containerId, pid); } return hashset<ContainerID>(); } Try<pid_t> SubprocessLauncher::fork( const ContainerID& containerId, const string& path, const vector<string>& argv, const mesos::slave::ContainerIO& containerIO, const flags::FlagsBase* flags, const Option<map<string, string>>& environment, const Option<int>& enterNamespaces, const Option<int>& cloneNamespaces, const vector<int_fd>& whitelistFds) { if (enterNamespaces.isSome() && enterNamespaces.get() != 0) { return Error("Subprocess launcher does not support entering namespaces"); } if (cloneNamespaces.isSome() && cloneNamespaces.get() != 0) { return Error("Subprocess launcher does not support cloning namespaces"); } if (pids.contains(containerId)) { return Error("Process has already been forked for container " + stringify(containerId)); } vector<process::Subprocess::ParentHook> parentHooks; #ifdef __linux__ // If we are on systemd, then extend the life of the child. Any // grandchildren's lives will also be extended. if (systemd::enabled()) { parentHooks.emplace_back(Subprocess::ParentHook( &systemd::mesos::extendLifetime)); } #elif defined(__WINDOWS__) // If we are on Windows, then ensure the child is placed inside a // new job object. parentHooks.emplace_back(Subprocess::ParentHook::CREATE_JOB()); #endif // __linux__ vector<Subprocess::ChildHook> childHooks; #ifndef __WINDOWS__ childHooks.push_back(Subprocess::ChildHook::SETSID()); #endif // __WINDOWS__ Try<Subprocess> child = subprocess( path, argv, containerIO.in, containerIO.out, containerIO.err, flags, environment, None(), parentHooks, childHooks, whitelistFds); if (child.isError()) { return Error("Failed to fork a child process: " + child.error()); } LOG(INFO) << "Forked child with pid '" << child->pid() << "' for container '" << containerId << "'"; // Store the pid (session id and process group id). pids.put(containerId, child->pid()); return child->pid(); } // Forward declaration. Future<Nothing> _destroy(const Future<Option<int>>& future); Future<Nothing> SubprocessLauncher::destroy(const ContainerID& containerId) { LOG(INFO) << "Asked to destroy container " << containerId; if (!pids.contains(containerId)) { LOG(WARNING) << "Ignored destroy for unknown container " << containerId; return Nothing(); } pid_t pid = pids.at(containerId); // Kill all processes in the session and process group. os::killtree(pid, SIGKILL, true, true); pids.erase(containerId); // The child process may not have been waited on yet so we'll delay // completing destroy until we're sure it has been reaped. return process::reap(pid) .then(lambda::bind(&_destroy, lambda::_1)); } Future<Nothing> _destroy(const Future<Option<int>>& future) { if (future.isReady()) { return Nothing(); } else { return Failure("Failed to kill all processes: " + (future.isFailed() ? future.failure() : "unknown error")); } } Future<ContainerStatus> SubprocessLauncher::status( const ContainerID& containerId) { if (!pids.contains(containerId)) { return Failure("Container does not exist!"); } ContainerStatus status; status.set_executor_pid(pids[containerId]); return status; } } // namespace slave { } // namespace internal { } // namespace mesos {
{ "pile_set_name": "Github" }
// SPDX-License-Identifier: MIT /* * Copyright © 2020 Intel Corporation */ #include <linux/pm_qos.h> #include <linux/sort.h> #include "intel_engine_heartbeat.h" #include "intel_engine_pm.h" #include "intel_gpu_commands.h" #include "intel_gt_clock_utils.h" #include "intel_gt_pm.h" #include "intel_rc6.h" #include "selftest_engine_heartbeat.h" #include "selftest_rps.h" #include "selftests/igt_flush_test.h" #include "selftests/igt_spinner.h" #include "selftests/librapl.h" /* Try to isolate the impact of cstates from determing frequency response */ #define CPU_LATENCY 0 /* -1 to disable pm_qos, 0 to disable cstates */ static void dummy_rps_work(struct work_struct *wrk) { } static int cmp_u64(const void *A, const void *B) { const u64 *a = A, *b = B; if (*a < *b) return -1; else if (*a > *b) return 1; else return 0; } static int cmp_u32(const void *A, const void *B) { const u32 *a = A, *b = B; if (*a < *b) return -1; else if (*a > *b) return 1; else return 0; } static struct i915_vma * create_spin_counter(struct intel_engine_cs *engine, struct i915_address_space *vm, bool srm, u32 **cancel, u32 **counter) { enum { COUNT, INC, __NGPR__, }; #define CS_GPR(x) GEN8_RING_CS_GPR(engine->mmio_base, x) struct drm_i915_gem_object *obj; struct i915_vma *vma; unsigned long end; u32 *base, *cs; int loop, i; int err; obj = i915_gem_object_create_internal(vm->i915, 64 << 10); if (IS_ERR(obj)) return ERR_CAST(obj); end = obj->base.size / sizeof(u32) - 1; vma = i915_vma_instance(obj, vm, NULL); if (IS_ERR(vma)) { i915_gem_object_put(obj); return vma; } err = i915_vma_pin(vma, 0, 0, PIN_USER); if (err) { i915_vma_put(vma); return ERR_PTR(err); } base = i915_gem_object_pin_map(obj, I915_MAP_WC); if (IS_ERR(base)) { i915_gem_object_put(obj); return ERR_CAST(base); } cs = base; *cs++ = MI_LOAD_REGISTER_IMM(__NGPR__ * 2); for (i = 0; i < __NGPR__; i++) { *cs++ = i915_mmio_reg_offset(CS_GPR(i)); *cs++ = 0; *cs++ = i915_mmio_reg_offset(CS_GPR(i)) + 4; *cs++ = 0; } *cs++ = MI_LOAD_REGISTER_IMM(1); *cs++ = i915_mmio_reg_offset(CS_GPR(INC)); *cs++ = 1; loop = cs - base; /* Unroll the loop to avoid MI_BB_START stalls impacting measurements */ for (i = 0; i < 1024; i++) { *cs++ = MI_MATH(4); *cs++ = MI_MATH_LOAD(MI_MATH_REG_SRCA, MI_MATH_REG(COUNT)); *cs++ = MI_MATH_LOAD(MI_MATH_REG_SRCB, MI_MATH_REG(INC)); *cs++ = MI_MATH_ADD; *cs++ = MI_MATH_STORE(MI_MATH_REG(COUNT), MI_MATH_REG_ACCU); if (srm) { *cs++ = MI_STORE_REGISTER_MEM_GEN8; *cs++ = i915_mmio_reg_offset(CS_GPR(COUNT)); *cs++ = lower_32_bits(vma->node.start + end * sizeof(*cs)); *cs++ = upper_32_bits(vma->node.start + end * sizeof(*cs)); } } *cs++ = MI_BATCH_BUFFER_START_GEN8; *cs++ = lower_32_bits(vma->node.start + loop * sizeof(*cs)); *cs++ = upper_32_bits(vma->node.start + loop * sizeof(*cs)); GEM_BUG_ON(cs - base > end); i915_gem_object_flush_map(obj); *cancel = base + loop; *counter = srm ? memset32(base + end, 0, 1) : NULL; return vma; } static u8 wait_for_freq(struct intel_rps *rps, u8 freq, int timeout_ms) { u8 history[64], i; unsigned long end; int sleep; i = 0; memset(history, freq, sizeof(history)); sleep = 20; /* The PCU does not change instantly, but drifts towards the goal? */ end = jiffies + msecs_to_jiffies(timeout_ms); do { u8 act; act = read_cagf(rps); if (time_after(jiffies, end)) return act; /* Target acquired */ if (act == freq) return act; /* Any change within the last N samples? */ if (!memchr_inv(history, act, sizeof(history))) return act; history[i] = act; i = (i + 1) % ARRAY_SIZE(history); usleep_range(sleep, 2 * sleep); sleep *= 2; if (sleep > timeout_ms * 20) sleep = timeout_ms * 20; } while (1); } static u8 rps_set_check(struct intel_rps *rps, u8 freq) { mutex_lock(&rps->lock); GEM_BUG_ON(!intel_rps_is_active(rps)); intel_rps_set(rps, freq); GEM_BUG_ON(rps->last_freq != freq); mutex_unlock(&rps->lock); return wait_for_freq(rps, freq, 50); } static void show_pstate_limits(struct intel_rps *rps) { struct drm_i915_private *i915 = rps_to_i915(rps); if (IS_BROXTON(i915)) { pr_info("P_STATE_CAP[%x]: 0x%08x\n", i915_mmio_reg_offset(BXT_RP_STATE_CAP), intel_uncore_read(rps_to_uncore(rps), BXT_RP_STATE_CAP)); } else if (IS_GEN(i915, 9)) { pr_info("P_STATE_LIMITS[%x]: 0x%08x\n", i915_mmio_reg_offset(GEN9_RP_STATE_LIMITS), intel_uncore_read(rps_to_uncore(rps), GEN9_RP_STATE_LIMITS)); } } int live_rps_clock_interval(void *arg) { struct intel_gt *gt = arg; struct intel_rps *rps = &gt->rps; void (*saved_work)(struct work_struct *wrk); struct intel_engine_cs *engine; enum intel_engine_id id; struct igt_spinner spin; int err = 0; if (!intel_rps_is_enabled(rps)) return 0; if (igt_spinner_init(&spin, gt)) return -ENOMEM; intel_gt_pm_wait_for_idle(gt); saved_work = rps->work.func; rps->work.func = dummy_rps_work; intel_gt_pm_get(gt); intel_rps_disable(&gt->rps); intel_gt_check_clock_frequency(gt); for_each_engine(engine, gt, id) { struct i915_request *rq; u32 cycles; u64 dt; if (!intel_engine_can_store_dword(engine)) continue; st_engine_heartbeat_disable(engine); rq = igt_spinner_create_request(&spin, engine->kernel_context, MI_NOOP); if (IS_ERR(rq)) { st_engine_heartbeat_enable(engine); err = PTR_ERR(rq); break; } i915_request_add(rq); if (!igt_wait_for_spinner(&spin, rq)) { pr_err("%s: RPS spinner did not start\n", engine->name); igt_spinner_end(&spin); st_engine_heartbeat_enable(engine); intel_gt_set_wedged(engine->gt); err = -EIO; break; } intel_uncore_forcewake_get(gt->uncore, FORCEWAKE_ALL); intel_uncore_write_fw(gt->uncore, GEN6_RP_CUR_UP_EI, 0); /* Set the evaluation interval to infinity! */ intel_uncore_write_fw(gt->uncore, GEN6_RP_UP_EI, 0xffffffff); intel_uncore_write_fw(gt->uncore, GEN6_RP_UP_THRESHOLD, 0xffffffff); intel_uncore_write_fw(gt->uncore, GEN6_RP_CONTROL, GEN6_RP_ENABLE | GEN6_RP_UP_BUSY_AVG); if (wait_for(intel_uncore_read_fw(gt->uncore, GEN6_RP_CUR_UP_EI), 10)) { /* Just skip the test; assume lack of HW support */ pr_notice("%s: rps evaluation interval not ticking\n", engine->name); err = -ENODEV; } else { ktime_t dt_[5]; u32 cycles_[5]; int i; for (i = 0; i < 5; i++) { preempt_disable(); dt_[i] = ktime_get(); cycles_[i] = -intel_uncore_read_fw(gt->uncore, GEN6_RP_CUR_UP_EI); udelay(1000); dt_[i] = ktime_sub(ktime_get(), dt_[i]); cycles_[i] += intel_uncore_read_fw(gt->uncore, GEN6_RP_CUR_UP_EI); preempt_enable(); } /* Use the median of both cycle/dt; close enough */ sort(cycles_, 5, sizeof(*cycles_), cmp_u32, NULL); cycles = (cycles_[1] + 2 * cycles_[2] + cycles_[3]) / 4; sort(dt_, 5, sizeof(*dt_), cmp_u64, NULL); dt = div_u64(dt_[1] + 2 * dt_[2] + dt_[3], 4); } intel_uncore_write_fw(gt->uncore, GEN6_RP_CONTROL, 0); intel_uncore_forcewake_put(gt->uncore, FORCEWAKE_ALL); igt_spinner_end(&spin); st_engine_heartbeat_enable(engine); if (err == 0) { u64 time = intel_gt_pm_interval_to_ns(gt, cycles); u32 expected = intel_gt_ns_to_pm_interval(gt, dt); pr_info("%s: rps counted %d C0 cycles [%lldns] in %lldns [%d cycles], using GT clock frequency of %uKHz\n", engine->name, cycles, time, dt, expected, gt->clock_frequency / 1000); if (10 * time < 8 * dt || 8 * time > 10 * dt) { pr_err("%s: rps clock time does not match walltime!\n", engine->name); err = -EINVAL; } if (10 * expected < 8 * cycles || 8 * expected > 10 * cycles) { pr_err("%s: walltime does not match rps clock ticks!\n", engine->name); err = -EINVAL; } } if (igt_flush_test(gt->i915)) err = -EIO; break; /* once is enough */ } intel_rps_enable(&gt->rps); intel_gt_pm_put(gt); igt_spinner_fini(&spin); intel_gt_pm_wait_for_idle(gt); rps->work.func = saved_work; if (err == -ENODEV) /* skipped, don't report a fail */ err = 0; return err; } int live_rps_control(void *arg) { struct intel_gt *gt = arg; struct intel_rps *rps = &gt->rps; void (*saved_work)(struct work_struct *wrk); struct intel_engine_cs *engine; enum intel_engine_id id; struct igt_spinner spin; int err = 0; /* * Check that the actual frequency matches our requested frequency, * to verify our control mechanism. We have to be careful that the * PCU may throttle the GPU in which case the actual frequency used * will be lowered than requested. */ if (!intel_rps_is_enabled(rps)) return 0; if (IS_CHERRYVIEW(gt->i915)) /* XXX fragile PCU */ return 0; if (igt_spinner_init(&spin, gt)) return -ENOMEM; intel_gt_pm_wait_for_idle(gt); saved_work = rps->work.func; rps->work.func = dummy_rps_work; intel_gt_pm_get(gt); for_each_engine(engine, gt, id) { struct i915_request *rq; ktime_t min_dt, max_dt; int f, limit; int min, max; if (!intel_engine_can_store_dword(engine)) continue; st_engine_heartbeat_disable(engine); rq = igt_spinner_create_request(&spin, engine->kernel_context, MI_NOOP); if (IS_ERR(rq)) { err = PTR_ERR(rq); break; } i915_request_add(rq); if (!igt_wait_for_spinner(&spin, rq)) { pr_err("%s: RPS spinner did not start\n", engine->name); igt_spinner_end(&spin); st_engine_heartbeat_enable(engine); intel_gt_set_wedged(engine->gt); err = -EIO; break; } if (rps_set_check(rps, rps->min_freq) != rps->min_freq) { pr_err("%s: could not set minimum frequency [%x], only %x!\n", engine->name, rps->min_freq, read_cagf(rps)); igt_spinner_end(&spin); st_engine_heartbeat_enable(engine); show_pstate_limits(rps); err = -EINVAL; break; } for (f = rps->min_freq + 1; f < rps->max_freq; f++) { if (rps_set_check(rps, f) < f) break; } limit = rps_set_check(rps, f); if (rps_set_check(rps, rps->min_freq) != rps->min_freq) { pr_err("%s: could not restore minimum frequency [%x], only %x!\n", engine->name, rps->min_freq, read_cagf(rps)); igt_spinner_end(&spin); st_engine_heartbeat_enable(engine); show_pstate_limits(rps); err = -EINVAL; break; } max_dt = ktime_get(); max = rps_set_check(rps, limit); max_dt = ktime_sub(ktime_get(), max_dt); min_dt = ktime_get(); min = rps_set_check(rps, rps->min_freq); min_dt = ktime_sub(ktime_get(), min_dt); igt_spinner_end(&spin); st_engine_heartbeat_enable(engine); pr_info("%s: range:[%x:%uMHz, %x:%uMHz] limit:[%x:%uMHz], %x:%x response %lluns:%lluns\n", engine->name, rps->min_freq, intel_gpu_freq(rps, rps->min_freq), rps->max_freq, intel_gpu_freq(rps, rps->max_freq), limit, intel_gpu_freq(rps, limit), min, max, ktime_to_ns(min_dt), ktime_to_ns(max_dt)); if (limit == rps->min_freq) { pr_err("%s: GPU throttled to minimum!\n", engine->name); show_pstate_limits(rps); err = -ENODEV; break; } if (igt_flush_test(gt->i915)) { err = -EIO; break; } } intel_gt_pm_put(gt); igt_spinner_fini(&spin); intel_gt_pm_wait_for_idle(gt); rps->work.func = saved_work; return err; } static void show_pcu_config(struct intel_rps *rps) { struct drm_i915_private *i915 = rps_to_i915(rps); unsigned int max_gpu_freq, min_gpu_freq; intel_wakeref_t wakeref; int gpu_freq; if (!HAS_LLC(i915)) return; min_gpu_freq = rps->min_freq; max_gpu_freq = rps->max_freq; if (INTEL_GEN(i915) >= 9) { /* Convert GT frequency to 50 HZ units */ min_gpu_freq /= GEN9_FREQ_SCALER; max_gpu_freq /= GEN9_FREQ_SCALER; } wakeref = intel_runtime_pm_get(rps_to_uncore(rps)->rpm); pr_info("%5s %5s %5s\n", "GPU", "eCPU", "eRing"); for (gpu_freq = min_gpu_freq; gpu_freq <= max_gpu_freq; gpu_freq++) { int ia_freq = gpu_freq; sandybridge_pcode_read(i915, GEN6_PCODE_READ_MIN_FREQ_TABLE, &ia_freq, NULL); pr_info("%5d %5d %5d\n", gpu_freq * 50, ((ia_freq >> 0) & 0xff) * 100, ((ia_freq >> 8) & 0xff) * 100); } intel_runtime_pm_put(rps_to_uncore(rps)->rpm, wakeref); } static u64 __measure_frequency(u32 *cntr, int duration_ms) { u64 dc, dt; dt = ktime_get(); dc = READ_ONCE(*cntr); usleep_range(1000 * duration_ms, 2000 * duration_ms); dc = READ_ONCE(*cntr) - dc; dt = ktime_get() - dt; return div64_u64(1000 * 1000 * dc, dt); } static u64 measure_frequency_at(struct intel_rps *rps, u32 *cntr, int *freq) { u64 x[5]; int i; *freq = rps_set_check(rps, *freq); for (i = 0; i < 5; i++) x[i] = __measure_frequency(cntr, 2); *freq = (*freq + read_cagf(rps)) / 2; /* A simple triangle filter for better result stability */ sort(x, 5, sizeof(*x), cmp_u64, NULL); return div_u64(x[1] + 2 * x[2] + x[3], 4); } static u64 __measure_cs_frequency(struct intel_engine_cs *engine, int duration_ms) { u64 dc, dt; dt = ktime_get(); dc = intel_uncore_read_fw(engine->uncore, CS_GPR(0)); usleep_range(1000 * duration_ms, 2000 * duration_ms); dc = intel_uncore_read_fw(engine->uncore, CS_GPR(0)) - dc; dt = ktime_get() - dt; return div64_u64(1000 * 1000 * dc, dt); } static u64 measure_cs_frequency_at(struct intel_rps *rps, struct intel_engine_cs *engine, int *freq) { u64 x[5]; int i; *freq = rps_set_check(rps, *freq); for (i = 0; i < 5; i++) x[i] = __measure_cs_frequency(engine, 2); *freq = (*freq + read_cagf(rps)) / 2; /* A simple triangle filter for better result stability */ sort(x, 5, sizeof(*x), cmp_u64, NULL); return div_u64(x[1] + 2 * x[2] + x[3], 4); } static bool scaled_within(u64 x, u64 y, u32 f_n, u32 f_d) { return f_d * x > f_n * y && f_n * x < f_d * y; } int live_rps_frequency_cs(void *arg) { void (*saved_work)(struct work_struct *wrk); struct intel_gt *gt = arg; struct intel_rps *rps = &gt->rps; struct intel_engine_cs *engine; struct pm_qos_request qos; enum intel_engine_id id; int err = 0; /* * The premise is that the GPU does change freqency at our behest. * Let's check there is a correspondence between the requested * frequency, the actual frequency, and the observed clock rate. */ if (!intel_rps_is_enabled(rps)) return 0; if (INTEL_GEN(gt->i915) < 8) /* for CS simplicity */ return 0; if (CPU_LATENCY >= 0) cpu_latency_qos_add_request(&qos, CPU_LATENCY); intel_gt_pm_wait_for_idle(gt); saved_work = rps->work.func; rps->work.func = dummy_rps_work; for_each_engine(engine, gt, id) { struct i915_request *rq; struct i915_vma *vma; u32 *cancel, *cntr; struct { u64 count; int freq; } min, max; st_engine_heartbeat_disable(engine); vma = create_spin_counter(engine, engine->kernel_context->vm, false, &cancel, &cntr); if (IS_ERR(vma)) { err = PTR_ERR(vma); st_engine_heartbeat_enable(engine); break; } rq = intel_engine_create_kernel_request(engine); if (IS_ERR(rq)) { err = PTR_ERR(rq); goto err_vma; } i915_vma_lock(vma); err = i915_request_await_object(rq, vma->obj, false); if (!err) err = i915_vma_move_to_active(vma, rq, 0); if (!err) err = rq->engine->emit_bb_start(rq, vma->node.start, PAGE_SIZE, 0); i915_vma_unlock(vma); i915_request_add(rq); if (err) goto err_vma; if (wait_for(intel_uncore_read(engine->uncore, CS_GPR(0)), 10)) { pr_err("%s: timed loop did not start\n", engine->name); goto err_vma; } min.freq = rps->min_freq; min.count = measure_cs_frequency_at(rps, engine, &min.freq); max.freq = rps->max_freq; max.count = measure_cs_frequency_at(rps, engine, &max.freq); pr_info("%s: min:%lluKHz @ %uMHz, max:%lluKHz @ %uMHz [%d%%]\n", engine->name, min.count, intel_gpu_freq(rps, min.freq), max.count, intel_gpu_freq(rps, max.freq), (int)DIV64_U64_ROUND_CLOSEST(100 * min.freq * max.count, max.freq * min.count)); if (!scaled_within(max.freq * min.count, min.freq * max.count, 2, 3)) { int f; pr_err("%s: CS did not scale with frequency! scaled min:%llu, max:%llu\n", engine->name, max.freq * min.count, min.freq * max.count); show_pcu_config(rps); for (f = min.freq + 1; f <= rps->max_freq; f++) { int act = f; u64 count; count = measure_cs_frequency_at(rps, engine, &act); if (act < f) break; pr_info("%s: %x:%uMHz: %lluKHz [%d%%]\n", engine->name, act, intel_gpu_freq(rps, act), count, (int)DIV64_U64_ROUND_CLOSEST(100 * min.freq * count, act * min.count)); f = act; /* may skip ahead [pcu granularity] */ } err = -EINVAL; } err_vma: *cancel = MI_BATCH_BUFFER_END; i915_gem_object_flush_map(vma->obj); i915_gem_object_unpin_map(vma->obj); i915_vma_unpin(vma); i915_vma_put(vma); st_engine_heartbeat_enable(engine); if (igt_flush_test(gt->i915)) err = -EIO; if (err) break; } intel_gt_pm_wait_for_idle(gt); rps->work.func = saved_work; if (CPU_LATENCY >= 0) cpu_latency_qos_remove_request(&qos); return err; } int live_rps_frequency_srm(void *arg) { void (*saved_work)(struct work_struct *wrk); struct intel_gt *gt = arg; struct intel_rps *rps = &gt->rps; struct intel_engine_cs *engine; struct pm_qos_request qos; enum intel_engine_id id; int err = 0; /* * The premise is that the GPU does change freqency at our behest. * Let's check there is a correspondence between the requested * frequency, the actual frequency, and the observed clock rate. */ if (!intel_rps_is_enabled(rps)) return 0; if (INTEL_GEN(gt->i915) < 8) /* for CS simplicity */ return 0; if (CPU_LATENCY >= 0) cpu_latency_qos_add_request(&qos, CPU_LATENCY); intel_gt_pm_wait_for_idle(gt); saved_work = rps->work.func; rps->work.func = dummy_rps_work; for_each_engine(engine, gt, id) { struct i915_request *rq; struct i915_vma *vma; u32 *cancel, *cntr; struct { u64 count; int freq; } min, max; st_engine_heartbeat_disable(engine); vma = create_spin_counter(engine, engine->kernel_context->vm, true, &cancel, &cntr); if (IS_ERR(vma)) { err = PTR_ERR(vma); st_engine_heartbeat_enable(engine); break; } rq = intel_engine_create_kernel_request(engine); if (IS_ERR(rq)) { err = PTR_ERR(rq); goto err_vma; } i915_vma_lock(vma); err = i915_request_await_object(rq, vma->obj, false); if (!err) err = i915_vma_move_to_active(vma, rq, 0); if (!err) err = rq->engine->emit_bb_start(rq, vma->node.start, PAGE_SIZE, 0); i915_vma_unlock(vma); i915_request_add(rq); if (err) goto err_vma; if (wait_for(READ_ONCE(*cntr), 10)) { pr_err("%s: timed loop did not start\n", engine->name); goto err_vma; } min.freq = rps->min_freq; min.count = measure_frequency_at(rps, cntr, &min.freq); max.freq = rps->max_freq; max.count = measure_frequency_at(rps, cntr, &max.freq); pr_info("%s: min:%lluKHz @ %uMHz, max:%lluKHz @ %uMHz [%d%%]\n", engine->name, min.count, intel_gpu_freq(rps, min.freq), max.count, intel_gpu_freq(rps, max.freq), (int)DIV64_U64_ROUND_CLOSEST(100 * min.freq * max.count, max.freq * min.count)); if (!scaled_within(max.freq * min.count, min.freq * max.count, 1, 2)) { int f; pr_err("%s: CS did not scale with frequency! scaled min:%llu, max:%llu\n", engine->name, max.freq * min.count, min.freq * max.count); show_pcu_config(rps); for (f = min.freq + 1; f <= rps->max_freq; f++) { int act = f; u64 count; count = measure_frequency_at(rps, cntr, &act); if (act < f) break; pr_info("%s: %x:%uMHz: %lluKHz [%d%%]\n", engine->name, act, intel_gpu_freq(rps, act), count, (int)DIV64_U64_ROUND_CLOSEST(100 * min.freq * count, act * min.count)); f = act; /* may skip ahead [pcu granularity] */ } err = -EINVAL; } err_vma: *cancel = MI_BATCH_BUFFER_END; i915_gem_object_flush_map(vma->obj); i915_gem_object_unpin_map(vma->obj); i915_vma_unpin(vma); i915_vma_put(vma); st_engine_heartbeat_enable(engine); if (igt_flush_test(gt->i915)) err = -EIO; if (err) break; } intel_gt_pm_wait_for_idle(gt); rps->work.func = saved_work; if (CPU_LATENCY >= 0) cpu_latency_qos_remove_request(&qos); return err; } static void sleep_for_ei(struct intel_rps *rps, int timeout_us) { /* Flush any previous EI */ usleep_range(timeout_us, 2 * timeout_us); /* Reset the interrupt status */ rps_disable_interrupts(rps); GEM_BUG_ON(rps->pm_iir); rps_enable_interrupts(rps); /* And then wait for the timeout, for real this time */ usleep_range(2 * timeout_us, 3 * timeout_us); } static int __rps_up_interrupt(struct intel_rps *rps, struct intel_engine_cs *engine, struct igt_spinner *spin) { struct intel_uncore *uncore = engine->uncore; struct i915_request *rq; u32 timeout; if (!intel_engine_can_store_dword(engine)) return 0; rps_set_check(rps, rps->min_freq); rq = igt_spinner_create_request(spin, engine->kernel_context, MI_NOOP); if (IS_ERR(rq)) return PTR_ERR(rq); i915_request_get(rq); i915_request_add(rq); if (!igt_wait_for_spinner(spin, rq)) { pr_err("%s: RPS spinner did not start\n", engine->name); i915_request_put(rq); intel_gt_set_wedged(engine->gt); return -EIO; } if (!intel_rps_is_active(rps)) { pr_err("%s: RPS not enabled on starting spinner\n", engine->name); igt_spinner_end(spin); i915_request_put(rq); return -EINVAL; } if (!(rps->pm_events & GEN6_PM_RP_UP_THRESHOLD)) { pr_err("%s: RPS did not register UP interrupt\n", engine->name); i915_request_put(rq); return -EINVAL; } if (rps->last_freq != rps->min_freq) { pr_err("%s: RPS did not program min frequency\n", engine->name); i915_request_put(rq); return -EINVAL; } timeout = intel_uncore_read(uncore, GEN6_RP_UP_EI); timeout = intel_gt_pm_interval_to_ns(engine->gt, timeout); timeout = DIV_ROUND_UP(timeout, 1000); sleep_for_ei(rps, timeout); GEM_BUG_ON(i915_request_completed(rq)); igt_spinner_end(spin); i915_request_put(rq); if (rps->cur_freq != rps->min_freq) { pr_err("%s: Frequency unexpectedly changed [up], now %d!\n", engine->name, intel_rps_read_actual_frequency(rps)); return -EINVAL; } if (!(rps->pm_iir & GEN6_PM_RP_UP_THRESHOLD)) { pr_err("%s: UP interrupt not recorded for spinner, pm_iir:%x, prev_up:%x, up_threshold:%x, up_ei:%x\n", engine->name, rps->pm_iir, intel_uncore_read(uncore, GEN6_RP_PREV_UP), intel_uncore_read(uncore, GEN6_RP_UP_THRESHOLD), intel_uncore_read(uncore, GEN6_RP_UP_EI)); return -EINVAL; } return 0; } static int __rps_down_interrupt(struct intel_rps *rps, struct intel_engine_cs *engine) { struct intel_uncore *uncore = engine->uncore; u32 timeout; rps_set_check(rps, rps->max_freq); if (!(rps->pm_events & GEN6_PM_RP_DOWN_THRESHOLD)) { pr_err("%s: RPS did not register DOWN interrupt\n", engine->name); return -EINVAL; } if (rps->last_freq != rps->max_freq) { pr_err("%s: RPS did not program max frequency\n", engine->name); return -EINVAL; } timeout = intel_uncore_read(uncore, GEN6_RP_DOWN_EI); timeout = intel_gt_pm_interval_to_ns(engine->gt, timeout); timeout = DIV_ROUND_UP(timeout, 1000); sleep_for_ei(rps, timeout); if (rps->cur_freq != rps->max_freq) { pr_err("%s: Frequency unexpectedly changed [down], now %d!\n", engine->name, intel_rps_read_actual_frequency(rps)); return -EINVAL; } if (!(rps->pm_iir & (GEN6_PM_RP_DOWN_THRESHOLD | GEN6_PM_RP_DOWN_TIMEOUT))) { pr_err("%s: DOWN interrupt not recorded for idle, pm_iir:%x, prev_down:%x, down_threshold:%x, down_ei:%x [prev_up:%x, up_threshold:%x, up_ei:%x]\n", engine->name, rps->pm_iir, intel_uncore_read(uncore, GEN6_RP_PREV_DOWN), intel_uncore_read(uncore, GEN6_RP_DOWN_THRESHOLD), intel_uncore_read(uncore, GEN6_RP_DOWN_EI), intel_uncore_read(uncore, GEN6_RP_PREV_UP), intel_uncore_read(uncore, GEN6_RP_UP_THRESHOLD), intel_uncore_read(uncore, GEN6_RP_UP_EI)); return -EINVAL; } return 0; } int live_rps_interrupt(void *arg) { struct intel_gt *gt = arg; struct intel_rps *rps = &gt->rps; void (*saved_work)(struct work_struct *wrk); struct intel_engine_cs *engine; enum intel_engine_id id; struct igt_spinner spin; u32 pm_events; int err = 0; /* * First, let's check whether or not we are receiving interrupts. */ if (!intel_rps_has_interrupts(rps)) return 0; intel_gt_pm_get(gt); pm_events = rps->pm_events; intel_gt_pm_put(gt); if (!pm_events) { pr_err("No RPS PM events registered, but RPS is enabled?\n"); return -ENODEV; } if (igt_spinner_init(&spin, gt)) return -ENOMEM; intel_gt_pm_wait_for_idle(gt); saved_work = rps->work.func; rps->work.func = dummy_rps_work; for_each_engine(engine, gt, id) { /* Keep the engine busy with a spinner; expect an UP! */ if (pm_events & GEN6_PM_RP_UP_THRESHOLD) { intel_gt_pm_wait_for_idle(engine->gt); GEM_BUG_ON(intel_rps_is_active(rps)); st_engine_heartbeat_disable(engine); err = __rps_up_interrupt(rps, engine, &spin); st_engine_heartbeat_enable(engine); if (err) goto out; intel_gt_pm_wait_for_idle(engine->gt); } /* Keep the engine awake but idle and check for DOWN */ if (pm_events & GEN6_PM_RP_DOWN_THRESHOLD) { st_engine_heartbeat_disable(engine); intel_rc6_disable(&gt->rc6); err = __rps_down_interrupt(rps, engine); intel_rc6_enable(&gt->rc6); st_engine_heartbeat_enable(engine); if (err) goto out; } } out: if (igt_flush_test(gt->i915)) err = -EIO; igt_spinner_fini(&spin); intel_gt_pm_wait_for_idle(gt); rps->work.func = saved_work; return err; } static u64 __measure_power(int duration_ms) { u64 dE, dt; dt = ktime_get(); dE = librapl_energy_uJ(); usleep_range(1000 * duration_ms, 2000 * duration_ms); dE = librapl_energy_uJ() - dE; dt = ktime_get() - dt; return div64_u64(1000 * 1000 * dE, dt); } static u64 measure_power_at(struct intel_rps *rps, int *freq) { u64 x[5]; int i; *freq = rps_set_check(rps, *freq); for (i = 0; i < 5; i++) x[i] = __measure_power(5); *freq = (*freq + read_cagf(rps)) / 2; /* A simple triangle filter for better result stability */ sort(x, 5, sizeof(*x), cmp_u64, NULL); return div_u64(x[1] + 2 * x[2] + x[3], 4); } int live_rps_power(void *arg) { struct intel_gt *gt = arg; struct intel_rps *rps = &gt->rps; void (*saved_work)(struct work_struct *wrk); struct intel_engine_cs *engine; enum intel_engine_id id; struct igt_spinner spin; int err = 0; /* * Our fundamental assumption is that running at lower frequency * actually saves power. Let's see if our RAPL measurement support * that theory. */ if (!intel_rps_is_enabled(rps)) return 0; if (!librapl_energy_uJ()) return 0; if (igt_spinner_init(&spin, gt)) return -ENOMEM; intel_gt_pm_wait_for_idle(gt); saved_work = rps->work.func; rps->work.func = dummy_rps_work; for_each_engine(engine, gt, id) { struct i915_request *rq; struct { u64 power; int freq; } min, max; if (!intel_engine_can_store_dword(engine)) continue; st_engine_heartbeat_disable(engine); rq = igt_spinner_create_request(&spin, engine->kernel_context, MI_NOOP); if (IS_ERR(rq)) { st_engine_heartbeat_enable(engine); err = PTR_ERR(rq); break; } i915_request_add(rq); if (!igt_wait_for_spinner(&spin, rq)) { pr_err("%s: RPS spinner did not start\n", engine->name); igt_spinner_end(&spin); st_engine_heartbeat_enable(engine); intel_gt_set_wedged(engine->gt); err = -EIO; break; } max.freq = rps->max_freq; max.power = measure_power_at(rps, &max.freq); min.freq = rps->min_freq; min.power = measure_power_at(rps, &min.freq); igt_spinner_end(&spin); st_engine_heartbeat_enable(engine); pr_info("%s: min:%llumW @ %uMHz, max:%llumW @ %uMHz\n", engine->name, min.power, intel_gpu_freq(rps, min.freq), max.power, intel_gpu_freq(rps, max.freq)); if (10 * min.freq >= 9 * max.freq) { pr_notice("Could not control frequency, ran at [%d:%uMHz, %d:%uMhz]\n", min.freq, intel_gpu_freq(rps, min.freq), max.freq, intel_gpu_freq(rps, max.freq)); continue; } if (11 * min.power > 10 * max.power) { pr_err("%s: did not conserve power when setting lower frequency!\n", engine->name); err = -EINVAL; break; } if (igt_flush_test(gt->i915)) { err = -EIO; break; } } igt_spinner_fini(&spin); intel_gt_pm_wait_for_idle(gt); rps->work.func = saved_work; return err; } int live_rps_dynamic(void *arg) { struct intel_gt *gt = arg; struct intel_rps *rps = &gt->rps; struct intel_engine_cs *engine; enum intel_engine_id id; struct igt_spinner spin; int err = 0; /* * We've looked at the bascs, and have established that we * can change the clock frequency and that the HW will generate * interrupts based on load. Now we check how we integrate those * moving parts into dynamic reclocking based on load. */ if (!intel_rps_is_enabled(rps)) return 0; if (igt_spinner_init(&spin, gt)) return -ENOMEM; if (intel_rps_has_interrupts(rps)) pr_info("RPS has interrupt support\n"); if (intel_rps_uses_timer(rps)) pr_info("RPS has timer support\n"); for_each_engine(engine, gt, id) { struct i915_request *rq; struct { ktime_t dt; u8 freq; } min, max; if (!intel_engine_can_store_dword(engine)) continue; intel_gt_pm_wait_for_idle(gt); GEM_BUG_ON(intel_rps_is_active(rps)); rps->cur_freq = rps->min_freq; intel_engine_pm_get(engine); intel_rc6_disable(&gt->rc6); GEM_BUG_ON(rps->last_freq != rps->min_freq); rq = igt_spinner_create_request(&spin, engine->kernel_context, MI_NOOP); if (IS_ERR(rq)) { err = PTR_ERR(rq); goto err; } i915_request_add(rq); max.dt = ktime_get(); max.freq = wait_for_freq(rps, rps->max_freq, 500); max.dt = ktime_sub(ktime_get(), max.dt); igt_spinner_end(&spin); min.dt = ktime_get(); min.freq = wait_for_freq(rps, rps->min_freq, 2000); min.dt = ktime_sub(ktime_get(), min.dt); pr_info("%s: dynamically reclocked to %u:%uMHz while busy in %lluns, and %u:%uMHz while idle in %lluns\n", engine->name, max.freq, intel_gpu_freq(rps, max.freq), ktime_to_ns(max.dt), min.freq, intel_gpu_freq(rps, min.freq), ktime_to_ns(min.dt)); if (min.freq >= max.freq) { pr_err("%s: dynamic reclocking of spinner failed\n!", engine->name); err = -EINVAL; } err: intel_rc6_enable(&gt->rc6); intel_engine_pm_put(engine); if (igt_flush_test(gt->i915)) err = -EIO; if (err) break; } igt_spinner_fini(&spin); return err; }
{ "pile_set_name": "Github" }
<vector xmlns:android="http://schemas.android.com/apk/res/android" android:width="108dp" android:height="108dp" android:viewportWidth="108" android:viewportHeight="108"> <group android:scaleX="0.23625" android:scaleY="0.23625" android:translateX="23.76" android:translateY="23.76"> <path android:pathData="m175.751,234.454v12.13c0,2.761 -2.239,5 -5,5h-85.5c-2.761,0 -5,-2.239 -5,-5v-12.13c1.4,0.09 2.81,0.14 4.23,0.14h87.04c1.42,0 2.83,-0.05 4.23,-0.14z" android:fillColor="#878787"/> <path android:pathData="m180.984,51.083c0.821,7.446 7.691,10.864 16.16,7.995 6.301,5.932 14.873,7.125 19.399,2.599s3.333,-13.098 -2.599,-19.399c2.889,-8.529 -0.606,-15.345 -7.995,-16.16 1.075,-5.164 0.196,-9.897 -2.901,-12.995 -6.254,-6.254 -19.172,-3.476 -28.853,6.205s-12.459,22.599 -6.205,28.853c3.097,3.099 7.83,3.978 12.994,2.902z" android:fillColor="#13bf6d"/> <path android:pathData="m79.693,107.256c7.97,-0.879 11.629,-8.232 8.558,-17.297 6.35,-6.745 7.627,-15.92 2.782,-20.764 -4.845,-4.845 -14.019,-3.568 -20.764,2.782 -9.129,-3.093 -16.425,0.648 -17.297,8.558 -5.528,-1.151 -10.594,-0.21 -13.91,3.105 -6.694,6.694 -3.721,20.521 6.642,30.884s24.19,13.336 30.884,6.642c3.315,-3.316 4.256,-8.382 3.105,-13.91z" android:fillColor="#13bf6d"/> <path android:pathData="m69.265,34.579c5.525,5.525 14.482,5.525 20.007,0l-20.007,-20.007c-5.525,5.525 -5.525,14.482 0,20.007z" android:fillColor="#fc5d3d"/> <path android:pathData="M112.998,85.912a6.038,6.038 121.7,1 0,11.752 -2.778a6.038,6.038 121.7,1 0,-11.752 2.778z" android:fillColor="#13bf6d"/> <path android:pathData="M149.239,71.783a6.038,6.038 121.7,1 0,11.752 -2.778a6.038,6.038 121.7,1 0,-11.752 2.778z" android:fillColor="#fc5d3d"/> <path android:pathData="m113.533,9.231c-6.981,6.981 -5.731,19.564 -0.332,24.963 4.285,4.285 10.121,2.957 15.839,-1.276 2.897,-2.131 6.047,-5.28 8.18,-8.18 4.233,-5.718 5.562,-11.553 1.276,-15.839 -5.4,-5.399 -17.982,-6.649 -24.963,0.332z" android:fillColor="#f0ab20"/> <path android:pathData="m147.001,42.699c-3.022,3.022 -8.004,2.742 -10.668,-0.612l-7.294,-9.17c2.897,-2.131 6.047,-5.28 8.18,-8.18l9.17,7.294c3.354,2.664 3.634,7.646 0.612,10.668z" android:fillColor="#f2e1d6"/> <path android:pathData="m205.877,75.311c11.354,11.354 11.341,29.758 0,41.099 -11.354,11.354 -29.745,11.354 -41.099,0l8.57,-8.57 23.959,-23.959z" android:fillColor="#fff"/> <path android:pathData="m197.307,83.881c6.619,6.619 6.611,17.348 0,23.959 -6.619,6.619 -17.34,6.619 -23.959,0z" android:fillColor="#f0c020"/> <path android:pathData="m197.307,83.881c6.619,6.619 6.611,17.348 0,23.959 -6.619,6.619 -17.34,6.619 -23.959,0z" android:fillColor="#f0c020"/> <path android:pathData="m224.711,170.774h-190c0,-13.61 8.79,-26.74 24.65,-26.74 8.87,0 16.62,4.69 20.96,11.72 4,-6.43 11.6,-9.84 19.29,-8.15 -3.37,-13.66 7.02,-26.44 20.68,-26.44 10.39,0 19.03,7.44 20.92,17.27 9.68,-4.69 21.15,0.01 24.87,9.97 6.13,-7.25 17.81,-5.14 21.02,3.8 15,-12.09 37.61,-0.9 37.61,18.57z" android:fillColor="#f2e1d6"/> <path android:pathData="m230.111,170.774h-5.4,-190 -8.82c-2.92,0 -5.24,2.5 -4.99,5.41 2.62,31.33 27.87,56.21 59.35,58.27 1.4,0.09 2.81,0.14 4.23,0.14h87.04c1.42,0 2.83,-0.05 4.23,-0.14 31.389,-2.048 56.711,-26.852 59.35,-58.26 0.25,-2.91 -2.06,-5.42 -4.99,-5.42z" android:fillColor="#72c1e8"/> <path android:pathData="m171.521,225.094h-70.697c-1.381,0 -2.5,-1.119 -2.5,-2.5s1.119,-2.5 2.5,-2.5h70.697c22.941,0 42.671,-15.584 47.979,-37.898 0.319,-1.342 1.668,-2.176 3.011,-1.854 1.344,0.319 2.174,1.667 1.854,3.011 -5.847,24.576 -27.578,41.741 -52.844,41.741zM90.824,225.094h-6.344c-1.381,0 -2.5,-1.119 -2.5,-2.5s1.119,-2.5 2.5,-2.5h6.343c1.381,0 2.5,1.119 2.5,2.5s-1.118,2.5 -2.499,2.5z" android:fillColor="#c5e5ff"/> <path android:fillColor="#FF000000" android:pathData="m216.743,41.668c2.32,-8.563 -1.183,-15.465 -7.961,-17.515 0.528,-5.193 -0.845,-9.674 -3.967,-12.797 -7.341,-7.34 -21.569,-4.615 -32.389,6.206 -10.821,10.82 -13.547,25.047 -6.205,32.388 3.122,3.123 7.605,4.494 12.796,3.967 2.059,6.813 8.99,10.271 17.517,7.96 7.217,6.03 16.561,6.786 21.776,1.568 5.217,-5.216 4.463,-14.557 -1.567,-21.777zM214.775,59.91c-3.503,3.504 -10.643,2.314 -15.917,-2.652 -0.678,-0.637 -1.646,-0.843 -2.516,-0.547 -6.201,2.101 -12.169,0.496 -12.873,-5.901 -0.165,-1.482 -1.572,-2.474 -2.995,-2.173 -4.53,0.942 -8.341,0.156 -10.718,-2.222 -4.818,-4.818 -2.689,-15.182 4.505,-23.482l4.776,4.776 -0.355,15.107c-0.033,1.4 1.091,2.559 2.5,2.559 1.354,0 2.467,-1.081 2.498,-2.441l0.243,-10.338 10.947,10.944 -0.122,8.189c-0.021,1.394 1.104,2.537 2.501,2.537 1.363,0 2.479,-1.095 2.499,-2.463l0.05,-3.336 8.152,8.152c0.977,0.977 2.559,0.977 3.535,0 0.977,-0.976 0.977,-2.559 0,-3.535l-8.153,-8.153 3.808,-0.057c1.381,-0.021 2.483,-1.156 2.463,-2.537 -0.022,-1.449 -1.259,-2.512 -2.537,-2.462l-8.661,0.129 -10.945,-10.945 10.581,-0.249c1.381,-0.033 2.474,-1.178 2.44,-2.558 -0.031,-1.38 -1.172,-2.458 -2.558,-2.441l-15.351,0.361 -4.776,-4.776c8.3,-7.194 18.665,-9.324 23.483,-4.505 2.377,2.377 3.165,6.183 2.222,10.718 -0.302,1.45 0.714,2.834 2.173,2.995 6.162,0.68 8.108,6.361 5.901,12.873 -0.296,0.875 -0.086,1.843 0.548,2.516 4.966,5.274 6.155,12.414 2.652,15.917z"/> <path android:fillColor="#FF000000" android:pathData="m78.355,122.934c3.333,-3.332 4.781,-8.132 4.18,-13.699 7.358,-2.142 11.015,-9.58 8.518,-18.664 6.45,-7.667 7.283,-17.608 1.748,-23.143s-15.475,-4.703 -23.144,1.748c-9.155,-2.519 -16.538,1.225 -18.664,8.518 -5.565,-0.6 -10.368,0.848 -13.699,4.18 -7.788,7.788 -4.871,22.907 6.642,34.419 11.509,11.508 26.629,14.431 34.419,6.641zM40.83,85.408c2.98,-2.981 7.523,-3.282 11.632,-2.425 1.444,0.299 2.833,-0.708 2.995,-2.174 0.763,-6.921 7.2,-8.772 14.01,-6.464 0.876,0.296 1.843,0.085 2.516,-0.547 5.715,-5.379 13.467,-6.65 17.283,-2.835 3.816,3.816 2.544,11.569 -2.835,17.283 -0.633,0.673 -0.844,1.641 -0.547,2.516 2.281,6.734 0.511,13.24 -6.464,14.01 -1.466,0.162 -2.474,1.552 -2.174,2.995 1.023,4.914 0.162,9.045 -2.425,11.632 -5.249,5.249 -16.543,2.926 -25.521,-4.934l5.302,-5.302c0.06,0.001 16.245,0.383 16.305,0.383 1.354,0 2.466,-1.081 2.499,-2.441 0.032,-1.38 -1.061,-2.525 -2.441,-2.558l-11.476,-0.27 12.059,-12.059c0.009,0 8.87,0.132 8.878,0.132 1.363,0 2.479,-1.095 2.499,-2.463 0.021,-1.38 -1.082,-2.516 -2.462,-2.537l-3.987,-0.059 9.021,-9.021c0.977,-0.976 0.977,-2.559 0,-3.535 -0.976,-0.977 -2.56,-0.977 -3.535,0l-9.021,9.021 -0.067,-4.492c-0.02,-1.368 -1.135,-2.463 -2.499,-2.463 -0.013,0 -0.025,0 -0.038,0 -1.381,0.021 -2.483,1.156 -2.462,2.537l0.139,9.345 -12.061,12.057 -0.276,-11.736c-0.033,-1.38 -1.189,-2.461 -2.558,-2.441 -1.38,0.033 -2.473,1.178 -2.441,2.558l0.388,16.506 -5.302,5.302c-7.859,-8.977 -10.183,-20.272 -4.934,-25.521z"/> <path android:fillColor="#FF000000" android:pathData="m67.498,12.804c-6.506,6.505 -6.507,17.036 0,23.542 6.49,6.49 17.05,6.491 23.542,0 0.978,-0.979 0.975,-2.562 0,-3.536l-20.007,-20.005c-0.976,-0.977 -2.56,-0.976 -3.535,-0.001zM71.033,32.812c-3.994,-3.995 -4.432,-10.074 -1.579,-14.515l16.091,16.091c-4.505,2.883 -10.578,2.357 -14.512,-1.576z"/> <path android:fillColor="#FF000000" android:pathData="m110.344,84.574c0,4.708 3.831,8.539 8.539,8.539s8.539,-3.831 8.539,-8.539 -3.831,-8.539 -8.539,-8.539 -8.539,3.831 -8.539,8.539zM122.421,84.574c0,1.951 -1.587,3.539 -3.539,3.539s-3.539,-1.587 -3.539,-3.539 1.587,-3.539 3.539,-3.539 3.539,1.588 3.539,3.539z"/> <path android:fillColor="#FF000000" android:pathData="m163.675,70.449c0,-4.708 -3.831,-8.539 -8.539,-8.539s-8.538,3.831 -8.538,8.539 3.83,8.539 8.538,8.539 8.539,-3.831 8.539,-8.539zM151.598,70.449c0,-1.951 1.587,-3.539 3.538,-3.539s3.539,1.587 3.539,3.539 -1.588,3.539 -3.539,3.539 -3.538,-1.588 -3.538,-3.539z"/> <path android:fillColor="#FF000000" android:pathData="m134.375,43.642c3.59,4.52 10.314,4.905 14.394,0.826 4.078,-4.079 3.69,-10.807 -0.823,-14.392l-7.352,-5.848c5.594,-8.893 2.289,-14.475 -0.331,-17.096 -6.332,-6.333 -20.491,-7.676 -28.498,0.332 -7.985,7.985 -6.711,22.119 -0.332,28.498 4.912,4.914 11.234,4.007 17.091,0.325zM145.233,40.932c-1.965,1.963 -5.199,1.795 -6.944,-0.401l-5.737,-7.214c1.702,-1.451 3.542,-3.277 5.066,-5.067l7.216,5.74c2.19,1.74 2.366,4.974 0.399,6.942zM114.968,32.426c-4.649,-4.649 -5.425,-15.67 0.332,-21.428 5.729,-5.728 16.718,-5.04 21.427,-0.332 2.973,2.974 2.463,7.207 -1.522,12.589 -1.972,2.681 -4.975,5.682 -7.653,7.652 -5.376,3.981 -9.61,4.492 -12.584,1.519z"/> <path android:fillColor="#FF000000" android:pathData="m120.913,21.479 l-0.904,-0.182c-1.358,-0.271 -2.672,0.605 -2.944,1.958 -0.272,1.354 0.605,2.671 1.958,2.944l0.904,0.182c1.366,0.272 2.674,-0.615 2.944,-1.958 0.272,-1.353 -0.605,-2.671 -1.958,-2.944z"/> <path android:fillColor="#FF000000" android:pathData="m128.725,18.569c1.354,-0.272 2.23,-1.59 1.958,-2.944l-0.182,-0.904c-0.272,-1.354 -1.584,-2.229 -2.944,-1.958 -1.354,0.272 -2.23,1.59 -1.958,2.944l0.182,0.904c0.271,1.345 1.579,2.23 2.944,1.958z"/> <path android:fillColor="#FF000000" android:pathData="m149.878,102.873 l1.974,-1.974c0.977,-0.976 0.977,-2.559 0,-3.535 -0.977,-0.977 -2.559,-0.977 -3.535,0l-1.974,1.974c-0.977,0.976 -0.977,2.559 0,3.535 0.976,0.976 2.558,0.976 3.535,0z"/> <path android:fillColor="#FF000000" android:pathData="m52.246,52.848 l1.974,1.974c0.976,0.977 2.56,0.977 3.535,0 0.977,-0.976 0.977,-2.559 0,-3.535l-1.974,-1.974c-0.976,-0.977 -2.56,-0.977 -3.535,0 -0.976,0.976 -0.976,2.559 0,3.535z"/> <path android:fillColor="#FF000000" android:pathData="m216.8,98.22c0.615,-9.754 -2.817,-18.346 -9.16,-24.68 -0.939,-0.94 -2.58,-0.94 -3.53,0l-41.1,41.1c-0.974,0.975 -0.975,2.566 0,3.54 11.74,11.74 31.549,13.115 45.13,-0.51 0.96,-1 0.92,-2.59 -0.08,-3.54 -0.96,-0.92 -2.609,-0.88 -3.529,0.08 -9.353,9.97 -25.341,11.083 -36.131,2.12l5.078,-5.079c7.75,5.973 18.71,5.243 25.597,-1.643 6.974,-6.973 7.574,-17.932 1.641,-25.602l5.075,-5.076c4.23,5.091 6.545,11.843 6.02,18.91 -0.102,1.373 0.919,2.576 2.311,2.68 1.398,0.1 2.578,-0.94 2.678,-2.3zM197.155,87.568c3.92,5.569 3.474,13.415 -1.616,18.503 -5.027,5.028 -12.867,5.566 -18.5,1.617 17.364,-17.369 13.218,-13.222 20.116,-20.12z"/> <path android:fillColor="#FF000000" android:pathData="m212.899,105.4c-1.229,-0.55 -2.76,0.04 -3.3,1.26 -0.57,1.31 0.034,2.757 1.26,3.3 1.243,0.564 2.746,0.009 3.311,-1.26 0.524,-1.185 0.059,-2.712 -1.271,-3.3z"/> <path android:fillColor="#FF000000" android:pathData="m24.06,198.14c0.55,1.21 2.08,1.8 3.31,1.24 1.25,-0.57 1.81,-2.05 1.24,-3.31 -4.998,-11.021 -4.902,-19.13 -5.23,-20.13 0,-1.682 1.204,-2.661 2.51,-2.67h204.22c1.671,0.012 2.702,1.591 2.5,2.72 -2.65,31.478 -29.044,56.1 -61.09,56.1h-87.04c-17.477,0 -33.722,-7.287 -45.34,-20.04 -0.89,-0.98 -2.55,-1.05 -3.53,-0.16 -1.02,0.93 -1.09,2.51 -0.17,3.53 0.01,0 0.01,0.01 0.01,0.01 10.634,11.666 25.418,19.608 42.301,21.31v9.845c0,4.136 3.364,7.5 7.5,7.5h85.5c4.136,0 7.5,-3.364 7.5,-7.5v-9.841c31.656,-3.207 56.683,-28.586 59.339,-60.333 0.387,-4.35 -3.08,-8.129 -7.48,-8.14h-3.016c-1.751,-19.002 -23.211,-29.735 -39.044,-19.81 -4.442,-7.121 -14.201,-9.175 -21.17,-4.18 -4.822,-8.205 -14.787,-12.132 -23.96,-9.21 -3.097,-9.451 -11.944,-16.389 -22.63,-16.41 -2.55,0 -5.06,0.4 -7.46,1.19 -1.31,0.44 -2.02,1.85 -1.59,3.16 0.42,1.28 1.86,2.02 3.15,1.59 1.9,-0.62 3.88,-0.94 5.91,-0.94 8.99,0 16.76,6.41 18.46,15.25 0.298,1.6 2.071,2.494 3.54,1.77 8.636,-4.158 18.349,0.322 21.44,8.6 0.67,1.797 3.016,2.19 4.25,0.74 4.935,-5.82 14.207,-4.052 16.76,3.03 0.551,1.556 2.524,2.208 3.92,1.1 12.786,-10.279 31.599,-1.625 33.385,14.12h-184.73c1.042,-11.867 8.996,-21.701 22.035,-21.74 7.71,0 14.75,3.94 18.83,10.54 0.974,1.568 3.268,1.58 4.25,0 3.569,-5.719 10.167,-8.462 16.63,-7.02 1.779,0.386 3.41,-1.213 2.97,-3.04 -0.833,-3.352 -0.78,-6.877 0.34,-10.35 0.438,-1.335 -0.312,-2.74 -1.6,-3.15 -1.29,-0.43 -2.73,0.32 -3.15,1.6 -1.028,3.15 -1.35,6.32 -1.05,9.58 -6.564,-0.224 -12.285,2.463 -16.2,6.84 -5.18,-6.35 -12.84,-10 -21.02,-10h-0.07c-15.472,0 -25.905,11.882 -26.985,26.74h-6.415c-4.444,0.01 -7.889,3.857 -7.48,8.15 0,-0.001 0.447,10.226 5.65,21.719zM173.251,246.584c0,1.378 -1.121,2.5 -2.5,2.5h-85.5c-1.378,0 -2.5,-1.122 -2.5,-2.5v-9.513c0.585,0.016 1.163,0.023 1.73,0.023h87.04c0.567,0 1.145,-0.008 1.73,-0.023z"/> <path android:fillColor="#FF000000" android:pathData="m29.79,203.76c-1.159,0.766 -1.479,2.306 -0.71,3.47 0.761,1.156 2.32,1.46 3.46,0.71 1.15,-0.76 1.47,-2.31 0.71,-3.46 -0.73,-1.12 -2.34,-1.45 -3.46,-0.72z"/> <path android:fillColor="#FF000000" android:pathData="m106.95,125.67c-0.95,-0.95 -2.6,-0.94 -3.54,0 -0.97,0.98 -0.97,2.57 0.01,3.54 0.956,0.956 2.535,0.995 3.54,-0.01 0.97,-0.97 0.96,-2.56 -0.01,-3.53z"/> <path android:fillColor="#FF000000" android:pathData="m121.383,141.75v-2.003c0,-1.381 -1.119,-2.5 -2.5,-2.5s-2.5,1.119 -2.5,2.5v2.003c0,1.381 1.119,2.5 2.5,2.5s2.5,-1.119 2.5,-2.5z"/> <path android:fillColor="#FF000000" android:pathData="m150.587,151.344c0.977,-0.976 0.977,-2.559 0,-3.535l-1.417,-1.417c-0.977,-0.977 -2.559,-0.977 -3.535,0 -0.977,0.976 -0.977,2.559 0,3.535l1.417,1.417c0.976,0.977 2.558,0.977 3.535,0z"/> <path android:fillColor="#FF000000" android:pathData="m109.777,156.326 l1.417,-1.417c0.976,-0.977 0.976,-2.56 0,-3.536 -0.977,-0.976 -2.559,-0.976 -3.536,0l-1.417,1.417c-0.976,0.977 -0.976,2.56 0,3.536 0.978,0.976 2.56,0.976 3.536,0z"/> <path android:fillColor="#FF000000" android:pathData="m56.899,152.566c-0.976,0.977 -0.976,2.56 0,3.536l1.417,1.417c0.977,0.976 2.559,0.976 3.536,0 0.976,-0.977 0.976,-2.56 0,-3.536l-1.417,-1.417c-0.977,-0.976 -2.56,-0.976 -3.536,0z"/> <path android:fillColor="#FF000000" android:pathData="m159.261,157.519 l1.417,-1.417c0.977,-0.976 0.977,-2.559 0.001,-3.536 -0.977,-0.977 -2.56,-0.976 -3.536,0l-1.417,1.417c-0.977,0.976 -0.977,2.559 -0.001,3.536 0.976,0.976 2.559,0.976 3.536,0z"/> <path android:fillColor="#FF000000" android:pathData="m206.16,161.959 l1.417,-1.417c0.977,-0.976 0.977,-2.559 0,-3.535 -0.977,-0.977 -2.559,-0.977 -3.535,0l-1.417,1.417c-0.977,0.976 -0.977,2.559 0,3.535 0.977,0.977 2.559,0.977 3.535,0z"/> </group> </vector>
{ "pile_set_name": "Github" }
<?php if(!isset($a1->config['program'])) { /**/ } elseif(is_string($a1->config['program'])) { /**/ } else { /**/ } if(!isset($a2->config['program'])) { /**/ } elseif(is_string($a2->config)) { /**/ } else { /**/ } if(!isset($a3->config['program'])) { /**/ } elseif(is_string($a3->nope)) { /**/ } else { /**/ } if(!isset($a4->config['program'])) { /**/ } elseif(is_string($a4->nope['program'])) { /**/ } else { /**/ } if(!isset($a6->config['program'])) { /**/ } elseif(is_string($a7->config['program'])) { /**/ } else { /**/ } if(!isset($a7->config['program'])) { /**/ } elseif(is_string($a8->config['program'])) { /**/ } else { /**/ } ?>
{ "pile_set_name": "Github" }
資源の有効な利用の促進に関する法律 (平成三年四月二十六日法律第四十八号)最終改正:平成二五年五月三一日法律第二五号(最終改正までの未施行法令)平成二十五年五月三十一日法律第二十五号(未施行)   第一章 総則(第一条・第二条)  第二章 基本方針等(第三条―第九条)  第三章 特定省資源業種(第十条―第十四条)  第四章 特定再利用業種(第十五条―第十七条)  第五章 指定省資源化製品(第十八条―第二十条)  第六章 指定再利用促進製品(第二十一条―第二十三条)  第七章 指定表示製品(第二十四条・第二十五条)  第八章 指定再資源化製品(第二十六条―第三十三条)  第九章 指定副産物(第三十四条―第三十六条)  第十章 雑則(第三十七条―第四十一条)  第十一章 罰則(第四十二条―第四十四条)  附則    第一章 総則 (目的) 第一条  この法律は、主要な資源の大部分を輸入に依存している我が国において、近年の国民経済の発展に伴い、資源が大量に使用されていることにより、使用済物品等及び副産物が大量に発生し、その相当部分が廃棄されており、かつ、再生資源及び再生部品の相当部分が利用されずに廃棄されている状況にかんがみ、資源の有効な利用の確保を図るとともに、廃棄物の発生の抑制及び環境の保全に資するため、使用済物品等及び副産物の発生の抑制並びに再生資源及び再生部品の利用の促進に関する所要の措置を講ずることとし、もって国民経済の健全な発展に寄与することを目的とする。 (定義) 第二条  この法律において「使用済物品等」とは、一度使用され、又は使用されずに収集され、若しくは廃棄された物品(放射性物質及びこれによって汚染された物を除く。)をいう。 2  この法律において「副産物」とは、製品の製造、加工、修理若しくは販売、エネルギーの供給又は土木建築に関する工事(以下「建設工事」という。)に伴い副次的に得られた物品(放射性物質及びこれによって汚染された物を除く。)をいう。 3  この法律において「副産物の発生抑制等」とは、製品の製造又は加工に使用する原材料、部品その他の物品(エネルギーの使用の合理化に関する法律 (昭和五十四年法律第四十九号)第二条第二項 に規定する燃料を除く。以下「原材料等」という。)の使用の合理化により当該原材料等の使用に係る副産物の発生の抑制を行うこと及び当該原材料等の使用に係る副産物の全部又は一部を再生資源として利用することを促進することをいう。 4  この法律において「再生資源」とは、使用済物品等又は副産物のうち有用なものであって、原材料として利用することができるもの又はその可能性のあるものをいう。 5  この法律において「再生部品」とは、使用済物品等のうち有用なものであって、部品その他製品の一部として利用することができるもの又はその可能性のあるものをいう。 6  この法律において「再資源化」とは、使用済物品等のうち有用なものの全部又は一部を再生資源又は再生部品として利用することができる状態にすることをいう。 7  この法律において「特定省資源業種」とは、副産物の発生抑制等が技術的及び経済的に可能であり、かつ、副産物の発生抑制等を行うことが当該原材料等に係る資源及び当該副産物に係る再生資源の有効な利用を図る上で特に必要なものとして政令で定める原材料等の種類及びその使用に係る副産物の種類ごとに政令で定める業種をいう。 8  この法律において「特定再利用業種」とは、再生資源又は再生部品を利用することが技術的及び経済的に可能であり、かつ、これらを利用することが当該再生資源又は再生部品の有効な利用を図る上で特に必要なものとして政令で定める再生資源又は再生部品の種類ごとに政令で定める業種をいう。 9  この法律において「指定省資源化製品」とは、製品であって、それに係る原材料等の使用の合理化、その長期間の使用の促進その他の当該製品に係る使用済物品等の発生の抑制を促進することが当該製品に係る原材料等に係る資源の有効な利用を図る上で特に必要なものとして政令で定めるものをいう。 10  この法律において「指定再利用促進製品」とは、それが一度使用され、又は使用されずに収集され、若しくは廃棄された後その全部又は一部を再生資源又は再生部品として利用することを促進することが当該再生資源又は再生部品の有効な利用を図る上で特に必要なものとして政令で定める製品をいう。 11  この法律において「指定表示製品」とは、それが一度使用され、又は使用されずに収集され、若しくは廃棄された後その全部又は一部を再生資源として利用することを目的として分別回収(類似の物品と分別して回収することをいう。以下同じ。)をするための表示をすることが当該再生資源の有効な利用を図る上で特に必要なものとして政令で定める製品をいう。 12  この法律において「指定再資源化製品」とは、製品(他の製品の部品として使用される製品を含む。)であって、それが一度使用され、又は使用されずに収集され、若しくは廃棄された後それを当該製品(他の製品の部品として使用される製品にあっては、当該製品又は当該他の製品)の製造、加工、修理若しくは販売の事業を行う者が自主回収(自ら回収し、又は他の者に委託して回収することをいう。以下同じ。)をすることが経済的に可能であって、その自主回収がされたものの全部又は一部の再資源化をすることが技術的及び経済的に可能であり、かつ、その再資源化をすることが当該再生資源又は再生部品の有効な利用を図る上で特に必要なものとして政令で定めるものをいう。 13  この法律において「指定副産物」とは、エネルギーの供給又は建設工事に係る副産物であって、その全部又は一部を再生資源として利用することを促進することが当該再生資源の有効な利用を図る上で特に必要なものとして政令で定める業種ごとに政令で定めるものをいう。    第二章 基本方針等 (基本方針) 第三条  主務大臣は、使用済物品等及び副産物の発生の抑制並びに再生資源及び再生部品の利用による資源の有効な利用(以下この章において「資源の有効な利用」という。)を総合的かつ計画的に推進するため、資源の有効な利用の促進に関する基本方針(以下「基本方針」という。)を定め、これを公表するものとする。 2  基本方針は、製品の種類及び副産物の種類ごとの原材料等の使用の合理化に関する目標、再生資源の種類及び再生部品の種類ごとのこれらの利用に関する目標、製品の種類ごとの長期間の使用の促進に関する事項、環境の保全に資するものとしての資源の有効な利用の促進の意義に関する知識の普及に係る事項その他資源の有効な利用の促進に関する事項について、資源の有効な利用に関する技術水準その他の事情を勘案して定めるものとする。 3  主務大臣は、前項の事情の変動のため必要があるときは、基本方針を改定するものとする。 4  第一項及び第二項の規定は、前項の規定による基本方針の改定に準用する。 (事業者等の責務) 第四条  工場若しくは事業場(建設工事に係るものを含む。以下同じ。)において事業を行う者及び物品の販売の事業を行う者(以下「事業者」という。)又は建設工事の発注者は、その事業又はその建設工事の発注を行うに際して原材料等の使用の合理化を行うとともに、再生資源及び再生部品を利用するよう努めなければならない。 2  事業者又は建設工事の発注者は、その事業に係る製品が長期間使用されることを促進するよう努めるとともに、その事業に係る製品が一度使用され、若しくは使用されずに収集され、若しくは廃棄された後その全部若しくは一部を再生資源若しくは再生部品として利用することを促進し、又はその事業若しくはその建設工事に係る副産物の全部若しくは一部を再生資源として利用することを促進するよう努めなければならない。 (消費者の責務) 第五条  消費者は、製品をなるべく長期間使用し、並びに再生資源及び再生部品の利用を促進するよう努めるとともに、国、地方公共団体及び事業者がこの法律の目的を達成するために行う措置に協力するものとする。 (資金の確保等) 第六条  国は、資源の有効な利用を促進するために必要な資金の確保その他の措置を講ずるよう努めなければならない。 2  国は、物品の調達に当たっては、再生資源及び再生部品の利用を促進するように必要な考慮を払うものとする。 (科学技術の振興) 第七条  国は、資源の有効な利用の促進に資する科学技術の振興を図るため、研究開発の推進及びその成果の普及等必要な措置を講ずるよう努めなければならない。 (国民の理解を深める等のための措置) 第八条  国は、教育活動、広報活動等を通じて、資源の有効な利用の促進に関する国民の理解を深めるとともに、その実施に関する国民の協力を求めるよう努めなければならない。 (地方公共団体の責務) 第九条  地方公共団体は、その区域の経済的社会的諸条件に応じて資源の有効な利用を促進するよう努めなければならない。    第三章 特定省資源業種 (特定省資源事業者の判断の基準となるべき事項) 第十条  主務大臣は、特定省資源業種に係る原材料等の使用の合理化による副産物の発生の抑制及び当該副産物に係る再生資源の利用を促進するため、主務省令で、副産物の発生抑制等のために必要な計画的に取り組むべき措置その他の措置に関し、工場又は事業場において特定省資源業種に属する事業を行う者(以下「特定省資源事業者」という。)の判断の基準となるべき事項を定めるものとする。 2  前項に規定する判断の基準となるべき事項は、当該特定省資源業種に係る原材料等の使用の合理化による副産物の発生の抑制の状況、原材料等の使用の合理化による副産物の発生の抑制に関する技術水準その他の事情及び当該副産物に係る再生資源の利用の状況、再生資源の利用の促進に関する技術水準その他の事情を勘案して定めるものとし、これらの事情の変動に応じて必要な改定をするものとする。 3  主務大臣は、第一項に規定する判断の基準となるべき事項を定め、又は前項に規定する改定をしようとするときは、資源の再利用の促進に係る環境の保全の観点から、環境大臣に協議しなければならない。 (指導及び助言) 第十一条  主務大臣は、特定省資源事業者の副産物の発生抑制等の適確な実施を確保するため必要があると認めるときは、特定省資源事業者に対し、前条第一項に規定する判断の基準となるべき事項を勘案して、副産物の発生抑制等について必要な指導及び助言をすることができる。  (計画の作成) 第十二条  特定省資源事業者であって、その事業年度における当該特定省資源事業者の製造に係る政令で定める製品の生産量が政令で定める要件に該当するものは、主務省令で定めるところにより、第十条第一項に規定する判断の基準となるべき事項において定められた副産物の発生抑制等のために必要な計画的に取り組むべき措置の実施に関する計画を作成し、主務大臣に提出しなければならない。 (勧告及び命令) 第十三条  主務大臣は、特定省資源事業者であって、その製造に係る製品の生産量が政令で定める要件に該当するものの当該特定省資源業種に係る副産物の発生抑制等が第十条第一項に規定する判断の基準となるべき事項に照らして著しく不十分であると認めるときは、当該特定省資源事業者に対し、その判断の根拠を示して、当該特定省資源業種に係る副産物の発生抑制等に関し必要な措置をとるべき旨の勧告をすることができる。 2  主務大臣は、前項に規定する勧告を受けた特定省資源事業者がその勧告に従わなかったときは、その旨を公表することができる。 3  主務大臣は、第一項に規定する勧告を受けた特定省資源事業者が、前項の規定によりその勧告に従わなかった旨を公表された後において、なお、正当な理由がなくてその勧告に係る措置をとらなかった場合において、当該特定省資源業種に係る副産物の発生抑制等を著しく害すると認めるときは、審議会等(国家行政組織法 (昭和二十三年法律第百二十号)第八条 に規定する機関をいう。以下同じ。)で政令で定めるものの意見を聴いて、当該特定省資源事業者に対し、その勧告に係る措置をとるべきことを命ずることができる。 (環境大臣との関係) 第十四条  主務大臣は、特定省資源事業者の副産物の発生抑制等の適確な実施を確保するために必要な施策の実施に当たり、当該施策の実施が廃棄物の適正な処理に関する施策に関連する場合には、環境大臣と緊密に連絡して行うものとする。    第四章 特定再利用業種 (特定再利用事業者の判断の基準となるべき事項) 第十五条  主務大臣は、特定再利用業種に係る再生資源又は再生部品の利用を促進するため、主務省令で、工場又は事業場において特定再利用業種に属する事業を行う者(以下「特定再利用事業者」という。)の再生資源又は再生部品の利用に関する判断の基準となるべき事項を定めるものとする。 2  前項に規定する判断の基準となるべき事項は、当該特定再利用業種に係る再生資源又は再生部品の利用の状況、再生資源又は再生部品の利用に関する技術水準その他の事情を勘案して定めるものとし、これらの事情の変動に応じて必要な改定をするものとする。 3  第十条第三項の規定は、第一項に規定する判断の基準となるべき事項を定め、又は前項に規定する改定をしようとする場合に準用する。 (指導及び助言) 第十六条  主務大臣は、特定再利用事業者の再生資源又は再生部品の利用の適確な実施を確保するため必要があると認めるときは、特定再利用事業者に対し、前条第一項に規定する判断の基準となるべき事項を勘案して、再生資源又は再生部品の利用について必要な指導及び助言をすることができる。 (勧告及び命令) 第十七条  主務大臣は、特定再利用事業者であって、その製造に係る製品の生産量又はその施工に係る建設工事の施工金額が政令で定める要件に該当するものの当該特定再利用業種に係る再生資源又は再生部品の利用が第十五条第一項に規定する判断の基準となるべき事項に照らして著しく不十分であると認めるときは、当該特定再利用事業者に対し、その判断の根拠を示して、当該特定再利用業種に係る再生資源又は再生部品の利用に関し必要な措置をとるべき旨の勧告をすることができる。 2  主務大臣は、前項に規定する勧告を受けた特定再利用事業者がその勧告に従わなかったときは、その旨を公表することができる。 3  主務大臣は、第一項に規定する勧告を受けた特定再利用事業者が、前項の規定によりその勧告に従わなかった旨を公表された後において、なお、正当な理由がなくてその勧告に係る措置をとらなかった場合において、当該特定再利用業種に係る再生資源又は再生部品の利用を著しく害すると認めるときは、審議会等で政令で定めるものの意見を聴いて、当該特定再利用事業者に対し、その勧告に係る措置をとるべきことを命ずることができる。    第五章 指定省資源化製品 (指定省資源化事業者の判断の基準となるべき事項) 第十八条  主務大臣は、指定省資源化製品に係る使用済物品等の発生の抑制を促進するため、主務省令で、指定省資源化製品の製造、加工、修理又は販売の事業を行う者(以下「指定省資源化事業者」という。)の使用済物品等の発生の抑制に関する判断の基準となるべき事項を定めるものとする。 2  前項に規定する判断の基準となるべき事項は、当該指定省資源化製品に係る使用済物品等の発生の抑制の状況、使用済物品等の発生の抑制に関する技術水準その他の事情を勘案して定めるものとし、これらの事情の変動に応じて必要な改定をするものとする。 3  第十条第三項の規定は、第一項に規定する判断の基準となるべき事項を定め、又は前項に規定する改定をしようとする場合に準用する。 (指導及び助言) 第十九条  主務大臣は、指定省資源化製品に係る使用済物品等の発生の抑制を促進するため必要があると認めるときは、指定省資源化事業者に対し、前条第一項に規定する判断の基準となるべき事項を勘案して、使用済物品等の発生の抑制について必要な指導及び助言をすることができる。 (勧告及び命令) 第二十条  主務大臣は、指定省資源化事業者であって、その製造又は販売に係る指定省資源化製品の生産量又は販売量が政令で定める要件に該当するものの当該指定省資源化製品に係る使用済物品等の発生の抑制が第十八条第一項に規定する判断の基準となるべき事項に照らして著しく不十分であると認めるときは、当該指定省資源化事業者に対し、その判断の根拠を示して、当該指定省資源化製品に係る使用済物品等の発生の抑制に関し必要な措置をとるべき旨の勧告をすることができる。 2  主務大臣は、前項に規定する勧告を受けた指定省資源化事業者がその勧告に従わなかったときは、その旨を公表することができる。 3  主務大臣は、第一項に規定する勧告を受けた指定省資源化事業者が、前項の規定によりその勧告に従わなかった旨を公表された後において、なお、正当な理由がなくてその勧告に係る措置をとらなかった場合において、当該指定省資源化製品に係る使用済物品等の発生の抑制を著しく害すると認めるときは、審議会等で政令で定めるものの意見を聴いて、当該指定省資源化事業者に対し、その勧告に係る措置をとるべきことを命ずることができる。    第六章 指定再利用促進製品 (指定再利用促進事業者の判断の基準となるべき事項) 第二十一条  主務大臣は、指定再利用促進製品に係る再生資源又は再生部品の利用を促進するため、主務省令で、指定再利用促進製品の製造、加工、修理又は販売の事業を行う者(以下「指定再利用促進事業者」という。)の再生資源又は再生部品の利用の促進に関する判断の基準となるべき事項を定めるものとする。 2  前項に規定する判断の基準となるべき事項は、当該指定再利用促進製品に係る再生資源又は再生部品の利用の状況、再生資源又は再生部品の利用の促進に関する技術水準その他の事情を勘案して定めるものとし、これらの事情の変動に応じて必要な改定をするものとする。 3  第十条第三項の規定は、第一項に規定する判断の基準となるべき事項を定め、又は前項に規定する改定をしようとする場合に準用する。 (指導及び助言) 第二十二条  主務大臣は、指定再利用促進製品に係る再生資源又は再生部品の利用を促進するため必要があると認めるときは、指定再利用促進事業者に対し、前条第一項に規定する判断の基準となるべき事項を勘案して、再生資源又は再生部品の利用の促進について必要な指導及び助言をすることができる。 (勧告及び命令) 第二十三条  主務大臣は、指定再利用促進事業者であって、その製造又は販売に係る指定再利用促進製品の生産量又は販売量が政令で定める要件に該当するものの当該指定再利用促進製品に係る再生資源又は再生部品の利用の促進が第二十一条第一項に規定する判断の基準となるべき事項に照らして著しく不十分であると認めるときは、当該指定再利用促進事業者に対し、その判断の根拠を示して、当該指定再利用促進製品に係る再生資源又は再生部品の利用の促進に関し必要な措置をとるべき旨の勧告をすることができる。 2  主務大臣は、前項に規定する勧告を受けた指定再利用促進事業者がその勧告に従わなかったときは、その旨を公表することができる。 3  主務大臣は、第一項に規定する勧告を受けた指定再利用促進事業者が、前項の規定によりその勧告に従わなかった旨を公表された後において、なお、正当な理由がなくてその勧告に係る措置をとらなかった場合において、当該指定再利用促進製品に係る再生資源又は再生部品の利用の促進を著しく害すると認めるときは、審議会等で政令で定めるものの意見を聴いて、当該指定再利用促進事業者に対し、その勧告に係る措置をとるべきことを命ずることができる。    第七章 指定表示製品 (指定表示事業者の表示の標準となるべき事項) 第二十四条  主務大臣は、指定表示製品に係る再生資源の利用を促進するため、主務省令で、指定表示製品ごとに、次に掲げる事項につき表示の標準となるべき事項を定めるものとする。 一  材質又は成分その他の分別回収に関し表示すべき事項 二  表示の方法その他前号に掲げる事項の表示に際して指定表示製品の製造、加工又は販売の事業を行う者(その事業の用に供するために指定表示製品の製造を発注する事業者を含む。以下「指定表示事業者」という。)が遵守すべき事項 2  第十条第三項の規定は、前項に規定する表示の標準となるべき事項を定めようとする場合に準用する。 (勧告及び命令) 第二十五条  主務大臣は、前条第一項の主務省令で定める同項第一号に掲げる事項(以下「表示事項」という。)を表示せず、又は同項の主務省令で定める同項第二号に掲げる事項(以下「遵守事項」という。)を遵守しない指定表示事業者(中小企業基本法 (昭和三十八年法律第百五十四号)第二条第五項 に規定する小規模企業者その他の政令で定める者であって、その政令で定める収入金額が政令で定める要件に該当するものを除く。)があるときは、当該指定表示事業者に対し、表示事項を表示し、又は遵守事項を遵守すべき旨の勧告をすることができる。 2  主務大臣は、前項に規定する勧告を受けた指定表示事業者がその勧告に従わなかったときは、その旨を公表することができる。 3  主務大臣は、第一項に規定する勧告を受けた指定表示事業者が、前項の規定によりその勧告に従わなかった旨を公表された後において、なお、正当な理由がなくてその勧告に係る措置をとらなかった場合において、当該指定表示製品に係る再生資源の利用の促進を著しく害すると認めるときは、審議会等で政令で定めるものの意見を聴いて、当該指定表示事業者に対し、その勧告に係る措置をとるべきことを命ずることができる。    第八章 指定再資源化製品 (指定再資源化事業者の判断の基準となるべき事項) 第二十六条  主務大臣は、指定再資源化製品に係る再生資源又は再生部品の利用を促進するため、主務省令で、次に掲げる事項に関し、指定再資源化製品の製造、加工、修理又は販売の事業を行う者(指定再資源化製品を部品として使用する政令で定める製品の製造、加工、修理又は販売の事業を行う者を含む。以下「指定再資源化事業者」という。)の判断の基準となるべき事項を定めるものとする。 一  使用済指定再資源化製品(指定再資源化製品が一度使用され、又は使用されずに収集され、若しくは廃棄されたものをいう。以下同じ。)の自主回収の実効の確保その他実施方法に関する事項 二  使用済指定再資源化製品の再資源化の目標に関する事項及び実施方法に関する事項 三  使用済指定再資源化製品について市町村から引取りを求められた場合における引取りの実施、引取りの方法その他市町村との連携に関する事項 四  その他自主回収及び再資源化の実施に関し必要な事項 2  前項に規定する判断の基準となるべき事項は、当該使用済指定再資源化製品に係る自主回収及び再資源化の状況、再資源化に関する技術水準、市町村が行う収集及び処分の状況その他の事情を勘案して定めるものとし、これらの事情の変動に応じて必要な改定をするものとする。 (使用済指定再資源化製品の自主回収及び再資源化の認定) 第二十七条  指定再資源化事業者は、単独に又は共同して、使用済指定再資源化製品の自主回収及び再資源化を実施しようとするときは、主務省令で定めるところにより、次の各号のいずれにも適合していることについて、主務大臣の認定を受けることができる。 一  当該自主回収及び再資源化が前条第一項に規定する判断の基準となるべき事項に適合するものであること。 二  当該自主回収及び再資源化に必要な行為を実施する者が主務省令で定める基準に適合するものであること。 三  前号に規定する者が主務省令で定める基準に適合する施設を有するものであること。 四  同一の業種に属する事業を営む二以上の指定再資源化事業者の申請に係る自主回収及び再資源化にあっては、次のイ及びロに適合するものであること。イ 当該二以上の指定再資源化事業者と当該業種に属する他の事業者との間の適正な競争が確保されるものであること。 ロ 一般消費者及び関連事業者の利益を不当に害するおそれがあるものでないこと。 2  前項の認定を受けようとする者は、主務省令で定めるところにより、次に掲げる事項を記載した申請書その他主務省令で定める書類を主務大臣に提出しなければならない。 一  氏名又は名称及び住所並びに法人にあっては、その代表者の氏名 二  自主回収及び再資源化の対象とする使用済指定再資源化製品の種類 三  自主回収及び再資源化の目標 四  自主回収及び再資源化に必要な行為を実施する者並びに当該自主回収及び再資源化に必要な行為の用に供する施設 五  自主回収及び再資源化の方法その他の内容に関する事項 3  主務大臣は、第一項の認定の申請に係る自主回収及び再資源化が同項各号のいずれにも適合していると認めるときは、同項の認定をするものとする。 (変更の認定) 第二十八条  前条第一項の認定を受けた指定再資源化事業者(以下「認定指定再資源化事業者」という。)は、同条第二項第二号から第五号までに掲げる事項の変更(主務省令で定める軽微な変更を除く。)をしようとするときは、主務大臣の認定を受けなければならない。 2  前条第二項及び第三項の規定は、前項の変更の認定に準用する。 (認定の取消し) 第二十九条  主務大臣は、第二十七条第一項の認定に係る自主回収及び再資源化が同項各号のいずれかに適合しなくなったと認めるときは、当該認定を取り消すことができる。 (公正取引委員会との関係) 第三十条  主務大臣は、同一の業種に属する事業を営む二以上の指定再資源化事業者の申請に係る自主回収及び再資源化について第二十七条第一項の規定による認定(第二十八条第一項の規定による変更の認定を含む。次項及び次条において同じ。)をしようとする場合において、必要があると認めるときは、当該申請に係る自主回収及び再資源化のための措置について、公正取引委員会に意見を求めることができる。 2  公正取引委員会は、必要があると認めるときは、主務大臣に対し、前項の規定により意見を求められた自主回収及び再資源化のための措置であって主務大臣が第二十七条第一項の規定により認定をしたものについて意見を述べることができる。 (廃棄物の処理及び清掃に関する法律 における配慮) 第三十一条  環境大臣は、廃棄物の処理及び清掃に関する法律 (昭和四十五年法律第百三十七号)の規定の適用に当たっては、第二十七条第一項の規定による認定に係る自主回収及び再資源化の円滑な実施が図られるよう適切な配慮をするものとする。 (指導及び助言) 第三十二条  主務大臣は、使用済指定再資源化製品の自主回収及び再資源化を促進するため必要があると認めるときは、指定再資源化事業者に対し、第二十六条第一項に規定する判断の基準となるべき事項を勘案して、使用済指定再資源化製品の自主回収及び再資源化について必要な指導及び助言をすることができる。 (勧告及び命令) 第三十三条  主務大臣は、指定再資源化事業者であって、その製造若しくは販売に係る指定再資源化製品又は指定再資源化製品を部品として使用する第二十六条第一項の政令で定める製品の生産量若しくは販売量が政令で定める要件に該当するものの当該使用済指定再資源化製品の自主回収及び再資源化が同項に規定する判断の基準となるべき事項に照らして著しく不十分であると認めるときは、当該指定再資源化事業者に対し、その判断の根拠を示して、当該使用済指定再資源化製品の自主回収及び再資源化に関し必要な措置をとるべき旨の勧告をすることができる。 2  主務大臣は、前項に規定する勧告を受けた指定再資源化事業者がその勧告に従わなかったときは、その旨を公表することができる。 3  主務大臣は、第一項に規定する勧告を受けた指定再資源化事業者が、前項の規定によりその勧告に従わなかった旨を公表された後において、なお、正当な理由がなくてその勧告に係る措置をとらなかった場合において、当該使用済指定再資源化製品の自主回収及び再資源化を著しく害すると認めるときは、審議会等で政令で定めるものの意見を聴いて、当該指定再資源化事業者に対し、その勧告に係る措置をとるべきことを命ずることができる。    第九章 指定副産物 (指定副産物事業者の判断の基準となるべき事項) 第三十四条  主務大臣は、指定副産物に係る再生資源の利用を促進するため、主務省令で、事業場において指定副産物に係る業種に属する事業を行う者(以下「指定副産物事業者」という。)の再生資源の利用の促進に関する判断の基準となるべき事項を定めるものとする。 2  前項に規定する判断の基準となるべき事項は、当該指定副産物に係る再生資源の利用の状況、再生資源の利用の促進に関する技術水準その他の事情を勘案して定めるものとし、これらの事情の変動に応じて必要な改定をするものとする。 3  第十条第三項の規定は、第一項に規定する判断の基準となるべき事項を定め、又は前項に規定する改定をしようとする場合に準用する。 (指導及び助言) 第三十五条  主務大臣は、指定副産物に係る再生資源の利用を促進するため必要があると認めるときは、指定副産物事業者に対し、前条第一項に規定する判断の基準となるべき事項を勘案して、再生資源の利用の促進について必要な指導及び助言をすることができる。 (勧告及び命令) 第三十六条  主務大臣は、指定副産物事業者であって、その供給に係るエネルギーの供給量又はその施工に係る建設工事の施工金額が政令で定める要件に該当するものの当該指定副産物に係る再生資源の利用の促進が第三十四条第一項に規定する判断の基準となるべき事項に照らして著しく不十分であると認めるときは、当該指定副産物事業者に対し、その判断の根拠を示して、当該指定副産物に係る再生資源の利用の促進に関し必要な措置をとるべき旨の勧告をすることができる。 2  主務大臣は、前項に規定する勧告を受けた指定副産物事業者がその勧告に従わなかったときは、その旨を公表することができる。 3  主務大臣は、第一項に規定する勧告を受けた指定副産物事業者が、前項の規定によりその勧告に従わなかった旨を公表された後において、なお、正当な理由がなくてその勧告に係る措置をとらなかった場合において、当該指定副産物に係る再生資源の利用の促進を著しく害すると認めるときは、審議会等で政令で定めるものの意見を聴いて、当該指定副産物事業者に対し、その勧告に係る措置をとるべきことを命ずることができる。    第十章 雑則 (報告及び立入検査) 第三十七条  主務大臣は、第十三条及び第十七条の規定の施行に必要な限度において、政令で定めるところにより、特定省資源事業者又は特定再利用事業者に対し、その業務の状況に関し報告させ、又はその職員に、特定省資源事業者又は特定再利用事業者の事務所、工場、事業場又は倉庫に立ち入り、設備、帳簿、書類その他の物件を検査させることができる。 2  主務大臣は、第二十条、第二十三条及び第二十五条の規定の施行に必要な限度において、政令で定めるところにより、指定省資源化事業者、指定再利用促進事業者又は指定表示事業者に対し、指定省資源化製品、指定再利用促進製品又は指定表示製品に係る業務の状況に関し報告させ、又はその職員に、指定省資源化事業者、指定再利用促進事業者又は指定表示事業者の事務所、工場、事業場又は倉庫に立ち入り、指定省資源化製品、指定再利用促進製品又は指定表示製品、帳簿、書類その他の物件を検査させることができる。 3  主務大臣は、第二十八条及び第二十九条の規定の施行に必要な限度において、認定指定再資源化事業者に対し、その認定に係る使用済指定再資源化製品の自主回収又は再資源化の実施の状況に関し報告させ、又はその職員に、認定指定再資源化事業者の事務所、工場、事業場又は倉庫に立ち入り、帳簿、書類その他の物件を検査させることができる。 4  主務大臣は、第三十三条の規定の施行に必要な限度において、政令で定めるところにより、指定再資源化事業者に対し、使用済指定再資源化製品の自主回収又は再資源化の実施の状況に関し報告させ、又はその職員に、指定再資源化事業者の事務所、工場、事業場又は倉庫に立ち入り、帳簿、書類その他の物件を検査させることができる。 5  主務大臣は、前条の規定の施行に必要な限度において、政令で定めるところにより、指定副産物事業者に対し、指定副産物に係る業務の状況に関し報告させ、又はその職員に、指定副産物事業者の事務所、事業場又は倉庫に立ち入り、指定副産物、帳簿、書類その他の物件を検査させることができる。 6  前各項の規定により立入検査をする職員は、その身分を示す証明書を携帯し、関係人に提示しなければならない。 7  第一項から第五項までの規定による立入検査の権限は、犯罪捜査のために認められたものと解釈してはならない。 (不服申立ての手続における意見の聴取) 第三十八条  第十三条第三項、第十七条第三項、第二十条第三項、第二十三条第三項、第二十五条第三項、第三十三条第三項又は第三十六条第三項の規定による命令についての審査請求又は異議申立てに対する裁決又は決定(却下の裁決又は決定を除く。)は、審査請求人又は異議申立人に対し、相当な期間をおいて予告をした上、公開による意見の聴取を行った後にしなければならない。 2  前項の予告においては、期日、場所及び事案の内容を示さなければならない。 3  第一項の意見の聴取に際しては、審査請求人又は異議申立人及び利害関係人に対し、当該事案について証拠を提示し、意見を述べる機会を与えなければならない。 (主務大臣等) 第三十九条  この法律における主務大臣は、次のとおりとする。 一  第三条第一項の規定による基本方針の策定及び公表並びに同条第三項の規定による基本方針の改定に関する事項については、経済産業大臣、国土交通大臣、農林水産大臣、財務大臣、厚生労働大臣及び環境大臣 二  第十条第一項の規定による判断の基準となるべき事項の策定、同条第二項に規定する当該事項の改定、第十一条に規定する指導及び助言、第十二条に規定する計画、第十三条第一項に規定する勧告、同条第二項の規定による公表、同条第三項の規定による命令並びに第三十七条第一項の規定による報告の徴収及び立入検査に関する事項については、当該特定省資源業種に属する事業を所管する大臣 三  第十五条第一項の規定による判断の基準となるべき事項の策定、同条第二項に規定する当該事項の改定、第十六条に規定する指導及び助言、第十七条第一項に規定する勧告、同条第二項の規定による公表、同条第三項の規定による命令並びに第三十七条第一項の規定による報告の徴収及び立入検査に関する事項については、当該特定再利用業種に属する事業を所管する大臣 四  第十八条第一項の規定による判断の基準となるべき事項の策定、同条第二項に規定する当該事項の改定、第十九条に規定する指導及び助言、第二十条第一項に規定する勧告、同条第二項の規定による公表、同条第三項の規定による命令、第二十一条第一項の規定による判断の基準となるべき事項の策定、同条第二項に規定する当該事項の改定、第二十二条に規定する指導及び助言、第二十三条第一項に規定する勧告、同条第二項の規定による公表、同条第三項の規定による命令、第二十四条第一項の規定による表示の標準となるべき事項の策定、第二十五条第一項に規定する勧告、同条第二項の規定による公表、同条第三項の規定による命令並びに第三十七条第二項の規定による報告の徴収及び立入検査に関する事項については、政令で定めるところにより、当該指定省資源化製品の製造、加工、修理若しくは販売の事業、当該指定再利用促進製品の製造、加工、修理若しくは販売の事業又は当該指定表示製品の製造、加工若しくは販売の事業(その事業の用に供するために指定表示製品の製造を発注する事業者にあっては、当該事業者の事業)を所管する大臣 五  第二十六条第一項の規定による判断の基準となるべき事項の策定、同条第二項に規定する当該事項の改定、第二十七条第一項の規定による認定、第二十八条第一項の規定による変更の認定、第二十九条の規定による認定の取消し、第三十条の規定による意見、第三十二条に規定する指導及び助言、第三十三条第一項に規定する勧告、同条第二項の規定による公表、同条第三項の規定による命令並びに第三十七条第三項及び第四項の規定による報告の徴収及び立入検査に関する事項については、政令で定めるところにより、当該指定再資源化製品の製造、加工、修理若しくは販売の事業又は当該指定再資源化製品を部品として使用する第二十六条第一項の政令で定める製品の製造、加工、修理若しくは販売の事業を所管する大臣及び環境大臣 六  第三十四条第一項の規定による判断の基準となるべき事項の策定、同条第二項に規定する当該事項の改定、第三十五条に規定する指導及び助言、第三十六条第一項に規定する勧告、同条第二項の規定による公表、同条第三項の規定による命令並びに第三十七条第五項の規定による報告の徴収及び立入検査に関する事項については、政令で定めるところにより、当該指定副産物に係る業種に属する事業を所管する大臣 2  この法律における主務省令は、前項第二号又は第三号に定める事項に関しては、それぞれ同項第二号又は第三号に定める主務大臣の発する命令とし、同項第四号から第六号までに定める事項に関しては、政令で定めるところにより、それぞれ同項第四号から第六号までに定める主務大臣の発する命令とする。 3  この法律による権限は、政令で定めるところにより、地方支分部局の長に委任することができる。 第四十条  主務大臣は、この法律の目的を達成するため必要があると認めるときは、環境大臣に対し、廃棄物の処理に関し、再生資源又は再生部品の利用の促進について必要な協力を求めることができる。 (経過措置) 第四十一条  この法律の規定に基づき命令を制定し、又は改廃する場合においては、その命令で、その制定又は改廃に伴い合理的に必要と判断される範囲内において、所要の経過措置(罰則に関する経過措置を含む。)を定めることができる。    第十一章 罰則 第四十二条  第十三条第三項、第十七条第三項、第二十条第三項、第二十三条第三項、第二十五条第三項、第三十三条第三項又は第三十六条第三項の規定による命令に違反した者は、五十万円以下の罰金に処する。 第四十三条  次の各号のいずれかに該当する者は、二十万円以下の罰金に処する。 一 第十二条の規定による提出をしなかった者 二 第三十七条第一項から第五項までの規定による報告をせず、若しくは虚偽の報告をし、又はこれらの規定による検査を拒み、妨げ、若しくは忌避した者 第四十四条  法人の代表者又は法人若しくは人の代理人、使用人その他の従業者が、その法人又は人の業務に関し、前二条の違反行為をしたときは、行為者を罰するほか、その法人又は人に対して各本条の刑を科する。    附 則 抄 (施行期日) 第一条  この法律は、公布の日から起算して六月を超えない範囲内において政令で定める日から施行する。 (国の無利子貸付け等) 第二条  国は、当分の間、地方公共団体に対し、再生資源又は再生部品を利用することにより資源の有効な利用を促進するための施設を整備する事業で日本電信電話株式会社の株式の売払収入の活用による社会資本の整備の促進に関する特別措置法(昭和六十二年法律第八十六号)第二条第一項第二号に該当するものにつき、当該地方公共団体が自ら行う場合にあってはその要する費用に充てる資金の一部を、民間事業者が行う場合にあっては当該民間事業者に対し当該地方公共団体が補助する費用に充てる資金の一部を、予算の範囲内において、無利子で貸し付けることができる。 2  前項の国の貸付金の償還期間は、五年(二年以内の据置期間を含む。)以内で政令で定める期間とする。 3  前項に定めるもののほか、第一項の規定による貸付金の償還方法、償還期限の繰上げその他償還に関し必要な事項は、政令で定める。 4  国は、第一項の規定により地方公共団体に対し貸付けを行った場合には、当該貸付けの対象である事業について、当該貸付金に相当する金額の補助を行うものとし、当該補助については、当該貸付金の償還時において、当該貸付金の償還金に相当する金額を交付することにより行うものとする。 5  地方公共団体が、第一項の規定による貸付けを受けた無利子貸付金について、第二項及び第三項の規定に基づき定められる償還期限を繰り上げて償還を行った場合(政令で定める場合を除く。)における前項の規定の適用については、当該償還は、当該償還期限の到来時に行われたものとみなす。    附 則 (平成五年一一月一二日法律第八九号) 抄 (施行期日) 第一条  この法律は、行政手続法(平成五年法律第八十八号)の施行の日から施行する。 (諮問等がされた不利益処分に関する経過措置) 第二条  この法律の施行前に法令に基づき審議会その他の合議制の機関に対し行政手続法第十三条に規定する聴聞又は弁明の機会の付与の手続その他の意見陳述のための手続に相当する手続を執るべきことの諮問その他の求めがされた場合においては、当該諮問その他の求めに係る不利益処分の手続に関しては、この法律による改正後の関係法律の規定にかかわらず、なお従前の例による。 (罰則に関する経過措置) 第十三条  この法律の施行前にした行為に対する罰則の適用については、なお従前の例による。 (聴聞に関する規定の整理に伴う経過措置) 第十四条  この法律の施行前に法律の規定により行われた聴聞、聴問若しくは聴聞会(不利益処分に係るものを除く。)又はこれらのための手続は、この法律による改正後の関係法律の相当規定により行われたものとみなす。 (政令への委任) 第十五条  附則第二条から前条までに定めるもののほか、この法律の施行に関して必要な経過措置は、政令で定める。    附 則 (平成一一年一二月二二日法律第一六〇号) 抄 (施行期日) 第一条  この法律(第二条及び第三条を除く。)は、平成十三年一月六日から施行する。    附 則 (平成一二年六月七日法律第一一三号) 抄 (施行期日) 第一条  この法律は、平成十三年四月一日から施行する。 (処分等の効力) 第二条  この法律による改正前の再生資源の利用の促進に関する法律の規定によってした処分、手続その他の行為は、この法律による改正後の資源の有効な利用の促進に関する法律の相当規定によってしたものとみなす。 (罰則に関する経過措置) 第三条  この法律の施行前にした行為に対する罰則の適用については、なお従前の例による。 (検討) 第四条  政府は、この法律の施行の日から七年以内に、この法律による改正後の資源の有効な利用の促進に関する法律の施行の状況について検討を加え、その結果に基づいて必要な措置を講ずるものとする。    附 則 (平成一四年二月八日法律第一号) 抄 (施行期日) 第一条  この法律は、公布の日から施行する。    附 則 (平成二五年五月三一日法律第二五号) 抄 (施行期日) 第一条  この法律は、公布の日から起算して一年三月を超えない範囲内において政令で定める日から施行する。
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <packages> <package id="Xamarin.Forms" version="2.0.0.6482" targetFramework="portable-net45+win+wpa81+wp80+MonoAndroid10+xamarinios10+MonoTouch10" /> </packages>
{ "pile_set_name": "Github" }
variable $a := [ 1, 2 ]; delete json $a("bar")
{ "pile_set_name": "Github" }
import { APP_URL } from '../../../../helpers' context('Registration Flow UI', () => { beforeEach(() => { cy.visit(APP_URL + '/auth/registration') }) describe('use ui elements', () => { it('clicks the visibility toggle to show the password', () => { cy.get('input[name="password"]').type('some password') cy.get('input[name="password"]').should('have.prop', 'type', 'password') cy.get('.password-visibility-toggle').click() cy.get('input[name="password"]').should('have.prop', 'type', 'text') cy.get('.password-visibility-toggle').click() cy.get('input[name="password"]').should('have.prop', 'type', 'password') }) it('clicks the log in link', () => { cy.get('a[href*="auth/login"]').click() cy.location('pathname').should('include', 'auth/login') cy.location('search').should('not.be.empty', 'request') }) }) })
{ "pile_set_name": "Github" }
(nxml-define-char-name-set 'katakana '(("KATAKANA-HIRAGANA DOUBLE HYPHEN" #x30A0) ("KATAKANA LETTER SMALL A" #x30A1) ("KATAKANA LETTER A" #x30A2) ("KATAKANA LETTER SMALL I" #x30A3) ("KATAKANA LETTER I" #x30A4) ("KATAKANA LETTER SMALL U" #x30A5) ("KATAKANA LETTER U" #x30A6) ("KATAKANA LETTER SMALL E" #x30A7) ("KATAKANA LETTER E" #x30A8) ("KATAKANA LETTER SMALL O" #x30A9) ("KATAKANA LETTER O" #x30AA) ("KATAKANA LETTER KA" #x30AB) ("KATAKANA LETTER GA" #x30AC) ("KATAKANA LETTER KI" #x30AD) ("KATAKANA LETTER GI" #x30AE) ("KATAKANA LETTER KU" #x30AF) ("KATAKANA LETTER GU" #x30B0) ("KATAKANA LETTER KE" #x30B1) ("KATAKANA LETTER GE" #x30B2) ("KATAKANA LETTER KO" #x30B3) ("KATAKANA LETTER GO" #x30B4) ("KATAKANA LETTER SA" #x30B5) ("KATAKANA LETTER ZA" #x30B6) ("KATAKANA LETTER SI" #x30B7) ("KATAKANA LETTER ZI" #x30B8) ("KATAKANA LETTER SU" #x30B9) ("KATAKANA LETTER ZU" #x30BA) ("KATAKANA LETTER SE" #x30BB) ("KATAKANA LETTER ZE" #x30BC) ("KATAKANA LETTER SO" #x30BD) ("KATAKANA LETTER ZO" #x30BE) ("KATAKANA LETTER TA" #x30BF) ("KATAKANA LETTER DA" #x30C0) ("KATAKANA LETTER TI" #x30C1) ("KATAKANA LETTER DI" #x30C2) ("KATAKANA LETTER SMALL TU" #x30C3) ("KATAKANA LETTER TU" #x30C4) ("KATAKANA LETTER DU" #x30C5) ("KATAKANA LETTER TE" #x30C6) ("KATAKANA LETTER DE" #x30C7) ("KATAKANA LETTER TO" #x30C8) ("KATAKANA LETTER DO" #x30C9) ("KATAKANA LETTER NA" #x30CA) ("KATAKANA LETTER NI" #x30CB) ("KATAKANA LETTER NU" #x30CC) ("KATAKANA LETTER NE" #x30CD) ("KATAKANA LETTER NO" #x30CE) ("KATAKANA LETTER HA" #x30CF) ("KATAKANA LETTER BA" #x30D0) ("KATAKANA LETTER PA" #x30D1) ("KATAKANA LETTER HI" #x30D2) ("KATAKANA LETTER BI" #x30D3) ("KATAKANA LETTER PI" #x30D4) ("KATAKANA LETTER HU" #x30D5) ("KATAKANA LETTER BU" #x30D6) ("KATAKANA LETTER PU" #x30D7) ("KATAKANA LETTER HE" #x30D8) ("KATAKANA LETTER BE" #x30D9) ("KATAKANA LETTER PE" #x30DA) ("KATAKANA LETTER HO" #x30DB) ("KATAKANA LETTER BO" #x30DC) ("KATAKANA LETTER PO" #x30DD) ("KATAKANA LETTER MA" #x30DE) ("KATAKANA LETTER MI" #x30DF) ("KATAKANA LETTER MU" #x30E0) ("KATAKANA LETTER ME" #x30E1) ("KATAKANA LETTER MO" #x30E2) ("KATAKANA LETTER SMALL YA" #x30E3) ("KATAKANA LETTER YA" #x30E4) ("KATAKANA LETTER SMALL YU" #x30E5) ("KATAKANA LETTER YU" #x30E6) ("KATAKANA LETTER SMALL YO" #x30E7) ("KATAKANA LETTER YO" #x30E8) ("KATAKANA LETTER RA" #x30E9) ("KATAKANA LETTER RI" #x30EA) ("KATAKANA LETTER RU" #x30EB) ("KATAKANA LETTER RE" #x30EC) ("KATAKANA LETTER RO" #x30ED) ("KATAKANA LETTER SMALL WA" #x30EE) ("KATAKANA LETTER WA" #x30EF) ("KATAKANA LETTER WI" #x30F0) ("KATAKANA LETTER WE" #x30F1) ("KATAKANA LETTER WO" #x30F2) ("KATAKANA LETTER N" #x30F3) ("KATAKANA LETTER VU" #x30F4) ("KATAKANA LETTER SMALL KA" #x30F5) ("KATAKANA LETTER SMALL KE" #x30F6) ("KATAKANA LETTER VA" #x30F7) ("KATAKANA LETTER VI" #x30F8) ("KATAKANA LETTER VE" #x30F9) ("KATAKANA LETTER VO" #x30FA) ("KATAKANA MIDDLE DOT" #x30FB) ("KATAKANA-HIRAGANA PROLONGED SOUND MARK" #x30FC) ("KATAKANA ITERATION MARK" #x30FD) ("KATAKANA VOICED ITERATION MARK" #x30FE) ("KATAKANA DIGRAPH KOTO" #x30FF) ))
{ "pile_set_name": "Github" }
/* comedi/drivers/das1800.c Driver for Keitley das1700/das1800 series boards Copyright (C) 2000 Frank Mori Hess <fmhess@users.sourceforge.net> COMEDI - Linux Control and Measurement Device Interface Copyright (C) 2000 David A. Schleef <ds@schleef.org> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. ************************************************************************ */ /* Driver: das1800 Description: Keithley Metrabyte DAS1800 (& compatibles) Author: Frank Mori Hess <fmhess@users.sourceforge.net> Devices: [Keithley Metrabyte] DAS-1701ST (das-1701st), DAS-1701ST-DA (das-1701st-da), DAS-1701/AO (das-1701ao), DAS-1702ST (das-1702st), DAS-1702ST-DA (das-1702st-da), DAS-1702HR (das-1702hr), DAS-1702HR-DA (das-1702hr-da), DAS-1702/AO (das-1702ao), DAS-1801ST (das-1801st), DAS-1801ST-DA (das-1801st-da), DAS-1801HC (das-1801hc), DAS-1801AO (das-1801ao), DAS-1802ST (das-1802st), DAS-1802ST-DA (das-1802st-da), DAS-1802HR (das-1802hr), DAS-1802HR-DA (das-1802hr-da), DAS-1802HC (das-1802hc), DAS-1802AO (das-1802ao) Status: works The waveform analog output on the 'ao' cards is not supported. If you need it, send me (Frank Hess) an email. Configuration options: [0] - I/O port base address [1] - IRQ (optional, required for timed or externally triggered conversions) [2] - DMA0 (optional, requires irq) [3] - DMA1 (optional, requires irq and dma0) */ /* This driver supports the following Keithley boards: das-1701st das-1701st-da das-1701ao das-1702st das-1702st-da das-1702hr das-1702hr-da das-1702ao das-1801st das-1801st-da das-1801hc das-1801ao das-1802st das-1802st-da das-1802hr das-1802hr-da das-1802hc das-1802ao Options: [0] - base io address [1] - irq (optional, required for timed or externally triggered conversions) [2] - dma0 (optional, requires irq) [3] - dma1 (optional, requires irq and dma0) irq can be omitted, although the cmd interface will not work without it. analog input cmd triggers supported: start_src: TRIG_NOW | TRIG_EXT scan_begin_src: TRIG_FOLLOW | TRIG_TIMER | TRIG_EXT scan_end_src: TRIG_COUNT convert_src: TRIG_TIMER | TRIG_EXT (TRIG_EXT requires scan_begin_src == TRIG_FOLLOW) stop_src: TRIG_COUNT | TRIG_EXT | TRIG_NONE scan_begin_src triggers TRIG_TIMER and TRIG_EXT use the card's 'burst mode' which limits the valid conversion time to 64 microseconds (convert_arg <= 64000). This limitation does not apply if scan_begin_src is TRIG_FOLLOW. NOTES: Only the DAS-1801ST has been tested by me. Unipolar and bipolar ranges cannot be mixed in the channel/gain list. TODO: Make it automatically allocate irq and dma channels if they are not specified Add support for analog out on 'ao' cards read insn for analog out */ #include <linux/interrupt.h> #include "../comedidev.h" #include <linux/ioport.h> #include <asm/dma.h> #include "8253.h" #include "comedi_fc.h" /* misc. defines */ #define DAS1800_SIZE 16 /* uses 16 io addresses */ #define FIFO_SIZE 1024 /* 1024 sample fifo */ #define TIMER_BASE 200 /* 5 Mhz master clock */ #define UNIPOLAR 0x4 /* bit that determines whether input range is uni/bipolar */ #define DMA_BUF_SIZE 0x1ff00 /* size in bytes of dma buffers */ /* Registers for the das1800 */ #define DAS1800_FIFO 0x0 #define DAS1800_QRAM 0x0 #define DAS1800_DAC 0x0 #define DAS1800_SELECT 0x2 #define ADC 0x0 #define QRAM 0x1 #define DAC(a) (0x2 + a) #define DAS1800_DIGITAL 0x3 #define DAS1800_CONTROL_A 0x4 #define FFEN 0x1 #define CGEN 0x4 #define CGSL 0x8 #define TGEN 0x10 #define TGSL 0x20 #define ATEN 0x80 #define DAS1800_CONTROL_B 0x5 #define DMA_CH5 0x1 #define DMA_CH6 0x2 #define DMA_CH7 0x3 #define DMA_CH5_CH6 0x5 #define DMA_CH6_CH7 0x6 #define DMA_CH7_CH5 0x7 #define DMA_ENABLED 0x3 /* mask used to determine if dma is enabled */ #define DMA_DUAL 0x4 #define IRQ3 0x8 #define IRQ5 0x10 #define IRQ7 0x18 #define IRQ10 0x28 #define IRQ11 0x30 #define IRQ15 0x38 #define FIMD 0x40 #define DAS1800_CONTROL_C 0X6 #define IPCLK 0x1 #define XPCLK 0x3 #define BMDE 0x4 #define CMEN 0x8 #define UQEN 0x10 #define SD 0x40 #define UB 0x80 #define DAS1800_STATUS 0x7 /* bits that prevent interrupt status bits (and CVEN) from being cleared on write */ #define CLEAR_INTR_MASK (CVEN_MASK | 0x1f) #define INT 0x1 #define DMATC 0x2 #define CT0TC 0x8 #define OVF 0x10 #define FHF 0x20 #define FNE 0x40 #define CVEN_MASK 0x40 /* masks CVEN on write */ #define CVEN 0x80 #define DAS1800_BURST_LENGTH 0x8 #define DAS1800_BURST_RATE 0x9 #define DAS1800_QRAM_ADDRESS 0xa #define DAS1800_COUNTER 0xc #define IOBASE2 0x400 /* offset of additional ioports used on 'ao' cards */ enum { das1701st, das1701st_da, das1702st, das1702st_da, das1702hr, das1702hr_da, das1701ao, das1702ao, das1801st, das1801st_da, das1802st, das1802st_da, das1802hr, das1802hr_da, das1801hc, das1802hc, das1801ao, das1802ao }; static int das1800_attach(struct comedi_device *dev, struct comedi_devconfig *it); static int das1800_detach(struct comedi_device *dev); static int das1800_probe(struct comedi_device *dev); static int das1800_cancel(struct comedi_device *dev, struct comedi_subdevice *s); static irqreturn_t das1800_interrupt(int irq, void *d); static int das1800_ai_poll(struct comedi_device *dev, struct comedi_subdevice *s); static void das1800_ai_handler(struct comedi_device *dev); static void das1800_handle_dma(struct comedi_device *dev, struct comedi_subdevice *s, unsigned int status); static void das1800_flush_dma(struct comedi_device *dev, struct comedi_subdevice *s); static void das1800_flush_dma_channel(struct comedi_device *dev, struct comedi_subdevice *s, unsigned int channel, uint16_t * buffer); static void das1800_handle_fifo_half_full(struct comedi_device *dev, struct comedi_subdevice *s); static void das1800_handle_fifo_not_empty(struct comedi_device *dev, struct comedi_subdevice *s); static int das1800_ai_do_cmdtest(struct comedi_device *dev, struct comedi_subdevice *s, struct comedi_cmd *cmd); static int das1800_ai_do_cmd(struct comedi_device *dev, struct comedi_subdevice *s); static int das1800_ai_rinsn(struct comedi_device *dev, struct comedi_subdevice *s, struct comedi_insn *insn, unsigned int *data); static int das1800_ao_winsn(struct comedi_device *dev, struct comedi_subdevice *s, struct comedi_insn *insn, unsigned int *data); static int das1800_di_rbits(struct comedi_device *dev, struct comedi_subdevice *s, struct comedi_insn *insn, unsigned int *data); static int das1800_do_wbits(struct comedi_device *dev, struct comedi_subdevice *s, struct comedi_insn *insn, unsigned int *data); static int das1800_set_frequency(struct comedi_device *dev); static unsigned int burst_convert_arg(unsigned int convert_arg, int round_mode); static unsigned int suggest_transfer_size(struct comedi_cmd *cmd); /* analog input ranges */ static const struct comedi_lrange range_ai_das1801 = { 8, { RANGE(-5, 5), RANGE(-1, 1), RANGE(-0.1, 0.1), RANGE(-0.02, 0.02), RANGE(0, 5), RANGE(0, 1), RANGE(0, 0.1), RANGE(0, 0.02), } }; static const struct comedi_lrange range_ai_das1802 = { 8, { RANGE(-10, 10), RANGE(-5, 5), RANGE(-2.5, 2.5), RANGE(-1.25, 1.25), RANGE(0, 10), RANGE(0, 5), RANGE(0, 2.5), RANGE(0, 1.25), } }; struct das1800_board { const char *name; int ai_speed; /* max conversion period in nanoseconds */ int resolution; /* bits of ai resolution */ int qram_len; /* length of card's channel / gain queue */ int common; /* supports AREF_COMMON flag */ int do_n_chan; /* number of digital output channels */ int ao_ability; /* 0 == no analog out, 1 == basic analog out, 2 == waveform analog out */ int ao_n_chan; /* number of analog out channels */ const struct comedi_lrange *range_ai; /* available input ranges */ }; /* Warning: the maximum conversion speeds listed below are * not always achievable depending on board setup (see * user manual.) */ static const struct das1800_board das1800_boards[] = { { .name = "das-1701st", .ai_speed = 6250, .resolution = 12, .qram_len = 256, .common = 1, .do_n_chan = 4, .ao_ability = 0, .ao_n_chan = 0, .range_ai = &range_ai_das1801, }, { .name = "das-1701st-da", .ai_speed = 6250, .resolution = 12, .qram_len = 256, .common = 1, .do_n_chan = 4, .ao_ability = 1, .ao_n_chan = 4, .range_ai = &range_ai_das1801, }, { .name = "das-1702st", .ai_speed = 6250, .resolution = 12, .qram_len = 256, .common = 1, .do_n_chan = 4, .ao_ability = 0, .ao_n_chan = 0, .range_ai = &range_ai_das1802, }, { .name = "das-1702st-da", .ai_speed = 6250, .resolution = 12, .qram_len = 256, .common = 1, .do_n_chan = 4, .ao_ability = 1, .ao_n_chan = 4, .range_ai = &range_ai_das1802, }, { .name = "das-1702hr", .ai_speed = 20000, .resolution = 16, .qram_len = 256, .common = 1, .do_n_chan = 4, .ao_ability = 0, .ao_n_chan = 0, .range_ai = &range_ai_das1802, }, { .name = "das-1702hr-da", .ai_speed = 20000, .resolution = 16, .qram_len = 256, .common = 1, .do_n_chan = 4, .ao_ability = 1, .ao_n_chan = 2, .range_ai = &range_ai_das1802, }, { .name = "das-1701ao", .ai_speed = 6250, .resolution = 12, .qram_len = 256, .common = 1, .do_n_chan = 4, .ao_ability = 2, .ao_n_chan = 2, .range_ai = &range_ai_das1801, }, { .name = "das-1702ao", .ai_speed = 6250, .resolution = 12, .qram_len = 256, .common = 1, .do_n_chan = 4, .ao_ability = 2, .ao_n_chan = 2, .range_ai = &range_ai_das1802, }, { .name = "das-1801st", .ai_speed = 3000, .resolution = 12, .qram_len = 256, .common = 1, .do_n_chan = 4, .ao_ability = 0, .ao_n_chan = 0, .range_ai = &range_ai_das1801, }, { .name = "das-1801st-da", .ai_speed = 3000, .resolution = 12, .qram_len = 256, .common = 1, .do_n_chan = 4, .ao_ability = 0, .ao_n_chan = 4, .range_ai = &range_ai_das1801, }, { .name = "das-1802st", .ai_speed = 3000, .resolution = 12, .qram_len = 256, .common = 1, .do_n_chan = 4, .ao_ability = 0, .ao_n_chan = 0, .range_ai = &range_ai_das1802, }, { .name = "das-1802st-da", .ai_speed = 3000, .resolution = 12, .qram_len = 256, .common = 1, .do_n_chan = 4, .ao_ability = 1, .ao_n_chan = 4, .range_ai = &range_ai_das1802, }, { .name = "das-1802hr", .ai_speed = 10000, .resolution = 16, .qram_len = 256, .common = 1, .do_n_chan = 4, .ao_ability = 0, .ao_n_chan = 0, .range_ai = &range_ai_das1802, }, { .name = "das-1802hr-da", .ai_speed = 10000, .resolution = 16, .qram_len = 256, .common = 1, .do_n_chan = 4, .ao_ability = 1, .ao_n_chan = 2, .range_ai = &range_ai_das1802, }, { .name = "das-1801hc", .ai_speed = 3000, .resolution = 12, .qram_len = 64, .common = 0, .do_n_chan = 8, .ao_ability = 1, .ao_n_chan = 2, .range_ai = &range_ai_das1801, }, { .name = "das-1802hc", .ai_speed = 3000, .resolution = 12, .qram_len = 64, .common = 0, .do_n_chan = 8, .ao_ability = 1, .ao_n_chan = 2, .range_ai = &range_ai_das1802, }, { .name = "das-1801ao", .ai_speed = 3000, .resolution = 12, .qram_len = 256, .common = 1, .do_n_chan = 4, .ao_ability = 2, .ao_n_chan = 2, .range_ai = &range_ai_das1801, }, { .name = "das-1802ao", .ai_speed = 3000, .resolution = 12, .qram_len = 256, .common = 1, .do_n_chan = 4, .ao_ability = 2, .ao_n_chan = 2, .range_ai = &range_ai_das1802, }, }; /* * Useful for shorthand access to the particular board structure */ #define thisboard ((const struct das1800_board *)dev->board_ptr) struct das1800_private { volatile unsigned int count; /* number of data points left to be taken */ unsigned int divisor1; /* value to load into board's counter 1 for timed conversions */ unsigned int divisor2; /* value to load into board's counter 2 for timed conversions */ int do_bits; /* digital output bits */ int irq_dma_bits; /* bits for control register b */ /* dma bits for control register b, stored so that dma can be * turned on and off */ int dma_bits; unsigned int dma0; /* dma channels used */ unsigned int dma1; volatile unsigned int dma_current; /* dma channel currently in use */ uint16_t *ai_buf0; /* pointers to dma buffers */ uint16_t *ai_buf1; uint16_t *dma_current_buf; /* pointer to dma buffer currently being used */ unsigned int dma_transfer_size; /* size of transfer currently used, in bytes */ unsigned long iobase2; /* secondary io address used for analog out on 'ao' boards */ short ao_update_bits; /* remembers the last write to the 'update' dac */ }; #define devpriv ((struct das1800_private *)dev->private) /* analog out range for boards with basic analog out */ static const struct comedi_lrange range_ao_1 = { 1, { RANGE(-10, 10), } }; /* analog out range for 'ao' boards */ /* static const struct comedi_lrange range_ao_2 = { 2, { RANGE(-10, 10), RANGE(-5, 5), } }; */ static struct comedi_driver driver_das1800 = { .driver_name = "das1800", .module = THIS_MODULE, .attach = das1800_attach, .detach = das1800_detach, .num_names = ARRAY_SIZE(das1800_boards), .board_name = &das1800_boards[0].name, .offset = sizeof(struct das1800_board), }; /* * A convenient macro that defines init_module() and cleanup_module(), * as necessary. */ COMEDI_INITCLEANUP(driver_das1800); static int das1800_init_dma(struct comedi_device *dev, unsigned int dma0, unsigned int dma1) { unsigned long flags; /* need an irq to do dma */ if (dev->irq && dma0) { /* encode dma0 and dma1 into 2 digit hexadecimal for switch */ switch ((dma0 & 0x7) | (dma1 << 4)) { case 0x5: /* dma0 == 5 */ devpriv->dma_bits |= DMA_CH5; break; case 0x6: /* dma0 == 6 */ devpriv->dma_bits |= DMA_CH6; break; case 0x7: /* dma0 == 7 */ devpriv->dma_bits |= DMA_CH7; break; case 0x65: /* dma0 == 5, dma1 == 6 */ devpriv->dma_bits |= DMA_CH5_CH6; break; case 0x76: /* dma0 == 6, dma1 == 7 */ devpriv->dma_bits |= DMA_CH6_CH7; break; case 0x57: /* dma0 == 7, dma1 == 5 */ devpriv->dma_bits |= DMA_CH7_CH5; break; default: printk(" only supports dma channels 5 through 7\n" " Dual dma only allows the following combinations:\n" " dma 5,6 / 6,7 / or 7,5\n"); return -EINVAL; break; } if (request_dma(dma0, driver_das1800.driver_name)) { printk(" failed to allocate dma channel %i\n", dma0); return -EINVAL; } devpriv->dma0 = dma0; devpriv->dma_current = dma0; if (dma1) { if (request_dma(dma1, driver_das1800.driver_name)) { printk(" failed to allocate dma channel %i\n", dma1); return -EINVAL; } devpriv->dma1 = dma1; } devpriv->ai_buf0 = kmalloc(DMA_BUF_SIZE, GFP_KERNEL | GFP_DMA); if (devpriv->ai_buf0 == NULL) return -ENOMEM; devpriv->dma_current_buf = devpriv->ai_buf0; if (dma1) { devpriv->ai_buf1 = kmalloc(DMA_BUF_SIZE, GFP_KERNEL | GFP_DMA); if (devpriv->ai_buf1 == NULL) return -ENOMEM; } flags = claim_dma_lock(); disable_dma(devpriv->dma0); set_dma_mode(devpriv->dma0, DMA_MODE_READ); if (dma1) { disable_dma(devpriv->dma1); set_dma_mode(devpriv->dma1, DMA_MODE_READ); } release_dma_lock(flags); } return 0; } static int das1800_attach(struct comedi_device *dev, struct comedi_devconfig *it) { struct comedi_subdevice *s; unsigned long iobase = it->options[0]; unsigned int irq = it->options[1]; unsigned int dma0 = it->options[2]; unsigned int dma1 = it->options[3]; unsigned long iobase2; int board; int retval; /* allocate and initialize dev->private */ if (alloc_private(dev, sizeof(struct das1800_private)) < 0) return -ENOMEM; printk("comedi%d: %s: io 0x%lx", dev->minor, driver_das1800.driver_name, iobase); if (irq) { printk(", irq %u", irq); if (dma0) { printk(", dma %u", dma0); if (dma1) printk(" and %u", dma1); } } printk("\n"); if (iobase == 0) { printk(" io base address required\n"); return -EINVAL; } /* check if io addresses are available */ if (!request_region(iobase, DAS1800_SIZE, driver_das1800.driver_name)) { printk (" I/O port conflict: failed to allocate ports 0x%lx to 0x%lx\n", iobase, iobase + DAS1800_SIZE - 1); return -EIO; } dev->iobase = iobase; board = das1800_probe(dev); if (board < 0) { printk(" unable to determine board type\n"); return -ENODEV; } dev->board_ptr = das1800_boards + board; dev->board_name = thisboard->name; /* if it is an 'ao' board with fancy analog out then we need extra io ports */ if (thisboard->ao_ability == 2) { iobase2 = iobase + IOBASE2; if (!request_region(iobase2, DAS1800_SIZE, driver_das1800.driver_name)) { printk (" I/O port conflict: failed to allocate ports 0x%lx to 0x%lx\n", iobase2, iobase2 + DAS1800_SIZE - 1); return -EIO; } devpriv->iobase2 = iobase2; } /* grab our IRQ */ if (irq) { if (request_irq(irq, das1800_interrupt, 0, driver_das1800.driver_name, dev)) { printk(" unable to allocate irq %u\n", irq); return -EINVAL; } } dev->irq = irq; /* set bits that tell card which irq to use */ switch (irq) { case 0: break; case 3: devpriv->irq_dma_bits |= 0x8; break; case 5: devpriv->irq_dma_bits |= 0x10; break; case 7: devpriv->irq_dma_bits |= 0x18; break; case 10: devpriv->irq_dma_bits |= 0x28; break; case 11: devpriv->irq_dma_bits |= 0x30; break; case 15: devpriv->irq_dma_bits |= 0x38; break; default: printk(" irq out of range\n"); return -EINVAL; break; } retval = das1800_init_dma(dev, dma0, dma1); if (retval < 0) return retval; if (devpriv->ai_buf0 == NULL) { devpriv->ai_buf0 = kmalloc(FIFO_SIZE * sizeof(uint16_t), GFP_KERNEL); if (devpriv->ai_buf0 == NULL) return -ENOMEM; } if (alloc_subdevices(dev, 4) < 0) return -ENOMEM; /* analog input subdevice */ s = dev->subdevices + 0; dev->read_subdev = s; s->type = COMEDI_SUBD_AI; s->subdev_flags = SDF_READABLE | SDF_DIFF | SDF_GROUND | SDF_CMD_READ; if (thisboard->common) s->subdev_flags |= SDF_COMMON; s->n_chan = thisboard->qram_len; s->len_chanlist = thisboard->qram_len; s->maxdata = (1 << thisboard->resolution) - 1; s->range_table = thisboard->range_ai; s->do_cmd = das1800_ai_do_cmd; s->do_cmdtest = das1800_ai_do_cmdtest; s->insn_read = das1800_ai_rinsn; s->poll = das1800_ai_poll; s->cancel = das1800_cancel; /* analog out */ s = dev->subdevices + 1; if (thisboard->ao_ability == 1) { s->type = COMEDI_SUBD_AO; s->subdev_flags = SDF_WRITABLE; s->n_chan = thisboard->ao_n_chan; s->maxdata = (1 << thisboard->resolution) - 1; s->range_table = &range_ao_1; s->insn_write = das1800_ao_winsn; } else { s->type = COMEDI_SUBD_UNUSED; } /* di */ s = dev->subdevices + 2; s->type = COMEDI_SUBD_DI; s->subdev_flags = SDF_READABLE; s->n_chan = 4; s->maxdata = 1; s->range_table = &range_digital; s->insn_bits = das1800_di_rbits; /* do */ s = dev->subdevices + 3; s->type = COMEDI_SUBD_DO; s->subdev_flags = SDF_WRITABLE | SDF_READABLE; s->n_chan = thisboard->do_n_chan; s->maxdata = 1; s->range_table = &range_digital; s->insn_bits = das1800_do_wbits; das1800_cancel(dev, dev->read_subdev); /* initialize digital out channels */ outb(devpriv->do_bits, dev->iobase + DAS1800_DIGITAL); /* initialize analog out channels */ if (thisboard->ao_ability == 1) { /* select 'update' dac channel for baseAddress + 0x0 */ outb(DAC(thisboard->ao_n_chan - 1), dev->iobase + DAS1800_SELECT); outw(devpriv->ao_update_bits, dev->iobase + DAS1800_DAC); } return 0; }; static int das1800_detach(struct comedi_device *dev) { /* only free stuff if it has been allocated by _attach */ if (dev->iobase) release_region(dev->iobase, DAS1800_SIZE); if (dev->irq) free_irq(dev->irq, dev); if (dev->private) { if (devpriv->iobase2) release_region(devpriv->iobase2, DAS1800_SIZE); if (devpriv->dma0) free_dma(devpriv->dma0); if (devpriv->dma1) free_dma(devpriv->dma1); if (devpriv->ai_buf0) kfree(devpriv->ai_buf0); if (devpriv->ai_buf1) kfree(devpriv->ai_buf1); } printk("comedi%d: %s: remove\n", dev->minor, driver_das1800.driver_name); return 0; }; /* probes and checks das-1800 series board type */ static int das1800_probe(struct comedi_device *dev) { int id; int board; id = (inb(dev->iobase + DAS1800_DIGITAL) >> 4) & 0xf; /* get id bits */ board = ((struct das1800_board *)dev->board_ptr) - das1800_boards; switch (id) { case 0x3: if (board == das1801st_da || board == das1802st_da || board == das1701st_da || board == das1702st_da) { printk(" Board model: %s\n", das1800_boards[board].name); return board; } printk (" Board model (probed, not recommended): das-1800st-da series\n"); return das1801st; break; case 0x4: if (board == das1802hr_da || board == das1702hr_da) { printk(" Board model: %s\n", das1800_boards[board].name); return board; } printk (" Board model (probed, not recommended): das-1802hr-da\n"); return das1802hr; break; case 0x5: if (board == das1801ao || board == das1802ao || board == das1701ao || board == das1702ao) { printk(" Board model: %s\n", das1800_boards[board].name); return board; } printk (" Board model (probed, not recommended): das-1800ao series\n"); return das1801ao; break; case 0x6: if (board == das1802hr || board == das1702hr) { printk(" Board model: %s\n", das1800_boards[board].name); return board; } printk(" Board model (probed, not recommended): das-1802hr\n"); return das1802hr; break; case 0x7: if (board == das1801st || board == das1802st || board == das1701st || board == das1702st) { printk(" Board model: %s\n", das1800_boards[board].name); return board; } printk (" Board model (probed, not recommended): das-1800st series\n"); return das1801st; break; case 0x8: if (board == das1801hc || board == das1802hc) { printk(" Board model: %s\n", das1800_boards[board].name); return board; } printk (" Board model (probed, not recommended): das-1800hc series\n"); return das1801hc; break; default: printk (" Board model: probe returned 0x%x (unknown, please report)\n", id); return board; break; } return -1; } static int das1800_ai_poll(struct comedi_device *dev, struct comedi_subdevice *s) { unsigned long flags; /* prevent race with interrupt handler */ spin_lock_irqsave(&dev->spinlock, flags); das1800_ai_handler(dev); spin_unlock_irqrestore(&dev->spinlock, flags); return s->async->buf_write_count - s->async->buf_read_count; } static irqreturn_t das1800_interrupt(int irq, void *d) { struct comedi_device *dev = d; unsigned int status; if (dev->attached == 0) { comedi_error(dev, "premature interrupt"); return IRQ_HANDLED; } /* Prevent race with das1800_ai_poll() on multi processor systems. * Also protects indirect addressing in das1800_ai_handler */ spin_lock(&dev->spinlock); status = inb(dev->iobase + DAS1800_STATUS); /* if interrupt was not caused by das-1800 */ if (!(status & INT)) { spin_unlock(&dev->spinlock); return IRQ_NONE; } /* clear the interrupt status bit INT */ outb(CLEAR_INTR_MASK & ~INT, dev->iobase + DAS1800_STATUS); /* handle interrupt */ das1800_ai_handler(dev); spin_unlock(&dev->spinlock); return IRQ_HANDLED; } /* the guts of the interrupt handler, that is shared with das1800_ai_poll */ static void das1800_ai_handler(struct comedi_device *dev) { struct comedi_subdevice *s = dev->subdevices + 0; /* analog input subdevice */ struct comedi_async *async = s->async; struct comedi_cmd *cmd = &async->cmd; unsigned int status = inb(dev->iobase + DAS1800_STATUS); async->events = 0; /* select adc for base address + 0 */ outb(ADC, dev->iobase + DAS1800_SELECT); /* dma buffer full */ if (devpriv->irq_dma_bits & DMA_ENABLED) { /* look for data from dma transfer even if dma terminal count hasn't happened yet */ das1800_handle_dma(dev, s, status); } else if (status & FHF) { /* if fifo half full */ das1800_handle_fifo_half_full(dev, s); } else if (status & FNE) { /* if fifo not empty */ das1800_handle_fifo_not_empty(dev, s); } async->events |= COMEDI_CB_BLOCK; /* if the card's fifo has overflowed */ if (status & OVF) { /* clear OVF interrupt bit */ outb(CLEAR_INTR_MASK & ~OVF, dev->iobase + DAS1800_STATUS); comedi_error(dev, "DAS1800 FIFO overflow"); das1800_cancel(dev, s); async->events |= COMEDI_CB_ERROR | COMEDI_CB_EOA; comedi_event(dev, s); return; } /* stop taking data if appropriate */ /* stop_src TRIG_EXT */ if (status & CT0TC) { /* clear CT0TC interrupt bit */ outb(CLEAR_INTR_MASK & ~CT0TC, dev->iobase + DAS1800_STATUS); /* make sure we get all remaining data from board before quitting */ if (devpriv->irq_dma_bits & DMA_ENABLED) das1800_flush_dma(dev, s); else das1800_handle_fifo_not_empty(dev, s); das1800_cancel(dev, s); /* disable hardware conversions */ async->events |= COMEDI_CB_EOA; } else if (cmd->stop_src == TRIG_COUNT && devpriv->count == 0) { /* stop_src TRIG_COUNT */ das1800_cancel(dev, s); /* disable hardware conversions */ async->events |= COMEDI_CB_EOA; } comedi_event(dev, s); return; } static void das1800_handle_dma(struct comedi_device *dev, struct comedi_subdevice *s, unsigned int status) { unsigned long flags; const int dual_dma = devpriv->irq_dma_bits & DMA_DUAL; flags = claim_dma_lock(); das1800_flush_dma_channel(dev, s, devpriv->dma_current, devpriv->dma_current_buf); /* re-enable dma channel */ set_dma_addr(devpriv->dma_current, virt_to_bus(devpriv->dma_current_buf)); set_dma_count(devpriv->dma_current, devpriv->dma_transfer_size); enable_dma(devpriv->dma_current); release_dma_lock(flags); if (status & DMATC) { /* clear DMATC interrupt bit */ outb(CLEAR_INTR_MASK & ~DMATC, dev->iobase + DAS1800_STATUS); /* switch dma channels for next time, if appropriate */ if (dual_dma) { /* read data from the other channel next time */ if (devpriv->dma_current == devpriv->dma0) { devpriv->dma_current = devpriv->dma1; devpriv->dma_current_buf = devpriv->ai_buf1; } else { devpriv->dma_current = devpriv->dma0; devpriv->dma_current_buf = devpriv->ai_buf0; } } } return; } static inline uint16_t munge_bipolar_sample(const struct comedi_device *dev, uint16_t sample) { sample += 1 << (thisboard->resolution - 1); return sample; } static void munge_data(struct comedi_device *dev, uint16_t * array, unsigned int num_elements) { unsigned int i; int unipolar; /* see if card is using a unipolar or bipolar range so we can munge data correctly */ unipolar = inb(dev->iobase + DAS1800_CONTROL_C) & UB; /* convert to unsigned type if we are in a bipolar mode */ if (!unipolar) { for (i = 0; i < num_elements; i++) { array[i] = munge_bipolar_sample(dev, array[i]); } } } /* Utility function used by das1800_flush_dma() and das1800_handle_dma(). * Assumes dma lock is held */ static void das1800_flush_dma_channel(struct comedi_device *dev, struct comedi_subdevice *s, unsigned int channel, uint16_t * buffer) { unsigned int num_bytes, num_samples; struct comedi_cmd *cmd = &s->async->cmd; disable_dma(channel); /* clear flip-flop to make sure 2-byte registers * get set correctly */ clear_dma_ff(channel); /* figure out how many points to read */ num_bytes = devpriv->dma_transfer_size - get_dma_residue(channel); num_samples = num_bytes / sizeof(short); /* if we only need some of the points */ if (cmd->stop_src == TRIG_COUNT && devpriv->count < num_samples) num_samples = devpriv->count; munge_data(dev, buffer, num_samples); cfc_write_array_to_buffer(s, buffer, num_bytes); if (s->async->cmd.stop_src == TRIG_COUNT) devpriv->count -= num_samples; return; } /* flushes remaining data from board when external trigger has stopped aquisition * and we are using dma transfers */ static void das1800_flush_dma(struct comedi_device *dev, struct comedi_subdevice *s) { unsigned long flags; const int dual_dma = devpriv->irq_dma_bits & DMA_DUAL; flags = claim_dma_lock(); das1800_flush_dma_channel(dev, s, devpriv->dma_current, devpriv->dma_current_buf); if (dual_dma) { /* switch to other channel and flush it */ if (devpriv->dma_current == devpriv->dma0) { devpriv->dma_current = devpriv->dma1; devpriv->dma_current_buf = devpriv->ai_buf1; } else { devpriv->dma_current = devpriv->dma0; devpriv->dma_current_buf = devpriv->ai_buf0; } das1800_flush_dma_channel(dev, s, devpriv->dma_current, devpriv->dma_current_buf); } release_dma_lock(flags); /* get any remaining samples in fifo */ das1800_handle_fifo_not_empty(dev, s); return; } static void das1800_handle_fifo_half_full(struct comedi_device *dev, struct comedi_subdevice *s) { int numPoints = 0; /* number of points to read */ struct comedi_cmd *cmd = &s->async->cmd; numPoints = FIFO_SIZE / 2; /* if we only need some of the points */ if (cmd->stop_src == TRIG_COUNT && devpriv->count < numPoints) numPoints = devpriv->count; insw(dev->iobase + DAS1800_FIFO, devpriv->ai_buf0, numPoints); munge_data(dev, devpriv->ai_buf0, numPoints); cfc_write_array_to_buffer(s, devpriv->ai_buf0, numPoints * sizeof(devpriv->ai_buf0[0])); if (cmd->stop_src == TRIG_COUNT) devpriv->count -= numPoints; return; } static void das1800_handle_fifo_not_empty(struct comedi_device *dev, struct comedi_subdevice *s) { short dpnt; int unipolar; struct comedi_cmd *cmd = &s->async->cmd; unipolar = inb(dev->iobase + DAS1800_CONTROL_C) & UB; while (inb(dev->iobase + DAS1800_STATUS) & FNE) { if (cmd->stop_src == TRIG_COUNT && devpriv->count == 0) break; dpnt = inw(dev->iobase + DAS1800_FIFO); /* convert to unsigned type if we are in a bipolar mode */ if (!unipolar) ; dpnt = munge_bipolar_sample(dev, dpnt); cfc_write_to_buffer(s, dpnt); if (cmd->stop_src == TRIG_COUNT) devpriv->count--; } return; } static int das1800_cancel(struct comedi_device *dev, struct comedi_subdevice *s) { outb(0x0, dev->iobase + DAS1800_STATUS); /* disable conversions */ outb(0x0, dev->iobase + DAS1800_CONTROL_B); /* disable interrupts and dma */ outb(0x0, dev->iobase + DAS1800_CONTROL_A); /* disable and clear fifo and stop triggering */ if (devpriv->dma0) disable_dma(devpriv->dma0); if (devpriv->dma1) disable_dma(devpriv->dma1); return 0; } /* test analog input cmd */ static int das1800_ai_do_cmdtest(struct comedi_device *dev, struct comedi_subdevice *s, struct comedi_cmd *cmd) { int err = 0; int tmp; unsigned int tmp_arg; int i; int unipolar; /* step 1: make sure trigger sources are trivially valid */ tmp = cmd->start_src; cmd->start_src &= TRIG_NOW | TRIG_EXT; if (!cmd->start_src || tmp != cmd->start_src) err++; tmp = cmd->scan_begin_src; cmd->scan_begin_src &= TRIG_FOLLOW | TRIG_TIMER | TRIG_EXT; if (!cmd->scan_begin_src || tmp != cmd->scan_begin_src) err++; tmp = cmd->convert_src; cmd->convert_src &= TRIG_TIMER | TRIG_EXT; if (!cmd->convert_src || tmp != cmd->convert_src) err++; tmp = cmd->scan_end_src; cmd->scan_end_src &= TRIG_COUNT; if (!cmd->scan_end_src || tmp != cmd->scan_end_src) err++; tmp = cmd->stop_src; cmd->stop_src &= TRIG_COUNT | TRIG_EXT | TRIG_NONE; if (!cmd->stop_src || tmp != cmd->stop_src) err++; if (err) return 1; /* step 2: make sure trigger sources are unique and mutually compatible */ /* uniqueness check */ if (cmd->start_src != TRIG_NOW && cmd->start_src != TRIG_EXT) err++; if (cmd->scan_begin_src != TRIG_FOLLOW && cmd->scan_begin_src != TRIG_TIMER && cmd->scan_begin_src != TRIG_EXT) err++; if (cmd->convert_src != TRIG_TIMER && cmd->convert_src != TRIG_EXT) err++; if (cmd->stop_src != TRIG_COUNT && cmd->stop_src != TRIG_NONE && cmd->stop_src != TRIG_EXT) err++; /* compatibility check */ if (cmd->scan_begin_src != TRIG_FOLLOW && cmd->convert_src != TRIG_TIMER) err++; if (err) return 2; /* step 3: make sure arguments are trivially compatible */ if (cmd->start_arg != 0) { cmd->start_arg = 0; err++; } if (cmd->convert_src == TRIG_TIMER) { if (cmd->convert_arg < thisboard->ai_speed) { cmd->convert_arg = thisboard->ai_speed; err++; } } if (!cmd->chanlist_len) { cmd->chanlist_len = 1; err++; } if (cmd->scan_end_arg != cmd->chanlist_len) { cmd->scan_end_arg = cmd->chanlist_len; err++; } switch (cmd->stop_src) { case TRIG_COUNT: if (!cmd->stop_arg) { cmd->stop_arg = 1; err++; } break; case TRIG_NONE: if (cmd->stop_arg != 0) { cmd->stop_arg = 0; err++; } break; default: break; } if (err) return 3; /* step 4: fix up any arguments */ if (cmd->convert_src == TRIG_TIMER) { /* if we are not in burst mode */ if (cmd->scan_begin_src == TRIG_FOLLOW) { tmp_arg = cmd->convert_arg; /* calculate counter values that give desired timing */ i8253_cascade_ns_to_timer_2div(TIMER_BASE, &(devpriv->divisor1), &(devpriv->divisor2), &(cmd->convert_arg), cmd-> flags & TRIG_ROUND_MASK); if (tmp_arg != cmd->convert_arg) err++; } /* if we are in burst mode */ else { /* check that convert_arg is compatible */ tmp_arg = cmd->convert_arg; cmd->convert_arg = burst_convert_arg(cmd->convert_arg, cmd->flags & TRIG_ROUND_MASK); if (tmp_arg != cmd->convert_arg) err++; if (cmd->scan_begin_src == TRIG_TIMER) { /* if scans are timed faster than conversion rate allows */ if (cmd->convert_arg * cmd->chanlist_len > cmd->scan_begin_arg) { cmd->scan_begin_arg = cmd->convert_arg * cmd->chanlist_len; err++; } tmp_arg = cmd->scan_begin_arg; /* calculate counter values that give desired timing */ i8253_cascade_ns_to_timer_2div(TIMER_BASE, &(devpriv-> divisor1), &(devpriv-> divisor2), &(cmd-> scan_begin_arg), cmd-> flags & TRIG_ROUND_MASK); if (tmp_arg != cmd->scan_begin_arg) err++; } } } if (err) return 4; /* make sure user is not trying to mix unipolar and bipolar ranges */ if (cmd->chanlist) { unipolar = CR_RANGE(cmd->chanlist[0]) & UNIPOLAR; for (i = 1; i < cmd->chanlist_len; i++) { if (unipolar != (CR_RANGE(cmd->chanlist[i]) & UNIPOLAR)) { comedi_error(dev, "unipolar and bipolar ranges cannot be mixed in the chanlist"); err++; break; } } } if (err) return 5; return 0; } /* analog input cmd interface */ /* first, some utility functions used in the main ai_do_cmd() */ /* returns appropriate bits for control register a, depending on command */ static int control_a_bits(struct comedi_cmd cmd) { int control_a; control_a = FFEN; /* enable fifo */ if (cmd.stop_src == TRIG_EXT) { control_a |= ATEN; } switch (cmd.start_src) { case TRIG_EXT: control_a |= TGEN | CGSL; break; case TRIG_NOW: control_a |= CGEN; break; default: break; } return control_a; } /* returns appropriate bits for control register c, depending on command */ static int control_c_bits(struct comedi_cmd cmd) { int control_c; int aref; /* set clock source to internal or external, select analog reference, * select unipolar / bipolar */ aref = CR_AREF(cmd.chanlist[0]); control_c = UQEN; /* enable upper qram addresses */ if (aref != AREF_DIFF) control_c |= SD; if (aref == AREF_COMMON) control_c |= CMEN; /* if a unipolar range was selected */ if (CR_RANGE(cmd.chanlist[0]) & UNIPOLAR) control_c |= UB; switch (cmd.scan_begin_src) { case TRIG_FOLLOW: /* not in burst mode */ switch (cmd.convert_src) { case TRIG_TIMER: /* trig on cascaded counters */ control_c |= IPCLK; break; case TRIG_EXT: /* trig on falling edge of external trigger */ control_c |= XPCLK; break; default: break; } break; case TRIG_TIMER: /* burst mode with internal pacer clock */ control_c |= BMDE | IPCLK; break; case TRIG_EXT: /* burst mode with external trigger */ control_c |= BMDE | XPCLK; break; default: break; } return control_c; } /* sets up counters */ static int setup_counters(struct comedi_device *dev, struct comedi_cmd cmd) { /* setup cascaded counters for conversion/scan frequency */ switch (cmd.scan_begin_src) { case TRIG_FOLLOW: /* not in burst mode */ if (cmd.convert_src == TRIG_TIMER) { /* set conversion frequency */ i8253_cascade_ns_to_timer_2div(TIMER_BASE, &(devpriv->divisor1), &(devpriv->divisor2), &(cmd.convert_arg), cmd. flags & TRIG_ROUND_MASK); if (das1800_set_frequency(dev) < 0) { return -1; } } break; case TRIG_TIMER: /* in burst mode */ /* set scan frequency */ i8253_cascade_ns_to_timer_2div(TIMER_BASE, &(devpriv->divisor1), &(devpriv->divisor2), &(cmd.scan_begin_arg), cmd.flags & TRIG_ROUND_MASK); if (das1800_set_frequency(dev) < 0) { return -1; } break; default: break; } /* setup counter 0 for 'about triggering' */ if (cmd.stop_src == TRIG_EXT) { /* load counter 0 in mode 0 */ i8254_load(dev->iobase + DAS1800_COUNTER, 0, 0, 1, 0); } return 0; } /* sets up dma */ static void setup_dma(struct comedi_device *dev, struct comedi_cmd cmd) { unsigned long lock_flags; const int dual_dma = devpriv->irq_dma_bits & DMA_DUAL; if ((devpriv->irq_dma_bits & DMA_ENABLED) == 0) return; /* determine a reasonable dma transfer size */ devpriv->dma_transfer_size = suggest_transfer_size(&cmd); lock_flags = claim_dma_lock(); disable_dma(devpriv->dma0); /* clear flip-flop to make sure 2-byte registers for * count and address get set correctly */ clear_dma_ff(devpriv->dma0); set_dma_addr(devpriv->dma0, virt_to_bus(devpriv->ai_buf0)); /* set appropriate size of transfer */ set_dma_count(devpriv->dma0, devpriv->dma_transfer_size); devpriv->dma_current = devpriv->dma0; devpriv->dma_current_buf = devpriv->ai_buf0; enable_dma(devpriv->dma0); /* set up dual dma if appropriate */ if (dual_dma) { disable_dma(devpriv->dma1); /* clear flip-flop to make sure 2-byte registers for * count and address get set correctly */ clear_dma_ff(devpriv->dma1); set_dma_addr(devpriv->dma1, virt_to_bus(devpriv->ai_buf1)); /* set appropriate size of transfer */ set_dma_count(devpriv->dma1, devpriv->dma_transfer_size); enable_dma(devpriv->dma1); } release_dma_lock(lock_flags); return; } /* programs channel/gain list into card */ static void program_chanlist(struct comedi_device *dev, struct comedi_cmd cmd) { int i, n, chan_range; unsigned long irq_flags; const int range_mask = 0x3; /* masks unipolar/bipolar bit off range */ const int range_bitshift = 8; n = cmd.chanlist_len; /* spinlock protects indirect addressing */ spin_lock_irqsave(&dev->spinlock, irq_flags); outb(QRAM, dev->iobase + DAS1800_SELECT); /* select QRAM for baseAddress + 0x0 */ outb(n - 1, dev->iobase + DAS1800_QRAM_ADDRESS); /*set QRAM address start */ /* make channel / gain list */ for (i = 0; i < n; i++) { chan_range = CR_CHAN(cmd. chanlist[i]) | ((CR_RANGE(cmd.chanlist[i]) & range_mask) << range_bitshift); outw(chan_range, dev->iobase + DAS1800_QRAM); } outb(n - 1, dev->iobase + DAS1800_QRAM_ADDRESS); /*finish write to QRAM */ spin_unlock_irqrestore(&dev->spinlock, irq_flags); return; } /* analog input do_cmd */ static int das1800_ai_do_cmd(struct comedi_device *dev, struct comedi_subdevice *s) { int ret; int control_a, control_c; struct comedi_async *async = s->async; struct comedi_cmd cmd = async->cmd; if (!dev->irq) { comedi_error(dev, "no irq assigned for das-1800, cannot do hardware conversions"); return -1; } /* disable dma on TRIG_WAKE_EOS, or TRIG_RT * (because dma in handler is unsafe at hard real-time priority) */ if (cmd.flags & (TRIG_WAKE_EOS | TRIG_RT)) { devpriv->irq_dma_bits &= ~DMA_ENABLED; } else { devpriv->irq_dma_bits |= devpriv->dma_bits; } /* interrupt on end of conversion for TRIG_WAKE_EOS */ if (cmd.flags & TRIG_WAKE_EOS) { /* interrupt fifo not empty */ devpriv->irq_dma_bits &= ~FIMD; } else { /* interrupt fifo half full */ devpriv->irq_dma_bits |= FIMD; } /* determine how many conversions we need */ if (cmd.stop_src == TRIG_COUNT) { devpriv->count = cmd.stop_arg * cmd.chanlist_len; } das1800_cancel(dev, s); /* determine proper bits for control registers */ control_a = control_a_bits(cmd); control_c = control_c_bits(cmd); /* setup card and start */ program_chanlist(dev, cmd); ret = setup_counters(dev, cmd); if (ret < 0) { comedi_error(dev, "Error setting up counters"); return ret; } setup_dma(dev, cmd); outb(control_c, dev->iobase + DAS1800_CONTROL_C); /* set conversion rate and length for burst mode */ if (control_c & BMDE) { /* program conversion period with number of microseconds minus 1 */ outb(cmd.convert_arg / 1000 - 1, dev->iobase + DAS1800_BURST_RATE); outb(cmd.chanlist_len - 1, dev->iobase + DAS1800_BURST_LENGTH); } outb(devpriv->irq_dma_bits, dev->iobase + DAS1800_CONTROL_B); /* enable irq/dma */ outb(control_a, dev->iobase + DAS1800_CONTROL_A); /* enable fifo and triggering */ outb(CVEN, dev->iobase + DAS1800_STATUS); /* enable conversions */ return 0; } /* read analog input */ static int das1800_ai_rinsn(struct comedi_device *dev, struct comedi_subdevice *s, struct comedi_insn *insn, unsigned int *data) { int i, n; int chan, range, aref, chan_range; int timeout = 1000; short dpnt; int conv_flags = 0; unsigned long irq_flags; /* set up analog reference and unipolar / bipolar mode */ aref = CR_AREF(insn->chanspec); conv_flags |= UQEN; if (aref != AREF_DIFF) conv_flags |= SD; if (aref == AREF_COMMON) conv_flags |= CMEN; /* if a unipolar range was selected */ if (CR_RANGE(insn->chanspec) & UNIPOLAR) conv_flags |= UB; outb(conv_flags, dev->iobase + DAS1800_CONTROL_C); /* software conversion enabled */ outb(CVEN, dev->iobase + DAS1800_STATUS); /* enable conversions */ outb(0x0, dev->iobase + DAS1800_CONTROL_A); /* reset fifo */ outb(FFEN, dev->iobase + DAS1800_CONTROL_A); chan = CR_CHAN(insn->chanspec); /* mask of unipolar/bipolar bit from range */ range = CR_RANGE(insn->chanspec) & 0x3; chan_range = chan | (range << 8); spin_lock_irqsave(&dev->spinlock, irq_flags); outb(QRAM, dev->iobase + DAS1800_SELECT); /* select QRAM for baseAddress + 0x0 */ outb(0x0, dev->iobase + DAS1800_QRAM_ADDRESS); /* set QRAM address start */ outw(chan_range, dev->iobase + DAS1800_QRAM); outb(0x0, dev->iobase + DAS1800_QRAM_ADDRESS); /*finish write to QRAM */ outb(ADC, dev->iobase + DAS1800_SELECT); /* select ADC for baseAddress + 0x0 */ for (n = 0; n < insn->n; n++) { /* trigger conversion */ outb(0, dev->iobase + DAS1800_FIFO); for (i = 0; i < timeout; i++) { if (inb(dev->iobase + DAS1800_STATUS) & FNE) break; } if (i == timeout) { comedi_error(dev, "timeout"); return -ETIME; } dpnt = inw(dev->iobase + DAS1800_FIFO); /* shift data to offset binary for bipolar ranges */ if ((conv_flags & UB) == 0) dpnt += 1 << (thisboard->resolution - 1); data[n] = dpnt; } spin_unlock_irqrestore(&dev->spinlock, irq_flags); return n; } /* writes to an analog output channel */ static int das1800_ao_winsn(struct comedi_device *dev, struct comedi_subdevice *s, struct comedi_insn *insn, unsigned int *data) { int chan = CR_CHAN(insn->chanspec); /* int range = CR_RANGE(insn->chanspec); */ int update_chan = thisboard->ao_n_chan - 1; short output; unsigned long irq_flags; /* card expects two's complement data */ output = data[0] - (1 << (thisboard->resolution - 1)); /* if the write is to the 'update' channel, we need to remember its value */ if (chan == update_chan) devpriv->ao_update_bits = output; /* write to channel */ spin_lock_irqsave(&dev->spinlock, irq_flags); outb(DAC(chan), dev->iobase + DAS1800_SELECT); /* select dac channel for baseAddress + 0x0 */ outw(output, dev->iobase + DAS1800_DAC); /* now we need to write to 'update' channel to update all dac channels */ if (chan != update_chan) { outb(DAC(update_chan), dev->iobase + DAS1800_SELECT); /* select 'update' channel for baseAddress + 0x0 */ outw(devpriv->ao_update_bits, dev->iobase + DAS1800_DAC); } spin_unlock_irqrestore(&dev->spinlock, irq_flags); return 1; } /* reads from digital input channels */ static int das1800_di_rbits(struct comedi_device *dev, struct comedi_subdevice *s, struct comedi_insn *insn, unsigned int *data) { data[1] = inb(dev->iobase + DAS1800_DIGITAL) & 0xf; data[0] = 0; return 2; } /* writes to digital output channels */ static int das1800_do_wbits(struct comedi_device *dev, struct comedi_subdevice *s, struct comedi_insn *insn, unsigned int *data) { unsigned int wbits; /* only set bits that have been masked */ data[0] &= (1 << s->n_chan) - 1; wbits = devpriv->do_bits; wbits &= ~data[0]; wbits |= data[0] & data[1]; devpriv->do_bits = wbits; outb(devpriv->do_bits, dev->iobase + DAS1800_DIGITAL); data[1] = devpriv->do_bits; return 2; } /* loads counters with divisor1, divisor2 from private structure */ static int das1800_set_frequency(struct comedi_device *dev) { int err = 0; /* counter 1, mode 2 */ if (i8254_load(dev->iobase + DAS1800_COUNTER, 0, 1, devpriv->divisor1, 2)) err++; /* counter 2, mode 2 */ if (i8254_load(dev->iobase + DAS1800_COUNTER, 0, 2, devpriv->divisor2, 2)) err++; if (err) return -1; return 0; } /* converts requested conversion timing to timing compatible with * hardware, used only when card is in 'burst mode' */ static unsigned int burst_convert_arg(unsigned int convert_arg, int round_mode) { unsigned int micro_sec; /* in burst mode, the maximum conversion time is 64 microseconds */ if (convert_arg > 64000) convert_arg = 64000; /* the conversion time must be an integral number of microseconds */ switch (round_mode) { case TRIG_ROUND_NEAREST: default: micro_sec = (convert_arg + 500) / 1000; break; case TRIG_ROUND_DOWN: micro_sec = convert_arg / 1000; break; case TRIG_ROUND_UP: micro_sec = (convert_arg - 1) / 1000 + 1; break; } /* return number of nanoseconds */ return micro_sec * 1000; } /* utility function that suggests a dma transfer size based on the conversion period 'ns' */ static unsigned int suggest_transfer_size(struct comedi_cmd *cmd) { unsigned int size = DMA_BUF_SIZE; static const int sample_size = 2; /* size in bytes of one sample from board */ unsigned int fill_time = 300000000; /* target time in nanoseconds for filling dma buffer */ unsigned int max_size; /* maximum size we will allow for a transfer */ /* make dma buffer fill in 0.3 seconds for timed modes */ switch (cmd->scan_begin_src) { case TRIG_FOLLOW: /* not in burst mode */ if (cmd->convert_src == TRIG_TIMER) size = (fill_time / cmd->convert_arg) * sample_size; break; case TRIG_TIMER: size = (fill_time / (cmd->scan_begin_arg * cmd->chanlist_len)) * sample_size; break; default: size = DMA_BUF_SIZE; break; } /* set a minimum and maximum size allowed */ max_size = DMA_BUF_SIZE; /* if we are taking limited number of conversions, limit transfer size to that */ if (cmd->stop_src == TRIG_COUNT && cmd->stop_arg * cmd->chanlist_len * sample_size < max_size) max_size = cmd->stop_arg * cmd->chanlist_len * sample_size; if (size > max_size) size = max_size; if (size < sample_size) size = sample_size; return size; }
{ "pile_set_name": "Github" }
--- title: Container Structure Test Task description: Test container structure by container task and integrate test reporting into your build and release pipelines ms.assetid: 6A752841-345D-4BC6-8765-C45F63D91D75 ms.topic: reference ms.custom: seodec18 ms.author: admahesh author: Additi ms.date: 04/20/2020 monikerRange: 'azure-devops' --- # Container Structure Tests The Container Structure Tests provide a powerful framework to validate the structure of a container image. These tests can be used to check the output of commands in an image, as well as verify metadata and contents of the filesystem. Tests can be run either through a standalone binary, or through a Docker image. Tests within this framework are specified through a YAML or JSON config file. Multiple config files may be specified in a single test run. The config file will be loaded in by the test runner, which will execute the tests in order. Within this config file, four types of tests can be written: * Command Tests (testing output/error of a specific command issued) * File Existence Tests (making sure a file is, or isn't, present in the file system of the image) * File Content Tests (making sure files in the file system of the image contain, or do not contain, specific contents) * Metadata Test, singular (making sure certain container metadata is correct) ## Container Structure Test Task This task helps you run container structure tests and publish test results to Azure Pipelines and provides a comprehensive test reporting and analytics experience. > [!NOTE] > This is an early preview feature. More upcoming features will be rolled out in upcoming sprints. ## Arguments |Argument|Description| |--- |--- | |`dockerRegistryServiceConnection`<br/>Docker registry service connection| (Required) Select a Docker registry service connection. Required for commands that need to authenticate with a registry.| |`repository` <br/>Container repository| (Required) Name of the repository| |`tag` <br/>Tag| The tag is used in pulling the image from docker registry service connection <br/>Default value: `$(Build.BuildId)`| |`configFile` <br/>Config file path| (Required) Config files path, that contains container structure tests. Either .yaml or .json files| |`testRunTitle` <br/>Test run title| (Optional) Provide a name for the Test Run| |`failTaskOnFailedTests` <br/>Fail task if there are test failures| (Optional) Fail the task if there are any test failures. Check this option to fail the task if test failures are detected.| ## Build, Test and Publish Test The container structure test task can be added in the classic pipeline as well as in unified pipeline (multi-stage) & YAML based pipelines. # [YAML](#tab/yaml) In the new YAML based unified pipeline, you can search for task in the window. > [!div class="mx-imgBorder"] > ![Container Test in Unified Pipeline](media/unified-pipeline-creation.png) Once the task is added, you need to set the config file path, docker registory service connection, container repository and tag, if required. Task input in the yaml based pipeline is created. > [!div class="mx-imgBorder"] > ![Container Test in YAML based Pipeline](media/yaml-based-pipeline.png) ### YAML file > [!div class="mx-imgBorder"] > ![YAML file](media/yaml-file.png) Sample YAML [!INCLUDE [temp](../includes/yaml/ContainerStructureTestV1.md)] # [Classic](#tab/classic) In the classic pipeline, you can add this task from the designer view. > [!div class="mx-imgBorder"] > ![Container Test in Classic Pipeline](media/classic-pipeline-creation.png) * * * ## View test report Once the task is executed, you can directly go to test tab to view the full report. The published test results are displayed in the [Tests tab](../../test/review-continuous-test-results-after-build.md) in the pipeline summary and help you to measure pipeline quality, review traceability, troubleshoot failures, and drive failure ownership. > [!div class="mx-imgBorder"] > ![Test Reporting Page](media/results-page.png)
{ "pile_set_name": "Github" }
/** \addtogroup platform */ /** @{*/ /** * \defgroup platform_error Error functions * @{ */ /* mbed Microcontroller Library * Copyright (c) 2006-2013 ARM Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef MBED_ERROR_H #define MBED_ERROR_H /** To generate a fatal compile-time error, you can use the pre-processor #error directive. * * @param format C string that contains data stream to be printed. * Code snippets below show valid format. * * @code * #error "That shouldn't have happened!" * @endcode * * If the compiler evaluates this line, it will report the error and stop the compile. * * For example, you could use this to check some user-defined compile-time variables: * * @code * #define NUM_PORTS 7 * #if (NUM_PORTS > 4) * #error "NUM_PORTS must be less than 4" * #endif * @endcode * * Reporting Run-Time Errors: * To generate a fatal run-time error, you can use the mbed error() function. * * @code * error("That shouldn't have happened!"); * @endcode * * If the mbed running the program executes this function, it will print the * message via the USB serial port, and then die with the blue lights of death! * * The message can use printf-style formatting, so you can report variables in the * message too. For example, you could use this to check a run-time condition: * * @code * if(x >= 5) { * error("expected x to be less than 5, but got %d", x); * } * @endcode * * */ #ifdef __cplusplus extern "C" { #endif void error(const char* format, ...); #ifdef __cplusplus } #endif #endif /** @}*/ /** @}*/
{ "pile_set_name": "Github" }
<p id="notice"><%= notice %></p> <p> <strong>Name:</strong> <%= @galaxy.name %> </p> <%= link_to 'Edit', edit_galaxy_path(@galaxy) %> | <%= link_to 'Back', galaxies_path %>
{ "pile_set_name": "Github" }
/* * Copyright (c) 2015 The WebM project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ #include <stdlib.h> #include "./vpx_config.h" #include "./vpx_dsp_rtcd.h" #include "vpx/vpx_integer.h" #include "vpx_ports/mem.h" void vpx_subtract_block_c(int rows, int cols, int16_t *diff_ptr, ptrdiff_t diff_stride, const uint8_t *src_ptr, ptrdiff_t src_stride, const uint8_t *pred_ptr, ptrdiff_t pred_stride) { int r, c; for (r = 0; r < rows; r++) { for (c = 0; c < cols; c++) diff_ptr[c] = src_ptr[c] - pred_ptr[c]; diff_ptr += diff_stride; pred_ptr += pred_stride; src_ptr += src_stride; } } #if CONFIG_VP9_HIGHBITDEPTH void vpx_highbd_subtract_block_c(int rows, int cols, int16_t *diff_ptr, ptrdiff_t diff_stride, const uint8_t *src8_ptr, ptrdiff_t src_stride, const uint8_t *pred8_ptr, ptrdiff_t pred_stride, int bd) { int r, c; uint16_t *src = CONVERT_TO_SHORTPTR(src8_ptr); uint16_t *pred = CONVERT_TO_SHORTPTR(pred8_ptr); (void)bd; for (r = 0; r < rows; r++) { for (c = 0; c < cols; c++) { diff_ptr[c] = src[c] - pred[c]; } diff_ptr += diff_stride; pred += pred_stride; src += src_stride; } } #endif // CONFIG_VP9_HIGHBITDEPTH
{ "pile_set_name": "Github" }
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #pragma once #include <aws/workdocs/WorkDocs_EXPORTS.h> #include <aws/core/utils/memory/stl/AWSString.h> namespace Aws { namespace WorkDocs { namespace Model { enum class SubscriptionType { NOT_SET, ALL }; namespace SubscriptionTypeMapper { AWS_WORKDOCS_API SubscriptionType GetSubscriptionTypeForName(const Aws::String& name); AWS_WORKDOCS_API Aws::String GetNameForSubscriptionType(SubscriptionType value); } // namespace SubscriptionTypeMapper } // namespace Model } // namespace WorkDocs } // namespace Aws
{ "pile_set_name": "Github" }
<!DOCTYPE HTML> <html lang="en-us"> <head> <meta http-equiv="Content-type" content="text/html; charset=utf-8"> <title>Colors Example</title> <script src="colors.js"></script> </head> <body> <script> var test = colors.red("hopefully colorless output"); document.write('Rainbows are fun!'.rainbow + '<br/>'); document.write('So '.italic + 'are'.underline + ' styles! '.bold + 'inverse'.inverse); // styles not widely supported document.write('Chains are also cool.'.bold.italic.underline.red); // styles not widely supported //document.write('zalgo time!'.zalgo); document.write(test.stripColors); document.write("a".grey + " b".black); document.write("Zebras are so fun!".zebra); document.write(colors.rainbow('Rainbows are fun!')); document.write(colors.italic('So ') + colors.underline('are') + colors.bold(' styles! ') + colors.inverse('inverse')); // styles not widely supported document.write(colors.bold(colors.italic(colors.underline(colors.red('Chains are also cool.'))))); // styles not widely supported //document.write(colors.zalgo('zalgo time!')); document.write(colors.stripColors(test)); document.write(colors.grey("a") + colors.black(" b")); colors.addSequencer("america", function(letter, i, exploded) { if(letter === " ") return letter; switch(i%3) { case 0: return letter.red; case 1: return letter.white; case 2: return letter.blue; } }); colors.addSequencer("random", (function() { var available = ['bold', 'underline', 'italic', 'inverse', 'grey', 'yellow', 'red', 'green', 'blue', 'white', 'cyan', 'magenta']; return function(letter, i, exploded) { return letter === " " ? letter : letter[available[Math.round(Math.random() * (available.length - 1))]]; }; })()); document.write("AMERICA! F--K YEAH!".america); document.write("So apparently I've been to Mars, with all the little green men. But you know, I don't recall.".random); // // Custom themes // colors.setTheme({ silly: 'rainbow', input: 'grey', verbose: 'cyan', prompt: 'grey', info: 'green', data: 'grey', help: 'cyan', warn: 'yellow', debug: 'blue', error: 'red' }); // outputs red text document.write("this is an error".error); // outputs yellow text document.write("this is a warning".warn); </script> </body> </html>
{ "pile_set_name": "Github" }
/* -*- indent-tabs-mode: nil; js-indent-level: 2 -*- */ /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ "use strict"; // Tests that adding a certificate already present in the certificate database // with different trust bits than those stored in the database does not result // in the new trust bits being ignored. do_get_profile(); var certDB = Cc["@mozilla.org/security/x509certdb;1"] .getService(Ci.nsIX509CertDB); function load_cert(cert, trust) { let file = "test_intermediate_basic_usage_constraints/" + cert + ".pem"; addCertFromFile(certDB, file, trust); } function getDERString(cert) { let derString = ""; for (let rawByte of cert.getRawDER({})) { derString += String.fromCharCode(rawByte); } return derString; } function run_test() { load_cert("ca", "CTu,CTu,CTu"); load_cert("int-limited-depth", "CTu,CTu,CTu"); let file = "test_intermediate_basic_usage_constraints/ee-int-limited-depth.pem"; let cert_pem = readFile(do_get_file(file)); let ee = certDB.constructX509FromBase64(pemToBase64(cert_pem)); checkCertErrorGeneric(certDB, ee, PRErrorCodeSuccess, certificateUsageSSLServer); // Change the already existing intermediate certificate's trust using // addCertFromBase64(). We use findCertByNickname first to ensure that the // certificate already exists. let int_cert = certDB.findCertByNickname("int-limited-depth"); notEqual(int_cert, null, "Intermediate cert should be in the cert DB"); let base64_cert = btoa(getDERString(int_cert)); certDB.addCertFromBase64(base64_cert, "p,p,p", "ignored_argument"); checkCertErrorGeneric(certDB, ee, SEC_ERROR_UNTRUSTED_ISSUER, certificateUsageSSLServer); }
{ "pile_set_name": "Github" }
/* * Hibernate Search, full-text search for your domain model * * License: GNU Lesser General Public License (LGPL), version 2.1 or later * See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>. */ package org.hibernate.search.engine.backend.types; import org.hibernate.search.engine.search.predicate.dsl.SearchPredicateFactory; /** * Defines how the structure of an object field is preserved upon indexing. */ public enum ObjectStructure { /** * Use the backend-specific default. */ DEFAULT, /** * Flatten multi-valued object fields. * <p> * This structure is generally more efficient, * but has the disadvantage of dropping the original structure * by making the leaf fields multi-valued instead of the object fields. * <p> * For instance this structure: * <ul> * <li>title = Levianthan Wakes</li> * <li>authors = * <ul> * <li>(first element) * <ul> * <li>firstName = Daniel</li> * <li>lastName = Abraham</li> * </ul> * </li> * <li>(second element) * <ul> * <li>firstName = Ty</li> * <li>lastName = Frank</li> * </ul> * </li> * </ul> * </li> * </ul> * Will become: * <ul> * <li>title = Levianthan Wakes</li> * <li>authors.firstName = * <ul> * <li>(first element) Daniel</li> * <li>(second element) Ty</li> * </ul> * </li> * <li>authors.lastName = * <ul> * <li>(first element) Abraham</li> * <li>(second element) Frank</li> * </ul> * </li> * </ul> * * As a result, a search for <code>authors.firstname:Ty AND authors.lastname=Abraham</code> * would return the above document even though Ty Abraham does not exist. */ FLATTENED, /** * Store object fields as nested documents. * <p> * This structure is generally less efficient, * but has the advantage of preserving the original structure. * Note however that access to that information when querying * requires special care. * See in particular the {@link SearchPredicateFactory#nested() "nested" predicate}. */ NESTED }
{ "pile_set_name": "Github" }
const isOffline = !!window.location.href.includes('index.html') const getAbsolutePath = () => isOffline ? window.location.href.split('index.html')[0] : '/' export default getAbsolutePath
{ "pile_set_name": "Github" }
# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make <target>' where <target> is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/gandalf.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/gandalf.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/gandalf" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/gandalf" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt."
{ "pile_set_name": "Github" }
// |reftest| skip-if(!this.hasOwnProperty("TypedObject")) var BUGNUMBER = 939715; var summary = 'method type.build'; /* * Any copyright is dedicated to the Public Domain. * http://creativecommons.org/licenses/publicdomain/ */ var ArrayType = TypedObject.ArrayType; var StructType = TypedObject.StructType; var uint8 = TypedObject.uint8; var uint16 = TypedObject.uint16; var uint32 = TypedObject.uint32; var uint8Clamped = TypedObject.uint8Clamped; var int8 = TypedObject.int8; var int16 = TypedObject.int16; var int32 = TypedObject.int32; var float32 = TypedObject.float32; var float64 = TypedObject.float64; function oneDimensionalArrayOfUints() { var grain = uint32; var type = grain.array(4); var r1 = type.build(x => x * 2); assertTypedEqual(type, r1, new type([0, 2, 4, 6])); } function oneDimensionalArrayOfStructs() { var grain = new StructType({f: uint32}); var type = grain.array(4); var r1 = type.build(x => new grain({f: x * 2})); var r2 = type.build((x, out) => { out.f = x * 2; }); assertTypedEqual(type, r1, new type([{f:0}, {f:2}, {f:4}, {f:6}])); assertTypedEqual(type, r1, r2); } // At an attempt at readability, the tests below all try to build up // numbers where there is a one-to-one mapping between input dimension // and base-10 digit in the output. // // (Note that leading zeros must be elided in the expected-values to // avoid inadvertantly interpreting the numbers as octal constants.) function twoDimensionalArrayOfStructsWithDepth2() { var grain = new StructType({f: uint32}); var type = grain.array(2, 2); var r1 = type.build(2, (x, y) => { return new grain({f: x * 10 + y}); }); var r2 = type.build(2, (x, y, out) => { out.f = x * 10 + y; }); assertTypedEqual(type, r1, new type([[{f: 0}, {f: 1}], [{f:10}, {f:11}]])); assertTypedEqual(type, r1, r2); } function twoDimensionalArrayOfStructsWithDepth1() { var grain = new StructType({f: uint32}).array(2); var type = grain.array(2); var r1 = type.build((x) => { return new grain([{f: x * 10}, {f: x * 10 + 1}]); }); var r2 = type.build(1, (x, out) => { out[0].f = x * 10 + 0; out[1].f = x * 10 + 1; }); assertTypedEqual(type, r1, new type([[{f: 0}, {f: 1}], [{f:10}, {f:11}]])); assertTypedEqual(type, r1, r2); } function threeDimensionalArrayOfUintsWithDepth3() { var grain = uint32; var type = grain.array(2).array(2).array(2); var r1 = type.build(3, (x,y,z) => x * 100 + y * 10 + z); assertTypedEqual(type, r1, new type([[[ 0, 1], [ 10, 11]], [[100, 101], [110, 111]]])); } function threeDimensionalArrayOfUintsWithDepth2() { var grain = uint32.array(2); var type = grain.array(2).array(2); var r1 = type.build(2, (x,y) => [x * 100 + y * 10 + 0, x * 100 + y * 10 + 1]); var r1b = type.build(2, (x,y) => grain.build(z => x * 100 + y * 10 + z)); var r1c = type.build(2, (x,y) => grain.build(1, z => x * 100 + y * 10 + z)); var r2 = type.build(2, (x,y, out) => { out[0] = x * 100 + y * 10 + 0; out[1] = x * 100 + y * 10 + 1; }); assertTypedEqual(type, r1, new type([[[ 0, 1], [ 10, 11]], [[100, 101], [110, 111]]])); assertTypedEqual(type, r1, r1b); assertTypedEqual(type, r1, r1c); assertTypedEqual(type, r1, r2); } function threeDimensionalArrayOfUintsWithDepth1() { var grain = uint32.array(2).array(2); var type = grain.array(2); var r1 = type.build(1, (x) => grain.build(y => [x * 100 + y * 10 + 0, x * 100 + y * 10 + 1])); var r1b = type.build(1, (x) => grain.build(1, y => [x * 100 + y * 10 + 0, x * 100 + y * 10 + 1])); var r1c = type.build(1, (x) => grain.build(2, (y,z) => x * 100 + y * 10 + z)); var r2 = type.build(1, (x, out) => { out[0][0] = x * 100 + 0 * 10 + 0; out[0][1] = x * 100 + 0 * 10 + 1; out[1][0] = x * 100 + 1 * 10 + 0; out[1][1] = x * 100 + 1 * 10 + 1; }); assertTypedEqual(type, r1, new type([[[ 0, 1], [ 10, 11]], [[100, 101], [110, 111]]])); assertTypedEqual(type, r1, r1b); assertTypedEqual(type, r1, r1c); assertTypedEqual(type, r1, r2); } function runTests() { print(BUGNUMBER + ": " + summary); oneDimensionalArrayOfUints(); oneDimensionalArrayOfStructs(); twoDimensionalArrayOfStructsWithDepth2(); twoDimensionalArrayOfStructsWithDepth1(); threeDimensionalArrayOfUintsWithDepth3(); threeDimensionalArrayOfUintsWithDepth2(); threeDimensionalArrayOfUintsWithDepth1(); if (typeof reportCompare === "function") reportCompare(true, true); print("Tests complete"); } runTests();
{ "pile_set_name": "Github" }