Dataset Preview
Duplicate
The full dataset viewer is not available (click to read why). Only showing a preview of the rows.
The dataset generation failed
Error code:   DatasetGenerationError
Exception:    TypeError
Message:      Couldn't cast array of type
struct<content_hash: string, timestamp: string, source: string, line_count: int64, max_line_length: int64, avg_line_length: double, alnum_prop: double, repo_name: string, id: string, size: string, binary: bool, copies: string, ref: string, path: string, mode: string, license: string, language: list<item: struct<name: string, bytes: string>>, symlink_target: string>
to
{'content_hash': Value(dtype='string', id=None), 'timestamp': Value(dtype='string', id=None), 'source': Value(dtype='string', id=None), 'line_count': Value(dtype='int64', id=None), 'max_line_length': Value(dtype='int64', id=None), 'avg_line_length': Value(dtype='float64', id=None), 'alnum_prop': Value(dtype='float64', id=None), 'repo_name': Value(dtype='string', id=None), 'id': Value(dtype='string', id=None), 'size': Value(dtype='string', id=None), 'binary': Value(dtype='bool', id=None), 'copies': Value(dtype='string', id=None), 'ref': Value(dtype='string', id=None), 'path': Value(dtype='string', id=None), 'mode': Value(dtype='string', id=None), 'license': Value(dtype='string', id=None), 'language': [{'name': Value(dtype='string', id=None), 'bytes': Value(dtype='string', id=None)}]}
Traceback:    Traceback (most recent call last):
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 2011, in _prepare_split_single
                  writer.write_table(table)
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/arrow_writer.py", line 585, in write_table
                  pa_table = table_cast(pa_table, self._schema)
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2302, in table_cast
                  return cast_table_to_schema(table, schema)
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2261, in cast_table_to_schema
                  arrays = [cast_array_to_feature(table[name], feature) for name, feature in features.items()]
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2261, in <listcomp>
                  arrays = [cast_array_to_feature(table[name], feature) for name, feature in features.items()]
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 1802, in wrapper
                  return pa.chunked_array([func(chunk, *args, **kwargs) for chunk in array.chunks])
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 1802, in <listcomp>
                  return pa.chunked_array([func(chunk, *args, **kwargs) for chunk in array.chunks])
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2122, in cast_array_to_feature
                  raise TypeError(f"Couldn't cast array of type\n{_short_str(array.type)}\nto\n{_short_str(feature)}")
              TypeError: Couldn't cast array of type
              struct<content_hash: string, timestamp: string, source: string, line_count: int64, max_line_length: int64, avg_line_length: double, alnum_prop: double, repo_name: string, id: string, size: string, binary: bool, copies: string, ref: string, path: string, mode: string, license: string, language: list<item: struct<name: string, bytes: string>>, symlink_target: string>
              to
              {'content_hash': Value(dtype='string', id=None), 'timestamp': Value(dtype='string', id=None), 'source': Value(dtype='string', id=None), 'line_count': Value(dtype='int64', id=None), 'max_line_length': Value(dtype='int64', id=None), 'avg_line_length': Value(dtype='float64', id=None), 'alnum_prop': Value(dtype='float64', id=None), 'repo_name': Value(dtype='string', id=None), 'id': Value(dtype='string', id=None), 'size': Value(dtype='string', id=None), 'binary': Value(dtype='bool', id=None), 'copies': Value(dtype='string', id=None), 'ref': Value(dtype='string', id=None), 'path': Value(dtype='string', id=None), 'mode': Value(dtype='string', id=None), 'license': Value(dtype='string', id=None), 'language': [{'name': Value(dtype='string', id=None), 'bytes': Value(dtype='string', id=None)}]}
              
              The above exception was the direct cause of the following exception:
              
              Traceback (most recent call last):
                File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1529, in compute_config_parquet_and_info_response
                  parquet_operations = convert_to_parquet(builder)
                File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1154, in convert_to_parquet
                  builder.download_and_prepare(
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1027, in download_and_prepare
                  self._download_and_prepare(
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1122, in _download_and_prepare
                  self._prepare_split(split_generator, **prepare_split_kwargs)
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1882, in _prepare_split
                  for job_id, done, content in self._prepare_split_single(
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 2038, in _prepare_split_single
                  raise DatasetGenerationError("An error occurred while generating the dataset") from e
              datasets.exceptions.DatasetGenerationError: An error occurred while generating the dataset

Need help to make the dataset viewer work? Make sure to review how to configure the dataset viewer, and open a discussion for direct support.

text
string
meta
dict
package org.onosproject.net.flow.criteria; import org.onlab.packet.TpPort; import java.util.Objects; /** * Implementation of SCTP port criterion (16 bits unsigned integer). */ public final class SctpPortCriterion implements Criterion { private final TpPort sctpPort; private final TpPort mask; private final Type type; /** * Constructor. * * @param sctpPort the SCTP port to match * @param mask the mask for the SCTP port * @param type the match type. Should be either Type.SCTP_SRC_MASKED or * Type.SCTP_SRC_DST_MASKED */ SctpPortCriterion(TpPort sctpPort, TpPort mask, Type type) { this.sctpPort = sctpPort; this.mask = mask; this.type = type; } /** * Constructor. * * @param sctpPort the SCTP port to match * @param type the match type. Should be either Type.SCTP_SRC or * Type.SCTP_DST */ SctpPortCriterion(TpPort sctpPort, Type type) { this(sctpPort, null, type); } @Override public Type type() { return this.type; } /** * Gets the SCTP port to match. * * @return the SCTP port to match */ public TpPort sctpPort() { return this.sctpPort; } /** * Gets the mask for the SCTP port to match. * * @return the SCTP port mask, null if not specified */ public TpPort mask() { return this.mask; } @Override public String toString() { return (mask != null) ? type().toString() + SEPARATOR + sctpPort + "/" + mask : type().toString() + SEPARATOR + sctpPort; } @Override public int hashCode() { return Objects.hash(type.ordinal(), sctpPort, mask); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof SctpPortCriterion) { SctpPortCriterion that = (SctpPortCriterion) obj; return Objects.equals(sctpPort, that.sctpPort) && Objects.equals(mask, that.mask) && Objects.equals(type, that.type); } return false; } }
{ "content_hash": "ace524c8a0379ece0e8e9a3981021917", "timestamp": "", "source": "github", "line_count": 89, "max_line_length": 75, "avg_line_length": 24.707865168539325, "alnum_prop": 0.5793542519326966, "repo_name": "LorenzReinhart/ONOSnew", "id": "ca5d1d5c1b570f1d3d78b6648759184b7e3f8e42", "size": "2816", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "core/api/src/main/java/org/onosproject/net/flow/criteria/SctpPortCriterion.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "224030" }, { "name": "HTML", "bytes": "108368" }, { "name": "Java", "bytes": "34148438" }, { "name": "JavaScript", "bytes": "3833411" }, { "name": "Protocol Buffer", "bytes": "13730" }, { "name": "Python", "bytes": "185205" }, { "name": "Shell", "bytes": "2594" } ] }
<a href="http://github.com/angular/angular.js/tree/v1.2.0-rc.3/src/ng/directive/booleanAttrs.js#L176" class="view-source btn btn-action"><i class="icon-zoom-in"> </i> View source</a><a href="http://github.com/angular/angular.js/edit/master/src/ng/directive/booleanAttrs.js" class="improve-docs btn btn-primary"><i class="icon-edit"> </i> Improve this doc</a><h1><code ng:non-bindable="">ngChecked</code> <div><span class="hint">directive in module <code ng:non-bindable="">ng</code> </span> </div> </h1> <div><h2 id="description">Description</h2> <div class="description"><div class="ng-directive-page ng-directive-ngchecked-page"><p>The HTML specification does not require browsers to preserve the values of boolean attributes such as checked. (Their presence means true and their absence means false.) This prevents the Angular compiler from retrieving the binding expression. The <code>ngChecked</code> directive solves this problem for the <code>checked</code> attribute.</p> </div></div> <h2 id="usage">Usage</h2> <div class="usage">as attribute<pre class="prettyprint linenums">&lt;INPUT ng-checked="{expression}"&gt; ... &lt;/INPUT&gt;</pre> <h4 id="usage_parameters">Parameters</h4><table class="variables-matrix table table-bordered table-striped"><thead><tr><th>Param</th><th>Type</th><th>Details</th></tr></thead><tbody><tr><td>ngChecked</td><td><a href="" class="label type-hint type-hint-expression">expression</a></td><td><div class="ng-directive-page ng-directive-ngchecked-page"><p>If the <a href="guide/expression">expression</a> is truthy, then special attribute &quot;checked&quot; will be set on the element</p> </div></td></tr></tbody></table></div> <h2 id="example">Example</h2> <div class="example"><div class="ng-directive-page ng-directive-ngchecked-page"><h4 id="example_source">Source</h4> <div source-edit="" source-edit-deps="angular.js" source-edit-html="index.html-40" source-edit-css="" source-edit-js="" source-edit-json="" source-edit-unit="" source-edit-scenario="scenario.js-41"></div> <div class="tabbable"><div class="tab-pane" title="index.html"> <pre class="prettyprint linenums" ng-set-text="index.html-40" ng-html-wrap=" angular.js"></pre> <script type="text/ng-template" id="index.html-40"> Check me to check both: <input type="checkbox" ng-model="master"><br/> <input id="checkSlave" type="checkbox" ng-checked="master"> </script> </div> <div class="tab-pane" title="End to end test"> <pre class="prettyprint linenums" ng-set-text="scenario.js-41"></pre> <script type="text/ng-template" id="scenario.js-41"> it('should check both checkBoxes', function() { expect(element('.doc-example-live #checkSlave').prop('checked')).toBeFalsy(); input('master').check(); expect(element('.doc-example-live #checkSlave').prop('checked')).toBeTruthy(); }); </script> </div> </div><h4 id="example_demo">Demo</h4> <div class="well doc-example-live animate-container" ng-embed-app="" ng-set-html="index.html-40" ng-eval-javascript=""></div> </div></div> </div>
{ "content_hash": "59b45bb855d2572643a7069a56a54a94", "timestamp": "", "source": "github", "line_count": 42, "max_line_length": 409, "avg_line_length": 71.69047619047619, "alnum_prop": 0.7157090667552308, "repo_name": "jbdeboer/angular-velocity-talk", "id": "ba4e45793751e726f848f475150b74ad5e3ba428", "size": "3011", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/lib/angular/docs/partials/api/ng.directive:ngChecked.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "337252" }, { "name": "JavaScript", "bytes": "6281638" }, { "name": "Ruby", "bytes": "503" }, { "name": "Shell", "bytes": "3876" } ] }
<!-- START doctoc generated TOC please keep comment here to allow auto update --> <!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE --> **Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - [修炼设计技法](#%E4%BF%AE%E7%82%BC%E8%AE%BE%E8%AE%A1%E6%8A%80%E6%B3%95) <!-- END doctoc generated TOC please keep comment here to allow auto update --> # 修炼设计技法 技法是实现设计的基础
{ "content_hash": "bdd7fa2e3e85db524423de81775e7bd7", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 84, "avg_line_length": 36.72727272727273, "alnum_prop": 0.7054455445544554, "repo_name": "ChanShuYi/uidesign_notebook", "id": "b710ff1b5ae8c7fbd3d41315746ddd71ed43bed6", "size": "448", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "chapter02/02_00_learn_design_skill.md", "mode": "33188", "license": "mit", "language": [] }
package org.apache.nutch.protocol; // Hadoop imports import org.apache.hadoop.conf.Configurable; import org.apache.nutch.plugin.FieldPluggable; import org.apache.nutch.storage.WebPage; import crawlercommons.robots.BaseRobotRules; /** A retriever of url content. Implemented by protocol extensions. */ public interface Protocol extends FieldPluggable, Configurable { /** The name of the extension point. */ public final static String X_POINT_ID = Protocol.class.getName(); /** * Property name. If in the current configuration this property is set to * true, protocol implementations should handle "politeness" limits * internally. If this is set to false, it is assumed that these limits are * enforced elsewhere, and protocol implementations should not enforce them * internally. */ public final static String CHECK_BLOCKING = "protocol.plugin.check.blocking"; /** * Property name. If in the current configuration this property is set to * true, protocol implementations should handle robot exclusion rules * internally. If this is set to false, it is assumed that these limits are * enforced elsewhere, and protocol implementations should not enforce them * internally. */ public final static String CHECK_ROBOTS = "protocol.plugin.check.robots"; /* * Returns the {@link Content} for a fetchlist entry. */ ProtocolOutput getProtocolOutput(String url, WebPage page); /** * Retrieve robot rules applicable for this url. * * @param url * url to check * @param page * @return robot rules (specific for this url or default), never null */ BaseRobotRules getRobotRules(String url, WebPage page); }
{ "content_hash": "bca1f14999ec1d20edf405bcd741ca9b", "timestamp": "", "source": "github", "line_count": 50, "max_line_length": 79, "avg_line_length": 33.92, "alnum_prop": 0.7334905660377359, "repo_name": "supermy/nutch2", "id": "25dd59bf989643fe143acfdbdbdab6a81ae944ea", "size": "2498", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "src/java/org/apache/nutch/protocol/Protocol.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "3332" }, { "name": "HTML", "bytes": "124309" }, { "name": "Java", "bytes": "2311373" }, { "name": "JavaScript", "bytes": "4862" }, { "name": "Shell", "bytes": "14602" }, { "name": "XSLT", "bytes": "1822" } ] }
package co.cask.cdap.data2.metadata.dataset; import co.cask.cdap.api.dataset.DatasetAdmin; import co.cask.cdap.api.dataset.DatasetContext; import co.cask.cdap.api.dataset.DatasetDefinition; import co.cask.cdap.api.dataset.DatasetProperties; import co.cask.cdap.api.dataset.DatasetSpecification; import co.cask.cdap.api.dataset.IncompatibleUpdateException; import co.cask.cdap.api.dataset.Reconfigurable; import co.cask.cdap.api.dataset.lib.AbstractDatasetDefinition; import co.cask.cdap.api.dataset.lib.IndexedTable; import java.io.IOException; import java.util.Map; /** * Define the Dataset for metadata. */ public class MetadataDatasetDefinition extends AbstractDatasetDefinition<MetadataDataset, DatasetAdmin> implements Reconfigurable { private static final String METADATA_INDEX_TABLE_NAME = "metadata_index"; private final DatasetDefinition<? extends IndexedTable, ?> indexedTableDef; public MetadataDatasetDefinition(String name, DatasetDefinition<? extends IndexedTable, ?> indexedTableDef) { super(name); this.indexedTableDef = indexedTableDef; } // Implementation of DatasetDefinition interface methods. @Override public DatasetSpecification configure(String instanceName, DatasetProperties properties) { return DatasetSpecification.builder(instanceName, getName()) .properties(properties.getProperties()) .datasets(indexedTableDef.configure(METADATA_INDEX_TABLE_NAME, addIndexColumn(properties, MetadataDataset.INDEX_COLUMN))) .build(); } @Override public DatasetSpecification reconfigure(String instanceName, DatasetProperties newProperties, DatasetSpecification currentSpec) throws IncompatibleUpdateException { // extract the column to index from the indexed table spec DatasetSpecification indexSpec = currentSpec.getSpecification(METADATA_INDEX_TABLE_NAME); String indexColumn = indexSpec.getProperty(IndexedTable.INDEX_COLUMNS_CONF_KEY); return DatasetSpecification.builder(instanceName, getName()) .properties(newProperties.getProperties()) .datasets(AbstractDatasetDefinition.reconfigure(indexedTableDef, METADATA_INDEX_TABLE_NAME, addIndexColumn(newProperties, indexColumn), indexSpec)) .build(); } private DatasetProperties addIndexColumn(DatasetProperties properties, String indexColumn) { return DatasetProperties .builder() .addAll(properties.getProperties()) .add(IndexedTable.INDEX_COLUMNS_CONF_KEY, indexColumn) .build(); } @Override public DatasetAdmin getAdmin(DatasetContext datasetContext, DatasetSpecification spec, ClassLoader classLoader) throws IOException { return indexedTableDef.getAdmin(datasetContext, spec.getSpecification(METADATA_INDEX_TABLE_NAME), classLoader); } @Override public MetadataDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { return new MetadataDataset(indexedTableDef.getDataset(datasetContext, spec.getSpecification(METADATA_INDEX_TABLE_NAME), arguments, classLoader)); } }
{ "content_hash": "9172c72ae800277642ccb7562c02d65d", "timestamp": "", "source": "github", "line_count": 82, "max_line_length": 112, "avg_line_length": 43.69512195121951, "alnum_prop": 0.6893664526932738, "repo_name": "caskdata/cdap", "id": "a9cf4d3394b75d9c2c1079e15a90be963ade7edf", "size": "4182", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "cdap-data-fabric/src/main/java/co/cask/cdap/data2/metadata/dataset/MetadataDatasetDefinition.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "26055" }, { "name": "CSS", "bytes": "478678" }, { "name": "HTML", "bytes": "647505" }, { "name": "Java", "bytes": "19722699" }, { "name": "JavaScript", "bytes": "2362906" }, { "name": "Python", "bytes": "166065" }, { "name": "Ruby", "bytes": "3178" }, { "name": "Scala", "bytes": "173411" }, { "name": "Shell", "bytes": "225202" }, { "name": "Visual Basic", "bytes": "870" } ] }
#ifndef WTF_OwnPtr_h #define WTF_OwnPtr_h #include "wtf/HashTableDeletedValueType.h" #include "wtf/Noncopyable.h" #include "wtf/OwnPtrCommon.h" #include <algorithm> #include <utility> namespace WTF { template <typename T> class PassOwnPtr; template <typename T> class OwnPtr { WTF_MAKE_NONCOPYABLE(OwnPtr); public: typedef typename std::remove_extent<T>::type ValueType; typedef ValueType* PtrType; OwnPtr() : m_ptr(nullptr) {} OwnPtr(std::nullptr_t) : m_ptr(nullptr) {} // See comment in PassOwnPtr.h for why this takes a const reference. OwnPtr(const PassOwnPtr<T>&); template <typename U> OwnPtr(const PassOwnPtr<U>&, EnsurePtrConvertibleArgDecl(U, T)); // Hash table deleted values, which are only constructed and never copied or // destroyed. OwnPtr(HashTableDeletedValueType) : m_ptr(hashTableDeletedValue()) {} bool isHashTableDeletedValue() const { return m_ptr == hashTableDeletedValue(); } ~OwnPtr() { OwnedPtrDeleter<T>::deletePtr(m_ptr); m_ptr = nullptr; } PtrType get() const { return m_ptr; } void clear(); PassOwnPtr<T> release(); PtrType leakPtr() WARN_UNUSED_RETURN; ValueType& operator*() const { ASSERT(m_ptr); return *m_ptr; } PtrType operator->() const { ASSERT(m_ptr); return m_ptr; } ValueType& operator[](std::ptrdiff_t i) const; bool operator!() const { return !m_ptr; } // This conversion operator allows implicit conversion to bool but not to // other integer types. typedef PtrType OwnPtr::*UnspecifiedBoolType; operator UnspecifiedBoolType() const { return m_ptr ? &OwnPtr::m_ptr : 0; } OwnPtr& operator=(const PassOwnPtr<T>&); OwnPtr& operator=(std::nullptr_t) { clear(); return *this; } template <typename U> OwnPtr& operator=(const PassOwnPtr<U>&); OwnPtr(OwnPtr&&); template <typename U> OwnPtr(OwnPtr<U>&&); OwnPtr& operator=(OwnPtr&&); template <typename U> OwnPtr& operator=(OwnPtr<U>&&); void swap(OwnPtr& o) { std::swap(m_ptr, o.m_ptr); } static T* hashTableDeletedValue() { return reinterpret_cast<T*>(-1); } private: // We should never have two OwnPtrs for the same underlying object // (otherwise we'll get double-destruction), so these equality operators // should never be needed. template <typename U> bool operator==(const OwnPtr<U>&) const { static_assert(!sizeof(U*), "OwnPtrs should never be equal"); return false; } template <typename U> bool operator!=(const OwnPtr<U>&) const { static_assert(!sizeof(U*), "OwnPtrs should never be equal"); return false; } template <typename U> bool operator==(const PassOwnPtr<U>&) const { static_assert(!sizeof(U*), "OwnPtrs should never be equal"); return false; } template <typename U> bool operator!=(const PassOwnPtr<U>&) const { static_assert(!sizeof(U*), "OwnPtrs should never be equal"); return false; } PtrType m_ptr; }; template <typename T> inline OwnPtr<T>::OwnPtr(const PassOwnPtr<T>& o) : m_ptr(o.leakPtr()) { } template <typename T> template <typename U> inline OwnPtr<T>::OwnPtr(const PassOwnPtr<U>& o, EnsurePtrConvertibleArgDefn(U, T)) : m_ptr(o.leakPtr()) { static_assert(!std::is_array<T>::value, "pointers to array must never be converted"); } template <typename T> inline void OwnPtr<T>::clear() { PtrType ptr = m_ptr; m_ptr = nullptr; OwnedPtrDeleter<T>::deletePtr(ptr); } template <typename T> inline PassOwnPtr<T> OwnPtr<T>::release() { PtrType ptr = m_ptr; m_ptr = nullptr; return PassOwnPtr<T>(ptr); } template <typename T> inline typename OwnPtr<T>::PtrType OwnPtr<T>::leakPtr() { PtrType ptr = m_ptr; m_ptr = nullptr; return ptr; } template <typename T> inline typename OwnPtr<T>::ValueType& OwnPtr<T>::operator[](std::ptrdiff_t i) const { static_assert(std::is_array<T>::value, "elements access is possible for arrays only"); ASSERT(m_ptr); ASSERT(i >= 0); return m_ptr[i]; } template <typename T> inline OwnPtr<T>& OwnPtr<T>::operator=(const PassOwnPtr<T>& o) { PtrType ptr = m_ptr; m_ptr = o.leakPtr(); ASSERT(!ptr || m_ptr != ptr); OwnedPtrDeleter<T>::deletePtr(ptr); return *this; } template <typename T> template <typename U> inline OwnPtr<T>& OwnPtr<T>::operator=(const PassOwnPtr<U>& o) { static_assert(!std::is_array<T>::value, "pointers to array must never be converted"); PtrType ptr = m_ptr; m_ptr = o.leakPtr(); ASSERT(!ptr || m_ptr != ptr); OwnedPtrDeleter<T>::deletePtr(ptr); return *this; } template <typename T> inline OwnPtr<T>::OwnPtr(OwnPtr<T>&& o) : m_ptr(o.leakPtr()) { } template <typename T> template <typename U> inline OwnPtr<T>::OwnPtr(OwnPtr<U>&& o) : m_ptr(o.leakPtr()) { static_assert(!std::is_array<T>::value, "pointers to array must never be converted"); } template <typename T> inline OwnPtr<T>& OwnPtr<T>::operator=(OwnPtr<T>&& o) { PtrType ptr = m_ptr; m_ptr = o.leakPtr(); ASSERT(!ptr || m_ptr != ptr); OwnedPtrDeleter<T>::deletePtr(ptr); return *this; } template <typename T> template <typename U> inline OwnPtr<T>& OwnPtr<T>::operator=(OwnPtr<U>&& o) { static_assert(!std::is_array<T>::value, "pointers to array must never be converted"); PtrType ptr = m_ptr; m_ptr = o.leakPtr(); ASSERT(!ptr || m_ptr != ptr); OwnedPtrDeleter<T>::deletePtr(ptr); return *this; } template <typename T> inline void swap(OwnPtr<T>& a, OwnPtr<T>& b) { a.swap(b); } template <typename T, typename U> inline bool operator==(const OwnPtr<T>& a, U* b) { return a.get() == b; } template <typename T, typename U> inline bool operator==(T* a, const OwnPtr<U>& b) { return a == b.get(); } template <typename T, typename U> inline bool operator!=(const OwnPtr<T>& a, U* b) { return a.get() != b; } template <typename T, typename U> inline bool operator!=(T* a, const OwnPtr<U>& b) { return a != b.get(); } template <typename T> inline typename OwnPtr<T>::PtrType getPtr(const OwnPtr<T>& p) { return p.get(); } } // namespace WTF using WTF::OwnPtr; #endif // WTF_OwnPtr_h
{ "content_hash": "3f0e4b67afb33ef576690570e84284e1", "timestamp": "", "source": "github", "line_count": 229, "max_line_length": 105, "avg_line_length": 27.139737991266376, "alnum_prop": 0.6479485116653259, "repo_name": "joone/chromium-crosswalk", "id": "ec09c10e07efe1f86070f141466085efaa3ba007", "size": "7153", "binary": false, "copies": "7", "ref": "refs/heads/2016.04.css-round-display-edtior-draft-1", "path": "third_party/WebKit/Source/wtf/OwnPtr.h", "mode": "33188", "license": "bsd-3-clause", "language": [] }
<?php class Kwf_Component_Cache_MenuExpanded_Menu_Component extends Kwc_Menu_Expanded_Component { public static function getSettings($param = null) { $ret = parent::getSettings($param); $ret['level'] = 'root'; return $ret; } }
{ "content_hash": "96768fe45b977ada20ef2ab998701d06", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 89, "avg_line_length": 26.2, "alnum_prop": 0.6374045801526718, "repo_name": "koala-framework/koala-framework", "id": "f1dac5a87657c5e250f5c77d98d7307452afd7bc", "size": "262", "binary": false, "copies": "1", "ref": "refs/heads/5.4", "path": "tests/Kwf/Component/Cache/MenuExpanded/Menu/Component.php", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "CSS", "bytes": "49755" }, { "name": "JavaScript", "bytes": "1120596" }, { "name": "PHP", "bytes": "7076782" }, { "name": "SCSS", "bytes": "92667" }, { "name": "Smarty", "bytes": "229096" }, { "name": "Twig", "bytes": "33150" } ] }
<div id="about"> <h3>About</h3> <p>The Etherton One Name Study contains records of over 5000 Ethertons worldwide, from the 15th century to present day. Search our archives to discover the history of your Etherton family tree</p> <h3>Contact</h3> <p>If you need further information regarding your Etherton family history, please feel free to mail me at martinetherton63@gmail.com</p> </div> <div id="carousel"> <h3>Etherton Gallery</h3> <carousel interval="myInterval"> <slide ng-repeat="slide in slides" active="slide.active"> <img ng-src="{{slide.image}}" style="margin:auto;"> <div class="carousel-caption"> <h4>Slide {{$index}}</h4> <p>{{slide.text}}</p> </div> </slide> </carousel> </div> <div id="recent"> <h3>Recent Activity</h3> <ul> <li>5-1-2015 - Census information added</li> </ul> </div>
{ "content_hash": "322292e334dee2d8483b3d9be8e4d069", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 200, "avg_line_length": 33.392857142857146, "alnum_prop": 0.611764705882353, "repo_name": "metherton/onsAngular", "id": "039ae760ac0b790d7b7d1022e34d5fd734c0149e", "size": "936", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/partials/home.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "6813" }, { "name": "JavaScript", "bytes": "267250" } ] }
package codec import ( "encoding/binary" "fmt" "math" "github.com/juju/errors" ) // EncodeInt encodes the int64 value with variable length format. // The encoded bytes format is: length flag(1 byte) + encoded data. // The length flag is calculated with following way // flag Value Range // 8 -> [MinInt64, MinInt32) // 12 -> [MinInt32, MinInt16) // 14 -> [MinInt16, MinInt8) // 15 -> [MinInt8, 0) // 16 -> 0 // 17 -> (0, MaxInt8] // 18 -> (MaxInt8, MaxInt16] // 20 -> (MaxInt16, MaxInt32] // 24 -> (MaxInt32, MaxInt64] // // EncodeInt appends the encoded value to slice b and returns the appended slice. // EncodeInt guarantees that the encoded value is in ascending order for comparison. func EncodeInt(b []byte, v int64) []byte { if v < 0 { switch { case v >= math.MinInt8: return append(b, 15, byte(v)) case v >= math.MinInt16: return append(b, 14, byte(v>>8), byte(v)) case v >= math.MinInt32: return append(b, 12, byte(v>>24), byte(v>>16), byte(v>>8), byte(v)) default: return append(b, 8, byte(v>>56), byte(v>>48), byte(v>>40), byte(v>>32), byte(v>>24), byte(v>>16), byte(v>>8), byte(v)) } } return EncodeUint(b, uint64(v)) } // EncodeIntDesc encodes the int64 value with variable length format. // The encoded bytes format is: length flag(1 byte) + encoded data. // The length flag is calculated with following way // flag Value Range // 24 -> [MinInt64, MinInt32) // 20 -> [MinInt32, MinInt16) // 18 -> [MinInt16, MinInt8) // 17 -> [MinInt8, 0) // 16 -> 0 // 15 -> (0, MaxInt8] // 14 -> (MaxInt8, MaxInt16] // 12 -> (MaxInt16, MaxInt32] // 8 -> (MaxInt32, MaxInt64] // // EncodeIntDesc appends the encoded value to slice b and returns the appended slice. // EncodeIntDesc guarantees that the encoded value is in descending order for comparison. func EncodeIntDesc(b []byte, v int64) []byte { if v < 0 { switch { case v >= math.MinInt8: v = ^v return append(b, 17, byte(v)) case v >= math.MinInt16: v = ^v return append(b, 18, byte(v>>8), byte(v)) case v >= math.MinInt32: v = ^v return append(b, 20, byte(v>>24), byte(v>>16), byte(v>>8), byte(v)) default: v = ^v return append(b, 24, byte(v>>56), byte(v>>48), byte(v>>40), byte(v>>32), byte(v>>24), byte(v>>16), byte(v>>8), byte(v)) } } return EncodeUintDesc(b, uint64(v)) } // DecodeInt decodes value encoded by EncodeInt before. // It returns the leftover un-decoded slice, decoded value if no error. func DecodeInt(b []byte) ([]byte, int64, error) { if len(b) == 0 { return nil, 0, errors.Errorf("empty bytes to decode value") } flag := b[0] length := int(flag) - 16 if length < 0 { length = -length leftover := b[1:] if len(leftover) < length { return nil, 0, errors.Errorf("insufficient bytes to decode value, need %d, but only %d", length, len(leftover)) } var v int64 switch length { case 1: v = int64(int8(leftover[0])) case 2: v = int64(int16(binary.BigEndian.Uint16(leftover[:length]))) case 4: v = int64(int32(binary.BigEndian.Uint32(leftover[:length]))) case 8: v = int64(binary.BigEndian.Uint64(leftover[:length])) default: return nil, 0, errors.Errorf("invalid encoded length flag %d", flag) } return leftover[length:], v, nil } leftover, v, err := DecodeUint(b) if v > math.MaxInt64 { return nil, 0, fmt.Errorf("decoded value %d overflow int64", v) } return leftover, int64(v), err } // DecodeIntDesc decodes value encoded by EncodeInt before. // It returns the leftover un-decoded slice, decoded value if no error. func DecodeIntDesc(b []byte) ([]byte, int64, error) { if len(b) == 0 { return nil, 0, errors.Errorf("empty bytes to decode value") } flag := b[0] length := int(flag) - 16 if length > 0 { leftover := b[1:] if len(leftover) < length { return nil, 0, errors.Errorf("insufficient bytes to decode value, need %d, but only %d", length, len(leftover)) } var v int64 switch length { case 1: v = int64(int8(leftover[0])) case 2: v = int64(int16(binary.BigEndian.Uint16(leftover[:length]))) case 4: v = int64(int32(binary.BigEndian.Uint32(leftover[:length]))) case 8: v = int64(binary.BigEndian.Uint64(leftover[:length])) default: return nil, 0, errors.Errorf("invalid encoded length flag %d", flag) } return leftover[length:], ^v, nil } leftover, v, err := DecodeUintDesc(b) if v > math.MaxInt64 { return nil, 0, fmt.Errorf("decoded value %d overflow int64", v) } return leftover, int64(v), err } // EncodeUint encodes the uint64 value with variable length format. // The encoded bytes format is: length flag(1 byte) + encoded data. // The length flag is calculated with following way: // flag Value Range // 16 -> 0 // 17 -> (0, MaxUint8] // 18 -> (MaxUint8, MaxUint16] // 20 -> (MaxUint16, MaxUint32] // 24 -> (MaxUint32, MaxUint64] // // EncodeUint appends the encoded value to slice b and returns the appended slice. // EncodeUint guarantees that the encoded value is in ascending order for comparison. func EncodeUint(b []byte, v uint64) []byte { switch { case v == 0: return append(b, 16) case v <= math.MaxUint8: return append(b, 17, byte(v)) case v <= math.MaxUint16: return append(b, 18, byte(v>>8), byte(v)) case v <= math.MaxUint32: return append(b, 20, byte(v>>24), byte(v>>16), byte(v>>8), byte(v)) default: return append(b, 24, byte(v>>56), byte(v>>48), byte(v>>40), byte(v>>32), byte(v>>24), byte(v>>16), byte(v>>8), byte(v)) } } // EncodeUintDesc encodes the int64 value with variable length format. // The encoded bytes format is: length flag(1 byte) + encoded data. // The length flag is calculated with following way // flag Value Range // 16 -> 0 // 15 -> (0, MaxUint8] // 14 -> (MaxUint8, MaxUint16] // 12 -> (MaxUint16, MaxUint32] // 8 -> (MaxUint32, MaxUint64] // // EncodeUintDesc appends the encoded value to slice b and returns the appended slice. // EncodeUintDesc guarantees that the encoded value is in descending order for comparison. func EncodeUintDesc(b []byte, v uint64) []byte { switch { case v == 0: return append(b, 16) case v <= math.MaxInt8: v = ^v return append(b, 15, byte(v)) case v <= math.MaxUint16: v = ^v return append(b, 14, byte(v>>8), byte(v)) case v <= math.MaxUint32: v = ^v return append(b, 12, byte(v>>24), byte(v>>16), byte(v>>8), byte(v)) default: v = ^v return append(b, 8, byte(v>>56), byte(v>>48), byte(v>>40), byte(v>>32), byte(v>>24), byte(v>>16), byte(v>>8), byte(v)) } } // DecodeUint decodes value encoded by EncodeUint before. // It returns the leftover un-decoded slice, decoded value if no error. func DecodeUint(b []byte) ([]byte, uint64, error) { if len(b) == 0 { return nil, 0, errors.Errorf("empty bytes to decode value") } flag := b[0] length := int(flag) - 16 leftover := b[1:] if len(leftover) < length { return nil, 0, errors.Errorf("insufficient bytes to decode value, need %d, but only %d", length, len(leftover)) } var v uint64 switch length { case 0: v = 0 case 1: v = uint64(leftover[0]) case 2: v = uint64(binary.BigEndian.Uint16(leftover[:length])) case 4: v = uint64(binary.BigEndian.Uint32(leftover[:length])) case 8: v = uint64(binary.BigEndian.Uint64(leftover[:length])) default: return nil, 0, errors.Errorf("invalid encoded length flag %d", flag) } return leftover[length:], v, nil } // DecodeUintDesc decodes value encoded by EncodeInt before. // It returns the leftover un-decoded slice, decoded value if no error. func DecodeUintDesc(b []byte) ([]byte, uint64, error) { if len(b) == 0 { return nil, 0, errors.Errorf("empty bytes to decode value") } flag := b[0] length := 16 - int(flag) leftover := b[1:] if len(leftover) < length { return nil, 0, errors.Errorf("insufficient bytes to decode value, need %d, but only %d", length, len(leftover)) } var v uint64 switch length { case 0: v = 0 case 1: v = uint64(^leftover[0]) case 2: v = uint64(^binary.BigEndian.Uint16(leftover[:length])) case 4: v = uint64(^binary.BigEndian.Uint32(leftover[:length])) case 8: v = uint64(^binary.BigEndian.Uint64(leftover[:length])) default: return nil, 0, errors.Errorf("invalid encoded length flag %d", flag) } return leftover[length:], v, nil }
{ "content_hash": "e28a9bc6f9e4fbcb6e10e1af383376a1", "timestamp": "", "source": "github", "line_count": 287, "max_line_length": 114, "avg_line_length": 28.822299651567945, "alnum_prop": 0.6528046421663443, "repo_name": "zenlinTechnofreak/vessel", "id": "615b66be07736ae399aa6744f62469d1bf6e66cb", "size": "9500", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "Godeps/_workspace/src/github.com/pingcap/tidb/util/codec/number.go", "mode": "33188", "license": "mit", "language": [ { "name": "Go", "bytes": "97412" } ] }
#include "windowgroup.h" #include "shadecontrol.h" #include <vector> #include <iostream> #include <boost/optional.hpp> #include "functions.h" #include "jsonobjects.h" #include "logging.h" namespace stadic { WindowGroup::WindowGroup() { } // Setters void WindowGroup::setName(const std::string &name){ m_name = name; } void WindowGroup::setBSDF(bool isBSDF){ m_BSDF=isBSDF; } void WindowGroup::setBaseGeometry(const std::string &file){ m_BaseGeometry=file; } void WindowGroup::setBaseCalculate(bool runBase){ m_RunBase=runBase; } // Getters std::string WindowGroup::name() const { return m_name; } bool WindowGroup::isBSDF() const { return m_BSDF; } bool WindowGroup::runBase() const { return m_RunBase; } std::string WindowGroup::baseGeometry() const { return m_BaseGeometry; } std::vector<std::string> WindowGroup::bsdfBaseLayers() const { return m_BSDFBaseLayers; } std::vector<std::string> WindowGroup::glazingLayers() const { return m_GlazingLayers; } std::vector<std::string> WindowGroup::shadeSettingGeometry() const { return m_ShadeSettingGeometry; } std::vector<bool> WindowGroup::runSetting() const { return m_RunSetting; } std::vector<std::vector<std::string> > WindowGroup::bsdfSettingLayers() const { return m_BSDFSettingLayers; } ShadeControl* WindowGroup::shadeControl() { return &m_ShadeControl; } bool WindowGroup::parseJson(const JsonObject &json){ if (json.empty()){ STADIC_LOG(Severity::Error, "The window group does not contain data."); return false; } boost::optional<std::string> sVal; boost::optional<bool> bVal; boost::optional<JsonObject> treeVal; sVal=getString(json, "name", "The key \"name\" within window_groups does not appear in the STADIC Control File.", "The key \"name\" within window_groups is not a string.", Severity::Error); if (!sVal){ return false; }else{ setName(sVal.get()); } bVal = getBool(json, "BSDF", false, "The key \"BSDF\" within window group " + name() + " is not a bool, assuming no BSDFs for window group " + name() + ".", Severity::Info); setBSDF(bVal.get()); sVal=getString(json, "base_geometry", "The key \"base_geometry\" within window group "+name()+" is missing.", "The key \"base_geometry\" within window group " + name() + " is not a string.", Severity::Info); if(!sVal) { STADIC_LOG(Severity::Info, "It is assumed there are is no base geometry for window group " +name()+"."); setBaseGeometry("empty.rad"); } else { setBaseGeometry(sVal.get()); } bVal=getBool(json, "calculate_base", "The key \"calculate_base\" within window group " + name() + " is not a bool. This will not be calculated.", "The key \"calculate_base\" within window group " + name() + " is not a bool. This will not be calculated.", Severity::Info); if (bVal){ setBaseCalculate(bVal.get()); bVal.reset(); }else{ setBaseCalculate(false); } if (isBSDF()){ treeVal=getArray(json, "bsdf_base_materials"); if(!treeVal) { STADIC_LOG(Severity::Info, "It is assumed that window group "+name()+" does not contain BSDFs in the base case."); } else { for(auto &v : treeVal.get()){ sVal = asString(v, "There was a problem reading the bsdf_base_materials key for window group "+name()+".", Severity::Fatal); if (sVal){ m_BSDFBaseLayers.push_back(sVal.get()); } } } treeVal.reset(); } treeVal=getArray(json, "glazing_materials", "The key \"glazing_materials\" within window group " + name() + " is missing.\n\tThese materials must be defined for the program to run.", Severity::Error); if(!treeVal) { return false; } else { for(auto &v : treeVal.get()){ sVal = asString(v, "There was a problem reading the glazing_materials key for window group "+name()+".", Severity::Fatal); if (sVal){ m_GlazingLayers.push_back(sVal.get()); } } treeVal.reset(); } treeVal=getArray(json, "shade_settings", "The key \"shade_settings\" within window group " + name() + " is missing.", Severity::Warning); if(!treeVal){ STADIC_LOG(Severity::Info, "It is assumed there are no shade settings for window group "+name()+"."); }else{ for(auto &v : treeVal.get()){ sVal = asString(v, "There was a problem reading the shade_settings key for window group "+name()+".", Severity::Fatal); if (sVal){ m_ShadeSettingGeometry.push_back(sVal.get()); sVal.reset(); } } treeVal.reset(); } treeVal=getArray(json, "calculate_setting", "The key \"calculate_setting\" within window group "+ name() + " is missing.", Severity::Warning); if (!treeVal){ STADIC_LOG(Severity::Info, "It is assumed there are no shade settings for window group "+name()+"."); }else{ for (auto &v : treeVal.get()){ bVal=asBool(v, "There was a problem reading the calculate settings for window group "+name()+".", Severity::Warning); if (bVal){ m_RunSetting.push_back(bVal.get()); bVal.get(); } } treeVal.reset(); } if (shadeSettingGeometry().size()>0){ treeVal=getObject(json, "shade_control", "The key \"shade_control\" within window group " + name() + " is missing.", Severity::Warning); if (!treeVal){ STADIC_LOG(Severity::Info, "It is assumed there is no shade control needed for windows within window group "+name()+"."); }else{ if (!m_ShadeControl.parseJson(treeVal.get())){ return false; } } } if (isBSDF() && shadeSettingGeometry().size()>0){ treeVal=getArray(json, "bsdf_setting_materials"); if (!treeVal){ STADIC_LOG(Severity::Info, "It is assumed that window group "+name()+" does not contain BSDFs in the setting layers."); }else{ for(int index1=0;index1<treeVal.get().size();index1++){ std::vector<std::string> tempVector; for (int index2=0;index2<treeVal.get()[index1].size();index2++){ tempVector.push_back(treeVal.get()[index1][index2].asString()); } m_BSDFSettingLayers.push_back(tempVector); } } } return true; } }
{ "content_hash": "e9608aa4a0f53710265df97d058cf7e9", "timestamp": "", "source": "github", "line_count": 192, "max_line_length": 277, "avg_line_length": 34.239583333333336, "alnum_prop": 0.6029814420444174, "repo_name": "Architectural-Lighting-Simulation/STADIC", "id": "4d85869d4488ab666dd0b6af926f95f5432fe80c", "size": "8440", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "lib/windowgroup.cpp", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "2235" }, { "name": "C++", "bytes": "761965" }, { "name": "CMake", "bytes": "9075" }, { "name": "Python", "bytes": "6335" } ] }
// Copyright 2007-2015 Chris Patterson, Dru Sellers, Travis Smith, et. al. // // Licensed under the Apache License, Version 2.0 (the "License"); you may not use // this file except in compliance with the License. You may obtain a copy of the // License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR // CONDITIONS OF ANY KIND, either express or implied. See the License for the // specific language governing permissions and limitations under the License. namespace MassTransit.RabbitMqTransport.Tests { using System; using NUnit.Framework; [TestFixture] public class Configure_Specs { [Test] public void Should_fail_on_no_hosts() { var exception = Assert.Throws<ConfigurationException>(() => { Bus.Factory.CreateUsingRabbitMq(x => { }); }); Console.WriteLine(string.Join(Environment.NewLine, exception.Result.Results)); } [Test] public void Should_fail_with_invalid_middleware() { var exception = Assert.Throws<ConfigurationException>(() => { Bus.Factory.CreateUsingRabbitMq(x => { var host = x.Host(new Uri("rabbitmq://[::1]/test/"), h => { }); x.UseRetry(null); }); }); Console.WriteLine(string.Join(Environment.NewLine, exception.Result.Results)); } [Test] public void Should_fail_when_late_configuration_happens() { var exception = Assert.Throws<ConfigurationException>(() => { Bus.Factory.CreateUsingRabbitMq(x => { var host = x.Host(new Uri("rabbitmq://[::1]/test/"), h => { }); x.ReceiveEndpoint(host, "input_queue", e => { var inputAddress = e.InputAddress; e.Durable = false; e.AutoDelete = true; }); }); }); Console.WriteLine(string.Join(Environment.NewLine, exception.Result.Results)); } [Test] public void Should_fail_with_invalid_middleware_on_endpoint() { var exception = Assert.Throws<ConfigurationException>(() => { Bus.Factory.CreateUsingRabbitMq(x => { var host = x.Host(new Uri("rabbitmq://[::1]/test/"), h => { }); x.ReceiveEndpoint(host, "input_queue", e => { e.UseRetry(null); }); }); }); Console.WriteLine(string.Join(Environment.NewLine, exception.Result.Results)); } [Test] public void Should_fail_with_empty_queue_name() { var exception = Assert.Throws<ConfigurationException>(() => { Bus.Factory.CreateUsingRabbitMq(x => { var host = x.Host(new Uri("rabbitmq://[::1]/test/"), h => { }); x.OverrideDefaultBusEndpointQueueName(""); }); }); Console.WriteLine(string.Join(Environment.NewLine, exception.Result.Results)); } [Test] public void Should_fail_with_invalid_queue_name() { var exception = Assert.Throws<ConfigurationException>(() => { Bus.Factory.CreateUsingRabbitMq(x => { var host = x.Host(new Uri("rabbitmq://[::1]/test/"), h => { }); x.ReceiveEndpoint(host, "0(*!)@((*#&!(*&@#/", e => { }); }); }); Console.WriteLine(string.Join(Environment.NewLine, exception.Result.Results)); } [Test] public void Should_not_fail_with_warnings() { Bus.Factory.CreateUsingRabbitMq(x => { var host = x.Host(new Uri("rabbitmq://[::1]/test/"), h => { }); x.ReceiveEndpoint(host, "input_queue", e => { e.PurgeOnStartup = true; }); }); } } }
{ "content_hash": "26ab447b6f4d15147965163f26d73edd", "timestamp": "", "source": "github", "line_count": 158, "max_line_length": 90, "avg_line_length": 31.120253164556964, "alnum_prop": 0.45068130974171244, "repo_name": "jsmale/MassTransit", "id": "3ba365cead113eaf1037e4daae5db0701e7c17ac", "size": "4919", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/MassTransit.RabbitMqTransport.Tests/Configure_Specs.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "1937" }, { "name": "C#", "bytes": "4957357" }, { "name": "F#", "bytes": "15254" }, { "name": "PowerShell", "bytes": "5322" } ] }
(function (root, factory) { if (typeof exports === 'object') { // CommonJS factory(exports); } else if (typeof define === 'function' && define.amd) { // AMD. Register as an anonymous module. define(['exports'], factory); } else { // Browser globals factory(root); } } (this, function (exports) { //Default config/variables var VERSION = '0.9.0'; /** * IntroJs main class * * @class IntroJs */ function IntroJs(obj) { this._targetElement = obj; this._options = { /* Next button label in tooltip box */ nextLabel: 'Next &rarr;', /* Previous button label in tooltip box */ prevLabel: '&larr; Back', /* Skip button label in tooltip box */ skipLabel: 'Skip', /* Done button label in tooltip box */ doneLabel: 'Done', /* Default tooltip box position */ tooltipPosition: 'bottom', /* Next CSS class for tooltip boxes */ tooltipClass: '', /* Close introduction when pressing Escape button? */ exitOnEsc: true, /* Close introduction when clicking on overlay layer? */ exitOnOverlayClick: true, /* Show step numbers in introduction? */ showStepNumbers: true, /* Let user use keyboard to navigate the tour? */ keyboardNavigation: true, /* Show tour control buttons? */ showButtons: true, /* Show tour bullets? */ showBullets: true, /* Scroll to highlighted element? */ scrollToElement: true, /* Set the overlay opacity */ overlayOpacity: 0.8 }; } /** * Initiate a new introduction/guide from an element in the page * * @api private * @method _introForElement * @param {Object} targetElm * @returns {Boolean} Success or not? */ function _introForElement(targetElm) { var introItems = [], self = this; if (this._options.steps) { //use steps passed programmatically var allIntroSteps = []; for (var i = 0, stepsLength = this._options.steps.length; i < stepsLength; i++) { var currentItem = _cloneObject(this._options.steps[i]); //set the step currentItem.step = introItems.length + 1; //use querySelector function only when developer used CSS selector if (typeof(currentItem.element) === 'string') { //grab the element with given selector from the page currentItem.element = document.querySelector(currentItem.element); } //intro without element if (typeof(currentItem.element) === 'undefined' || currentItem.element == null) { var floatingElementQuery = document.querySelector(".introjsFloatingElement"); if (floatingElementQuery == null) { floatingElementQuery = document.createElement('div'); floatingElementQuery.className = 'introjsFloatingElement'; document.body.appendChild(floatingElementQuery); } currentItem.element = floatingElementQuery; currentItem.position = 'floating'; } if (currentItem.element != null) { introItems.push(currentItem); } } } else { //use steps from data-* annotations var allIntroSteps = targetElm.querySelectorAll('*[data-intro]'); //if there's no element to intro if (allIntroSteps.length < 1) { return false; } //first add intro items with data-step for (var i = 0, elmsLength = allIntroSteps.length; i < elmsLength; i++) { var currentElement = allIntroSteps[i]; var step = parseInt(currentElement.getAttribute('data-step'), 10); if (step > 0) { introItems[step - 1] = { element: currentElement, intro: currentElement.getAttribute('data-intro'), step: parseInt(currentElement.getAttribute('data-step'), 10), tooltipClass: currentElement.getAttribute('data-tooltipClass'), position: currentElement.getAttribute('data-position') || this._options.tooltipPosition }; } } //next add intro items without data-step //todo: we need a cleanup here, two loops are redundant var nextStep = 0; for (var i = 0, elmsLength = allIntroSteps.length; i < elmsLength; i++) { var currentElement = allIntroSteps[i]; if (currentElement.getAttribute('data-step') == null) { while (true) { if (typeof introItems[nextStep] == 'undefined') { break; } else { nextStep++; } } introItems[nextStep] = { element: currentElement, intro: currentElement.getAttribute('data-intro'), step: nextStep + 1, tooltipClass: currentElement.getAttribute('data-tooltipClass'), position: currentElement.getAttribute('data-position') || this._options.tooltipPosition }; } } } //removing undefined/null elements var tempIntroItems = []; for (var z = 0; z < introItems.length; z++) { introItems[z] && tempIntroItems.push(introItems[z]); // copy non-empty values to the end of the array } introItems = tempIntroItems; //Ok, sort all items with given steps introItems.sort(function (a, b) { return a.step - b.step; }); //set it to the introJs object self._introItems = introItems; //add overlay layer to the page if(_addOverlayLayer.call(self, targetElm)) { //then, start the show _nextStep.call(self); var skipButton = targetElm.querySelector('.introjs-skipbutton'), nextStepButton = targetElm.querySelector('.introjs-nextbutton'); self._onKeyDown = function(e) { if (e.keyCode === 27 && self._options.exitOnEsc == true) { //escape key pressed, exit the intro _exitIntro.call(self, targetElm); //check if any callback is defined if (self._introExitCallback != undefined) { self._introExitCallback.call(self); } } else if(e.keyCode === 37) { //left arrow _previousStep.call(self); } else if (e.keyCode === 39 || e.keyCode === 13) { //right arrow or enter _nextStep.call(self); //prevent default behaviour on hitting Enter, to prevent steps being skipped in some browsers if(e.preventDefault) { e.preventDefault(); } else { e.returnValue = false; } } }; self._onResize = function(e) { _setHelperLayerPosition.call(self, document.querySelector('.introjs-helperLayer')); }; if (window.addEventListener) { if (this._options.keyboardNavigation) { window.addEventListener('keydown', self._onKeyDown, true); } //for window resize window.addEventListener("resize", self._onResize, true); } else if (document.attachEvent) { //IE if (this._options.keyboardNavigation) { document.attachEvent('onkeydown', self._onKeyDown); } //for window resize document.attachEvent("onresize", self._onResize); } } return false; } /* * makes a copy of the object * @api private * @method _cloneObject */ function _cloneObject(object) { if (object == null || typeof (object) != 'object' || typeof (object.nodeType) != 'undefined') { return object; } var temp = {}; for (var key in object) { temp[key] = _cloneObject(object[key]); } return temp; } /** * Go to specific step of introduction * * @api private * @method _goToStep */ function _goToStep(step) { //because steps starts with zero this._currentStep = step - 2; if (typeof (this._introItems) !== 'undefined') { _nextStep.call(this); } } /** * Go to next step on intro * * @api private * @method _nextStep */ function _nextStep() { this._direction = 'forward'; if (typeof (this._currentStep) === 'undefined') { this._currentStep = 0; } else { ++this._currentStep; } if ((this._introItems.length) <= this._currentStep) { //end of the intro //check if any callback is defined if (typeof (this._introCompleteCallback) === 'function') { this._introCompleteCallback.call(this); } _exitIntro.call(this, this._targetElement); return; } var nextStep = this._introItems[this._currentStep]; if (typeof (this._introBeforeChangeCallback) !== 'undefined') { this._introBeforeChangeCallback.call(this, nextStep.element); } _showElement.call(this, nextStep); } /** * Go to previous step on intro * * @api private * @method _nextStep */ function _previousStep() { this._direction = 'backward'; if (this._currentStep === 0) { return false; } var nextStep = this._introItems[--this._currentStep]; if (typeof (this._introBeforeChangeCallback) !== 'undefined') { this._introBeforeChangeCallback.call(this, nextStep.element); } _showElement.call(this, nextStep); } /** * Exit from intro * * @api private * @method _exitIntro * @param {Object} targetElement */ function _exitIntro(targetElement) { //remove overlay layer from the page var overlayLayer = targetElement.querySelector('.introjs-overlay'); //return if intro already completed or skipped if (overlayLayer == null) { return; } //for fade-out animation overlayLayer.style.opacity = 0; setTimeout(function () { if (overlayLayer.parentNode) { overlayLayer.parentNode.removeChild(overlayLayer); } }, 500); //remove all helper layers var helperLayer = targetElement.querySelector('.introjs-helperLayer'); if (helperLayer) { helperLayer.parentNode.removeChild(helperLayer); } //remove intro floating element var floatingElement = document.querySelector('.introjsFloatingElement'); if (floatingElement) { floatingElement.parentNode.removeChild(floatingElement); } //remove `introjs-showElement` class from the element var showElement = document.querySelector('.introjs-showElement'); if (showElement) { showElement.className = showElement.className.replace(/introjs-[a-zA-Z]+/g, '').replace(/^\s+|\s+$/g, ''); // This is a manual trim. } //remove `introjs-fixParent` class from the elements var fixParents = document.querySelectorAll('.introjs-fixParent'); if (fixParents && fixParents.length > 0) { for (var i = fixParents.length - 1; i >= 0; i--) { fixParents[i].className = fixParents[i].className.replace(/introjs-fixParent/g, '').replace(/^\s+|\s+$/g, ''); }; } //clean listeners if (window.removeEventListener) { window.removeEventListener('keydown', this._onKeyDown, true); } else if (document.detachEvent) { //IE document.detachEvent('onkeydown', this._onKeyDown); } //set the step to zero this._currentStep = undefined; } /** * Render tooltip box in the page * * @api private * @method _placeTooltip * @param {Object} targetElement * @param {Object} tooltipLayer * @param {Object} arrowLayer */ function _placeTooltip(targetElement, tooltipLayer, arrowLayer, helperNumberLayer) { var tooltipCssClass = '', currentStepObj, tooltipOffset, targetElementOffset; //reset the old style tooltipLayer.style.top = null; tooltipLayer.style.right = null; tooltipLayer.style.bottom = null; tooltipLayer.style.left = null; tooltipLayer.style.marginLeft = null; tooltipLayer.style.marginTop = null; arrowLayer.style.display = 'inherit'; if (typeof(helperNumberLayer) != 'undefined' && helperNumberLayer != null) { helperNumberLayer.style.top = null; helperNumberLayer.style.left = null; } //prevent error when `this._currentStep` is undefined if (!this._introItems[this._currentStep]) return; //if we have a custom css class for each step currentStepObj = this._introItems[this._currentStep]; if (typeof (currentStepObj.tooltipClass) === 'string') { tooltipCssClass = currentStepObj.tooltipClass; } else { tooltipCssClass = this._options.tooltipClass; } tooltipLayer.className = ('introjs-tooltip ' + tooltipCssClass).replace(/^\s+|\s+$/g, ''); //custom css class for tooltip boxes var tooltipCssClass = this._options.tooltipClass; currentTooltipPosition = this._introItems[this._currentStep].position; switch (currentTooltipPosition) { case 'top': tooltipLayer.style.left = '15px'; tooltipLayer.style.top = '-' + (_getOffset(tooltipLayer).height + 10) + 'px'; arrowLayer.className = 'introjs-arrow bottom'; break; case 'right': tooltipLayer.style.left = (_getOffset(targetElement).width + 20) + 'px'; arrowLayer.className = 'introjs-arrow left'; break; case 'left': if (this._options.showStepNumbers == true) { tooltipLayer.style.top = '15px'; } tooltipLayer.style.right = (_getOffset(targetElement).width + 20) + 'px'; arrowLayer.className = 'introjs-arrow right'; break; case 'floating': arrowLayer.style.display = 'none'; //we have to adjust the top and left of layer manually for intro items without element tooltipOffset = _getOffset(tooltipLayer); tooltipLayer.style.left = '50%'; tooltipLayer.style.top = '50%'; tooltipLayer.style.marginLeft = '-' + (tooltipOffset.width / 2) + 'px'; tooltipLayer.style.marginTop = '-' + (tooltipOffset.height / 2) + 'px'; if (typeof(helperNumberLayer) != 'undefined' && helperNumberLayer != null) { helperNumberLayer.style.left = '-' + ((tooltipOffset.width / 2) + 18) + 'px'; helperNumberLayer.style.top = '-' + ((tooltipOffset.height / 2) + 18) + 'px'; } break; case 'bottom-right-aligned': arrowLayer.className = 'introjs-arrow top-right'; tooltipLayer.style.right = '0px'; tooltipLayer.style.bottom = '-' + (_getOffset(tooltipLayer).height + 10) + 'px'; break; case 'bottom-middle-aligned': targetElementOffset = _getOffset(targetElement); tooltipOffset = _getOffset(tooltipLayer); arrowLayer.className = 'introjs-arrow top-middle'; tooltipLayer.style.left = (targetElementOffset.width / 2 - tooltipOffset.width / 2) + 'px'; tooltipLayer.style.bottom = '-' + (tooltipOffset.height + 10) + 'px'; break; case 'bottom-left-aligned': // Bottom-left-aligned is the same as the default bottom case 'bottom': // Bottom going to follow the default behavior default: tooltipLayer.style.bottom = '-' + (_getOffset(tooltipLayer).height + 10) + 'px'; arrowLayer.className = 'introjs-arrow top'; break; } } /** * Update the position of the helper layer on the screen * * @api private * @method _setHelperLayerPosition * @param {Object} helperLayer */ function _setHelperLayerPosition(helperLayer) { if (helperLayer) { //prevent error when `this._currentStep` in undefined if (!this._introItems[this._currentStep]) return; var currentElement = this._introItems[this._currentStep], elementPosition = _getOffset(currentElement.element), widthHeightPadding = 10; if (currentElement.position == 'floating') { widthHeightPadding = 0; } //set new position to helper layer helperLayer.setAttribute('style', 'width: ' + (elementPosition.width + widthHeightPadding) + 'px; ' + 'height:' + (elementPosition.height + widthHeightPadding) + 'px; ' + 'top:' + (elementPosition.top - 5) + 'px;' + 'left: ' + (elementPosition.left - 5) + 'px;'); } } /** * Show an element on the page * * @api private * @method _showElement * @param {Object} targetElement */ function _showElement(targetElement) { if (typeof (this._introChangeCallback) !== 'undefined') { this._introChangeCallback.call(this, targetElement.element); } var self = this, oldHelperLayer = document.querySelector('.introjs-helperLayer'), elementPosition = _getOffset(targetElement.element); if (oldHelperLayer != null) { var oldHelperNumberLayer = oldHelperLayer.querySelector('.introjs-helperNumberLayer'), oldtooltipLayer = oldHelperLayer.querySelector('.introjs-tooltiptext'), oldArrowLayer = oldHelperLayer.querySelector('.introjs-arrow'), oldtooltipContainer = oldHelperLayer.querySelector('.introjs-tooltip'), skipTooltipButton = oldHelperLayer.querySelector('.introjs-skipbutton'), prevTooltipButton = oldHelperLayer.querySelector('.introjs-prevbutton'), nextTooltipButton = oldHelperLayer.querySelector('.introjs-nextbutton'); //hide the tooltip oldtooltipContainer.style.opacity = 0; if (oldHelperNumberLayer != null) { var lastIntroItem = this._introItems[(targetElement.step - 2 >= 0 ? targetElement.step - 2 : 0)]; if (lastIntroItem != null && (this._direction == 'forward' && lastIntroItem.position == 'floating') || (this._direction == 'backward' && targetElement.position == 'floating')) { oldHelperNumberLayer.style.opacity = 0; } } //set new position to helper layer _setHelperLayerPosition.call(self, oldHelperLayer); //remove `introjs-fixParent` class from the elements var fixParents = document.querySelectorAll('.introjs-fixParent'); if (fixParents && fixParents.length > 0) { for (var i = fixParents.length - 1; i >= 0; i--) { fixParents[i].className = fixParents[i].className.replace(/introjs-fixParent/g, '').replace(/^\s+|\s+$/g, ''); }; } //remove old classes var oldShowElement = document.querySelector('.introjs-showElement'); oldShowElement.className = oldShowElement.className.replace(/introjs-[a-zA-Z]+/g, '').replace(/^\s+|\s+$/g, ''); //we should wait until the CSS3 transition is competed (it's 0.3 sec) to prevent incorrect `height` and `width` calculation if (self._lastShowElementTimer) { clearTimeout(self._lastShowElementTimer); } self._lastShowElementTimer = setTimeout(function() { //set current step to the label if (oldHelperNumberLayer != null) { oldHelperNumberLayer.innerHTML = targetElement.step; } //set current tooltip text oldtooltipLayer.innerHTML = targetElement.intro; //set the tooltip position _placeTooltip.call(self, targetElement.element, oldtooltipContainer, oldArrowLayer, oldHelperNumberLayer); //change active bullet oldHelperLayer.querySelector('.introjs-bullets li > a.active').className = ''; oldHelperLayer.querySelector('.introjs-bullets li > a[data-stepnumber="' + targetElement.step + '"]').className = 'active'; //show the tooltip oldtooltipContainer.style.opacity = 1; if (oldHelperNumberLayer) oldHelperNumberLayer.style.opacity = 1; }, 350); } else { var helperLayer = document.createElement('div'), arrowLayer = document.createElement('div'), tooltipLayer = document.createElement('div'), tooltipTextLayer = document.createElement('div'), bulletsLayer = document.createElement('div'), buttonsLayer = document.createElement('div'); helperLayer.className = 'introjs-helperLayer'; //set new position to helper layer _setHelperLayerPosition.call(self, helperLayer); //add helper layer to target element this._targetElement.appendChild(helperLayer); arrowLayer.className = 'introjs-arrow'; tooltipTextLayer.className = 'introjs-tooltiptext'; tooltipTextLayer.innerHTML = targetElement.intro; bulletsLayer.className = 'introjs-bullets'; if (this._options.showBullets === false) { bulletsLayer.style.display = 'none'; } var ulContainer = document.createElement('ul'); for (var i = 0, stepsLength = this._introItems.length; i < stepsLength; i++) { var innerLi = document.createElement('li'); var anchorLink = document.createElement('a'); anchorLink.onclick = function() { self.goToStep(this.getAttribute('data-stepnumber')); }; if (i === 0) anchorLink.className = "active"; anchorLink.href = 'javascript:void(0);'; anchorLink.innerHTML = "&nbsp;"; anchorLink.setAttribute('data-stepnumber', this._introItems[i].step); innerLi.appendChild(anchorLink); ulContainer.appendChild(innerLi); } bulletsLayer.appendChild(ulContainer); buttonsLayer.className = 'introjs-tooltipbuttons'; if (this._options.showButtons === false) { buttonsLayer.style.display = 'none'; } tooltipLayer.className = 'introjs-tooltip'; tooltipLayer.appendChild(tooltipTextLayer); tooltipLayer.appendChild(bulletsLayer); //add helper layer number if (this._options.showStepNumbers == true) { var helperNumberLayer = document.createElement('span'); helperNumberLayer.className = 'introjs-helperNumberLayer'; helperNumberLayer.innerHTML = targetElement.step; helperLayer.appendChild(helperNumberLayer); } tooltipLayer.appendChild(arrowLayer); helperLayer.appendChild(tooltipLayer); //next button var nextTooltipButton = document.createElement('a'); nextTooltipButton.onclick = function() { if (self._introItems.length - 1 != self._currentStep) { _nextStep.call(self); } }; nextTooltipButton.href = 'javascript:void(0);'; nextTooltipButton.innerHTML = this._options.nextLabel; //previous button var prevTooltipButton = document.createElement('a'); prevTooltipButton.onclick = function() { if (self._currentStep != 0) { _previousStep.call(self); } }; prevTooltipButton.href = 'javascript:void(0);'; prevTooltipButton.innerHTML = this._options.prevLabel; //skip button var skipTooltipButton = document.createElement('a'); skipTooltipButton.className = 'introjs-button introjs-skipbutton'; skipTooltipButton.href = 'javascript:void(0);'; skipTooltipButton.innerHTML = this._options.skipLabel; skipTooltipButton.onclick = function() { if (self._introItems.length - 1 == self._currentStep && typeof (self._introCompleteCallback) === 'function') { self._introCompleteCallback.call(self); } if (self._introItems.length - 1 != self._currentStep && typeof (self._introExitCallback) === 'function') { self._introExitCallback.call(self); } _exitIntro.call(self, self._targetElement); }; buttonsLayer.appendChild(skipTooltipButton); //in order to prevent displaying next/previous button always if (this._introItems.length > 1) { buttonsLayer.appendChild(prevTooltipButton); buttonsLayer.appendChild(nextTooltipButton); } tooltipLayer.appendChild(buttonsLayer); //set proper position _placeTooltip.call(self, targetElement.element, tooltipLayer, arrowLayer, helperNumberLayer); } if (this._currentStep == 0 && this._introItems.length > 1) { prevTooltipButton.className = 'introjs-button introjs-prevbutton introjs-disabled'; nextTooltipButton.className = 'introjs-button introjs-nextbutton'; skipTooltipButton.innerHTML = this._options.skipLabel; } else if (this._introItems.length - 1 == this._currentStep || this._introItems.length == 1) { skipTooltipButton.innerHTML = this._options.doneLabel; prevTooltipButton.className = 'introjs-button introjs-prevbutton'; nextTooltipButton.className = 'introjs-button introjs-nextbutton introjs-disabled'; } else { prevTooltipButton.className = 'introjs-button introjs-prevbutton'; nextTooltipButton.className = 'introjs-button introjs-nextbutton'; skipTooltipButton.innerHTML = this._options.skipLabel; } //Set focus on "next" button, so that hitting Enter always moves you onto the next step nextTooltipButton.focus(); //add target element position style targetElement.element.className += ' introjs-showElement'; var currentElementPosition = _getPropValue(targetElement.element, 'position'); if (currentElementPosition !== 'absolute' && currentElementPosition !== 'relative') { //change to new intro item targetElement.element.className += ' introjs-relativePosition'; } var parentElm = targetElement.element.parentNode; while (parentElm != null) { if (parentElm.tagName.toLowerCase() === 'body') break; //fix The Stacking Contenxt problem. //More detail: https://developer.mozilla.org/en-US/docs/Web/Guide/CSS/Understanding_z_index/The_stacking_context var zIndex = _getPropValue(parentElm, 'z-index'); var opacity = parseFloat(_getPropValue(parentElm, 'opacity')); if (/[0-9]+/.test(zIndex) || opacity < 1) { parentElm.className += ' introjs-fixParent'; } parentElm = parentElm.parentNode; } if (!_elementInViewport(targetElement.element) && this._options.scrollToElement === true) { var rect = targetElement.element.getBoundingClientRect(), winHeight=_getWinSize().height, top = rect.bottom - (rect.bottom - rect.top), bottom = rect.bottom - winHeight; //Scroll up if (top < 0 || targetElement.element.clientHeight > winHeight) { window.scrollBy(0, top - 30); // 30px padding from edge to look nice //Scroll down } else { window.scrollBy(0, bottom + 100); // 70px + 30px padding from edge to look nice } } if (typeof (this._introAfterChangeCallback) !== 'undefined') { this._introAfterChangeCallback.call(this, targetElement.element); } } /** * Get an element CSS property on the page * Thanks to JavaScript Kit: http://www.javascriptkit.com/dhtmltutors/dhtmlcascade4.shtml * * @api private * @method _getPropValue * @param {Object} element * @param {String} propName * @returns Element's property value */ function _getPropValue (element, propName) { var propValue = ''; if (element.currentStyle) { //IE propValue = element.currentStyle[propName]; } else if (document.defaultView && document.defaultView.getComputedStyle) { //Others propValue = document.defaultView.getComputedStyle(element, null).getPropertyValue(propName); } //Prevent exception in IE if (propValue && propValue.toLowerCase) { return propValue.toLowerCase(); } else { return propValue; } } /** * Provides a cross-browser way to get the screen dimensions * via: http://stackoverflow.com/questions/5864467/internet-explorer-innerheight * * @api private * @method _getWinSize * @returns {Object} width and height attributes */ function _getWinSize() { if (window.innerWidth != undefined) { return { width: window.innerWidth, height: window.innerHeight }; } else { var D = document.documentElement; return { width: D.clientWidth, height: D.clientHeight }; } } /** * Add overlay layer to the page * http://stackoverflow.com/questions/123999/how-to-tell-if-a-dom-element-is-visible-in-the-current-viewport * * @api private * @method _elementInViewport * @param {Object} el */ function _elementInViewport(el) { var rect = el.getBoundingClientRect(); return ( rect.top >= 0 && rect.left >= 0 && (rect.bottom+80) <= window.innerHeight && // add 80 to get the text right rect.right <= window.innerWidth ); } /** * Add overlay layer to the page * * @api private * @method _addOverlayLayer * @param {Object} targetElm */ function _addOverlayLayer(targetElm) { var overlayLayer = document.createElement('div'), styleText = '', self = this; //set css class name overlayLayer.className = 'introjs-overlay'; //check if the target element is body, we should calculate the size of overlay layer in a better way if (targetElm.tagName.toLowerCase() === 'body') { styleText += 'top: 0;bottom: 0; left: 0;right: 0;position: fixed;'; overlayLayer.setAttribute('style', styleText); } else { //set overlay layer position var elementPosition = _getOffset(targetElm); if (elementPosition) { styleText += 'width: ' + elementPosition.width + 'px; height:' + elementPosition.height + 'px; top:' + elementPosition.top + 'px;left: ' + elementPosition.left + 'px;'; overlayLayer.setAttribute('style', styleText); } } targetElm.appendChild(overlayLayer); overlayLayer.onclick = function() { if (self._options.exitOnOverlayClick == true) { _exitIntro.call(self, targetElm); //check if any callback is defined if (self._introExitCallback != undefined) { self._introExitCallback.call(self); } } }; setTimeout(function() { styleText += 'opacity: ' + self._options.overlayOpacity.toString() + ';'; overlayLayer.setAttribute('style', styleText); }, 10); return true; } /** * Get an element position on the page * Thanks to `meouw`: http://stackoverflow.com/a/442474/375966 * * @api private * @method _getOffset * @param {Object} element * @returns Element's position info */ function _getOffset(element) { var elementPosition = {}; //set width elementPosition.width = element.offsetWidth; //set height elementPosition.height = element.offsetHeight; //calculate element top and left var _x = 0; var _y = 0; while (element && !isNaN(element.offsetLeft) && !isNaN(element.offsetTop)) { _x += element.offsetLeft; _y += element.offsetTop; element = element.offsetParent; } //set top elementPosition.top = _y; //set left elementPosition.left = _x; return elementPosition; } /** * Overwrites obj1's values with obj2's and adds obj2's if non existent in obj1 * via: http://stackoverflow.com/questions/171251/how-can-i-merge-properties-of-two-javascript-objects-dynamically * * @param obj1 * @param obj2 * @returns obj3 a new object based on obj1 and obj2 */ function _mergeOptions(obj1,obj2) { var obj3 = {}; for (var attrname in obj1) { obj3[attrname] = obj1[attrname]; } for (var attrname in obj2) { obj3[attrname] = obj2[attrname]; } return obj3; } var introJs = function (targetElm) { if (typeof (targetElm) === 'object') { //Ok, create a new instance return new IntroJs(targetElm); } else if (typeof (targetElm) === 'string') { //select the target element with query selector var targetElement = document.querySelector(targetElm); if (targetElement) { return new IntroJs(targetElement); } else { throw new Error('There is no element with given selector.'); } } else { return new IntroJs(document.body); } }; /** * Current IntroJs version * * @property version * @type String */ introJs.version = VERSION; //Prototype introJs.fn = IntroJs.prototype = { clone: function () { return new IntroJs(this); }, setOption: function(option, value) { this._options[option] = value; return this; }, setOptions: function(options) { this._options = _mergeOptions(this._options, options); return this; }, start: function () { _introForElement.call(this, this._targetElement); return this; }, goToStep: function(step) { _goToStep.call(this, step); return this; }, nextStep: function() { _nextStep.call(this); return this; }, previousStep: function() { _previousStep.call(this); return this; }, exit: function() { _exitIntro.call(this, this._targetElement); }, refresh: function() { _setHelperLayerPosition.call(this, document.querySelector('.introjs-helperLayer')); return this; }, onbeforechange: function(providedCallback) { if (typeof (providedCallback) === 'function') { this._introBeforeChangeCallback = providedCallback; } else { throw new Error('Provided callback for onbeforechange was not a function'); } return this; }, onchange: function(providedCallback) { if (typeof (providedCallback) === 'function') { this._introChangeCallback = providedCallback; } else { throw new Error('Provided callback for onchange was not a function.'); } return this; }, onafterchange: function(providedCallback) { if (typeof (providedCallback) === 'function') { this._introAfterChangeCallback = providedCallback; } else { throw new Error('Provided callback for onafterchange was not a function'); } return this; }, oncomplete: function(providedCallback) { if (typeof (providedCallback) === 'function') { this._introCompleteCallback = providedCallback; } else { throw new Error('Provided callback for oncomplete was not a function.'); } return this; }, onexit: function(providedCallback) { if (typeof (providedCallback) === 'function') { this._introExitCallback = providedCallback; } else { throw new Error('Provided callback for onexit was not a function.'); } return this; } }; exports.introJs = introJs; return introJs; }));
{ "content_hash": "f5f69667209d52401bbef91e331cca91", "timestamp": "", "source": "github", "line_count": 1028, "max_line_length": 185, "avg_line_length": 34.494163424124515, "alnum_prop": 0.6084320360970107, "repo_name": "RTKKENYA/RTK", "id": "b78296a1a5d482ef3cfb12bb9ee4b1e9a4d872e7", "size": "35636", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "assets/bower_components/intro.js/intro.js", "mode": "33261", "license": "mit", "language": [ { "name": "ActionScript", "bytes": "45431" }, { "name": "ApacheConf", "bytes": "1471" }, { "name": "Batchfile", "bytes": "6463" }, { "name": "CSS", "bytes": "923141" }, { "name": "HTML", "bytes": "5755118" }, { "name": "JavaScript", "bytes": "5454096" }, { "name": "PHP", "bytes": "14230036" } ] }
layout: page title: Williams International Party date: 2016-05-24 author: Anthony Duke tags: weekly links, java status: published summary: Etiam non massa nisi. Etiam auctor consequat. banner: images/banner/office-01.jpg booking: startDate: 05/02/2019 endDate: 05/03/2019 ctyhocn: FMYAPHX groupCode: WIP published: true --- Lorem ipsum dolor sit amet, consectetur adipiscing elit. Cras bibendum, arcu eget laoreet faucibus, nisl augue rhoncus massa, a hendrerit neque neque sit amet lorem. Vestibulum nec blandit dolor. Cras vestibulum lobortis diam eu pretium. Vestibulum eget elementum elit. Vivamus ex urna, porta ut elit et, porta pharetra nunc. Integer rhoncus vitae nunc at tincidunt. Donec in ante hendrerit, ornare lorem vitae, scelerisque risus. Quisque dapibus mattis massa vel sollicitudin. Phasellus tempus justo sed metus porta blandit. Duis quis ex non ex vestibulum euismod. * Proin efficitur nulla id lacinia sodales * Aenean vitae neque non mauris congue congue eget in massa. Vivamus gravida varius lacus ac aliquet. Ut consequat lorem sapien, sit amet dictum elit tristique a. Praesent accumsan eu urna ut vehicula. Pellentesque tristique fermentum tellus, non auctor massa. In quam libero, dapibus in bibendum fringilla, sagittis id ante. Praesent nisi nunc, tristique id ligula vitae, ultrices dictum leo. Vestibulum consectetur a lorem sit amet congue. Donec eu placerat sem. Mauris placerat maximus vehicula. Aenean eget ligula imperdiet, aliquam enim ac, varius metus. Phasellus condimentum, neque nec accumsan tincidunt, libero felis lobortis velit, id malesuada quam nunc in augue. Interdum et malesuada fames ac ante ipsum primis in faucibus.
{ "content_hash": "0ca836bcf8b27b94b930fc8a27ae0270", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 675, "avg_line_length": 80, "alnum_prop": 0.8065476190476191, "repo_name": "KlishGroup/prose-pogs", "id": "be60add57fbd34021249e5824f30a1a14180e108", "size": "1684", "binary": false, "copies": "1", "ref": "refs/heads/gh-pages", "path": "pogs/F/FMYAPHX/WIP/index.md", "mode": "33188", "license": "mit", "language": [] }
package com.cloud.host.dao; import com.cloud.cluster.agentlb.HostTransferMapVO; import com.cloud.cluster.agentlb.dao.HostTransferMapDao; import com.cloud.dc.ClusterVO; import com.cloud.dc.dao.ClusterDao; import com.cloud.gpu.dao.HostGpuGroupsDao; import com.cloud.gpu.dao.VGPUTypesDao; import com.cloud.host.HostTagVO; import com.cloud.host.HostVO; import com.cloud.info.RunningHostCountInfo; import com.cloud.legacymodel.dc.Host; import com.cloud.legacymodel.dc.HostStatus; import com.cloud.legacymodel.exceptions.CloudRuntimeException; import com.cloud.legacymodel.resource.ResourceState; import com.cloud.legacymodel.vm.VgpuTypesInfo; import com.cloud.model.enumeration.Event; import com.cloud.model.enumeration.HostType; import com.cloud.model.enumeration.ManagedState; import com.cloud.utils.DateUtil; import com.cloud.utils.db.Attribute; import com.cloud.utils.db.DB; import com.cloud.utils.db.Filter; import com.cloud.utils.db.GenericDaoBase; import com.cloud.utils.db.GenericSearchBuilder; import com.cloud.utils.db.JoinBuilder; import com.cloud.utils.db.JoinBuilder.JoinType; import com.cloud.utils.db.SearchBuilder; import com.cloud.utils.db.SearchCriteria; import com.cloud.utils.db.SearchCriteria.Func; import com.cloud.utils.db.SearchCriteria.Op; import com.cloud.utils.db.TransactionLegacy; import com.cloud.utils.db.UpdateBuilder; import javax.annotation.PostConstruct; import javax.ejb.Local; import javax.inject.Inject; import javax.persistence.TableGenerator; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TimeZone; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; @Component @Local(value = {HostDao.class}) @DB @TableGenerator(name = "host_req_sq", table = "op_host", pkColumnName = "id", valueColumnName = "sequence", allocationSize = 1) public class HostDaoImpl extends GenericDaoBase<HostVO, Long> implements HostDao { //FIXME: , ExternalIdDao { private static final Logger s_logger = LoggerFactory.getLogger(HostDaoImpl.class); private static final Logger status_logger = LoggerFactory.getLogger(HostStatus.class); private static final Logger state_logger = LoggerFactory.getLogger(ResourceState.class); protected SearchBuilder<HostVO> TypePodDcStatusSearch; protected SearchBuilder<HostVO> IdStatusSearch; protected SearchBuilder<HostVO> TypeDcSearch; protected SearchBuilder<HostVO> TypeDcStatusSearch; protected SearchBuilder<HostVO> TypeClusterStatusSearch; protected SearchBuilder<HostVO> MsStatusSearch; protected SearchBuilder<HostVO> DcPrivateIpAddressSearch; protected SearchBuilder<HostVO> DcStorageIpAddressSearch; protected SearchBuilder<HostVO> PublicIpAddressSearch; protected SearchBuilder<HostVO> GuidSearch; protected SearchBuilder<HostVO> DcSearch; protected SearchBuilder<HostVO> PodSearch; protected SearchBuilder<HostVO> ClusterSearch; protected SearchBuilder<HostVO> TypeSearch; protected SearchBuilder<HostVO> StatusSearch; protected SearchBuilder<HostVO> ResourceStateSearch; protected SearchBuilder<HostVO> NameLikeSearch; protected SearchBuilder<HostVO> NameSearch; protected SearchBuilder<HostVO> SequenceSearch; protected SearchBuilder<HostVO> DirectlyConnectedSearch; protected SearchBuilder<HostVO> UnmanagedDirectConnectSearch; protected SearchBuilder<HostVO> UnmanagedApplianceSearch; protected SearchBuilder<HostVO> MaintenanceCountSearch; protected SearchBuilder<HostVO> ClusterStatusSearch; protected SearchBuilder<HostVO> TypeNameZoneSearch; protected SearchBuilder<HostVO> AvailHypevisorInZone; protected SearchBuilder<HostVO> DirectConnectSearch; protected SearchBuilder<HostVO> ManagedDirectConnectSearch; protected SearchBuilder<HostVO> ManagedRoutingServersSearch; protected SearchBuilder<HostVO> SecondaryStorageVMSearch; protected GenericSearchBuilder<HostVO, Long> HostIdSearch; protected GenericSearchBuilder<HostVO, Long> HostsInStatusSearch; protected GenericSearchBuilder<HostVO, Long> CountRoutingByDc; protected SearchBuilder<HostTransferMapVO> HostTransferSearch; protected SearchBuilder<ClusterVO> ClusterManagedSearch; protected SearchBuilder<HostVO> RoutingSearch; protected SearchBuilder<HostVO> HostsForReconnectSearch; protected GenericSearchBuilder<HostVO, Long> ClustersOwnedByMSSearch; protected GenericSearchBuilder<HostVO, Long> ClustersForHostsNotOwnedByAnyMSSearch; protected GenericSearchBuilder<ClusterVO, Long> AllClustersSearch; protected SearchBuilder<HostVO> HostsInClusterSearch; protected Attribute _statusAttr; protected Attribute _resourceStateAttr; protected Attribute _msIdAttr; protected Attribute _pingTimeAttr; @Inject protected HostDetailsDao _detailsDao; @Inject protected HostGpuGroupsDao _hostGpuGroupsDao; @Inject protected VGPUTypesDao _vgpuTypesDao; @Inject protected HostTagsDao _hostTagsDao; @Inject protected HostTransferMapDao _hostTransferDao; @Inject protected ClusterDao _clusterDao; public HostDaoImpl() { super(); } @PostConstruct public void init() { this.MaintenanceCountSearch = createSearchBuilder(); this.MaintenanceCountSearch.and("cluster", this.MaintenanceCountSearch.entity().getClusterId(), SearchCriteria.Op.EQ); this.MaintenanceCountSearch.and("resourceState", this.MaintenanceCountSearch.entity().getResourceState(), SearchCriteria.Op.IN); this.MaintenanceCountSearch.done(); this.TypePodDcStatusSearch = createSearchBuilder(); final HostVO entity = this.TypePodDcStatusSearch.entity(); this.TypePodDcStatusSearch.and("type", entity.getType(), SearchCriteria.Op.EQ); this.TypePodDcStatusSearch.and("pod", entity.getPodId(), SearchCriteria.Op.EQ); this.TypePodDcStatusSearch.and("dc", entity.getDataCenterId(), SearchCriteria.Op.EQ); this.TypePodDcStatusSearch.and("cluster", entity.getClusterId(), SearchCriteria.Op.EQ); this.TypePodDcStatusSearch.and("status", entity.getStatus(), SearchCriteria.Op.EQ); this.TypePodDcStatusSearch.and("resourceState", entity.getResourceState(), SearchCriteria.Op.EQ); this.TypePodDcStatusSearch.done(); this.MsStatusSearch = createSearchBuilder(); this.MsStatusSearch.and("ms", this.MsStatusSearch.entity().getManagementServerId(), SearchCriteria.Op.EQ); this.MsStatusSearch.and("type", this.MsStatusSearch.entity().getType(), SearchCriteria.Op.EQ); this.MsStatusSearch.and("resourceState", this.MsStatusSearch.entity().getResourceState(), SearchCriteria.Op.NIN); this.MsStatusSearch.done(); this.TypeDcSearch = createSearchBuilder(); this.TypeDcSearch.and("type", this.TypeDcSearch.entity().getType(), SearchCriteria.Op.EQ); this.TypeDcSearch.and("dc", this.TypeDcSearch.entity().getDataCenterId(), SearchCriteria.Op.EQ); this.TypeDcSearch.done(); this.SecondaryStorageVMSearch = createSearchBuilder(); this.SecondaryStorageVMSearch.and("type", this.SecondaryStorageVMSearch.entity().getType(), SearchCriteria.Op.EQ); this.SecondaryStorageVMSearch.and("dc", this.SecondaryStorageVMSearch.entity().getDataCenterId(), SearchCriteria.Op.EQ); this.SecondaryStorageVMSearch.and("status", this.SecondaryStorageVMSearch.entity().getStatus(), SearchCriteria.Op.EQ); this.SecondaryStorageVMSearch.done(); this.TypeDcStatusSearch = createSearchBuilder(); this.TypeDcStatusSearch.and("type", this.TypeDcStatusSearch.entity().getType(), SearchCriteria.Op.EQ); this.TypeDcStatusSearch.and("dc", this.TypeDcStatusSearch.entity().getDataCenterId(), SearchCriteria.Op.EQ); this.TypeDcStatusSearch.and("status", this.TypeDcStatusSearch.entity().getStatus(), SearchCriteria.Op.EQ); this.TypeDcStatusSearch.and("resourceState", this.TypeDcStatusSearch.entity().getResourceState(), SearchCriteria.Op.EQ); this.TypeDcStatusSearch.done(); this.TypeClusterStatusSearch = createSearchBuilder(); this.TypeClusterStatusSearch.and("type", this.TypeClusterStatusSearch.entity().getType(), SearchCriteria.Op.EQ); this.TypeClusterStatusSearch.and("cluster", this.TypeClusterStatusSearch.entity().getClusterId(), SearchCriteria.Op.EQ); this.TypeClusterStatusSearch.and("status", this.TypeClusterStatusSearch.entity().getStatus(), SearchCriteria.Op.EQ); this.TypeClusterStatusSearch.and("resourceState", this.TypeClusterStatusSearch.entity().getResourceState(), SearchCriteria.Op.EQ); this.TypeClusterStatusSearch.done(); this.IdStatusSearch = createSearchBuilder(); this.IdStatusSearch.and("id", this.IdStatusSearch.entity().getId(), SearchCriteria.Op.EQ); this.IdStatusSearch.and("states", this.IdStatusSearch.entity().getStatus(), SearchCriteria.Op.IN); this.IdStatusSearch.done(); this.DcPrivateIpAddressSearch = createSearchBuilder(); this.DcPrivateIpAddressSearch.and("privateIpAddress", this.DcPrivateIpAddressSearch.entity().getPrivateIpAddress(), SearchCriteria.Op.EQ); this.DcPrivateIpAddressSearch.and("dc", this.DcPrivateIpAddressSearch.entity().getDataCenterId(), SearchCriteria.Op.EQ); this.DcPrivateIpAddressSearch.done(); this.DcStorageIpAddressSearch = createSearchBuilder(); this.DcStorageIpAddressSearch.and("storageIpAddress", this.DcStorageIpAddressSearch.entity().getStorageIpAddress(), SearchCriteria.Op.EQ); this.DcStorageIpAddressSearch.and("dc", this.DcStorageIpAddressSearch.entity().getDataCenterId(), SearchCriteria.Op.EQ); this.DcStorageIpAddressSearch.done(); this.PublicIpAddressSearch = createSearchBuilder(); this.PublicIpAddressSearch.and("publicIpAddress", this.PublicIpAddressSearch.entity().getPublicIpAddress(), SearchCriteria.Op.EQ); this.PublicIpAddressSearch.done(); this.GuidSearch = createSearchBuilder(); this.GuidSearch.and("guid", this.GuidSearch.entity().getGuid(), SearchCriteria.Op.EQ); this.GuidSearch.done(); this.DcSearch = createSearchBuilder(); this.DcSearch.and("dc", this.DcSearch.entity().getDataCenterId(), SearchCriteria.Op.EQ); this.DcSearch.and("type", this.DcSearch.entity().getType(), Op.EQ); this.DcSearch.and("status", this.DcSearch.entity().getStatus(), Op.EQ); this.DcSearch.and("resourceState", this.DcSearch.entity().getResourceState(), Op.EQ); this.DcSearch.done(); this.ClusterStatusSearch = createSearchBuilder(); this.ClusterStatusSearch.and("cluster", this.ClusterStatusSearch.entity().getClusterId(), SearchCriteria.Op.EQ); this.ClusterStatusSearch.and("status", this.ClusterStatusSearch.entity().getStatus(), SearchCriteria.Op.EQ); this.ClusterStatusSearch.done(); this.TypeNameZoneSearch = createSearchBuilder(); this.TypeNameZoneSearch.and("name", this.TypeNameZoneSearch.entity().getName(), SearchCriteria.Op.EQ); this.TypeNameZoneSearch.and("type", this.TypeNameZoneSearch.entity().getType(), SearchCriteria.Op.EQ); this.TypeNameZoneSearch.and("zoneId", this.TypeNameZoneSearch.entity().getDataCenterId(), SearchCriteria.Op.EQ); this.TypeNameZoneSearch.done(); this.PodSearch = createSearchBuilder(); this.PodSearch.and("podId", this.PodSearch.entity().getPodId(), SearchCriteria.Op.EQ); this.PodSearch.done(); this.ClusterSearch = createSearchBuilder(); this.ClusterSearch.and("clusterId", this.ClusterSearch.entity().getClusterId(), SearchCriteria.Op.EQ); this.ClusterSearch.done(); this.TypeSearch = createSearchBuilder(); this.TypeSearch.and("type", this.TypeSearch.entity().getType(), SearchCriteria.Op.EQ); this.TypeSearch.done(); this.StatusSearch = createSearchBuilder(); this.StatusSearch.and("status", this.StatusSearch.entity().getStatus(), SearchCriteria.Op.IN); this.StatusSearch.done(); this.ResourceStateSearch = createSearchBuilder(); this.ResourceStateSearch.and("resourceState", this.ResourceStateSearch.entity().getResourceState(), SearchCriteria.Op.IN); this.ResourceStateSearch.done(); this.NameLikeSearch = createSearchBuilder(); this.NameLikeSearch.and("name", this.NameLikeSearch.entity().getName(), SearchCriteria.Op.LIKE); this.NameLikeSearch.done(); this.NameSearch = createSearchBuilder(); this.NameSearch.and("name", this.NameSearch.entity().getName(), SearchCriteria.Op.EQ); this.NameSearch.done(); this.SequenceSearch = createSearchBuilder(); this.SequenceSearch.and("id", this.SequenceSearch.entity().getId(), SearchCriteria.Op.EQ); // SequenceSearch.addRetrieve("sequence", SequenceSearch.entity().getSequence()); this.SequenceSearch.done(); this.DirectlyConnectedSearch = createSearchBuilder(); this.DirectlyConnectedSearch.and("resource", this.DirectlyConnectedSearch.entity().getResource(), SearchCriteria.Op.NNULL); this.DirectlyConnectedSearch.and("ms", this.DirectlyConnectedSearch.entity().getManagementServerId(), SearchCriteria.Op.EQ); this.DirectlyConnectedSearch.and("statuses", this.DirectlyConnectedSearch.entity().getStatus(), SearchCriteria.Op.EQ); this.DirectlyConnectedSearch.and("resourceState", this.DirectlyConnectedSearch.entity().getResourceState(), SearchCriteria.Op.NOTIN); this.DirectlyConnectedSearch.done(); this.UnmanagedDirectConnectSearch = createSearchBuilder(); this.UnmanagedDirectConnectSearch.and("resource", this.UnmanagedDirectConnectSearch.entity().getResource(), SearchCriteria.Op.NNULL); this.UnmanagedDirectConnectSearch.and("server", this.UnmanagedDirectConnectSearch.entity().getManagementServerId(), SearchCriteria.Op.NULL); this.UnmanagedDirectConnectSearch.and("lastPinged", this.UnmanagedDirectConnectSearch.entity().getLastPinged(), SearchCriteria.Op.LTEQ); this.UnmanagedDirectConnectSearch.and("resourceStates", this.UnmanagedDirectConnectSearch.entity().getResourceState(), SearchCriteria.Op.NIN); this.UnmanagedDirectConnectSearch.and("clusterIn", this.UnmanagedDirectConnectSearch.entity().getClusterId(), SearchCriteria.Op.IN); /* * UnmanagedDirectConnectSearch.op(SearchCriteria.Op.OR, "managementServerId", * UnmanagedDirectConnectSearch.entity().getManagementServerId(), SearchCriteria.Op.EQ); * UnmanagedDirectConnectSearch.and("lastPinged", UnmanagedDirectConnectSearch.entity().getLastPinged(), * SearchCriteria.Op.LTEQ); UnmanagedDirectConnectSearch.cp(); UnmanagedDirectConnectSearch.cp(); */ this.HostTransferSearch = this._hostTransferDao.createSearchBuilder(); this.HostTransferSearch.and("id", this.HostTransferSearch.entity().getId(), SearchCriteria.Op.NULL); this.UnmanagedDirectConnectSearch.join("hostTransferSearch", this.HostTransferSearch, this.HostTransferSearch.entity().getId(), this.UnmanagedDirectConnectSearch.entity().getId(), JoinType.LEFTOUTER); this.ClusterManagedSearch = this._clusterDao.createSearchBuilder(); this.ClusterManagedSearch.and("managed", this.ClusterManagedSearch.entity().getManagedState(), SearchCriteria.Op.EQ); this.UnmanagedDirectConnectSearch.join("ClusterManagedSearch", this.ClusterManagedSearch, this.ClusterManagedSearch.entity().getId(), this.UnmanagedDirectConnectSearch.entity() .getClusterId(), JoinType.INNER); this.UnmanagedDirectConnectSearch.done(); this.DirectConnectSearch = createSearchBuilder(); this.DirectConnectSearch.and("resource", this.DirectConnectSearch.entity().getResource(), SearchCriteria.Op.NNULL); this.DirectConnectSearch.and("id", this.DirectConnectSearch.entity().getId(), SearchCriteria.Op.EQ); this.DirectConnectSearch.and().op("nullserver", this.DirectConnectSearch.entity().getManagementServerId(), SearchCriteria.Op.NULL); this.DirectConnectSearch.or("server", this.DirectConnectSearch.entity().getManagementServerId(), SearchCriteria.Op.EQ); this.DirectConnectSearch.cp(); this.DirectConnectSearch.done(); this.UnmanagedApplianceSearch = createSearchBuilder(); this.UnmanagedApplianceSearch.and("resource", this.UnmanagedApplianceSearch.entity().getResource(), SearchCriteria.Op.NNULL); this.UnmanagedApplianceSearch.and("server", this.UnmanagedApplianceSearch.entity().getManagementServerId(), SearchCriteria.Op.NULL); this.UnmanagedApplianceSearch.and("types", this.UnmanagedApplianceSearch.entity().getType(), SearchCriteria.Op.IN); this.UnmanagedApplianceSearch.and("lastPinged", this.UnmanagedApplianceSearch.entity().getLastPinged(), SearchCriteria.Op.LTEQ); this.UnmanagedApplianceSearch.done(); this.AvailHypevisorInZone = createSearchBuilder(); this.AvailHypevisorInZone.and("zoneId", this.AvailHypevisorInZone.entity().getDataCenterId(), SearchCriteria.Op.EQ); this.AvailHypevisorInZone.and("hostId", this.AvailHypevisorInZone.entity().getId(), SearchCriteria.Op.NEQ); this.AvailHypevisorInZone.and("type", this.AvailHypevisorInZone.entity().getType(), SearchCriteria.Op.EQ); this.AvailHypevisorInZone.groupBy(this.AvailHypevisorInZone.entity().getHypervisorType()); this.AvailHypevisorInZone.done(); this.HostsInStatusSearch = createSearchBuilder(Long.class); this.HostsInStatusSearch.selectFields(this.HostsInStatusSearch.entity().getId()); this.HostsInStatusSearch.and("dc", this.HostsInStatusSearch.entity().getDataCenterId(), Op.EQ); this.HostsInStatusSearch.and("pod", this.HostsInStatusSearch.entity().getPodId(), Op.EQ); this.HostsInStatusSearch.and("cluster", this.HostsInStatusSearch.entity().getClusterId(), Op.EQ); this.HostsInStatusSearch.and("type", this.HostsInStatusSearch.entity().getType(), Op.EQ); this.HostsInStatusSearch.and("statuses", this.HostsInStatusSearch.entity().getStatus(), Op.IN); this.HostsInStatusSearch.done(); this.CountRoutingByDc = createSearchBuilder(Long.class); this.CountRoutingByDc.select(null, Func.COUNT, null); this.CountRoutingByDc.and("dc", this.CountRoutingByDc.entity().getDataCenterId(), SearchCriteria.Op.EQ); this.CountRoutingByDc.and("type", this.CountRoutingByDc.entity().getType(), SearchCriteria.Op.EQ); this.CountRoutingByDc.and("status", this.CountRoutingByDc.entity().getStatus(), SearchCriteria.Op.EQ); this.CountRoutingByDc.done(); this.ManagedDirectConnectSearch = createSearchBuilder(); this.ManagedDirectConnectSearch.and("resource", this.ManagedDirectConnectSearch.entity().getResource(), SearchCriteria.Op.NNULL); this.ManagedDirectConnectSearch.and("server", this.ManagedDirectConnectSearch.entity().getManagementServerId(), SearchCriteria.Op.NULL); this.ManagedDirectConnectSearch.done(); this.ManagedRoutingServersSearch = createSearchBuilder(); this.ManagedRoutingServersSearch.and("server", this.ManagedRoutingServersSearch.entity().getManagementServerId(), SearchCriteria.Op.NNULL); this.ManagedRoutingServersSearch.and("type", this.ManagedRoutingServersSearch.entity().getType(), SearchCriteria.Op.EQ); this.ManagedRoutingServersSearch.done(); this.RoutingSearch = createSearchBuilder(); this.RoutingSearch.and("type", this.RoutingSearch.entity().getType(), SearchCriteria.Op.EQ); this.RoutingSearch.done(); this.HostsForReconnectSearch = createSearchBuilder(); this.HostsForReconnectSearch.and("resource", this.HostsForReconnectSearch.entity().getResource(), SearchCriteria.Op.NNULL); this.HostsForReconnectSearch.and("server", this.HostsForReconnectSearch.entity().getManagementServerId(), SearchCriteria.Op.EQ); this.HostsForReconnectSearch.and("lastPinged", this.HostsForReconnectSearch.entity().getLastPinged(), SearchCriteria.Op.LTEQ); this.HostsForReconnectSearch.and("resourceStates", this.HostsForReconnectSearch.entity().getResourceState(), SearchCriteria.Op.NIN); this.HostsForReconnectSearch.and("cluster", this.HostsForReconnectSearch.entity().getClusterId(), SearchCriteria.Op.NNULL); this.HostsForReconnectSearch.and("status", this.HostsForReconnectSearch.entity().getStatus(), SearchCriteria.Op.IN); this.HostsForReconnectSearch.done(); this.ClustersOwnedByMSSearch = createSearchBuilder(Long.class); this.ClustersOwnedByMSSearch.select(null, Func.DISTINCT, this.ClustersOwnedByMSSearch.entity().getClusterId()); this.ClustersOwnedByMSSearch.and("resource", this.ClustersOwnedByMSSearch.entity().getResource(), SearchCriteria.Op.NNULL); this.ClustersOwnedByMSSearch.and("cluster", this.ClustersOwnedByMSSearch.entity().getClusterId(), SearchCriteria.Op.NNULL); this.ClustersOwnedByMSSearch.and("server", this.ClustersOwnedByMSSearch.entity().getManagementServerId(), SearchCriteria.Op.EQ); this.ClustersOwnedByMSSearch.done(); this.ClustersForHostsNotOwnedByAnyMSSearch = createSearchBuilder(Long.class); this.ClustersForHostsNotOwnedByAnyMSSearch.select(null, Func.DISTINCT, this.ClustersForHostsNotOwnedByAnyMSSearch.entity().getClusterId()); this.ClustersForHostsNotOwnedByAnyMSSearch.and("resource", this.ClustersForHostsNotOwnedByAnyMSSearch.entity().getResource(), SearchCriteria.Op.NNULL); this.ClustersForHostsNotOwnedByAnyMSSearch.and("cluster", this.ClustersForHostsNotOwnedByAnyMSSearch.entity().getClusterId(), SearchCriteria.Op.NNULL); this.ClustersForHostsNotOwnedByAnyMSSearch.and("server", this.ClustersForHostsNotOwnedByAnyMSSearch.entity().getManagementServerId(), SearchCriteria.Op.NULL); this.ClustersForHostsNotOwnedByAnyMSSearch.done(); this.AllClustersSearch = this._clusterDao.createSearchBuilder(Long.class); this.AllClustersSearch.select(null, Func.NATIVE, this.AllClustersSearch.entity().getId()); this.AllClustersSearch.and("managed", this.AllClustersSearch.entity().getManagedState(), SearchCriteria.Op.EQ); this.AllClustersSearch.done(); this.HostsInClusterSearch = createSearchBuilder(); this.HostsInClusterSearch.and("resource", this.HostsInClusterSearch.entity().getResource(), SearchCriteria.Op.NNULL); this.HostsInClusterSearch.and("cluster", this.HostsInClusterSearch.entity().getClusterId(), SearchCriteria.Op.EQ); this.HostsInClusterSearch.and("server", this.HostsInClusterSearch.entity().getManagementServerId(), SearchCriteria.Op.NNULL); this.HostsInClusterSearch.done(); this.HostIdSearch = createSearchBuilder(Long.class); this.HostIdSearch.selectFields(this.HostIdSearch.entity().getId()); this.HostIdSearch.and("dataCenterId", this.HostIdSearch.entity().getDataCenterId(), Op.EQ); this.HostIdSearch.done(); this._statusAttr = this._allAttributes.get("status"); this._msIdAttr = this._allAttributes.get("managementServerId"); this._pingTimeAttr = this._allAttributes.get("lastPinged"); this._resourceStateAttr = this._allAttributes.get("resourceState"); assert (this._statusAttr != null && this._msIdAttr != null && this._pingTimeAttr != null) : "Couldn't find one of these attributes"; } @Override public long countBy(final long clusterId, final ResourceState... states) { final SearchCriteria<HostVO> sc = this.MaintenanceCountSearch.create(); sc.setParameters("resourceState", (Object[]) states); sc.setParameters("cluster", clusterId); final List<HostVO> hosts = listBy(sc); return hosts.size(); } @Override public void markHostsAsDisconnected(final long msId, final long lastPing) { SearchCriteria<HostVO> sc = this.MsStatusSearch.create(); sc.setParameters("ms", msId); HostVO host = createForUpdate(); host.setLastPinged(lastPing); host.setDisconnectedOn(new Date()); UpdateBuilder ub = getUpdateBuilder(host); ub.set(host, "status", HostStatus.Disconnected); update(ub, sc, null); sc = this.MsStatusSearch.create(); sc.setParameters("ms", msId); host = createForUpdate(); host.setManagementServerId(null); host.setLastPinged(lastPing); host.setDisconnectedOn(new Date()); ub = getUpdateBuilder(host); update(ub, sc, null); } @DB @Override public List<HostVO> findLostHosts(final long timeout) { final List<HostVO> result = new ArrayList<>(); final String sql = "select h.id from host h left join cluster c on h.cluster_id=c.id where h.mgmt_server_id is not null and h.last_ping < ? and h.status in ('Up', 'Updating', " + "'Disconnected', 'Connecting') and h.type not in ('ExternalLoadBalancer', 'TrafficMonitor', 'SecondaryStorage', " + "'LocalSecondaryStorage', 'L2Networking') and (h.cluster_id is null or c.managed_state = 'Managed') ;"; try ( final TransactionLegacy txn = TransactionLegacy.currentTxn(); final PreparedStatement pstmt = txn.prepareStatement(sql)) { pstmt.setLong(1, timeout); try (final ResultSet rs = pstmt.executeQuery()) { while (rs.next()) { final long id = rs.getLong(1); //ID column result.add(findById(id)); } } } catch (final SQLException e) { s_logger.warn("Exception: ", e); } return result; } @Override @DB public List<HostVO> findAndUpdateDirectAgentToLoad(final long lastPingSecondsAfter, final Long limit, final long managementServerId) { final TransactionLegacy txn = TransactionLegacy.currentTxn(); txn.start(); if (s_logger.isDebugEnabled()) { s_logger.debug("Resetting hosts suitable for reconnect"); } // reset hosts that are suitable candidates for reconnect resetHosts(managementServerId, lastPingSecondsAfter); if (s_logger.isDebugEnabled()) { s_logger.debug("Completed resetting hosts suitable for reconnect"); } final List<HostVO> assignedHosts = new ArrayList<>(); if (s_logger.isDebugEnabled()) { s_logger.debug("Acquiring hosts for clusters already owned by this management server"); } List<Long> clusters = findClustersOwnedByManagementServer(managementServerId); if (clusters.size() > 0) { // handle clusters already owned by @managementServerId final SearchCriteria<HostVO> sc = this.UnmanagedDirectConnectSearch.create(); sc.setParameters("lastPinged", lastPingSecondsAfter); sc.setJoinParameters("ClusterManagedSearch", "managed", ManagedState.Managed); sc.setParameters("clusterIn", clusters.toArray()); final List<HostVO> unmanagedHosts = lockRows(sc, new Filter(HostVO.class, "clusterId", true, 0L, limit), true); // host belongs to clusters owned by @managementServerId final StringBuilder sb = new StringBuilder(); for (final HostVO host : unmanagedHosts) { host.setManagementServerId(managementServerId); update(host.getId(), host); assignedHosts.add(host); sb.append(host.getId()); sb.append(" "); } if (s_logger.isTraceEnabled()) { s_logger.trace("Following hosts got acquired for clusters already owned: " + sb.toString()); } } if (s_logger.isDebugEnabled()) { s_logger.debug("Completed acquiring hosts for clusters already owned by this management server"); } if (assignedHosts.size() < limit) { if (s_logger.isDebugEnabled()) { s_logger.debug("Acquiring hosts for clusters not owned by any management server"); } // for remaining hosts not owned by any MS check if they can be owned (by owning full cluster) clusters = findClustersForHostsNotOwnedByAnyManagementServer(); List<Long> updatedClusters = clusters; if (clusters.size() > limit) { updatedClusters = clusters.subList(0, limit.intValue()); } if (updatedClusters.size() > 0) { final SearchCriteria<HostVO> sc = this.UnmanagedDirectConnectSearch.create(); sc.setParameters("lastPinged", lastPingSecondsAfter); sc.setJoinParameters("ClusterManagedSearch", "managed", ManagedState.Managed); sc.setParameters("clusterIn", updatedClusters.toArray()); final List<HostVO> unmanagedHosts = lockRows(sc, null, true); // group hosts based on cluster final Map<Long, List<HostVO>> hostMap = new HashMap<>(); for (final HostVO host : unmanagedHosts) { if (hostMap.get(host.getClusterId()) == null) { hostMap.put(host.getClusterId(), new ArrayList<>()); } hostMap.get(host.getClusterId()).add(host); } final StringBuilder sb = new StringBuilder(); for (final Long clusterId : hostMap.keySet()) { if (canOwnCluster(clusterId)) { // cluster is not owned by any other MS, so @managementServerId can own it final List<HostVO> hostList = hostMap.get(clusterId); for (final HostVO host : hostList) { host.setManagementServerId(managementServerId); update(host.getId(), host); assignedHosts.add(host); sb.append(host.getId()); sb.append(" "); } } if (assignedHosts.size() > limit) { break; } } if (s_logger.isTraceEnabled()) { s_logger.trace("Following hosts got acquired from newly owned clusters: " + sb.toString()); } } if (s_logger.isDebugEnabled()) { s_logger.debug("Completed acquiring hosts for clusters not owned by any management server"); } } txn.commit(); return assignedHosts; } @Override @DB public List<RunningHostCountInfo> getRunningHostCounts(final Date cutTime) { final String sql = "select * from (" + "select h.data_center_id, h.type, count(*) as count from host as h INNER JOIN mshost as m ON h.mgmt_server_id=m.msid " + "where h.status='Up' and h.type='SecondaryStorage' and m.last_update > ? " + "group by h.data_center_id, h.type " + "UNION ALL " + "select h.data_center_id, h.type, count(*) as count from host as h INNER JOIN mshost as m ON h.mgmt_server_id=m.msid " + "where h.status='Up' and h.type='Routing' and m.last_update > ? " + "group by h.data_center_id, h.type) as t " + "ORDER by t.data_center_id, t.type"; final ArrayList<RunningHostCountInfo> l = new ArrayList<>(); final TransactionLegacy txn = TransactionLegacy.currentTxn(); PreparedStatement pstmt = null; try { pstmt = txn.prepareAutoCloseStatement(sql); final String gmtCutTime = DateUtil.getDateDisplayString(TimeZone.getTimeZone("GMT"), cutTime); pstmt.setString(1, gmtCutTime); pstmt.setString(2, gmtCutTime); final ResultSet rs = pstmt.executeQuery(); while (rs.next()) { final RunningHostCountInfo info = new RunningHostCountInfo(); info.setDcId(rs.getLong(1)); info.setHostType(rs.getString(2)); info.setCount(rs.getInt(3)); l.add(info); } } catch (final SQLException e) { s_logger.debug("SQLException caught", e); } return l; } @Override public long getNextSequence(final long hostId) { if (s_logger.isTraceEnabled()) { s_logger.trace("getNextSequence(), hostId: " + hostId); } final TableGenerator tg = this._tgs.get("host_req_sq"); assert tg != null : "how can this be wrong!"; return s_seqFetcher.getNextSequence(Long.class, tg, hostId); } @Override public void loadDetails(final HostVO host) { final Map<String, String> details = this._detailsDao.findDetails(host.getId()); host.setDetails(details); } @Override public void saveDetails(final HostVO host) { final Map<String, String> details = host.getDetails(); if (details == null) { return; } this._detailsDao.persist(host.getId(), details); } @Override public void loadHostTags(final HostVO host) { final List<String> hostTags = this._hostTagsDao.gethostTags(host.getId()); host.setHostTags(hostTags); } @Override public List<HostVO> listByHostTag(final HostType type, final Long clusterId, final Long podId, final long dcId, final String hostTag) { final SearchBuilder<HostTagVO> hostTagSearch = this._hostTagsDao.createSearchBuilder(); final HostTagVO tagEntity = hostTagSearch.entity(); hostTagSearch.and("tag", tagEntity.getTag(), SearchCriteria.Op.EQ); final SearchBuilder<HostVO> hostSearch = createSearchBuilder(); final HostVO entity = hostSearch.entity(); hostSearch.and("type", entity.getType(), SearchCriteria.Op.EQ); hostSearch.and("pod", entity.getPodId(), SearchCriteria.Op.EQ); hostSearch.and("dc", entity.getDataCenterId(), SearchCriteria.Op.EQ); hostSearch.and("cluster", entity.getClusterId(), SearchCriteria.Op.EQ); hostSearch.and("status", entity.getStatus(), SearchCriteria.Op.EQ); hostSearch.and("resourceState", entity.getResourceState(), SearchCriteria.Op.EQ); hostSearch.join("hostTagSearch", hostTagSearch, entity.getId(), tagEntity.getHostId(), JoinBuilder.JoinType.INNER); final SearchCriteria<HostVO> sc = hostSearch.create(); sc.setJoinParameters("hostTagSearch", "tag", hostTag); sc.setParameters("type", type.toString()); if (podId != null) { sc.setParameters("pod", podId); } if (clusterId != null) { sc.setParameters("cluster", clusterId); } sc.setParameters("dc", dcId); sc.setParameters("status", HostStatus.Up.toString()); sc.setParameters("resourceState", ResourceState.Enabled.toString()); return listBy(sc); } @Override public long countRoutingHostsByDataCenter(final long dcId) { final SearchCriteria<Long> sc = this.CountRoutingByDc.create(); sc.setParameters("dc", dcId); sc.setParameters("type", HostType.Routing); sc.setParameters("status", HostStatus.Up.toString()); return customSearch(sc, null).get(0); } @Override @DB public List<HostVO> findAndUpdateApplianceToLoad(final long lastPingSecondsAfter, final long managementServerId) { final TransactionLegacy txn = TransactionLegacy.currentTxn(); txn.start(); final SearchCriteria<HostVO> sc = this.UnmanagedApplianceSearch.create(); sc.setParameters("lastPinged", lastPingSecondsAfter); sc.setParameters("types", HostType.ExternalDhcp, HostType.ExternalLoadBalancer, HostType.TrafficMonitor, HostType.L2Networking); final List<HostVO> hosts = lockRows(sc, null, true); for (final HostVO host : hosts) { host.setManagementServerId(managementServerId); update(host.getId(), host); } txn.commit(); return hosts; } @Override @DB public boolean update(final Long hostId, final HostVO host) { final TransactionLegacy txn = TransactionLegacy.currentTxn(); txn.start(); final boolean persisted = super.update(hostId, host); if (!persisted) { return persisted; } saveDetails(host); saveHostTags(host); saveGpuRecords(host); txn.commit(); return persisted; } protected void saveHostTags(final HostVO host) { final List<String> hostTags = host.getHostTags(); if (hostTags == null || (hostTags != null && hostTags.isEmpty())) { return; } this._hostTagsDao.persist(host.getId(), hostTags); } protected void saveGpuRecords(final HostVO host) { final HashMap<String, HashMap<String, VgpuTypesInfo>> groupDetails = host.getGpuGroupDetails(); if (groupDetails != null) { // Create/Update GPU group entries this._hostGpuGroupsDao.persist(host.getId(), new ArrayList<>(groupDetails.keySet())); // Create/Update VGPU types entries this._vgpuTypesDao.persist(host.getId(), groupDetails); } } @Override @DB public HostVO persist(final HostVO host) { final String InsertSequenceSql = "INSERT INTO op_host(id) VALUES(?)"; final TransactionLegacy txn = TransactionLegacy.currentTxn(); txn.start(); final HostVO dbHost = super.persist(host); try { final PreparedStatement pstmt = txn.prepareAutoCloseStatement(InsertSequenceSql); pstmt.setLong(1, dbHost.getId()); pstmt.executeUpdate(); } catch (final SQLException e) { throw new CloudRuntimeException("Unable to persist the sequence number for this host"); } saveDetails(host); loadDetails(dbHost); saveHostTags(host); loadHostTags(dbHost); saveGpuRecords(host); txn.commit(); return dbHost; } @Override public boolean updateResourceState(final ResourceState oldState, final ResourceState.Event event, final ResourceState newState, final Host vo) { final HostVO host = (HostVO) vo; final SearchBuilder<HostVO> sb = createSearchBuilder(); sb.and("resource_state", sb.entity().getResourceState(), SearchCriteria.Op.EQ); sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ); sb.done(); final SearchCriteria<HostVO> sc = sb.create(); sc.setParameters("resource_state", oldState); sc.setParameters("id", host.getId()); final UpdateBuilder ub = getUpdateBuilder(host); ub.set(host, this._resourceStateAttr, newState); final int result = update(ub, sc, null); assert result <= 1 : "How can this update " + result + " rows? "; if (state_logger.isDebugEnabled() && result == 0) { final HostVO ho = findById(host.getId()); assert ho != null : "How how how? : " + host.getId(); final StringBuilder str = new StringBuilder("Unable to update resource state: ["); str.append("m = " + host.getId()); str.append("; name = " + host.getName()); str.append("; old state = " + oldState); str.append("; event = " + event); str.append("; new state = " + newState + "]"); state_logger.debug(str.toString()); } else { final StringBuilder msg = new StringBuilder("Resource state update: ["); msg.append("id = " + host.getId()); msg.append("; name = " + host.getName()); msg.append("; old state = " + oldState); msg.append("; event = " + event); msg.append("; new state = " + newState + "]"); state_logger.debug(msg.toString()); } return result > 0; } @Override public HostVO findByGuid(final String guid) { final SearchCriteria<HostVO> sc = this.GuidSearch.create("guid", guid); return findOneBy(sc); } @Override public HostVO findByTypeNameAndZoneId(final long zoneId, final String name, final HostType type) { final SearchCriteria<HostVO> sc = this.TypeNameZoneSearch.create(); sc.setParameters("type", type); sc.setParameters("name", name); sc.setParameters("zoneId", zoneId); return findOneBy(sc); } @Override public List<HostVO> findHypervisorHostInCluster(final long clusterId) { final SearchCriteria<HostVO> sc = this.TypeClusterStatusSearch.create(); sc.setParameters("type", HostType.Routing); sc.setParameters("cluster", clusterId); sc.setParameters("status", HostStatus.Up); sc.setParameters("resourceState", ResourceState.Enabled); return listBy(sc); } @Override public List<HostVO> listAllUpAndEnabledNonHAHosts(final HostType type, final Long clusterId, final Long podId, final long dcId, final String haTag) { SearchBuilder<HostTagVO> hostTagSearch = null; if (haTag != null && !haTag.isEmpty()) { hostTagSearch = this._hostTagsDao.createSearchBuilder(); hostTagSearch.and().op("tag", hostTagSearch.entity().getTag(), SearchCriteria.Op.NEQ); hostTagSearch.or("tagNull", hostTagSearch.entity().getTag(), SearchCriteria.Op.NULL); hostTagSearch.cp(); } final SearchBuilder<HostVO> hostSearch = createSearchBuilder(); hostSearch.and("type", hostSearch.entity().getType(), SearchCriteria.Op.EQ); hostSearch.and("clusterId", hostSearch.entity().getClusterId(), SearchCriteria.Op.EQ); hostSearch.and("podId", hostSearch.entity().getPodId(), SearchCriteria.Op.EQ); hostSearch.and("zoneId", hostSearch.entity().getDataCenterId(), SearchCriteria.Op.EQ); hostSearch.and("status", hostSearch.entity().getStatus(), SearchCriteria.Op.EQ); hostSearch.and("resourceState", hostSearch.entity().getResourceState(), SearchCriteria.Op.EQ); if (haTag != null && !haTag.isEmpty()) { hostSearch.join("hostTagSearch", hostTagSearch, hostSearch.entity().getId(), hostTagSearch.entity().getHostId(), JoinBuilder.JoinType.LEFTOUTER); } final SearchCriteria<HostVO> sc = hostSearch.create(); if (haTag != null && !haTag.isEmpty()) { sc.setJoinParameters("hostTagSearch", "tag", haTag); } if (type != null) { sc.setParameters("type", type); } if (clusterId != null) { sc.setParameters("clusterId", clusterId); } if (podId != null) { sc.setParameters("podId", podId); } sc.setParameters("zoneId", dcId); sc.setParameters("status", HostStatus.Up); sc.setParameters("resourceState", ResourceState.Enabled); return listBy(sc); } @Override public List<HostVO> findByPodId(final Long podId) { final SearchCriteria<HostVO> sc = this.PodSearch.create(); sc.setParameters("podId", podId); return listBy(sc); } @Override public List<HostVO> findByClusterId(final Long clusterId) { final SearchCriteria<HostVO> sc = this.ClusterSearch.create(); sc.setParameters("clusterId", clusterId); return listBy(sc); } @Override public List<HostVO> listByDataCenterId(final long id) { final SearchCriteria<HostVO> sc = this.DcSearch.create(); sc.setParameters("dc", id); sc.setParameters("status", HostStatus.Up); sc.setParameters("type", HostType.Routing); sc.setParameters("resourceState", ResourceState.Enabled); return listBy(sc); } @Override public List<Long> listAllHosts(final long zoneId) { final SearchCriteria<Long> sc = this.HostIdSearch.create(); sc.addAnd("dataCenterId", SearchCriteria.Op.EQ, zoneId); return customSearch(sc, null); } @Override public HostVO findByPublicIp(final String publicIp) { final SearchCriteria<HostVO> sc = this.PublicIpAddressSearch.create(); sc.setParameters("publicIpAddress", publicIp); return findOneBy(sc); } @Override public List<HostVO> listByType(final HostType type) { final SearchCriteria<HostVO> sc = this.TypeSearch.create(); sc.setParameters("type", type); return listBy(sc); } /* * Find hosts which is in Disconnected, Down, Alert and ping timeout and server is not null, set server to null */ private void resetHosts(final long managementServerId, final long lastPingSecondsAfter) { final SearchCriteria<HostVO> sc = this.HostsForReconnectSearch.create(); sc.setParameters("server", managementServerId); sc.setParameters("lastPinged", lastPingSecondsAfter); sc.setParameters("status", HostStatus.Disconnected, HostStatus.Down, HostStatus.Alert); final StringBuilder sb = new StringBuilder(); final List<HostVO> hosts = lockRows(sc, null, true); // exclusive lock for (final HostVO host : hosts) { host.setManagementServerId(null); update(host.getId(), host); sb.append(host.getId()); sb.append(" "); } if (s_logger.isTraceEnabled()) { s_logger.trace("Following hosts got reset: " + sb.toString()); } } /* * Returns a list of cluster owned by @managementServerId */ private List<Long> findClustersOwnedByManagementServer(final long managementServerId) { final SearchCriteria<Long> sc = this.ClustersOwnedByMSSearch.create(); sc.setParameters("server", managementServerId); final List<Long> clusters = customSearch(sc, null); return clusters; } /* * Returns clusters based on the list of hosts not owned by any MS */ private List<Long> findClustersForHostsNotOwnedByAnyManagementServer() { final SearchCriteria<Long> sc = this.ClustersForHostsNotOwnedByAnyMSSearch.create(); final List<Long> clusters = customSearch(sc, null); return clusters; } /* * Returns a list of all cluster Ids */ private List<Long> listAllClusters() { final SearchCriteria<Long> sc = this.AllClustersSearch.create(); sc.setParameters("managed", ManagedState.Managed); final List<Long> clusters = this._clusterDao.customSearch(sc, null); return clusters; } /* * This determines if hosts belonging to cluster(@clusterId) are up for grabs * * This is used for handling following cases: * 1. First host added in cluster * 2. During MS restart all hosts in a cluster are without any MS */ private boolean canOwnCluster(final long clusterId) { final SearchCriteria<HostVO> sc = this.HostsInClusterSearch.create(); sc.setParameters("cluster", clusterId); final List<HostVO> hosts = search(sc, null); final boolean ownCluster = (hosts == null || hosts.size() == 0); return ownCluster; } @Override public boolean updateState(final HostStatus oldStatus, final Event event, final HostStatus newStatus, final Host vo, final Object data) { // lock target row from beginning to avoid lock-promotion caused deadlock HostVO host = lockRow(vo.getId(), true); if (host == null) { if (event == Event.Remove && newStatus == HostStatus.Removed) { host = findByIdIncludingRemoved(vo.getId()); } } if (host == null) { return false; } final long oldPingTime = host.getLastPinged(); final SearchBuilder<HostVO> sb = createSearchBuilder(); sb.and("status", sb.entity().getStatus(), SearchCriteria.Op.EQ); sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ); sb.and("update", sb.entity().getUpdated(), SearchCriteria.Op.EQ); if (newStatus.checkManagementServer()) { sb.and("ping", sb.entity().getLastPinged(), SearchCriteria.Op.EQ); sb.and().op("nullmsid", sb.entity().getManagementServerId(), SearchCriteria.Op.NULL); sb.or("msid", sb.entity().getManagementServerId(), SearchCriteria.Op.EQ); sb.cp(); } sb.done(); final SearchCriteria<HostVO> sc = sb.create(); sc.setParameters("status", oldStatus); sc.setParameters("id", host.getId()); sc.setParameters("update", host.getUpdated()); final long oldUpdateCount = host.getUpdated(); if (newStatus.checkManagementServer()) { sc.setParameters("ping", oldPingTime); sc.setParameters("msid", host.getManagementServerId()); } final long newUpdateCount = host.incrUpdated(); final UpdateBuilder ub = getUpdateBuilder(host); ub.set(host, this._statusAttr, newStatus); if (newStatus.updateManagementServer()) { if (newStatus.lostConnection()) { ub.set(host, this._msIdAttr, null); } else { ub.set(host, this._msIdAttr, host.getManagementServerId()); } if (event.equals(Event.Ping) || event.equals(Event.AgentConnected)) { ub.set(host, this._pingTimeAttr, System.currentTimeMillis() >> 10); } } if (event.equals(Event.ManagementServerDown)) { ub.set(host, this._pingTimeAttr, ((System.currentTimeMillis() >> 10) - (10 * 60))); } final int result = update(ub, sc, null); assert result <= 1 : "How can this update " + result + " rows? "; if (result == 0) { final HostVO ho = findById(host.getId()); assert ho != null : "How how how? : " + host.getId(); if (status_logger.isDebugEnabled()) { final StringBuilder str = new StringBuilder("Unable to update host for event:").append(event.toString()); str.append(". Name=").append(host.getName()); str.append("; New=[status=") .append(newStatus.toString()) .append(":msid=") .append(newStatus.lostConnection() ? "null" : host.getManagementServerId()) .append(":lastpinged=") .append(host.getLastPinged()) .append("]"); str.append("; Old=[status=").append(oldStatus.toString()).append(":msid=").append(host.getManagementServerId()).append(":lastpinged=").append(oldPingTime) .append("]"); str.append("; DB=[status=") .append(vo.getStatus().toString()) .append(":msid=") .append(vo.getManagementServerId()) .append(":lastpinged=") .append(vo.getLastPinged()) .append(":old update count=") .append(oldUpdateCount) .append("]"); status_logger.debug(str.toString()); } else { final StringBuilder msg = new StringBuilder("Agent status update: ["); msg.append("id = " + host.getId()); msg.append("; name = " + host.getName()); msg.append("; old status = " + oldStatus); msg.append("; event = " + event); msg.append("; new status = " + newStatus); msg.append("; old update count = " + oldUpdateCount); msg.append("; new update count = " + newUpdateCount + "]"); status_logger.debug(msg.toString()); } if (ho.getState() == newStatus) { status_logger.debug("Host " + ho.getName() + " state has already been updated to " + newStatus); return true; } } return result > 0; } }
{ "content_hash": "8efe76c927d6b299b5c1ee6628b5f510", "timestamp": "", "source": "github", "line_count": 1077, "max_line_length": 191, "avg_line_length": 49.045496750232125, "alnum_prop": 0.6681496346219379, "repo_name": "MissionCriticalCloud/cosmic", "id": "c86519f0efacf878831a57907ac1709fb2df597e", "size": "52822", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "cosmic-core/engine/schema/src/main/java/com/cloud/host/dao/HostDaoImpl.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "338798" }, { "name": "FreeMarker", "bytes": "1832" }, { "name": "Groovy", "bytes": "136420" }, { "name": "HTML", "bytes": "127137" }, { "name": "Java", "bytes": "16848786" }, { "name": "JavaScript", "bytes": "4252831" }, { "name": "Python", "bytes": "1721825" }, { "name": "Shell", "bytes": "120959" }, { "name": "XSLT", "bytes": "160281" } ] }
package org.elasticsearch.search.aggregations.bucket.terms; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LongBitSet; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.IntArray; import org.elasticsearch.common.util.LongHash; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.AbstractRandomAccessOrds; import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalMapping; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; import org.elasticsearch.search.aggregations.bucket.terms.support.BucketPriorityQueue; import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Map; /** * An aggregator of string values that relies on global ordinals in order to build buckets. */ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggregator { protected final ValuesSource.Bytes.WithOrdinals valuesSource; protected final IncludeExclude.OrdinalsFilter includeExclude; // TODO: cache the acceptedglobalValues per aggregation definition. // We can't cache this yet in ValuesSource, since ValuesSource is reused per field for aggs during the execution. // If aggs with same field, but different include/exclude are defined, then the last defined one will override the // first defined one. // So currently for each instance of this aggregator the acceptedglobalValues will be computed, this is unnecessary // especially if this agg is on a second layer or deeper. protected final LongBitSet acceptedGlobalOrdinals; protected final long valueCount; protected final GlobalOrdLookupFunction lookupGlobalOrd; private final LongHash bucketOrds; public interface GlobalOrdLookupFunction { BytesRef apply(long ord) throws IOException; } public GlobalOrdinalsStringTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals valuesSource, Terms.Order order, DocValueFormat format, BucketCountThresholds bucketCountThresholds, IncludeExclude.OrdinalsFilter includeExclude, SearchContext context, Aggregator parent, boolean forceRemapGlobalOrds, SubAggCollectionMode collectionMode, boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { super(name, factories, context, parent, order, format, bucketCountThresholds, collectionMode, showTermDocCountError, pipelineAggregators, metaData); this.valuesSource = valuesSource; this.includeExclude = includeExclude; final IndexReader reader = context.searcher().getIndexReader(); final RandomAccessOrds values = reader.leaves().size() > 0 ? valuesSource.globalOrdinalsValues(context.searcher().getIndexReader().leaves().get(0)) : DocValues.emptySortedSet(); this.valueCount = values.getValueCount(); this.lookupGlobalOrd = values::lookupOrd; this.acceptedGlobalOrdinals = includeExclude != null ? includeExclude.acceptedGlobalOrdinals(values) : null; /** * Remap global ords to dense bucket ordinals if any sub-aggregator cannot be deferred. * Sub-aggregators expect dense buckets and allocate memories based on this assumption. * Deferred aggregators are safe because the selected ordinals are remapped when the buckets * are replayed. */ boolean remapGlobalOrds = forceRemapGlobalOrds || Arrays.stream(subAggregators).anyMatch((a) -> shouldDefer(a) == false); this.bucketOrds = remapGlobalOrds ? new LongHash(1, context.bigArrays()) : null; } boolean remapGlobalOrds() { return bucketOrds != null; } protected final long getBucketOrd(long globalOrd) { return bucketOrds == null ? globalOrd : bucketOrds.find(globalOrd); } private void collectGlobalOrd(int doc, long globalOrd, LeafBucketCollector sub) throws IOException { if (bucketOrds == null) { collectExistingBucket(sub, doc, globalOrd); } else { long bucketOrd = bucketOrds.add(globalOrd); if (bucketOrd < 0) { bucketOrd = -1 - bucketOrd; collectExistingBucket(sub, doc, bucketOrd); } else { collectBucket(sub, doc, bucketOrd); } } } private RandomAccessOrds getGlobalOrds(LeafReaderContext ctx) throws IOException { return acceptedGlobalOrdinals == null ? valuesSource.globalOrdinalsValues(ctx) : new FilteredOrdinals(valuesSource.globalOrdinalsValues(ctx), acceptedGlobalOrdinals); } @Override public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { final RandomAccessOrds globalOrds = getGlobalOrds(ctx); if (bucketOrds == null) { grow(globalOrds.getValueCount()); } final SortedDocValues singleValues = DocValues.unwrapSingleton(globalOrds); if (singleValues != null) { return new LeafBucketCollectorBase(sub, globalOrds) { @Override public void collect(int doc, long bucket) throws IOException { assert bucket == 0; final int ord = singleValues.getOrd(doc); if (ord >= 0) { collectGlobalOrd(doc, ord, sub); } } }; } else { return new LeafBucketCollectorBase(sub, globalOrds) { @Override public void collect(int doc, long bucket) throws IOException { assert bucket == 0; globalOrds.setDocument(doc); final int numOrds = globalOrds.cardinality(); for (int i = 0; i < numOrds; i++) { final long globalOrd = globalOrds.ordAt(i); collectGlobalOrd(doc, globalOrd, sub); } } }; } } protected static void copy(BytesRef from, BytesRef to) { if (to.bytes.length < from.length) { to.bytes = new byte[ArrayUtil.oversize(from.length, 1)]; } to.offset = 0; to.length = from.length; System.arraycopy(from.bytes, from.offset, to.bytes, 0, from.length); } @Override public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException { if (valueCount == 0) { // no context in this reader return buildEmptyAggregation(); } final int size; if (bucketCountThresholds.getMinDocCount() == 0) { // if minDocCount == 0 then we can end up with more buckets then maxBucketOrd() returns size = (int) Math.min(valueCount, bucketCountThresholds.getShardSize()); } else { size = (int) Math.min(maxBucketOrd(), bucketCountThresholds.getShardSize()); } long otherDocCount = 0; BucketPriorityQueue<OrdBucket> ordered = new BucketPriorityQueue<>(size, order.comparator(this)); OrdBucket spare = new OrdBucket(-1, 0, null, showTermDocCountError, 0); for (long globalTermOrd = 0; globalTermOrd < valueCount; ++globalTermOrd) { if (includeExclude != null && !acceptedGlobalOrdinals.get(globalTermOrd)) { continue; } final long bucketOrd = getBucketOrd(globalTermOrd); final int bucketDocCount = bucketOrd < 0 ? 0 : bucketDocCount(bucketOrd); if (bucketCountThresholds.getMinDocCount() > 0 && bucketDocCount == 0) { continue; } otherDocCount += bucketDocCount; spare.globalOrd = globalTermOrd; spare.bucketOrd = bucketOrd; spare.docCount = bucketDocCount; if (bucketCountThresholds.getShardMinDocCount() <= spare.docCount) { spare = ordered.insertWithOverflow(spare); if (spare == null) { spare = new OrdBucket(-1, 0, null, showTermDocCountError, 0); } } } // Get the top buckets final StringTerms.Bucket[] list = new StringTerms.Bucket[ordered.size()]; long survivingBucketOrds[] = new long[ordered.size()]; for (int i = ordered.size() - 1; i >= 0; --i) { final OrdBucket bucket = ordered.pop(); survivingBucketOrds[i] = bucket.bucketOrd; BytesRef scratch = new BytesRef(); copy(lookupGlobalOrd.apply(bucket.globalOrd), scratch); list[i] = new StringTerms.Bucket(scratch, bucket.docCount, null, showTermDocCountError, 0, format); list[i].bucketOrd = bucket.bucketOrd; otherDocCount -= list[i].docCount; } //replay any deferred collections runDeferredCollections(survivingBucketOrds); //Now build the aggs for (int i = 0; i < list.length; i++) { StringTerms.Bucket bucket = list[i]; bucket.aggregations = bucket.docCount == 0 ? bucketEmptyAggregations() : bucketAggregations(bucket.bucketOrd); bucket.docCountError = 0; } return new StringTerms(name, order, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getMinDocCount(), pipelineAggregators(), metaData(), format, bucketCountThresholds.getShardSize(), showTermDocCountError, otherDocCount, Arrays.asList(list), 0); } /** * This is used internally only, just for compare using global ordinal instead of term bytes in the PQ */ static class OrdBucket extends InternalTerms.Bucket<OrdBucket> { long globalOrd; OrdBucket(long globalOrd, long docCount, InternalAggregations aggregations, boolean showDocCountError, long docCountError) { super(docCount, aggregations, showDocCountError, docCountError, null); this.globalOrd = globalOrd; } @Override public int compareTerm(Terms.Bucket other) { return Long.compare(globalOrd, ((OrdBucket) other).globalOrd); } @Override public String getKeyAsString() { throw new UnsupportedOperationException(); } @Override public Object getKey() { throw new UnsupportedOperationException(); } @Override OrdBucket newBucket(long docCount, InternalAggregations aggs, long docCountError) { throw new UnsupportedOperationException(); } @Override public Number getKeyAsNumber() { throw new UnsupportedOperationException(); } @Override protected void writeTermTo(StreamOutput out) throws IOException { throw new UnsupportedOperationException(); } @Override protected final XContentBuilder keyToXContent(XContentBuilder builder) throws IOException { throw new UnsupportedOperationException(); } } @Override protected void doClose() { Releasables.close(bucketOrds); } /** * Variant of {@link GlobalOrdinalsStringTermsAggregator} that resolves global ordinals post segment collection * instead of on the fly for each match.This is beneficial for low cardinality fields, because it can reduce * the amount of look-ups significantly. */ static class LowCardinality extends GlobalOrdinalsStringTermsAggregator { private IntArray segmentDocCounts; private RandomAccessOrds globalOrds; private RandomAccessOrds segmentOrds; LowCardinality(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals valuesSource, Terms.Order order, DocValueFormat format, BucketCountThresholds bucketCountThresholds, SearchContext context, Aggregator parent, boolean forceDenseMode, SubAggCollectionMode collectionMode, boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { super(name, factories, valuesSource, order, format, bucketCountThresholds, null, context, parent, forceDenseMode, collectionMode, showTermDocCountError, pipelineAggregators, metaData); this.segmentDocCounts = context.bigArrays().newIntArray(1, true); } @Override public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (segmentOrds != null) { mapSegmentCountsToGlobalCounts(); } globalOrds = valuesSource.globalOrdinalsValues(ctx); segmentOrds = valuesSource.ordinalsValues(ctx); segmentDocCounts = context.bigArrays().grow(segmentDocCounts, 1 + segmentOrds.getValueCount()); assert sub == LeafBucketCollector.NO_OP_COLLECTOR; final SortedDocValues singleValues = DocValues.unwrapSingleton(segmentOrds); if (singleValues != null) { return new LeafBucketCollectorBase(sub, segmentOrds) { @Override public void collect(int doc, long bucket) throws IOException { assert bucket == 0; final int ord = singleValues.getOrd(doc); segmentDocCounts.increment(ord + 1, 1); } }; } else { return new LeafBucketCollectorBase(sub, segmentOrds) { @Override public void collect(int doc, long bucket) throws IOException { assert bucket == 0; segmentOrds.setDocument(doc); final int numOrds = segmentOrds.cardinality(); for (int i = 0; i < numOrds; i++) { final long segmentOrd = segmentOrds.ordAt(i); segmentDocCounts.increment(segmentOrd + 1, 1); } } }; } } @Override protected void doPostCollection() { if (segmentOrds != null) { mapSegmentCountsToGlobalCounts(); } } @Override protected void doClose() { Releasables.close(segmentDocCounts); } private void mapSegmentCountsToGlobalCounts() { // There is no public method in Ordinals.Docs that allows for this mapping... // This is the cleanest way I can think of so far GlobalOrdinalMapping mapping; if (globalOrds.getValueCount() == segmentOrds.getValueCount()) { mapping = null; } else { mapping = (GlobalOrdinalMapping) globalOrds; } for (long i = 1; i < segmentDocCounts.size(); i++) { // We use set(...) here, because we need to reset the slow to 0. // segmentDocCounts get reused over the segments and otherwise counts would be too high. final int inc = segmentDocCounts.set(i, 0); if (inc == 0) { continue; } final long ord = i - 1; // remember we do +1 when counting final long globalOrd = mapping == null ? ord : mapping.getGlobalOrd(ord); long bucketOrd = getBucketOrd(globalOrd); incrementBucketDocCount(bucketOrd, inc); } } } private static final class FilteredOrdinals extends AbstractRandomAccessOrds { private final RandomAccessOrds inner; private final LongBitSet accepted; private int cardinality; private long[] ords = new long[0]; private FilteredOrdinals(RandomAccessOrds inner, LongBitSet accepted) { this.inner = inner; this.accepted = accepted; } @Override public long getValueCount() { return inner.getValueCount(); } @Override public long ordAt(int index) { return ords[index]; } @Override public void doSetDocument(int docId) { inner.setDocument(docId); final int innerCardinality = inner.cardinality(); ords = ArrayUtil.grow(ords, innerCardinality); cardinality = 0; for (int slot = 0; slot < innerCardinality; slot++) { long ord = inner.ordAt(slot); if (accepted.get(ord)) { ords[cardinality++] = ord; } } } @Override public int cardinality() { return cardinality; } @Override public BytesRef lookupOrd(long ord) { return inner.lookupOrd(ord); } } }
{ "content_hash": "1c5219ab71f905375849c50f17666749", "timestamp": "", "source": "github", "line_count": 433, "max_line_length": 138, "avg_line_length": 43.648960739030024, "alnum_prop": 0.6131746031746032, "repo_name": "strapdata/elassandra5-rc", "id": "62bad9313a774c7d24cc857a71f8a9108dd890b3", "size": "19688", "binary": false, "copies": "1", "ref": "refs/heads/v5.5.0-strapdata", "path": "core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "11082" }, { "name": "Batchfile", "bytes": "42785" }, { "name": "Emacs Lisp", "bytes": "3341" }, { "name": "FreeMarker", "bytes": "45" }, { "name": "Groovy", "bytes": "323716" }, { "name": "HTML", "bytes": "5519" }, { "name": "Java", "bytes": "41904615" }, { "name": "Perl", "bytes": "7271" }, { "name": "PowerShell", "bytes": "40357" }, { "name": "Python", "bytes": "565445" }, { "name": "Shell", "bytes": "188743" } ] }
End of preview.

No dataset card yet

Downloads last month
3