answer
stringlengths
15
1.25M
package com.hannesdorfmann.mosby3.sample.mail.base.view; import android.support.v4.app.<API key>; import android.support.v4.util.Pair; import android.view.View; import android.widget.Toast; import com.hannesdorfmann.mosby3.sample.mail.IntentStarter; import com.hannesdorfmann.mosby3.sample.mail.R; import com.hannesdorfmann.mosby3.sample.mail.base.presenter.BaseRxMailPresenter; import com.hannesdorfmann.mosby3.sample.mail.mails.MailsAdapter; import com.hannesdorfmann.mosby3.sample.mail.mails.MailsAdapterHolders; import com.hannesdorfmann.mosby3.sample.mail.model.contact.Person; import com.hannesdorfmann.mosby3.sample.mail.model.mail.Mail; import java.util.List; import javax.inject.Inject; /** * Base class for fragments that want to dipslay a list of Mails * * @author Hannes Dorfmann */ public abstract class BaseMailsFragment<V extends BaseMailView<List<Mail>>, P extends BaseRxMailPresenter<V, List<Mail>>> extends <API key><List<Mail>, V, P> implements BaseMailView<List<Mail>>, MailsAdapter.MailClickedListener, MailsAdapter.PersonClickListener, MailsAdapter.MailStarListner { @Inject IntentStarter intentStarter; @Override protected int getLayoutRes() { return R.layout.fragment_mails_base; } @Override protected ListAdapter<List<Mail>> createAdapter() { return new MailsAdapter(getActivity(), this, this, this); } @Override public void onMailClicked(MailsAdapterHolders.MailViewHolder vh, Mail mail) { <API key> options = <API key>.<API key>(getActivity(), Pair.create((View) vh.senderPic, getString(R.string.<API key>)), Pair.create((View) vh.subject, getString(R.string.shared_mail_subject)), Pair.create((View) vh.date, getString(R.string.shared_mail_date)), Pair.create((View) vh.star, getString(R.string.shared_mail_star)), Pair.create(getActivity().findViewById(R.id.toolbar), getString(R.string.shared_mail_toolbar))); intentStarter.showMailDetails(getActivity(), mail, options.toBundle()); } @Override public void onPersonClicked(Person person) { intentStarter.showProfile(getActivity(), person); } @Override public void onMailStarClicked(Mail mail) { presenter.starMail(mail, !mail.isStarred()); } @Override public void markMailAsStared(int mailId) { // Search for the mail Mail mail = ((MailsAdapter) adapter).findMail(mailId); if (mail != null) { mail.setStarred(true); adapter.<API key>(); } } @Override public void markMailAsUnstared(int mailId) { // Search for the mail Mail mail = ((MailsAdapter) adapter).findMail(mailId); if (mail != null) { mail.setStarred(false); adapter.<API key>(); } } private void showStarErrorToast(int messageRes, Mail mail) { Toast.makeText(getActivity(), String.format(getString(messageRes), mail.getSender().getName()), Toast.LENGTH_SHORT).show(); } @Override public void showStaringFailed(Mail mail) { showStarErrorToast(R.string.error_staring_mail, mail); } @Override public void showUnstaringFailed(Mail mail) { showStarErrorToast(R.string.<API key>, mail); } @Override public void markMailAsRead(Mail mail, boolean read) { MailsAdapter.MailInAdapterResult result = ((MailsAdapter) adapter).findMail(mail); if (result.isFound()) { result.getAdapterMail().read(read); adapter.<API key>(); } } }
#ifndef <API key> #define <API key> #include <ostream> #include <sstream> #include <typeinfo> #include "escape.hpp" #include "../forward.hpp" #include "../message_extension.hpp" #include "../type.hpp" #include "../external/pegtl/internal/demangle.hpp" namespace tao::json::internal { inline void to_stream( std::ostream& os, const bool v ) { os << ( v ? "true" : "false" ); } inline void to_stream( std::ostream& os, const type t ) { os << to_string( t ); } template< typename T > void to_stream( std::ostream& os, const T& t ) { os << t; } template< std::size_t N > void to_stream( std::ostream& os, const char ( &t )[ N ] ) { os.write( t, N - 1 ); } template< typename... Ts > void format_to( std::ostream& oss, const Ts&... ts ) { ( internal::to_stream( oss, ts ), ... ); } template< typename... Ts > [[nodiscard]] std::string format( const Ts&... ts ) { std::ostringstream oss; format_to( oss, ts... ); return oss.str(); } } // namespace tao::json::internal #endif
<?php final class <API key> extends <API key> { protected function didConstruct() { $this ->setName('importing') ->setExamples('**importing** __repository__ ...') ->setSynopsis( pht( 'Show commits in __repository__ which are still importing.')) ->setArguments( array( array( 'name' => 'simple', 'help' => pht('Show simpler output.'), ), array( 'name' => 'repos', 'wildcard' => true, ), )); } public function execute(<API key> $args) { $repos = $this->loadRepositories($args, 'repos'); if (!$repos) { throw new <API key>( pht( 'Specify one or more repositories to find importing commits for.')); } $repos = mpull($repos, null, 'getID'); $table = new <API key>(); $conn_r = $table->establishConnection('r'); $rows = queryfx_all( $conn_r, 'SELECT repositoryID, commitIdentifier, importStatus FROM %T WHERE repositoryID IN (%Ld) AND (importStatus & %d) != %d', $table->getTableName(), array_keys($repos), <API key>::IMPORTED_ALL, <API key>::IMPORTED_ALL); $console = PhutilConsole::getConsole(); if ($rows) { foreach ($rows as $row) { $repo = $repos[$row['repositoryID']]; $identifier = $row['commitIdentifier']; $console->writeOut('%s', $repo->formatCommitName($identifier)); if (!$args->getArg('simple')) { $status = $row['importStatus']; $need = array(); if (!($status & <API key>::IMPORTED_MESSAGE)) { $need[] = pht('Message'); } if (!($status & <API key>::IMPORTED_CHANGE)) { $need[] = pht('Change'); } if (!($status & <API key>::IMPORTED_OWNERS)) { $need[] = pht('Owners'); } if (!($status & <API key>::IMPORTED_HERALD)) { $need[] = pht('Herald'); } $console->writeOut(' %s', implode(', ', $need)); } $console->writeOut("\n"); } } else { $console->writeErr( "%s\n", pht('No importing commits found.')); } return 0; } }
package org.batfish.representation.cisco_nxos; import static com.google.common.base.Preconditions.checkArgument; import com.google.common.base.MoreObjects; import com.google.common.collect.Range; import java.io.Serializable; import java.util.Objects; import javax.annotation.Nonnull; import javax.annotation.Nullable; import org.batfish.datamodel.IntegerSpace; import org.batfish.datamodel.Ip6; import org.batfish.datamodel.Prefix6; /** An IPv4 static route */ public final class StaticRouteV6 implements Serializable { public static final class Builder { private boolean _discard; private @Nullable String _name; private @Nullable String _nextHopInterface; private @Nullable Ip6 _nextHopIp; private @Nullable String _nextHopVrf; private int _preference; private @Nonnull Prefix6 _prefix; private long _tag; private @Nullable Integer _track; private Builder(Prefix6 prefix) { _prefix = prefix; _preference = 1; } public @Nonnull StaticRouteV6 build() { checkArgument( _discard || _nextHopInterface != null || _nextHopIp != null, "Must specify either discard or next-hop options"); checkArgument( !_discard || (_nextHopInterface == null && _nextHopIp == null && _nextHopVrf == null), "Discard static route mutually exclusive with next-hop options"); checkArgument( <API key>.contains(_preference), "Invalid preference %s outside of %s", _preference, <API key>); checkArgument( 0 <= _tag && _tag <= 0xFFFFFFFFL, "Invalid tag %s is not an unsigned 32-bit integer", _tag); checkArgument( _track == null || <API key>.contains((int) _track), "Invalid track object number %s outside of %s", _track, <API key>); return new StaticRouteV6( _discard, _name, _nextHopInterface, _nextHopIp, _nextHopVrf, _preference, _prefix, _track, _tag); } public @Nonnull Builder setDiscard(boolean discard) { _discard = discard; return this; } public @Nonnull Builder setName(@Nullable String name) { _name = name; return this; } public @Nonnull Builder setNextHopInterface(@Nullable String nextHopInterface) { _nextHopInterface = nextHopInterface; return this; } public @Nonnull Builder setNextHopIp(@Nullable Ip6 nextHopIp) { _nextHopIp = nextHopIp; return this; } public @Nonnull Builder setNextHopVrf(@Nullable String nextHopVrf) { _nextHopVrf = nextHopVrf; return this; } public @Nonnull Builder setPreference(int preference) { _preference = preference; return this; } public @Nonnull Builder setPrefix(Prefix6 prefix) { _prefix = prefix; return this; } public @Nonnull Builder setTag(long tag) { _tag = tag; return this; } public @Nonnull Builder setTrack(@Nullable Integer track) { _track = track; return this; } } public static final IntegerSpace <API key> = IntegerSpace.of(Range.closed(1, 255)); public static final IntegerSpace <API key> = IntegerSpace.of(Range.closed(1, 512)); public static final int MAX_NAME_LENGTH = 50; public static @Nonnull Builder builder(Prefix6 prefix) { return new Builder(prefix); } private final boolean _discard; private final @Nullable String _name; private final @Nullable String _nextHopInterface; private final @Nullable Ip6 _nextHopIp; private final @Nullable String _nextHopVrf; private final int _preference; private final @Nonnull Prefix6 _prefix; private final long _tag; private final @Nullable Integer _track; private StaticRouteV6( boolean discard, @Nullable String name, @Nullable String nextHopInterface, @Nullable Ip6 nextHopIp, @Nullable String nextHopVrf, int preference, Prefix6 prefix, @Nullable Integer track, long tag) { _prefix = prefix; _discard = discard; _nextHopInterface = nextHopInterface; _nextHopIp = nextHopIp; _nextHopVrf = nextHopVrf; _track = track; _name = name; _preference = preference; _tag = tag; } public boolean getDiscard() { return _discard; } public @Nullable String getName() { return _name; } /** * The interface used for ARP lookup and forwarding. If not {@code null}, must be a member of * {@link #getNextHopVrf}. */ public @Nullable String getNextHopInterface() { return _nextHopInterface; } public @Nullable Ip6 getNextHopIp() { return _nextHopIp; } /** * The {@link Vrf} used for lookup of the {@link #getNextHopIp}. To be effective, this {@link Vrf} * should be distinct from the VRF in which this route is installed. */ public @Nullable String getNextHopVrf() { return _nextHopVrf; } public int getPreference() { return _preference; } public @Nonnull Prefix6 getPrefix() { return _prefix; } public long getTag() { return _tag; } public @Nullable Integer getTrack() { return _track; } @Override public boolean equals(Object o) { if (this == o) { return true; } else if (!(o instanceof StaticRouteV6)) { return false; } StaticRouteV6 that = (StaticRouteV6) o; return _discard == that._discard && _preference == that._preference && _tag == that._tag && _prefix.equals(that._prefix) && Objects.equals(_name, that._name) && Objects.equals(_nextHopInterface, that._nextHopInterface) && Objects.equals(_nextHopIp, that._nextHopIp) && Objects.equals(_nextHopVrf, that._nextHopVrf) && Objects.equals(_track, that._track); } @Override public int hashCode() { return Objects.hash( _discard, _name, _nextHopInterface, _nextHopIp, _nextHopVrf, _preference, _prefix, _tag, _track); } @Override public String toString() { return MoreObjects.toStringHelper(this) .omitNullValues() .add("prefix", _prefix) .add("nextHopInterface", _nextHopInterface) .add("nextHopIp", _nextHopIp) .add("nextHopVrf", _nextHopVrf) .add("discard", _discard) .add("name", _name) .add("preference", _preference) .add("tag", _tag) .add("track", _track) .toString(); } }
// English (Template) jQuery.timeago.settings.strings = { prefixAgo: null, prefixFromNow: null, suffixAgo: "ago", suffixFromNow: "from now", seconds: "less than a minute", minute: "about a minute", minutes: "%d minutes", hour: "about an hour", hours: "about %d hours", day: "a day", days: "%d days", month: "about a month", months: "%d months", year: "about a year", years: "%d years", wordSeparator: " ", numbers: [] };
1. [System](README_system.md) (white) 1. [Request](README_request.md) (blue) 1. [Accept](README_accept.md) (green) 1. [Retrieve](README_retrieve.md) (white) 1. [Precondition](README_precondition.md) (yellow) 1. Create/Process * [Create](README_create.md) (violet) * [Process](README_process.md) (red) 1. [Response](README_response.md) (cyan) 1. [Alternative](README_alternative.md) (gray) ![HTTP headers status](https://rawgithub.com/for-GET/<API key>/master/httpdd.png) ## System This block is in charge of "system"-level (request agnostic) checks. | callback | output | default : B26 | [`start : in`](#start--in) | | B24 | [`<API key> :bin`](#<API key>) | T / F | TRUE B23 | [`is_uri_too_long :bin`](#is_uri_too_long-bin) | T / F | FALSE B22 | [`<API key> :bin`](#<API key>) | T / F | FALSE B21 | [`method :var`](#method-var) | *Method* | `Transaction.request.method` | [`create_methods :var`](#create_methods-var) | [ *Method* ] | [ POST<br>] | [`process_methods :var`](#process_methods-var) | [ *Method* ] | [ POST<br>, PATCH<br>] | [`implemented_methods :var`](#<API key>) | [ *Method* ] | [ OPTIONS<br>, HEAD<br>, GET<br>, DELETE<br>, TRACE<br>, `create_methods :var`<br>, `process_methods :var`<br>] | [`<API key> : in`](#<API key>) | T / F | B20 | [`<API key> :var`](#<API key>) | [ *HeaderName* ] | [ content-encoding<br>, content-language<br>, content-length<br>, content-md5<br>, content-type<br>] | [`<API key> : in`](#<API key>) | T / F | B19 | [`<API key> :bin`](#<API key>) | T / F | TRUE B18 | [`<API key> :var`](#<API key>) | [ *ExtensionName* ] | [] | [`<API key> : in`](#<API key>) | T / F | B17 | [`is_system_block_ok :bin`](#<API key>) | T / F | TRUE P25 | [`beautify_headers :bin`](#beatify_headers-bin) | | | [`last : in`](#last--in) | | | [`<API key> : in`](#<API key>) | | | [`<API key> :bin`](#<API key>) | | | [`override :bin`](#override-bin) | | P26 | [`finish : in`](#finish--in) | | `start : in` Prepare *Transaction* for the request. `<API key> :bin` Return TRUE if the resource is accepting requests; return FALSE otherwise. Reference: [HTTPbis](http://tools.ietf.org/html/<API key> > The 503 (Service Unavailable) status code indicates that the server is currently unable to handle the request due to a temporary overload or scheduled maintenance, which will likely be alleviated after some delay. The server MAY send a Retry-After header field (Section 7.1.3) to suggest an appropriate amount of time for the client to wait before retrying the request. > > Note: The existence of the 503 status code does not imply that a server has to use it when becoming overloaded. Some servers might simply refuse the connection. `is_uri_too_long :bin` Return TRUE if the URI is too long; return FALSE otherwise. Reference: [HTTPbis](http://tools.ietf.org/html/<API key> > The 414 (URI Too Long) status code indicates that the server is refusing to service the request because the request-target (Section 5.3 of [Part1]) is longer than the server is willing to interpret. This rare condition is only likely to occur when a client has improperly converted a POST request to a GET request with long query information, when the client has descended into a "black hole" of redirection (e.g., a redirected URI prefix that points to a suffix of itself), or when the server is under attack by a client attempting to exploit potential security holes. > A 414 response is cacheable unless otherwise indicated by the method definition or explicit cache controls (see Section 4.1.2 of [Part6]). `method :var` If you allow the HTTP method to be overridden (e.g. via the <API key> header) then return the intended method; return `Transaction.request.method` otherwise. Reference: [Google Data APIs](https://developers.google.com/gdata/docs/2.0/basics#DeletingEntry) > If your firewall does not allow DELETE, then do an HTTP POST and set the method override header as follows: `<API key>: DELETE`. `safe_methods :var` FIXME `create_methods :var` FIXME `process_methods :var` FIXME `implemented_methods :var` Return a list of HTTP methods that are implemented by the system. `<API key> : in` Return TRUE if `Transaction.request.method` is in `implemented_methods :var`; return FALSE otherwise. `<API key> :var` Return a list of Content-* headers that are implemented by the system. `<API key> : in` Return TRUE if Content-* headers match `<API key> :var`; return FALSE otherwise `<API key> :bin` Return TRUE if the requested functionality (other than methods and content headers) is implemented; return FALSE otherwise. Reference: [HTTPbis](http://tools.ietf.org/html/<API key> > The 501 (Not Implemented) status code indicates that the server does not support the functionality required to fulfill the request. This is the appropriate response when the server does not recognize the request method and is not capable of supporting it for any resource. > A 501 response is cacheable unless otherwise indicated by the method definition or explicit cache controls (see Section 4.1.2 of [Part6]). `<API key> :var` Return a list of Expect extensions that are implemented by the system. `<API key> : in` Return True if expect extensions match `<API key> :var`; return FALSE otherwise Reference: [HTTPbis](http://tools.ietf.org/html/<API key> > The 417 (Expectation Failed) status code indicates that the expectation given in the request's Expect header field (Section 5.1.1) could not be met by at least one of the inbound servers. `is_system_block_ok :bin` FIXME `last : in` FIXME `<API key> :var` FIXME `<API key> :bin` FIXME `override :bin` Last chance for forcefully ammending the response of the *Transaction*. Return TRUE if succeeded; return FALSE otherwise. `finish : in` Finalize *Transaction*.
package com.perforce.p4java.server.delegator; import com.perforce.p4java.exception.AccessException; import com.perforce.p4java.exception.ConnectionException; import com.perforce.p4java.exception.P4JavaException; import com.perforce.p4java.exception.RequestException; import com.perforce.p4java.option.server.CounterOptions; public interface ICounterDelegator { String getCounter(final String counterName) throws ConnectionException, RequestException, AccessException; /** * Get the value of a named Perforce counter from the Perforce server. Note * that this method will return a zero string (i.e. "0") if the named * counter doesn't exist (rather than throw an exception); use getCounters * to see if a counter actually exists before you use it. * <p> * * Note that despite their name, counters can be any value, not just a * number; hence the string return value here. * * @param counterName * non-null counter name. * @param opts * CounterOptions object describing optional parameters; if null, * no options are set. * @return non-null (but possibly empty or useless) counter value associated * with counterName. * @throws P4JavaException * if an error occurs processing this method and its parameters. * @since 2012.2 */ String getCounter(final String counterName, final CounterOptions opts) throws P4JavaException; void setCounter(final String counterName, final String value, final boolean perforceCounter) throws ConnectionException, RequestException, AccessException; /** * Create, set or delete a counter on a Perforce server. This method can be * used to create, set, increment, or delete a counter according to the * specific options set in the associated options object. Note that the * increment operation does not work on servers earlier than 10.1, and that * the return value is <i>never</i> guaranteed to be non-null -- use with * caution. * * @param counterName * non-null counter name. * @param value * value the counter should be set to; can be null if the set * operation is an increment. * @param opts * CounterOptions object describing optional parameters; if null, * no options are set. * @return possibly-null current (post-set, post-increment) value; may be * zero if the operation was a delete; may not be reliable for pre * 10.1 servers. * @throws P4JavaException * if an error occurs processing this method and its parameters. */ String setCounter(final String counterName, final String value, final CounterOptions opts) throws P4JavaException; void deleteCounter(final String counterName, final boolean perforceCounter) throws ConnectionException, RequestException, AccessException; }
@<API key> package org.batfish.datamodel.hsrp; import javax.annotation.<API key>;
/* mixin definition ; sets LTR and RTL within the same style call */ md-radio-button, .md-switch-thumb { box-sizing: border-box; display: block; margin: 15px; white-space: nowrap; cursor: pointer; } md-radio-button *, md-radio-button *:before, md-radio-button *:after, .md-switch-thumb *, .md-switch-thumb *:before, .md-switch-thumb *:after { box-sizing: border-box; } md-radio-button input, .md-switch-thumb input { display: none; } md-radio-button .md-container, .md-switch-thumb .md-container { position: relative; top: 4px; display: inline-block; width: 16px; height: 16px; cursor: pointer; } md-radio-button .md-container .md-ripple-container, .md-switch-thumb .md-container .md-ripple-container { position: absolute; display: block; width: 48px; height: 48px; left: -16px; top: -16px; } md-radio-button .md-container:before, .md-switch-thumb .md-container:before { background-color: transparent; border-radius: 50%; content: ''; position: absolute; display: block; height: auto; left: 0; top: 0; right: 0; bottom: 0; transition: all 0.5s; width: auto; } md-radio-button .md-off, .md-switch-thumb .md-off { position: absolute; top: 0; left: 0; width: 16px; height: 16px; border: solid 2px; border-radius: 50%; transition: border-color ease 0.28s; } md-radio-button .md-on, .md-switch-thumb .md-on { position: absolute; top: 0; left: 0; width: 16px; height: 16px; border-radius: 50%; transition: -webkit-transform ease 0.28s; transition: transform ease 0.28s; -webkit-transform: scale(0); transform: scale(0); } md-radio-button.md-checked .md-on, .md-switch-thumb.md-checked .md-on { -webkit-transform: scale(0.5); transform: scale(0.5); } md-radio-button .md-label, .md-switch-thumb .md-label { position: relative; display: inline-block; margin-left: 10px; margin-right: 10px; vertical-align: middle; white-space: normal; pointer-events: none; width: auto; } md-radio-button .circle, .md-switch-thumb .circle { border-radius: 50%; } md-radio-group:focus { outline: none; } md-radio-group.md-focused .md-checked .md-container:before { left: -8px; top: -8px; right: -8px; bottom: -8px; } @media screen and (-ms-high-contrast: active) { md-radio-button.md-default-theme .md-on { background-color: #fff; } }
use crate::context::LintContext; use crate::rustc_middle::ty::TypeFoldable; use crate::LateContext; use crate::LateLintPass; use rustc_hir::def::DefKind; use rustc_hir::{Expr, ExprKind}; use rustc_middle::ty; use rustc_span::symbol::sym; declare_lint! { The `noop_method_call` lint detects specific calls to noop methods such as a calling `<&T as Clone>::clone` where `T: !Clone`. ### Example ```rust # #![allow(unused)] #![warn(noop_method_call)] struct Foo; let foo = &Foo; let clone: &Foo = foo.clone(); ``` {{produces}} ### Explanation Some method calls are noops meaning that they do nothing. Usually such methods are the result of blanket implementations that happen to create some method invocations that end up not doing anything. For instance, `Clone` is implemented on all `&T`, but calling `clone` on a `&T` where `T` does not implement clone, actually doesn't do anything as references are copy. This lint detects these calls and warns the user about them. pub NOOP_METHOD_CALL, Allow, "detects the use of well-known noop methods" } declare_lint_pass!(NoopMethodCall => [NOOP_METHOD_CALL]); impl<'tcx> LateLintPass<'tcx> for NoopMethodCall { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { // We only care about method calls. let (call, elements) = match expr.kind { ExprKind::MethodCall(call, _, elements, _) => (call, elements), _ => return, }; // We only care about method calls corresponding to the `Clone`, `Deref` and `Borrow` // traits and ignore any other method call. let (trait_id, did) = match cx.typeck_results().type_dependent_def(expr.hir_id) { // Verify we are dealing with a method/associated function. Some((DefKind::AssocFn, did)) => match cx.tcx.trait_of_item(did) { // Check that we're dealing with a trait method for one of the traits we care about. Some(trait_id) if matches!( cx.tcx.get_diagnostic_name(trait_id), Some(sym::Borrow | sym::Clone | sym::Deref) ) => { (trait_id, did) } _ => return, }, _ => return, }; let substs = cx.typeck_results().node_substs(expr.hir_id); if substs.<API key>(cx.tcx) { // We can't resolve on types that require monomorphization, so we don't handle them if // we need to perfom substitution. return; } let param_env = cx.tcx.param_env(trait_id); // Resolve the trait method instance. let i = match ty::Instance::resolve(cx.tcx, param_env, did, substs) { Ok(Some(i)) => i, _ => return, }; // (Re)check that it implements the noop diagnostic. for s in [sym::noop_method_clone, sym::noop_method_deref, sym::noop_method_borrow].iter() { if cx.tcx.is_diagnostic_item(*s, i.def_id()) { let method = &call.ident.name; let receiver = &elements[0]; let receiver_ty = cx.typeck_results().expr_ty(receiver); let expr_ty = cx.typeck_results().expr_ty_adjusted(expr); if receiver_ty != expr_ty { // This lint will only trigger if the receiver type and resulting expression \ // type are the same, implying that the method call is unnecessary. return; } let expr_span = expr.span; let note = format!( "the type `{:?}` which `{}` is being called on is the same as \ the type returned from `{}`, so the method call does not do \ anything and can be removed", receiver_ty, method, method, ); let span = expr_span.with_lo(receiver.span.hi()); cx.struct_span_lint(NOOP_METHOD_CALL, span, |lint| { let method = &call.ident.name; let message = format!( "call to `.{}()` on a reference in this situation does nothing", &method, ); lint.build(&message) .span_label(span, "unnecessary method call") .note(&note) .emit() }); } } } }
package store import ( "errors" "fmt" "os" "path/filepath" "sync" ) // store/SimpleFSLockFactory.java type SimpleFSLock struct { *LockImpl file, dir string } func newSimpleFSLock(lockDir, lockFileName string) *SimpleFSLock { ans := &SimpleFSLock{ dir: lockDir, file: filepath.Join(lockDir, lockFileName), } ans.LockImpl = NewLockImpl(ans) return ans } func (lock *SimpleFSLock) Obtain() (ok bool, err error) { // Ensure that lockDir exists and is a directory: var fi os.FileInfo fi, err = os.Stat(lock.dir) if err == nil { // exists if !fi.IsDir() { err = errors.New(fmt.Sprintf("Found regular file where directory expected: %v", lock.dir)) return } } else if os.IsNotExist(err) { err = os.Mkdir(lock.dir, 0755) if err != nil { // IO error return } } else { // IO error return } var f *os.File if f, err = os.Create(lock.file); err == nil { fmt.Printf("File '%v' is created.\n", f.Name()) ok = true defer f.Close() } return } func (lock *SimpleFSLock) Close() error { return os.Remove(lock.file) } func (lock *SimpleFSLock) IsLocked() bool { f, err := os.Open(lock.file) if err == nil { defer f.Close() } return err == nil || os.IsExist(err) } func (lock *SimpleFSLock) String() string { return fmt.Sprintf("SimpleFSLock@%v", lock.file) } /* Implements LockFactory using os.Create(). NOTE: This API may has the same issue as the one in Lucene Java that the write lock may not be released when Go program exists abnormally. When this happens, an error is returned when trying to create a writer, in which case you need to explicitly clear the lock file first. You can either manually remove the file, or use UnlockDirectory() API. But, first be certain that no writer is in fact writing to the index otherwise you can easily corrupt your index. If you suspect that this or any other LockFactory is not working properly in your environment, you can easily test it by using <API key>, LockVerifyServer and LockStressTest. */ type SimpleFSLockFactory struct { *FSLockFactory } func <API key>(path string) *SimpleFSLockFactory { ans := &SimpleFSLockFactory{} ans.FSLockFactory = newFSLockFactory() ans.setLockDir(path) return ans } func (f *SimpleFSLockFactory) Make(name string) Lock { if f.lockPrefix != "" { name = fmt.Sprintf("%v-%v", f.lockPrefix, name) } return newSimpleFSLock(f.lockDir, name) } func (f *SimpleFSLockFactory) Clear(name string) error { if f.lockPrefix != "" { name = fmt.Sprintf("%v-%v", f.lockPrefix, name) } return os.Remove(filepath.Join(f.lockDir, name)) } type SimpleFSDirectory struct { *FSDirectory } func <API key>(path string) (d *SimpleFSDirectory, err error) { d = &SimpleFSDirectory{} d.FSDirectory, err = newFSDirectory(d, path) if err != nil { return nil, err } return } func (d *SimpleFSDirectory) OpenInput(name string, context IOContext) (IndexInput, error) { d.EnsureOpen() fpath := filepath.Join(d.path, name) // fmt.Printf("Opening %v...\n", fpath) return <API key>(fmt.Sprintf("SimpleFSIndexInput(path='%v')", fpath), fpath, context) } // func (d *SimpleFSDirectory) CreateSlicer(name string, ctx IOContext) (slicer IndexInputSlicer, err error) { // d.EnsureOpen() // f, err := os.Open(filepath.Join(d.path, name)) // if err != nil { // return nil, err // return &<API key>{f, ctx, d.chunkSize}, nil // type <API key> struct { // file *os.File // ctx IOContext // chunkSize int // func (s *<API key>) Close() error { // err := s.file.Close() // if err != nil { // fmt.Printf("Closing %v failed: %v\n", s.file.Name(), err) // return err // func (s *<API key>) OpenSlice(desc string, offset, length int64) IndexInput { // return <API key>(fmt.Sprintf("SimpleFSIndexInput(%v in path='%v' slice=%v:%v)", // desc, s.file.Name(), offset, offset+length), // s.file, offset, length, bufferSize(s.ctx), s.chunkSize) // func (s *<API key>) OpenFullSlice() IndexInput { // fi, err := s.file.Stat() // if err != nil { // panic(err) // return s.OpenSlice("full-slice", 0, fi.Size()) /* The maximum chunk size is 8192 bytes, becaues Java RamdomAccessFile mallocs a native buffer outside of stack if the read buffer size is larger. GoLucene takes the same default value. TODO: test larger value here */ const CHUNK_SIZE = 8192 type SimpleFSIndexInput struct { *BufferedIndexInput fileLock sync.Locker // the file channel we will read from file *os.File // is this instance a clone and hence does not own the file to close it isClone bool // start offset: non-zero in the slice case off int64 // end offset (start+length) end int64 } func <API key>(desc, path string, ctx IOContext) (*SimpleFSIndexInput, error) { f, err := os.Open(path) if err != nil { return nil, err } fstat, err := f.Stat() if err != nil { return nil, err } ans := new(SimpleFSIndexInput) ans.BufferedIndexInput = <API key>(ans, desc, ctx) ans.file = f ans.off = 0 ans.end = fstat.Size() ans.fileLock = &sync.Mutex{} return ans, nil } func <API key>(desc string, file *os.File, off, length int64, bufferSize int) *SimpleFSIndexInput { ans := new(SimpleFSIndexInput) ans.BufferedIndexInput = <API key>(ans, desc, bufferSize) ans.file = file ans.off = off ans.end = off + length ans.isClone = true return ans } func (in *SimpleFSIndexInput) Close() error { if !in.isClone { return in.file.Close() } return nil } func (in *SimpleFSIndexInput) Clone() IndexInput { ans := &SimpleFSIndexInput{ in.BufferedIndexInput.Clone(), in.fileLock, in.file, true, in.off, in.end, } ans.spi = ans return ans } func (in *SimpleFSIndexInput) Slice(desc string, offset, length int64) (IndexInput, error) { assert2(offset >= 0 && length >= 0 && offset+length <= in.Length(), "slice() %v out of bounds: %v", desc, in) ans := <API key>(desc, in.file, in.off+offset, length, in.bufferSize) ans.fileLock = in.fileLock // share same file lock return ans, nil } func (in *SimpleFSIndexInput) Length() int64 { return in.end - in.off } func (in *SimpleFSIndexInput) readInternal(buf []byte) error { length := len(buf) in.fileLock.Lock() defer in.fileLock.Unlock() // TODO make use of Go's relative Seek or ReadAt function position := in.off + in.FilePointer() _, err := in.file.Seek(position, 0) if err != nil { return err } if position+int64(length) > in.end { return errors.New(fmt.Sprintf("read past EOF: %v", in)) } total := 0 for { readLength := length - total if CHUNK_SIZE < readLength { readLength = CHUNK_SIZE } // FIXME verify slice is working i, err := in.file.Read(buf[total : total+readLength]) if err != nil { return errors.New(fmt.Sprintf("%v: %v", err, in)) } total += i if total >= length { break } } return nil } func (in *SimpleFSIndexInput) seekInternal(pos int64) error { return nil }
package com.jiangKlijna.web.control; import javax.annotation.Resource; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.RequestMapping; import com.jiangKlijna.web.bean.Result; import com.jiangKlijna.web.service.UserService; import org.springframework.web.bind.annotation.ResponseBody; @RequestMapping("/user") @Controller public class UserControl extends BaseControl { @Resource(name = "userService") public UserService us; @ResponseBody @RequestMapping("/regist.json") public Result regist_json(String username, String password) { return testParameter(username, password) ? us.regist(username, password) : <API key>; } @ResponseBody @RequestMapping("/regist.xml") public Result regist_xml(String username, String password) { return testParameter(username, password) ? us.regist(username, password) : <API key>; } @ResponseBody @RequestMapping("/remove.json") public Result remove(int id) { return us.remove(id); } @ResponseBody @RequestMapping("/find.xml") public Result find(int id) { return us.find(id); } }
# Azul Zulu JRE Azul Zulu JRE provides Java runtimes developed by Azul team. Versions of Java from the `1.6`, `1.7`, and `1.8` levels are available. Unless otherwise configured, the version of Java that will be used is specified in [`config/zulu_jre.yml`][]. <table> <tr> <td><strong>Detection Criterion</strong></td> <td>Unconditional. Existence of a single bound Volume Service will result in Terminal heap dumps being written. <ul> <li>Existence of a Volume Service service is defined as the <a href="http://docs.cloudfoundry.org/devguide/deploy-apps/<API key>.html#VCAP-SERVICES"><code>VCAP_SERVICES</code></a> payload containing a service who's name, label or tag has <code>heap-dump</code> as a substring.</li> </ul> </td> </tr> <tr> <td><strong>Tags</strong></td> <td><tt>open-jdk-like-jre=&lang;version&rang;, <API key>=&lang;version&rang;, jvmkill=&lang;version&rang;</tt></td> </tr> </table> Tags are printed to standard output by the buildpack detect script. ## Configuration For general information on configuring the buildpack, including how to specify configuration values through environment variables, refer to [Configuration and Extension][]. The JRE can be configured by modifying the [`config/zulu_jre.yml`][] file in the buildpack fork. The JRE uses the [`Repository` utility support][repositories] and so, it supports the [version syntax][] defined there. To use Zulu JRE instead of OpenJDK without forking java-buildpack, set environment variable: `cf set-env <app_name> <API key> '{jres: ["JavaBuildpack::Jre::ZuluJRE"]}'` `cf restage <app_name>` | Name | Description | | `jre.repository_root` | The URL of the Zulu repository index ([details][repositories]). | `jre.version` | The version of Java runtime to use. Note: version 1.8.0 and higher require the `memory_sizes` and `memory_heuristics` mappings to specify `metaspace` rather than `permgen`. | `jvmkill.repository_root` | The URL of the `jvmkill` repository index ([details][repositories]). | `jvmkill.version` | The version of `jvmkill` to use. Candidate versions can be found in the listings for [mountainlion][<API key>] and [trusty][jvmkill-trusty]. | `memory_calculator` | Memory calculator defaults, described below under "Memory". Additional Resources The JRE can also be configured by overlaying a set of resources on the default distribution. To do this, add files to the `resources/zulu_jre` directory in the buildpack fork. # JCE Unlimited Strength To add the JCE Unlimited Strength `local_policy.jar`, add your file to `resources/zulu_jre/lib/security/local_policy.jar`. This file will be overlayed onto the Zulu distribution. # Custom CA Certificates To add custom SSL certificates, add your `cacerts` file to `resources/zulu_jre/lib/security/cacerts`. This file will be overlayed onto the Zulu distribution. `jvmkill` The `jvmkill` agent runs when an application has experience a resource exhaustion event. When this event occurs, the agent will print out a histogram of the first 100 largest types by total number of bytes. plain Resource exhaustion event: the JVM was unable to allocate memory from the heap. ResourceExhausted! (1/0) | Instance Count | Total Bytes | Class Name | | 18273 | 313157136 | [B | | 47806 | 7648568 | [C | | 14635 | 1287880 | Ljava/lang/reflect/Method; | | 46590 | 1118160 | Ljava/lang/String; | | 8413 | 938504 | Ljava/lang/Class; | | 28573 | 914336 | Ljava/util/concurrent/ConcurrentHashMap$Node; | It will also print out a summary of all of the memory spaces in the JVM. plain Memory usage: Heap memory: init 65011712, used 332392888, committed 351797248, max 351797248 Non-heap memory: init 2555904, used 63098592, committed 64815104, max 377790464 Memory pool usage: Code Cache: init 2555904, used 14702208, committed 15007744, max 251658240 PS Eden Space: init 16252928, used 84934656, committed 84934656, max 84934656 PS Survivor Space: init 2621440, used 0, committed 19398656, max 19398656 Compressed Class Space: init 0, used 5249512, committed 5505024, max 19214336 Metaspace: init 0, used 43150616, committed 44302336, max 106917888 PS Old Gen: init 43515904, used 247459792, committed 247463936, max 247463936 If a heap dump [Volume Service][] is bound, terminal heap dumps will be written with the pattern `<CONTAINER_DIR>/<SPACE_NAME>-<SPACE_ID[0,8]>/<APPLICATION_NAME>-<APPLICATION_ID[0,8]>/<INSTANCE_INDEX>-<TIMESTAMP>-<INSTANCE_ID[0,8]>.hprof` plain Heapdump written to /var/vcap/data/<API key>/<API key>/<API key>/0-2017-06-13T18:31:29+0000-7b23124e.hprof Memory The total available memory for the application's container is specified when an application is pushed. The Java buildpack uses this value to control the JRE's use of various regions of memory and logs the JRE memory settings when the application starts or restarts. These settings can be influenced by configuring the `stack_threads` and/or `class_count` mappings (both part of the `memory_calculator` mapping), and/or Java options relating to memory. Note: If the total available memory is scaled up or down, the Java buildpack will re-calculate the JRE memory settings the next time the application is started. # Total Memory The user can change the container's total memory available to influence the JRE memory settings. Unless the user specifies the heap size Java option (`-Xmx`), increasing or decreasing the total memory available results in the heap size setting increasing or decreasing by a corresponding amount. # Stack Threads The amount of memory that should be allocated to stacks is given as an amount of memory per thread with the Java option `-Xss`. If an explicit number of threads should be used for the calculation of stack memory, then it should be specified as in the following example: yaml stack_threads: 500 # Loaded Classes The amount of memory that is allocated to metaspace and compressed class space (or, on Java 7, the permanent generation) is calculated from an estimate of the number of classes that will be loaded. The default behaviour is to estimate the number of loaded classes as a fraction of the number of class files in the application. If a specific number of loaded classes should be used for calculations, then it should be specified as in the following example: yaml class_count: 500 # Java Options If the JRE memory settings need to be fine-tuned, the user can set one or more Java memory options to specific values. The heap size can be set explicitly, but changing the value of options other than the heap size can also affect the heap size. For example, if the user increases the maximum direct memory size from its default value of 10 Mb to 20 Mb, then this will reduce the calculated heap size by 10 Mb. # Memory Calculation Memory calculation happens before every `start` of an application and is performed by an external program, the [Java Buildpack Memory Calculator]. There is no need to `restage` an application after scaling the memory as restarting will cause the memory settings to be recalculated. The container's total available memory is allocated into heap, metaspace and compressed class space (or permanent generation for Java 7), direct memory, and stack memory settings. The memory calculation is described in more detail in the [Memory Calculator's README]. The inputs to the memory calculation, except the container's total memory (which is unknown at staging time), are logged during staging, for example: Loaded Classes: 13974, Threads: 300, JAVA_OPTS: '' The container's total memory is logged during `cf push` and `cf scale`, for example: state since cpu memory disk details #0 running 2017-04-10 02:20:03 PM 0.0% 896K of 1G 1.3M of 1G The JRE memory settings are logged when the application is started or re-started, for example: JVM Memory Configuration: -XX:MaxDirectMemorySize=10M -XX:MaxMetaspaceSize=99199K \ -XX:<API key>=240M -XX:<API key>=18134K -Xss1M -Xmx368042K [`config/components.yml`]: ../config/components.yml [`config/zulu_jre.yml`]: ../config/zulu_jre.yml [Azul Zulu]: https: [Configuration and Extension]: ../README.md#<API key> [Java Buildpack Memory Calculator]: https://github.com/cloudfoundry/<API key> [<API key>]: http://download.pivotal.io.s3.amazonaws.com/jvmkill/mountainlion/x86_64/index.yml [jvmkill-trusty]: http://download.pivotal.io.s3.amazonaws.com/jvmkill/trusty/x86_64/index.yml [Memory Calculator's README]: https://github.com/cloudfoundry/<API key> [repositories]: <API key>.md [version syntax]: <API key>.md#<API key> [Volume Service]: https://docs.cloudfoundry.org/devguide/services/using-vol-services.html [Zulu JRE]: jre-zulu_jre.md
## Table of Contents 1. [Portable Server Prerequisites](#<API key>) 1. [Build Prerequisites](#build-prerequisites) 1. [Run-time Prerequisites](#<API key>) 1. [Portable Server on Linux](#<API key>) 1. [Building on Linux](#building-on-linux) 1. [Installing on Linux](#installing-on-linux) 1. [Running on Linux](#running-on-linux) 1. [Portable Server on Windows](#<API key>) 1. [Building on Windows](#building-on-windows) 1. [Installing on Windows](#<API key>) 1. [Running on Windows](#running-on-windows) 1. [Portable Server on Mac OS](#<API key>) # Portable Server Prerequisites ## Build Prerequisites: * Python 2.7 * Python pexpect installed * Swig with support for Python (4.0.1 or later) * g++ (4.8 or later) ## Run-time prerequisites: * Python 2.7 * Python tornado installed * Python Imaging Library (PIL) installed # On CentOS/RHEL 6: sudo yum -y install python27-pip sudo pip2.7 install tornado pillow # Portable Server on Linux ## Building on Linux Make sure you have Python, the `pexpect` Pip package, as well as `tornado`, and g++ installed. Swig is bundled for Linux. Getting a Build Environment # On Ubuntu: sudo apt-get install g++ python python-pexpect python-tornado libpython-dev python-psycopg2 # On CentOS/RHEL 7: sudo yum -y install gcc-c++ python python-pip python-tornado python-psycopg2 sudo pip install pexpect # On CentOS/RHEL 6: If you're running an old version of a Red Hat distribution, such as Cent OS or RHEL 6, the packaged `g++` compiler will be too old. Enable the EPEL package repository, and install `devtoolset2` to get a more recent compiler: sudo yum install <API key> sudo yum -y install ius-release sudo yum -y install gcc-c++ python27 python27-pip python27-devel sudo pip2.7 install tornado pexpect psycopg2-binary gitpython Building Run cd earthenterprise/earth_enterprise/src/portableserver ./build.py The build script will produce a compressed archive with a name that looks like `earthenterprise/earth_enterprise/src/portableserver/build/<API key>.1.3-20170412.tar.gz`. The build date part of the file name will change depending on the day you build. You can install the built Portable Server from this `.tar.gz` archive. To clean build files, run cd earthenterprise/earth_enterprise/src/portableserver ./build.py --clean ## Installing on Linux Portable Server is not currently packaged for Linux distributions by the GEE Open Source team. Instead, just extract the tarball generated in the [Building on Linux](#building-on-linux) step in a directory you want to run it from. You could also create links, or start-up shell scripts for your convenience. You need to have the Python interpreter and package dependencies listed in [Run-time Prerequisites](#<API key>) set up and installed in order to run Portable Server. If you carried out the [Building on Linux](#building-on-linux) step, you already have all of the required dependencies. ## Running on Linux Change into the directory you extracted the built Portable Server tarball into (in the [Installing on Linux](#installing-on-linux) step). Then, just start `server/portable_server.py`: 1. cd <API key>.1.3-20170412/server/ #(substituting your extracted directory) 1. python portable_server.py You can edit `<API key>.1.3-20170412/server/portable.cfg` and `<API key>.1.3-20170412/server/remote.cfg` for your configuration needs before starting the server. # Portable Server on Windows ## Building on Windows **Note:** It is important to ensure `g++` and `python` versions used are both 32 bit or both 64 bit. Mixing them will lead to compilation/link errors which are not immediately obvious. If you want to build a 64 bit version of portable server, you must install the 64 bit versions of g++ and python. Install a g++ Compiler You can install [MinGW](https://sourceforge.net/projects/mingw-w64/) with a g++ compiler. Ensure the architecture you select to install matches your desired build architecture. If intending to distribute the build to other windows systems, be sure to select the `win` threads installation. Compiling with g++ for posix threads will lead to additional run-time dependencies. Make sure `g++` is set in your `PATH`. Install Swig with Python Support 1. Download a [Swig](http: 2. Extract the Zip in a desired installation directory. 3. Add the installation directory you extracted to your `PATH`. Install Python Download and install [Python](https: Once you have Python installed, make sure you have `pexpect` and `tornado` installed. E.g.: cd \Python27\Scripts pip install pexpect tornado Add the directory you installed Python in to your `PATH`. Build Portable Server Open a command prompt with `g++`, `swig` and `python` in your `PATH`. E.g., on Windows your `PATH` may look like: C:\swigwin-3.0.12;C:\Python27;C:\MinGW\bin;C:\Program Files\ . . . Run cd earthenterprise\earth_enterprise\src\portableserver python build.py The build script will produce a compressed archive with a name that looks like `earthenterprise\earth_enterprise\src\portableserver\build\<API key>.1.3-20170412.zip`. The build date part of the file name will change depending on the day you build. You can install the built Portable Server from this Zip archive. To clean build files, run cd earthenterprise\earth_enterprise\src\portableserver python build.py --clean ## Installing on Windows We do not currently provide a Windows installer for Portable Server. Instead, just extract the built Portable Server from the Zip archive generated in the [Building on Windows](#building-on-windows) step in a directory you want to run it from. You need to have the Python interpreter and packages listed in [Run-time Prerequisites](#<API key>) set up and installed in order to run Portable Server. If you carried out the [Building on Windows](#building-on-windows) step, you already have the required dependencies. ## Running on Windows Change into the directory you extracted the built Portable Server Zip archive into (in the [Installing on Windows](#<API key>) step). Then, just start `server\portable_server.py`: 1. cd <API key>.1.3-20170412\server\ #(substituting your extracted directory) 1. python portable_server.py You can edit `<API key>.1.3-20170412\server\portable.cfg` and `<API key>.1.3-20170412\server\remote.cfg` for your configuration needs before starting the server. # Portable Server on Mac OS Currently, building and running Portable Server on Mac OS has not been tested. Previous versions have run on Mac OS, and the `build.py` script has untested logic for running the Mac OS build commands. However, at present, you'll have to fix any problems you run into on Mac OS.
package org.simpleframework.xml.core; import org.simpleframework.xml.Attribute; import org.simpleframework.xml.Element; import org.simpleframework.xml.ElementArray; import org.simpleframework.xml.Root; import org.simpleframework.xml.Serializer; import org.simpleframework.xml.ValidationTestCase; public class ArrayEntryTest extends ValidationTestCase { private static final String LIST = "<?xml version=\"1.0\"?>\n"+ "<exampleArray>\n"+ " <list length='3'>\n"+ " <substitute id='1'>\n"+ " <text>one</text> \n\r"+ " </substitute>\n\r"+ " <substitute id='2'>\n"+ " <text>two</text> \n\r"+ " </substitute>\n"+ " <substitute id='3'>\n"+ " <text>three</text> \n\r"+ " </substitute>\n"+ " </list>\n"+ "</exampleArray>"; private static final String PRIMITIVE_LIST = "<?xml version=\"1.0\"?>\n"+ "<<API key>>\n"+ " <list length='4'>\r\n" + " <substitute>a</substitute>\n"+ " <substitute>b</substitute>\n"+ " <substitute>c</substitute>\n"+ " <substitute>d</substitute>\n"+ " </list>\r\n" + "</<API key>>"; @Root private static class Entry { @Attribute private int id; @Element private String text; public String getText() { return text; } public int getId() { return id; } } @Root private static class ExampleArray { @ElementArray(name="list", entry="substitute") // XXX bad error if the length= attribute is missing private Entry[] list; public Entry[] getArray() { return list; } } @Root private static class <API key> { @ElementArray(name="list", entry="substitute") // XXX bad error if this was an array private Character[] list; public Character[] getArray() { return list; } } public void testExampleArray() throws Exception { Serializer serializer = new Persister(); ExampleArray list = serializer.read(ExampleArray.class, LIST); validate(list, serializer); } public void <API key>() throws Exception { Serializer serializer = new Persister(); <API key> list = serializer.read(<API key>.class, PRIMITIVE_LIST); validate(list, serializer); } }
package org.apereo.cas.support.wsfederation.web; import org.apereo.cas.<API key>; import org.apereo.cas.services.<API key>; import org.apereo.cas.support.wsfederation.<API key>; import org.apereo.cas.util.HttpRequestUtils; import lombok.val; import org.apereo.inspektr.common.web.ClientInfo; import org.apereo.inspektr.common.web.ClientInfoHolder; import org.junit.jupiter.api.Test; import org.springframework.mock.web.<API key>; import org.springframework.mock.web.<API key>; import org.springframework.mock.web.MockServletContext; import org.springframework.webflow.context.servlet.<API key>; import org.springframework.webflow.test.MockRequestContext; import static org.junit.jupiter.api.Assertions.*; /** * This is {@link <API key>}. * * @author Misagh Moayyed * @since 5.3.0 */ public class <API key> extends <API key> { @Test public void verifyOperation() { val context = new MockRequestContext(); val request = new <API key>(); val response = new <API key>(); context.setExternalContext(new <API key>(new MockServletContext(), request, response)); request.setRemoteAddr("185.86.151.11"); request.setLocalAddr("185.88.151.11"); request.addHeader(HttpRequestUtils.USER_AGENT_HEADER, "MSIE"); ClientInfoHolder.setClientInfo(new ClientInfo(request)); request.addParameter(<API key>.PARAMETER_METHOD, "POST"); request.setAttribute("locale", "en"); request.setAttribute("theme", "custom"); val config = <API key>.iterator().next(); val wctx = config.getId(); val original = <API key>.getService(); <API key>.store(request, response, wctx, original, config); request.addParameter(<API key>.WCTX, wctx); request.setCookies(response.getCookies()); val service = <API key>.retrieve(context); assertNotNull(service); assertEquals(original.getId(), service.getId()); } }
using System; using System.Collections.Generic; using System.Linq; using System.Web; using System.Web.Mvc; using System.Web.Optimization; using System.Web.Routing; namespace <API key> { public class MvcApplication : System.Web.HttpApplication { protected void Application_Start() { AreaRegistration.RegisterAllAreas(); FilterConfig.<API key>(GlobalFilters.Filters); RouteConfig.RegisterRoutes(RouteTable.Routes); BundleConfig.RegisterBundles(BundleTable.Bundles); } } }
package matchers import ( "fmt" "github.com/cloudfoundry/bosh-utils/internal/github.com/onsi/gomega/format" ) type BeTrueMatcher struct { } func (matcher *BeTrueMatcher) Match(actual interface{}) (success bool, err error) { if !isBool(actual) { return false, fmt.Errorf("Expected a boolean. Got:\n%s", format.Object(actual, 1)) } return actual.(bool), nil } func (matcher *BeTrueMatcher) FailureMessage(actual interface{}) (message string) { return format.Message(actual, "to be true") } func (matcher *BeTrueMatcher) <API key>(actual interface{}) (message string) { return format.Message(actual, "not to be true") }
#include "argon2-encoding.h" #include "argon2-core.h" #include "utils.h" #include <limits.h> #include <stdio.h> #include <stdlib.h> #include <string.h> /* * Decode decimal integer from 'str'; the value is written in '*v'. * Returned value is a pointer to the next non-decimal character in the * string. If there is no digit at all, or the value encoding is not * minimal (extra leading zeros), or the value does not fit in an * 'unsigned long', then NULL is returned. */ static const char * decode_decimal(const char *str, unsigned long *v) { const char *orig; unsigned long acc; acc = 0; for (orig = str;; str++) { int c; c = *str; if (c < '0' || c > '9') { break; } c -= '0'; if (acc > (ULONG_MAX / 10)) { return NULL; } acc *= 10; if ((unsigned long) c > (ULONG_MAX - acc)) { return NULL; } acc += (unsigned long) c; } if (str == orig || (*orig == '0' && str != (orig + 1))) { return NULL; } *v = acc; return str; } /* * Code specific to Argon2. * * The code below applies the following format: * * $argon2<T>[$v=<num>]$m=<num>,t=<num>,p=<num>$<bin>$<bin> * * where <T> is either 'i', <num> is a decimal integer (positive, fits in an * 'unsigned long') and <bin> is Base64-encoded data (no '=' padding characters, * no newline or whitespace). * * The last two binary chunks (encoded in Base64) are, in that order, * the salt and the output. Both are required. The binary salt length and the * output length must be in the allowed ranges defined in argon2.h. * * The ctx struct must contain buffers large enough to hold the salt and pwd * when it is fed into <API key>. */ /* * Decode an Argon2i hash string into the provided structure 'ctx'. * Returned value is ARGON2_OK on success. */ int <API key>(argon2_context *ctx, const char *str, argon2_type type) { /* Prefix checking */ #define CC(prefix) \ do { \ size_t cc_len = strlen(prefix); \ if (strncmp(str, prefix, cc_len) != 0) { \ return <API key>; \ } \ str += cc_len; \ } while ((void) 0, 0) /* Optional prefix checking with supplied code */ #define CC_opt(prefix, code) \ do { \ size_t cc_len = strlen(prefix); \ if (strncmp(str, prefix, cc_len) == 0) { \ str += cc_len; \ { \ code; \ } \ } \ } while ((void) 0, 0) /* Decoding prefix into decimal */ #define DECIMAL(x) \ do { \ unsigned long dec_x; \ str = decode_decimal(str, &dec_x); \ if (str == NULL) { \ return <API key>; \ } \ (x) = dec_x; \ } while ((void) 0, 0) /* Decoding prefix into uint32_t decimal */ #define DECIMAL_U32(x) \ do { \ unsigned long dec_x; \ str = decode_decimal(str, &dec_x); \ if (str == NULL || dec_x > UINT32_MAX) { \ return <API key>; \ } \ (x) = (uint32_t)dec_x; \ } while ((void)0, 0) /* Decoding base64 into a binary buffer */ #define BIN(buf, max_len, len) \ do { \ size_t bin_len = (max_len); \ const char *str_end; \ if (sodium_base642bin((buf), (max_len), str, strlen(str), NULL, \ &bin_len, &str_end, \ <API key>) != 0 || \ bin_len > UINT32_MAX) { \ return <API key>; \ } \ (len) = (uint32_t) bin_len; \ str = str_end; \ } while ((void) 0, 0) size_t maxsaltlen = ctx->saltlen; size_t maxoutlen = ctx->outlen; int validation_result; uint32_t version = 0; ctx->saltlen = 0; ctx->outlen = 0; if (type == Argon2_id) { CC("$argon2id"); } else if (type == Argon2_i) { CC("$argon2i"); } else { return <API key>; } CC("$v="); DECIMAL_U32(version); if (version != <API key>) { return <API key>; } CC("$m="); DECIMAL_U32(ctx->m_cost); if (ctx->m_cost > UINT32_MAX) { return <API key>; } CC(",t="); DECIMAL_U32(ctx->t_cost); if (ctx->t_cost > UINT32_MAX) { return <API key>; } CC(",p="); DECIMAL_U32(ctx->lanes); if (ctx->lanes > UINT32_MAX) { return <API key>; } ctx->threads = ctx->lanes; CC("$"); BIN(ctx->salt, maxsaltlen, ctx->saltlen); CC("$"); BIN(ctx->out, maxoutlen, ctx->outlen); validation_result = <API key>(ctx); if (validation_result != ARGON2_OK) { return validation_result; } if (*str == 0) { return ARGON2_OK; } return <API key>; #undef CC #undef CC_opt #undef DECIMAL #undef BIN } #define U32_STR_MAXSIZE 11U static void u32_to_string(char *str, uint32_t x) { char tmp[U32_STR_MAXSIZE - 1U]; size_t i; i = sizeof tmp; do { tmp[--i] = (x % (uint32_t) 10U) + '0'; x /= (uint32_t) 10U; } while (x != 0U && i != 0U); memcpy(str, &tmp[i], (sizeof tmp) - i); str[(sizeof tmp) - i] = 0; } /* * Encode an argon2i hash string into the provided buffer. 'dst_len' * contains the size, in characters, of the 'dst' buffer; if 'dst_len' * is less than the number of required characters (including the * terminating 0), then this function returns 0. * * If pp->output_len is 0, then the hash string will be a salt string * (no output). if pp->salt_len is also 0, then the string will be a * parameter-only string (no salt and no output). * * On success, ARGON2_OK is returned. */ int <API key>(char *dst, size_t dst_len, argon2_context *ctx, argon2_type type) { #define SS(str) \ do { \ size_t pp_len = strlen(str); \ if (pp_len >= dst_len) { \ return <API key>; \ } \ memcpy(dst, str, pp_len + 1); \ dst += pp_len; \ dst_len -= pp_len; \ } while ((void) 0, 0) #define SX(x) \ do { \ char tmp[U32_STR_MAXSIZE]; \ u32_to_string(tmp, x); \ SS(tmp); \ } while ((void) 0, 0) #define SB(buf, len) \ do { \ size_t sb_len; \ if (sodium_bin2base64(dst, dst_len, (buf), (len), \ <API key>) == NULL) { \ return <API key>; \ } \ sb_len = strlen(dst); \ dst += sb_len; \ dst_len -= sb_len; \ } while ((void) 0, 0) int validation_result; switch (type) { case Argon2_id: SS("$argon2id$v="); break; case Argon2_i: SS("$argon2i$v="); break; default: return <API key>; } validation_result = <API key>(ctx); if (validation_result != ARGON2_OK) { return validation_result; } SX(<API key>); SS("$m="); SX(ctx->m_cost); SS(",t="); SX(ctx->t_cost); SS(",p="); SX(ctx->lanes); SS("$"); SB(ctx->salt, ctx->saltlen); SS("$"); SB(ctx->out, ctx->outlen); return ARGON2_OK; #undef SS #undef SX #undef SB }
package org.batfish.main.annotate; import static com.google.common.base.Preconditions.checkArgument; import static org.batfish.datamodel.answers.ParseStatus.FAILED; import static org.batfish.main.CliUtils.readAllFiles; import static org.batfish.main.CliUtils.relativize; import static org.batfish.main.CliUtils.resolve; import static org.batfish.main.CliUtils.writeAllFiles; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.LinkedHashMultimap; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import javax.annotation.Nonnull; import javax.annotation.Nullable; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.batfish.common.BfConsts; import org.batfish.common.NetworkSnapshot; import org.batfish.common.Warnings; import org.batfish.common.Warnings.ParseWarning; import org.batfish.config.Settings; import org.batfish.datamodel.ConfigurationFormat; import org.batfish.grammar.silent_syntax.<API key>; import org.batfish.grammar.silent_syntax.<API key>.SilentSyntaxElem; import org.batfish.identifiers.NetworkId; import org.batfish.identifiers.SnapshotId; import org.batfish.job.<API key>; import org.batfish.job.<API key>; import org.batfish.main.preprocess.Preprocessor; /** Tool to annotate configurations with silent syntax and warnings */ public final class Annotate { public static void main(String[] args) throws IOException { checkArgument(args.length == 2, "Expected arguments: <input_dir> <output_dir>"); Path inputPath = Paths.get(args[0]); Path outputPath = Paths.get(args[1]); // Bazel: resolve relative to current working directory. No-op if paths are already absolute. String wd = System.getenv("<API key>"); if (wd != null) { inputPath = Paths.get(wd).resolve(inputPath); outputPath = Paths.get(wd).resolve(outputPath); } Settings settings = new Settings(new String[] {"-storagebase", "/"}); settings.setPrintParseTree(true); annotate(inputPath, outputPath, settings); } private static void annotate(Path inputPath, Path outputPath, Settings settings) throws IOException { // Get annotated text for all files in inputPath, then write them under outputPath to the same // relative location. writeAllFiles( resolve( outputPath, relativize( inputPath, annotate( readAllFiles(inputPath.resolve(BfConsts.<API key>)), settings)))); } @Nonnull private static Map<Path, String> annotate(Map<Path, String> inputData, Settings settings) { // For each (path, text) in inputData, return an entry (path, annotated text). Map<Path, String> outputData = new ConcurrentHashMap<>(inputData.size()); inputData.entrySet().parallelStream() .forEach( inputDataEntry -> { Path inputFile = inputDataEntry.getKey(); String annotatedText = annotateText(inputFile, inputDataEntry.getValue(), settings); if (annotatedText != null) { outputData.put(inputFile, annotatedText); } }); return ImmutableMap.copyOf(outputData); } /** Return annotated input text, or {@code null} if there is an error. */ @Nullable private static String annotateText(Path inputFile, String inputText, Settings settings) { LOGGER.debug("Preprocessing: {}", inputFile); // preprocess the input text String preprocessedText; try { preprocessedText = Preprocessor.preprocess( settings, inputText, inputFile, new Warnings(false, false, false)); } catch (IOException e) { LOGGER.warn("Skipping {} because of preprocessing error: {}", inputText, e); return null; } LOGGER.debug("Parsing: {}", inputFile); // parse the preprocessed text <API key> parseResult = new <API key>( settings, new NetworkSnapshot(new NetworkId("dummyNetwork"), new SnapshotId("dummySnapshot")), ImmutableMap.of(inputFile.toString(), preprocessedText), new Warnings.Settings(true, true, true), ConfigurationFormat.UNKNOWN, ImmutableMultimap.of(), null) .call(); if (parseResult.getFileResults().values().stream() .anyMatch(result -> result.getParseStatus() == FAILED)) { LOGGER.error("Failed to parse: {}", inputFile); return null; } // annotate the preprocessed text based on warnings and silent syntax in parse result LOGGER.debug("Annotating: {}", inputFile); return <API key>( preprocessedText, parseResult.getFileResults().get(inputFile.toString()).getSilentSyntax(), parseResult.getFileResults().get(inputFile.toString()).getWarnings(), getCommentHeader(parseResult.<API key>())); } @Nonnull private static String <API key>( String inputText, <API key> silentSyntax, Warnings warnings, String commentHeader) { LinkedHashMultimap<Integer, SilentSyntaxElem> silentSyntaxByLine = LinkedHashMultimap.create(); silentSyntax.getElements().forEach(elem -> silentSyntaxByLine.put(elem.getLine(), elem)); LinkedHashMultimap<Integer, ParseWarning> parseWarningsByLine = LinkedHashMultimap.create(); warnings .getParseWarnings() .forEach(parseWarning -> parseWarningsByLine.put(parseWarning.getLine(), parseWarning)); StringBuilder sb = new StringBuilder(); String[] lines = inputText.split("\n", -1); for (int i = 0; i < lines.length; i++) { // Silent syntax and warning line indices start at 1. // Annotate silent syntax for this line. silentSyntaxByLine.get(i + 1).stream() .map(elem -> printElem(commentHeader, elem)) .forEach(sb::append); // Annotate all warnings for this line. parseWarningsByLine.get(i + 1).stream() .map(pw -> printParseWarning(commentHeader, pw)) .forEach(sb::append); sb.append(lines[i]).append('\n'); } return sb.toString(); } @VisibleForTesting @Nonnull static String printElem(String commentHeader, SilentSyntaxElem silentSyntaxElem) { // TODO: optional extra debug information return String.format( "%s SILENTLY IGNORED: %s\n", commentHeader, silentSyntaxElem.getText().trim()); } @VisibleForTesting static @Nonnull String printParseWarning(String commentHeader, ParseWarning parseWarning) { String comment = parseWarning.getComment(); switch (comment) { case "This syntax is unrecognized": return String.format("%s UNRECOGNIZED SYNTAX: %s\n", commentHeader, parseWarning.getText()); case "This feature is not currently supported": return String.format( "%s PARTIALLY UNSUPPORTED: %s\n", commentHeader, parseWarning.getText()); default: return String.format( "%s WARNING: %s: %s\n", commentHeader, comment, parseWarning.getText()); } } private static final Logger LOGGER = LogManager.getLogger(Annotate.class); @Nonnull private static String getCommentHeader(ConfigurationFormat format) { switch (format) { case F5: case F5_BIGIP_STRUCTURED: case FLAT_JUNIPER: case FLAT_VYOS: case FORTIOS: case IPTABLES: case JUNIPER: case JUNIPER_SWITCH: case PALO_ALTO: case PALO_ALTO_NESTED: case VYOS: return " default: return "!"; } } }
"use strict"; // An iterable WeakSet implementation inspired by the iterable WeakMap example code in the WeakRefs specification: // https://github.com/tc39/proposal-weakrefs#iterable-weakmaps module.exports = class IterableWeakSet { constructor() { this._refSet = new Set(); this._refMap = new WeakMap(); this.<API key> = new <API key>(({ ref, set }) => set.delete(ref)); } add(value) { if (!this._refMap.has(value)) { const ref = new WeakRef(value); this._refMap.set(value, ref); this._refSet.add(ref); this.<API key>.register(value, { ref, set: this._refSet }, ref); } return this; } delete(value) { const ref = this._refMap.get(value); if (!ref) { return false; } this._refMap.delete(value); this._refSet.delete(ref); this.<API key>.unregister(ref); return true; } has(value) { return this._refMap.has(value); } * [Symbol.iterator]() { for (const ref of this._refSet) { const value = ref.deref(); if (value === undefined) { continue; } yield value; } } };
let Manager = require('../../structures/manager'); class UtilityManager extends Manager { } module.exports = { class: UtilityManager, deps: [], async: false, shortcode: 'utm' };
package superpkg; public class TargetB { }
package com.jetbrains.env.python.testing; import com.intellij.execution.testframework.AbstractTestProxy; import com.intellij.execution.testframework.sm.runner.ui.MockPrinter; import com.intellij.openapi.vfs.VirtualFile; import com.jetbrains.env.EnvTestTagsRequired; import com.jetbrains.env.PyEnvTestCase; import com.jetbrains.env.<API key>; import com.jetbrains.env.ut.<API key>; import com.jetbrains.python.sdkTools.SdkCreationType; import com.jetbrains.python.testing.<API key>; import com.jetbrains.python.testing.pytest.<API key>; import com.jetbrains.python.testing.pytest.<API key>; import org.hamcrest.Matchers; import org.jetbrains.annotations.NotNull; import org.junit.Assert; import org.junit.Test; import java.io.IOException; import java.util.List; import static org.junit.Assert.assertEquals; /** * User : catherine */ @EnvTestTagsRequired(tags = "pytest") public class PythonPyTestingTest extends PyEnvTestCase { @Test public void <API key>() throws Exception { runPythonTest( new <API key>(<API key>.class, <API key>.PY_TEST_NAME)); } // Import error should lead to test failure @Test public void <API key>() { runPythonTest(new <API key><<API key>>("/testRunner/env/pytest/failTest", SdkCreationType.EMPTY_SDK) { @NotNull @Override protected <API key> createProcessRunner() throws Exception { return new <API key>(".", 0); } @Override protected void checkTestResults(@NotNull <API key> runner, @NotNull String stdout, @NotNull String stderr, @NotNull String all) { Assert.assertThat("Import error is not marked as error", runner.getFailedTestsCount(), Matchers.<API key>(1)); } }); } /** * Ensure project dir is used as curdir even if not set explicitly */ @Test public void testCurrentDir() throws Exception { runPythonTest(new <API key><<API key>>("/testRunner/env/pytest/", SdkCreationType.EMPTY_SDK) { @NotNull @Override protected <API key> createProcessRunner() throws Exception { return new <API key>("", 0) { @Override protected void <API key>(@NotNull final <API key> configuration) throws IOException { super.<API key>(configuration); configuration.setWorkingDirectory(null); final VirtualFile fullFilePath = myFixture.getTempDirFixture().getFile("dir_test.py"); assert fullFilePath != null : String.format("No dir_test.py in %s", myFixture.getTempDirFixture().getTempDirPath()); configuration.setTestToRun(fullFilePath.getPath()); } }; } @Override protected void checkTestResults(@NotNull final <API key> runner, @NotNull final String stdout, @NotNull final String stderr, @NotNull final String all) { Assert.assertThat("No directory found in output", stdout, Matchers.containsString(String.format("Directory %s", myFixture.getTempDirPath()))); } }); } @Test public void testPytestRunner() { runPythonTest(new <API key><<API key>>("/testRunner/env/pytest", SdkCreationType.EMPTY_SDK) { @NotNull @Override protected <API key> createProcessRunner() throws Exception { return new <API key>("test1.py", 0); } @Override protected void checkTestResults(@NotNull final <API key> runner, @NotNull final String stdout, @NotNull final String stderr, @NotNull final String all) { assertEquals(3, runner.getAllTestsCount()); assertEquals(3, runner.getPassedTestsCount()); runner.<API key>(); // This test has "sleep(1)", so duration should be >=1000 final AbstractTestProxy testForOneSecond = runner.findTestByName("testOne"); Assert.assertThat("Wrong duration", testForOneSecond.getDuration(), Matchers.<API key>(1000L)); } }); } @Test public void testPytestRunner2() { runPythonTest(new <API key><<API key>>("/testRunner/env/pytest", SdkCreationType.EMPTY_SDK) { @NotNull @Override protected <API key> createProcessRunner() throws Exception { return new <API key>("test2.py", 1); } @Override protected void checkTestResults(@NotNull final <API key> runner, @NotNull final String stdout, @NotNull final String stderr, @NotNull final String all) { if (runner.getCurrentRerunStep() > 0) { /** * We can't rerun one subtest (yield), so we rerun whole "test_even" */ assertEquals(stderr, 7, runner.getAllTestsCount()); assertEquals(stderr, 3, runner.getPassedTestsCount()); assertEquals(stderr, 4, runner.getFailedTestsCount()); return; } assertEquals(stderr, 9, runner.getAllTestsCount()); assertEquals(stderr, 5, runner.getPassedTestsCount()); assertEquals(stderr, 4, runner.getFailedTestsCount()); Assert .assertThat("No test stdout", MockPrinter.fillPrinter(runner.findTestByName("testOne")).getStdOut(), Matchers.startsWith("I am test1")); // Ensure test has stdout even it fails final AbstractTestProxy testFail = runner.findTestByName("testFail"); Assert.assertThat("No stdout for fail", MockPrinter.fillPrinter(testFail).getStdOut(), Matchers.startsWith("I will fail")); // This test has "sleep(1)", so duration should be >=1000 Assert.assertThat("Wrong duration", testFail.getDuration(), Matchers.<API key>(1000L)); } }); } /** * Ensures file references are highlighted for pytest traceback */ @Test public void <API key>() { final String fileName = "reference_tests.py"; runPythonTest(new <API key><<API key>>("/testRunner/env/unit", SdkCreationType.EMPTY_SDK) { @NotNull @Override protected <API key> createProcessRunner() throws Exception { return new <API key>(fileName, 0); } @Override protected void checkTestResults(@NotNull final <API key> runner, @NotNull final String stdout, @NotNull final String stderr, @NotNull final String all) { final List<String> fileNames = runner.<API key>().second; Assert.assertThat("No lines highlighted", fileNames, Matchers.not(Matchers.empty())); // PyTest highlights file:line_number Assert.assertTrue("Assert fail not marked", fileNames.contains("reference_tests.py:7")); Assert.assertTrue("Failed test not marked", fileNames.contains("reference_tests.py:12")); Assert.assertTrue("Failed test not marked", fileNames.contains("reference_tests.py")); } }); } }
<html> <head></head> <body> <p>[One</p> <p>Two</p> <p>Three</p> <p>Four</p> <ol> <li><p>Five]</p></li> </ol> </body> </html>
VERSION=$1 if [ ! "$VERSION" ]; then echo "usage: $(basename $0) [version]" exit 1 fi # edit parent pom.xml echo "editing parent pom.xml" echo mvn versions:update-parent "-DparentVersion=[${VERSION}]" mvn versions:update-parent "-DparentVersion=[${VERSION}]" echo "Committing changes" git commit -am "bumping incode-parent (isis) version to $VERSION"
package net.redhogs.cronparser; /** * @author grhodes * @since 10 Dec 2012 10:59:32 */ public enum CasingTypeEnum { Title, Sentence, LowerCase; }
//hu_hu605@163.com #include <stdlib.h> #include <stdio.h> #include <math.h> #include <iostream.h> #include <iomanip.h> #include <time.h> #include <windows.h> #define IM1 2147483563 #define IM2 2147483399 #define AM (1.0/IM1) #define IMM1 (IM1-1) #define IA1 40014 #define IA2 40692 #define IQ1 53668 #define IQ2 52774 #define IR1 12211 #define IR2 3791 #define NTAB 32 #define NDIV (1+IMM1/NTAB) #define EPS 1.2e-7 #define RNMX (1.0-EPS) #define zhizhenjuli 0.005 #define PI 3.14159265358 #define T0 100000 #define zhongqunshu1 200 #define zuobianjie -2000 #define youbianjie 2000 unsigned int seed=0; void mysrand(long int i) { seed = -i; } long a[1]; //double hundun; //double c=4; struct individual { unsigned *chrom; double geti; double shiyingdu; double fitness; }; individual *zuiyougeti; int zhongqunshu; individual *nowpop; individual *newpop; double sumfitness; double sumshiyingdu; double maxfitness; double avefitness; double maxshiyingdu; double avgshiyingdu; float pc; float pm; int lchrom; int maxgen; int gen; int flipc(double ,double ); int flipm(double ); int rnd(int low,int high); void initialize(); void preselectfitness(); void generation(); double suijibianli(); int fuzhi(float ); void crossover(individual ,individual ,individual &,individual &); void bianyi(individual &); void mubiaohanshu(individual &); void chidubianhuan(individual &); double ran1(long *); void bianma(double bianliang,unsigned *p); double yima(unsigned *p); void <API key>(); void jingyingbaoliu(); void glp(int n,int s,int *,int (*)[1],float (*)[1]); BOOL Exist(int Val, int Num, int *Array); int cmpfitness(const void *p1,const void *p2) { float i=((individual *)p1)->shiyingdu; float j=((individual *)p2)->shiyingdu; return i<j ? -1:(i==j ? 0:1); } void main() { initialize(); cout<<zuiyougeti->geti<<" "<<zuiyougeti->shiyingdu<<endl; for(gen=1;gen<maxgen;gen++) { generation(); } jingyingbaoliu(); cout<<setiosflags(ios::fixed)<<setprecision(6)<<zuiyougeti->geti<<" "<<setiosflags(ios::fixed)<<setprecision(6)<<(zuiyougeti->shiyingdu)<<endl; delete [] newpop; delete [] nowpop; delete [] zuiyougeti; system("pause"); } void initialize() { int q[zhongqunshu1][1],s=1; float xx[zhongqunshu1][1]; int h[1]={1}; zuiyougeti=new individual; zhongqunshu=200; nowpop=new individual[zhongqunshu1]; newpop=new individual[zhongqunshu1]; maxgen=150; gen=0; lchrom=22; mysrand(time(0)); a[0]=seed; zuiyougeti->geti=0; zuiyougeti->fitness=0; zuiyougeti->shiyingdu=0; glp(zhongqunshu,s,h,q,xx); // for(int j=0;j<s;j++) // nowpop[i].geti=zuobianjie+(<API key>)*xx[i][j]; for(int i=0;i<zhongqunshu1;i++) { nowpop[i].geti=zuobianjie+(youbianjie-(zuobianjie))*ran1(a); } //nowpop[0].geti=999;////////////////////////// <API key>(); jingyingbaoliu(); <API key>(); } void jingyingbaoliu() { individual *zuiyougetiguodu; zuiyougetiguodu=new individual[zhongqunshu1]; for(int i=0;i<zhongqunshu;i++) zuiyougetiguodu[i]=nowpop[i]; qsort(zuiyougetiguodu,zhongqunshu1,sizeof(individual),&cmpfitness); //system("pause"); if(zuiyougetiguodu[zhongqunshu-1].shiyingdu>zuiyougeti->shiyingdu) { *zuiyougeti=zuiyougetiguodu[zhongqunshu1-1]; } else nowpop[rnd(0,(zhongqunshu1-1))]=*zuiyougeti; delete [] zuiyougetiguodu; } void <API key>() { for(int i=0;i<zhongqunshu;i++) mubiaohanshu(nowpop[i]); for(i=0;i<zhongqunshu;i++) chidubianhuan(nowpop[i]); preselectfitness(); } void mubiaohanshu(individual &bianliang) { bianliang.shiyingdu=(bianliang.geti*cos(bianliang.geti)+2.0); } void chidubianhuan(individual &bianliang) { double T; T=T0*(pow(0.99,(gen+1-1))); double sum=0; for(int j=0;j<zhongqunshu;j++) sum+=exp(nowpop[j].shiyingdu/T); bianliang.fitness=exp(bianliang.shiyingdu/T)/sum; } void preselectfitness() { int j; sumfitness=0; for(j=0;j<zhongqunshu;j++) sumfitness+=nowpop[j].fitness; individual *guodu; guodu=new individual[zhongqunshu1]; for(j=0;j<zhongqunshu;j++) guodu[j]=nowpop[j]; qsort(guodu,zhongqunshu1,sizeof(individual),&cmpfitness); maxfitness=guodu[zhongqunshu1-1].fitness; avefitness=sumfitness/zhongqunshu1; delete [] guodu; } void generation() { individual fuqin1,fuqin2,*pipeiguodu,*pipeichi; int *peiduishuzu; pipeiguodu=new individual[zhongqunshu1]; pipeichi=new individual[zhongqunshu1]; peiduishuzu=new int[zhongqunshu1]; int member1,member2,j=0,fuzhijishu=0,i=0,temp=0,tt=0; float zhizhen; for(zhizhen=suijibianli();zhizhen<1;(zhizhen=zhizhen+zhizhenjuli)) { pipeichi[fuzhijishu]=nowpop[fuzhi(zhizhen)]; fuzhijishu++; } for(i=0;i<zhongqunshu1;i++) { peiduishuzu[i]=-1; } for (i=0; i<zhongqunshu1; i++) { temp =rnd(0,zhongqunshu1-1); while(Exist(temp, i, peiduishuzu)) { temp =rnd(0,zhongqunshu1-1); } *(peiduishuzu+i) = temp; } for(i=0;i<zhongqunshu1-1;i=i+2) { fuqin1=pipeichi[peiduishuzu[i]]; fuqin2=pipeichi[peiduishuzu[i+1]]; crossover(fuqin1,fuqin2,newpop[i],newpop[i+1]); } for(j=0;j<zhongqunshu1;j++) { //if(newpop[j].geti<-1000) nowpop[j].geti=newpop[j].geti; } <API key>(); for(j=0;j<zhongqunshu;j++) { bianyi(nowpop[j]); } <API key>(); jingyingbaoliu(); <API key>(); delete [] peiduishuzu; delete [] pipeichi; delete [] pipeiguodu; } void crossover(individual parent1,individual parent2,individual &child1,individual &child2) { int j; unsigned *panduan; panduan=new unsigned[lchrom]; parent1.chrom=new unsigned[lchrom]; parent2.chrom=new unsigned[lchrom]; child1.chrom=new unsigned[lchrom]; child2.chrom=new unsigned[lchrom]; //cout<<"jiaocha"<<endl;/////////////////////// bianma(parent1.geti,parent1.chrom); bianma(parent2.geti,parent2.chrom); if(flipc(parent1.fitness,parent2.fitness)) { for(j=0;j<lchrom;j++) panduan[j]=rnd(0,1); //for(j=0;j<lchrom;j++)//////////////// // cout<<panduan[j];///////////// // cout<<endl;//////////////// // system("pause");//////////////// for(j=0;j<lchrom;j++) { if(panduan[j]==1) child1.chrom[j]=parent1.chrom[j]; else child1.chrom[j]=parent2.chrom[j]; } for(j=0;j<lchrom;j++) { if(panduan[j]==0) child2.chrom[j]=parent1.chrom[j]; else child2.chrom[j]=parent2.chrom[j]; } //for(j=0;j<lchrom;j++)//////////////// // cout<<child1.chrom[j];///////////// //cout<<endl;//////////////// // system("pause");//////////////// child1.geti=yima(child1.chrom); child2.geti=yima(child2.chrom); delete [] child2.chrom; delete [] child1.chrom; delete [] parent2.chrom; delete [] parent1.chrom; delete [] panduan; } else { for(j=0;j<lchrom;j++) { child1.chrom[j]=parent1.chrom[j]; child2.chrom[j]=parent2.chrom[j]; } child1.geti=yima(child1.chrom); child2.geti=yima(child2.chrom); delete [] child2.chrom; delete [] child1.chrom; delete [] parent2.chrom; delete [] parent1.chrom; delete [] panduan; } } void bianyi(individual &child) { child.chrom=new unsigned[lchrom]; bianma(child.geti,child.chrom); for(int i=0;i<lchrom;i++) if(flipm(child.fitness)) { if(child.chrom[i]=0) child.chrom[i]=1; else child.chrom[i]=0; } child.geti=yima(child.chrom); delete [] child.chrom; } void bianma(double bianliang,unsigned *p) { unsigned *q; unsigned *gray; q=new unsigned[lchrom]; gray=new unsigned[lchrom]; int x=0; int i=0,j=0; if(bianliang<zuobianjie) { cout<<"bianliang:"<<bianliang<<endl; system("pause"); } //cout<<youbianjie-(zuobianjie)<<endl; //system("pause"); x=(bianliang-(zuobianjie))*((pow(2,lchrom)-1)/(youbianjie-(zuobianjie))); //cout<<x<<endl;/////////// if(x<0) system("pause"); for(i=0;i<lchrom;i++) { q[i]=0; p[i]=0; } i=0; while (x!=0&&(i!=lchrom)) { q[i]=(unsigned)(x%2); x=x/2; i++; } // for(i=0;i<lchrom;i++)////////////////// // cout<<q[i];/////////////// // cout<<endl;/////////// int w=lchrom-1; if(q[w]!=0&&q[w]!=1) system("pause"); for(j=0;j<lchrom&&w>0;j++) { p[j]=q[w]; w } //cout<<"yuanma"<<endl; //for(j=0;j<lchrom;j++)/////////// // cout<<p[j];//////// //cout<<endl;//////////////////// gray[0]=p[0]; for(j=1;j<lchrom;j++) { if(p[j-1]==p[j]) gray[j]=0; else if(p[j-1]!=p[j]) gray[j]=1; } for(j=0;j<lchrom;j++) p[j]=gray[j]; //cout<<"geleima"<<endl; //for(j=0;j<lchrom;j++)/////////// // cout<<p[j];//////// //cout<<endl;//////////////////// //system("pause");/////////// delete [] gray; delete [] q; } double yima(unsigned *p) { int i=0; // for(i=0;i<lchrom;i++)///////// // cout<<p[i];////// // cout<<endl;///////// // system("pause");////////// int x=0; unsigned *q; q=new unsigned[lchrom]; q[0]=p[0]; // cout<<q[0]<<endl;////////////////// // system("pause");////////// for(int j=1;j<lchrom;j++) { if(q[j-1]==p[j]) q[j]=0; else if(q[j-1]!=p[j]) q[j]=1; } // for(i=0;i<lchrom;i++)////// // cout<<q[i];////////// // if(q[i]!=0&&q[i]!=1) // cout<<q[i]; // system("pause"); // cout<<endl;//////// // system("pause");/////////////////// for(i=0;i<lchrom;i++) x=x+q[i]*pow(2,(lchrom-i-1)); if(x<0) { cout<<"ÒëÂë³ö´í1"<<endl; system("pause"); } //cout<<"x:"<<x<<endl; double bianliang; //cout<<pow(2,22)<<endl; //cout<<2000*x<<endl; //cout<<(x*(2000/(pow(2,22)-1)))<<endl; bianliang=(x*((youbianjie-(zuobianjie))/(pow(2,lchrom)-1)))+zuobianjie; if(bianliang<zuobianjie) { cout<<"ÒëÂë³ö´í2"<<endl; system("pause"); } delete [] q; return bianliang; } double ran1(long *idum) { int j; long k; static long idum2=123456789; static long iy=0; static long iv[NTAB]; float temp; if (*idum <= 0) { if (-(*idum) < 1) *idum=1; else *idum = -(*idum); idum2=(*idum); for (j=NTAB+7;j>=0;j { k=(*idum)/IQ1; *idum=IA1*(*idum-k*IQ1)-k*IR1; if (*idum < 0) *idum += IM1; if (j < NTAB) iv[j] = *idum; } iy=iv[0]; } k=(*idum)/IQ1; *idum=IA1*(*idum-k*IQ1)-k*IR1; if (*idum < 0) *idum += IM1; k=idum2/IQ2; idum2=IA2*(idum2-k*IQ2)-k*IR2; if (idum2 < 0) idum2 += IM2; j=iy/NDIV; iy=iv[j]-idum2; iv[j] = *idum; if (iy < 1) iy += IMM1; if ((temp=AM*iy) > RNMX) return RNMX; else return temp; } double suijibianli() { double i=ran1(a); while(i>zhizhenjuli) { i=ran1(a); } //cout<<i<<endl;////////////// return i; } int fuzhi(float p) { int i; double sum=0; if(sumfitness!=0) { for(i=0;(sum<p)&&(i<zhongqunshu);i++) sum+=nowpop[i].fitness/sumfitness; } else i=rnd(1,zhongqunshu1); return(i-1); } int rnd(int low, int high) { int i; if(low >= high) i = low; else { i =(int)((ran1(a) * (high - low + 1)) + low); if(i > high) i = high; } return(i); } int flipc(double p,double q) { double pc1=0.9,pc2=0.6; if((p-q)>0) { if(p>=avefitness) { pc=pc1-(pc1-pc2)*(p-avefitness)/(<API key>); } else pc=pc1; } else { if(q>=avefitness) { pc=pc1-(pc1-pc2)*(q-avefitness)/(<API key>); } else pc=pc1; } if(ran1(a)<=pc) return(1); else return(0); } int flipm(double p) { double pm1=0.001,pm2=0.0001; if(p>=avefitness) { pm=(pm1-(pm1-pm2)*(maxfitness-p)/(<API key>)); } else pm=pm1; if(ran1(a)<=pm) return(1); else return(0); } void glp(int n,int s,int *h,int (*q)[1],float (*xx)[1])//glp { int i=0,j=0; for(i=0;i<n;i++) { for(j=0;j<s;j++) { *(*(q+i)+j)=((i+1)*(*(h+j)))%n; } } i=n-1; for(j=0;j<s;j++) { *(*(q+i)+j)=n; } for(i=0;i<n;i++) { for(j=0;j<s;j++) { *(*(xx+i)+j)=(float)(2*(*(*(q+i)+j))-1)/(2*n); } } } BOOL Exist(int Val, int Num, int *Array) { BOOL FLAG = FALSE; int i; for (i=0; i<Num; i++) if (Val == *(Array + i)) { FLAG = TRUE; break; } return FLAG; }
import h2o from h2o.estimators.glm import <API key> h2o.init() h2o_df = h2o.import_file("http://<API key>.s3.amazonaws.com/bigdata/laptop/glm_ordinal_logit/<API key>.csv") h2o_df['C11'] = h2o_df['C11'].asfactor() ordinal_fit = <API key>(family = "ordinal", alpha = 1.0, lambda_=0.000000001, obj_reg = 0.00001, max_iterations=1000, beta_epsilon=1e-8, objective_epsilon=1e-10) ordinal_fit.train(x=list(range(0,10)), y="C11", training_frame=h2o_df)
"""Test the Kuler Sky config flow.""" from unittest.mock import MagicMock, patch import pykulersky from homeassistant import config_entries, setup from homeassistant.components.kulersky.config_flow import DOMAIN async def test_flow_success(hass): """Test we get the form.""" await setup.<API key>(hass, "<API key>", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] is None light = MagicMock(spec=pykulersky.Light) light.address = "AA:BB:CC:11:22:33" light.name = "Bedroom" with patch( "homeassistant.components.kulersky.config_flow.pykulersky.discover", return_value=[light], ), patch( "homeassistant.components.kulersky.async_setup_entry", return_value=True, ) as mock_setup_entry: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {}, ) await hass.<API key>() assert result2["type"] == "create_entry" assert result2["title"] == "Kuler Sky" assert result2["data"] == {} assert len(mock_setup_entry.mock_calls) == 1 async def <API key>(hass): """Test we get the form.""" await setup.<API key>(hass, "<API key>", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] is None with patch( "homeassistant.components.kulersky.config_flow.pykulersky.discover", return_value=[], ), patch( "homeassistant.components.kulersky.async_setup_entry", return_value=True, ) as mock_setup_entry: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {}, ) assert result2["type"] == "abort" assert result2["reason"] == "no_devices_found" await hass.<API key>() assert len(mock_setup_entry.mock_calls) == 0 async def <API key>(hass): """Test we get the form.""" await setup.<API key>(hass, "<API key>", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] is None with patch( "homeassistant.components.kulersky.config_flow.pykulersky.discover", side_effect=pykulersky.PykulerskyException("TEST"), ), patch( "homeassistant.components.kulersky.async_setup_entry", return_value=True, ) as mock_setup_entry: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {}, ) assert result2["type"] == "abort" assert result2["reason"] == "no_devices_found" await hass.<API key>() assert len(mock_setup_entry.mock_calls) == 0
using System; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Net; using Akka.Actor; using Akka.Configuration; using Akka.Dispatch; using Akka.Event; namespace Akka.IO { public class Tcp : ExtensionIdProvider<TcpExt> { public static readonly Tcp Instance = new Tcp(); public static IActorRef Manager(ActorSystem system) { return Instance.Apply(system).Manager; } public override TcpExt CreateExtension(ExtendedActorSystem system) { return new TcpExt(system); } public class Message : <API key> { } // COMMANDS public class Command : Message, SelectionHandler.IHasFailureMessage { private readonly CommandFailed _failureMessage; public Command() { _failureMessage = new CommandFailed(this); } public CommandFailed FailureMessage { get { return _failureMessage; } } object SelectionHandler.IHasFailureMessage.FailureMessage { get { return _failureMessage; } } } <summary> The Connect message is sent to the TCP manager actor, which is obtained via <see cref="TcpExt.Manager" />. Either the manager replies with a <see cref="CommandFailed" /> or the actor handling the new connection replies with a <see cref="Connected" /> message. </summary> public class Connect : Command { public Connect(EndPoint remoteAddress, EndPoint localAddress = null, IEnumerable<Inet.SocketOption> options = null, TimeSpan? timeout = null, bool pullMode = false) { RemoteAddress = remoteAddress; LocalAddress = localAddress; Options = options ?? Enumerable.Empty<Inet.SocketOption>(); Timeout = timeout; PullMode = pullMode; } public EndPoint RemoteAddress { get; private set; } public EndPoint LocalAddress { get; private set; } public IEnumerable<Inet.SocketOption> Options { get; private set; } public TimeSpan? Timeout { get; private set; } public bool PullMode { get; private set; } } <summary> The Bind message is send to the TCP manager actor, which is obtained via <see cref="TcpExt.Manager" /> in order to bind to a listening socket. The manager replies either with a <see cref="CommandFailed" /> or the actor handling the listen socket replies with a <see cref="Bound" /> message. If the local port is set to 0 in the Bind message, then the <see cref="Bound" /> message should be inspected to find the actual port which was bound to. </summary> public class Bind : Command { public Bind(IActorRef handler, EndPoint localAddress, int backlog = 100, IEnumerable<Inet.SocketOption> options = null, bool pullMode = false) { Handler = handler; LocalAddress = localAddress; Backlog = backlog; Options = options ?? Enumerable.Empty<Inet.SocketOption>(); PullMode = pullMode; } public IActorRef Handler { get; set; } public EndPoint LocalAddress { get; set; } public int Backlog { get; set; } public IEnumerable<Inet.SocketOption> Options { get; set; } public bool PullMode { get; set; } } <summary> This message must be sent to a TCP connection actor after receiving the <see cref="Connected" /> message. The connection will not read any data from the socket until this message is received, because this message defines the actor which will receive all inbound data. </summary> public class Register : Command { public Register(IActorRef handler, bool <API key> = false, bool useResumeWriting = true) { Handler = handler; <API key> = <API key>; UseResumeWriting = useResumeWriting; } public IActorRef Handler { get; private set; } public bool <API key> { get; private set; } public bool UseResumeWriting { get; private set; } } <summary> In order to close down a listening socket, send this message to that socket’s actor (that is the actor which previously had sent the <see cref="Bound" /> message). The listener socket actor will reply with a <see cref="Unbound" /> message. </summary> public class Unbind : Command { public static readonly Unbind Instance = new Unbind(); private Unbind() { } } <summary> Common interface for all commands which aim to close down an open connection. </summary> public abstract class CloseCommand : Command, <API key> { public abstract ConnectionClosed Event { get; } } <summary> A normal close operation will first flush pending writes and then close the socket. The sender of this command and the registered handler for incoming data will both be notified once the socket is closed using a <see cref="Closed" /> message. </summary> public class Close : CloseCommand { public static readonly Close Instance = new Close(); private Close() { } public override ConnectionClosed Event { get { return Closed.Instance; } } } <summary> A confirmed close operation will flush pending writes and half-close the connection, waiting for the peer to close the other half. The sender of this command and the registered handler for incoming data will both be notified once the socket is closed using a <see cref="ConfirmedClosed" /> message. </summary> public class ConfirmedClose : CloseCommand { public static readonly ConfirmedClose Instance = new ConfirmedClose(); private ConfirmedClose() { } public override ConnectionClosed Event { get { return ConfirmedClosed.Instance; } } } <summary> An abort operation will not flush pending writes and will issue a TCP ABORT command to the O/S kernel which should result in a TCP_RST packet being sent to the peer. The sender of this command and the registered handler for incoming data will both be notified once the socket is closed using a <see cref="Aborted" /> message. </summary> public class Abort : CloseCommand { public static readonly Abort Instance = new Abort(); private Abort() { } public override ConnectionClosed Event { get { return Aborted.Instance; } } } <summary> Each <see cref="WriteCommand" /> can optionally request a positive acknowledgment to be sent to the commanding actor. If such notification is not desired the <see cref="WriteCommand#ack" /> must be set to an instance of this class. The token contained within can be used to recognize which write failed when receiving a <see cref="CommandFailed" /> message. </summary> public class NoAck : Event { public static readonly NoAck Instance = new NoAck(null); public NoAck(object token) { Token = token; } public object Token { get; private set; } } public abstract class WriteCommand : Command { public CompoundWrite Prepend(SimpleWriteCommand other) { return new CompoundWrite(other, this); } public WriteCommand Prepend(IEnumerable<WriteCommand> writes) { return writes.Reverse().Aggregate(this, (b, a) => { var simple = a as SimpleWriteCommand; if (simple != null) return b.Prepend(simple); var compound = a as CompoundWrite; if (compound != null) return b.Prepend(compound); throw new ArgumentException("The supplied WriteCommand is invalid. Only SimpleWriteCommand and CompoundWrite WriteCommands are supported."); }); } public static WriteCommand Create(IEnumerable<WriteCommand> writes) { return Write.Empty.Prepend(writes); } public static WriteCommand Create(params WriteCommand[] writes) { return Create((IEnumerable<WriteCommand>) writes); } } public abstract class SimpleWriteCommand : WriteCommand { public abstract Event Ack { get; } public bool WantsAck { get { return !(Ack is NoAck); } } public CompoundWrite Append(WriteCommand that) { return that.Prepend(this); } } <summary> Write data to the TCP connection. If no ack is needed use the special `NoAck` object. The connection actor will reply with a <see cref="CommandFailed" /> message if the write could not be enqueued. If <see cref="WriteCommand#wantsAck" /> returns true, the connection actor will reply with the supplied <see cref="WriteCommand#ack" /> token once the write has been successfully enqueued to the O/S kernel. <b>Note that this does not in any way guarantee that the data will be or have been sent!</b> Unfortunately there is no way to determine whether a particular write has been sent by the O/S. </summary> public class Write : SimpleWriteCommand { private readonly Event _ack; public ByteString Data { get; private set; } public override Event Ack { get { return _ack; } } private Write(ByteString data, Event ack) { _ack = ack; Data = data; } public static Write Create(ByteString data) { return data.IsEmpty ? Empty : new Write(data, NoAck.Instance); } public static Write Create(ByteString data, Event ack) { return new Write(data, ack); } public static readonly Write Empty = new Write(ByteString.Empty, NoAck.Instance); } <summary> Write `count` bytes starting at `position` from file at `filePath` to the connection. The count must be &gt; 0. The connection actor will reply with a <see cref="CommandFailed"/> message if the write could not be enqueued. If <see cref="SimpleWriteCommand.WantsAck"/> returns true, the connection actor will reply with the supplied <see cref="SimpleWriteCommand.Ack"/> token once the write has been successfully enqueued to the O/S kernel. <b>Note that this does not in any way guarantee that the data will be or have been sent!</b> Unfortunately there is no way to determine whether a particular write has been sent by the O/S. </summary> public class WriteFile : SimpleWriteCommand { private readonly Event _ack; public WriteFile(string filePath, long position, long count, Event ack) { if (position < 0) throw new ArgumentException("WriteFile.position must be >= 0", nameof(position)); if (count <= 0) throw new ArgumentException("WriteFile.count must be > 0", nameof(count)); _ack = ack; FilePath = filePath; Position = position; Count = count; } public string FilePath { get; private set; } public long Position { get; private set; } public long Count { get; private set; } public override Event Ack { get { return _ack; } } } <summary> A write command which aggregates two other write commands. Using this construct you can chain a number of <see cref="Akka.IO.Tcp.Write" /> and/or <see cref="Akka.IO.Tcp.WriteFile" /> commands together in a way that allows them to be handled as a single write which gets written out to the network as quickly as possible. If the sub commands contain `ack` requests they will be honored as soon as the respective write has been written completely. </summary> public class CompoundWrite : WriteCommand, IEnumerable<SimpleWriteCommand> { private readonly SimpleWriteCommand _head; private readonly WriteCommand _tailCommand; public CompoundWrite(SimpleWriteCommand head, WriteCommand tailCommand) { _head = head; _tailCommand = tailCommand; } public IEnumerator<SimpleWriteCommand> GetEnumerator() { return Enumerable().GetEnumerator(); } IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } private IEnumerable<SimpleWriteCommand> Enumerable() { WriteCommand current = this; while (current != null) { var compound = current as CompoundWrite; if (compound != null) { current = compound.TailCommand; yield return compound.Head; } var simple = current as SimpleWriteCommand; if (simple != null) { current = null; yield return simple; } } } public SimpleWriteCommand Head { get { return _head; } } public WriteCommand TailCommand { get { return _tailCommand; } } } <summary> When `useResumeWriting` is in effect as was indicated in the <see cref="Register" /> message then this command needs to be sent to the connection actor in order to re-enable writing after a <see cref="CommandFailed" /> event. All <see cref="WriteCommand" /> processed by the connection actor between the first <see cref="CommandFailed" /> and subsequent reception of this message will also be rejected with <see cref="CommandFailed" />. </summary> public class ResumeWriting : Command { public static readonly ResumeWriting Instance = new ResumeWriting(); } <summary> Sending this command to the connection actor will disable reading from the TCP socket. TCP flow-control will then propagate backpressure to the sender side as buffers fill up on either end. To re-enable reading send <see cref="ResumeReading" />. </summary> public class SuspendReading : Command { public static SuspendReading Instance = new SuspendReading(); private SuspendReading() { } } <summary> This command needs to be sent to the connection actor after a <see cref="SuspendReading" /> command in order to resume reading from the socket. </summary> public class ResumeReading : Command { public static ResumeReading Instance = new ResumeReading(); private ResumeReading() { } } <summary> This message enables the accepting of the next connection if read throttling is enabled for connection actors. </summary> public class ResumeAccepting : Command { public int BatchSize { get; private set; } public ResumeAccepting(int batchSize) { BatchSize = batchSize; } } // EVENTS <summary> Common interface for all events generated by the TCP layer actors. </summary> public class Event : Message { } <summary> Whenever data are read from a socket they will be transferred within this class to the handler actor which was designated in the <see cref="Register" /> message. </summary> public sealed class Received : Event { public Received(ByteString data) { Data = data; } public ByteString Data { get; private set; } } <summary> The connection actor sends this message either to the sender of a <see cref="Connect" /> command (for outbound) or to the handler for incoming connections designated in the <see cref="Bind" /> message. The connection is characterized by the `remoteAddress` and `localAddress` TCP endpoints. </summary> public sealed class Connected : Event { public Connected(EndPoint remoteAddress, EndPoint localAddress) { RemoteAddress = remoteAddress; LocalAddress = localAddress; } public EndPoint RemoteAddress { get; private set; } public EndPoint LocalAddress { get; private set; } } <summary> Whenever a command cannot be completed, the queried actor will reply with this message, wrapping the original command which failed. </summary> public sealed class CommandFailed : Event { public CommandFailed(Command cmd) { Cmd = cmd; } public Command Cmd { get; private set; } } <summary> When `useResumeWriting` is in effect as indicated in the <see cref="Register" /> message, the <see cref="ResumeWriting" /> command will be acknowledged by this message type, upon which it is safe to send at least one write. This means that all writes preceding the first <see cref="CommandFailed" /> message have been enqueued to the O/S kernel at this point. </summary> public class WritingResumed : Event { public static WritingResumed Instance = new WritingResumed(); } <summary> The sender of a <see cref="Bind" /> command will—in case of success—receive confirmation in this form. If the bind address indicated a 0 port number, then the contained `localAddress` can be used to find out which port was automatically assigned. </summary> public class Bound : Event { public EndPoint LocalAddress { get; private set; } public Bound(EndPoint localAddress) { LocalAddress = localAddress; } } <summary> The sender of an <see cref="Unbind" /> command will receive confirmation through this message once the listening socket has been closed. </summary> public class Unbound : Event { public static Unbound Instance = new Unbound(); } <summary> This is the common interface for all events which indicate that a connection has been closed or half-closed. </summary> public class ConnectionClosed : Event, <API key> { public virtual bool IsAborted { get { return false; } } public virtual bool IsConfirmed { get { return false; } } public virtual bool IsPeerClosed { get { return false; } } public virtual bool IsErrorClosed { get { return false; } } public virtual string GetErrorCause() { return null; } } <summary> The connection has been closed normally in response to a <see cref="Close" /> command. </summary> public class Closed : ConnectionClosed { public static readonly Closed Instance = new Closed(); private Closed() { } } <summary> The connection has been aborted in response to an <see cref="Abort" /> command. </summary> public class Aborted : ConnectionClosed { public static Aborted Instance = new Aborted(); private Aborted() { } public override bool IsAborted { get { return true; } } } <summary> The connection has been half-closed by us and then half-close by the peer in response to a <see cref="ConfirmedClose" /> command. </summary> public class ConfirmedClosed : ConnectionClosed { public static ConfirmedClosed Instance = new ConfirmedClosed(); private ConfirmedClosed() { } public override bool IsConfirmed { get { return true; } } } <summary> The peer has closed its writing half of the connection. </summary> public class PeerClosed : ConnectionClosed { public static PeerClosed Instance = new PeerClosed(); private PeerClosed() { } public override bool IsPeerClosed { get { return true; } } } <summary> The connection has been closed due to an IO error. </summary> public class ErrorClosed : ConnectionClosed { private readonly string _cause; public ErrorClosed(string cause) { _cause = cause; } public override bool IsErrorClosed { get { return true; } } public override string GetErrorCause() { return _cause; } } } public class TcpExt : IOExtension { private readonly TcpSettings _settings; private readonly IActorRef _manager; private readonly IBufferPool _bufferPool; private readonly MessageDispatcher _fileIoDispatcher; public class TcpSettings : <API key> { public TcpSettings(Config config) : base(config) { //TODO: requiring, check defaults NrOfSelectors = config.GetInt("nr-of-selectors", 1); BatchAcceptLimit = config.GetInt("batch-accept-limit"); DirectBufferSize = config.GetInt("direct-buffer-size"); <API key> = config.GetInt("<API key>"); RegisterTimeout = config.GetTimeSpan("register-timeout"); <API key> = config.GetString("<API key>") == "unlimited" ? int.MaxValue : config.GetInt("<API key>"); <API key> = config.GetString("<API key>"); FileIODispatcher = config.GetString("file-io-dispatcher"); TransferToLimit = config.GetString("<API key>") == "unlimited" ? int.MaxValue : config.GetInt("<API key>"); <API key> = MaxChannels == -1 ? -1 : Math.Max(MaxChannels/NrOfSelectors, 1); <API key> = config.GetInt("<API key>", 3); } public int NrOfSelectors { get; private set; } public int BatchAcceptLimit { get; private set; } public int DirectBufferSize { get; private set; } public int <API key> { get; private set; } public TimeSpan? RegisterTimeout { get; private set; } public int <API key> { get; private set; } public string <API key> { get; private set; } public string FileIODispatcher { get; private set; } public int TransferToLimit { get; set; } public int <API key> { get; private set; } } public TcpExt(ExtendedActorSystem system) { _settings = new TcpSettings(system.Settings.Config.GetConfig("akka.io.tcp")); _bufferPool = new <API key>(_settings.DirectBufferSize, _settings.<API key>); //_fileIoDispatcher = system.Dispatchers.Lookup(_settings.FileIODispatcher); _manager = system.SystemActorOf( props: Props.Create(() => new TcpManager(this)) .WithDispatcher(_settings.<API key>) .WithDeploy(Deploy.Local), name: "IO-TCP"); } public override IActorRef Manager { get { return _manager; } } public IActorRef GetManager() { return _manager; } public TcpSettings Settings { get { return _settings; } } internal IBufferPool BufferPool { get { return _bufferPool; } } internal MessageDispatcher FileIoDispatcher { get { return _fileIoDispatcher; } } } public class TcpMessage { public static Tcp.Command Connect(EndPoint remoteAddress, EndPoint localAddress, IEnumerable<Inet.SocketOption> options, TimeSpan? timeout, bool pullMode) { return new Tcp.Connect(remoteAddress, localAddress, options, timeout, pullMode); } public static Tcp.Command Connect(EndPoint remoteAddress) { return Connect(remoteAddress, null, null, null, false); } public static Tcp.Command Bind(IActorRef handler, EndPoint endpoint, int backlog, IEnumerable<Inet.SocketOption> options, bool pullMode) { return new Tcp.Bind(handler, endpoint, backlog, options, pullMode); } public static Tcp.Command Bind(IActorRef handler, EndPoint endpoint, int backlog) { return new Tcp.Bind(handler, endpoint, backlog); } public static Tcp.Command Register(IActorRef handler, bool <API key> = false, bool useResumeWriting = true) { return new Tcp.Register(handler, <API key>, useResumeWriting); } public static Tcp.Command Unbind() { return Tcp.Unbind.Instance; } public static Tcp.Command Close() { return Tcp.Close.Instance; } public static Tcp.Command ConfirmedClose() { return Tcp.ConfirmedClose.Instance; } public static Tcp.Command Abort() { return Tcp.Abort.Instance; } public static Tcp.NoAck NoAck(object token = null) { return new Tcp.NoAck(token); } public static Tcp.Command Write(ByteString data, Tcp.Event ack = null) { return Tcp.Write.Create(data, ack); } public static Tcp.Command ResumeWriting() { return Tcp.ResumeWriting.Instance; } public static Tcp.Command SuspendReading() { return Tcp.SuspendReading.Instance; } public static Tcp.Command ResumeReading() { return Tcp.ResumeReading.Instance; } public static Tcp.Command ResumeAccepting(int batchSize) { return new Tcp.ResumeAccepting(batchSize); } } public static class TcpExtensions { public static IActorRef Tcp(this ActorSystem system) { return IO.Tcp.Manager(system); } } }
#pragma once #include "drape_frontend/color_constants.hpp" #include "drape_frontend/route_shape.hpp" #include "drape/pointers.hpp" #include "drape/texture_manager.hpp" #include "traffic/speed_groups.hpp" #include "geometry/polyline2d.hpp" #include <functional> #include <unordered_map> #include <vector> namespace df { class RouteBuilder { public: using FlushFn = std::function<void(drape_ptr<SubrouteData> &&)>; using FlushArrowsFn = std::function<void(drape_ptr<SubrouteArrowsData> &&)>; using FlushMarkersFn = std::function<void(drape_ptr<SubrouteMarkersData> &&)>; RouteBuilder(FlushFn && flushFn, FlushArrowsFn && flushArrowsFn, FlushMarkersFn && flushMarkersFn); void Build(dp::DrapeID subrouteId, SubrouteConstPtr subroute, ref_ptr<dp::TextureManager> textures, int recacheId); void BuildArrows(dp::DrapeID subrouteId, std::vector<ArrowBorders> const & borders, ref_ptr<dp::TextureManager> textures, int recacheId); void ClearRouteCache(); private: FlushFn m_flushFn; FlushArrowsFn m_flushArrowsFn; FlushMarkersFn m_flushMarkersFn; struct RouteCacheData { m2::PolylineD m_polyline; double m_baseDepthIndex = 0.0; }; std::unordered_map<dp::DrapeID, RouteCacheData> m_routeCache; }; } // namespace df
// This source file is part of the Swift.org open source project #include "swift/IDE/<API key>.h" #include "swift/IDE/Utils.h" #include "swift/Sema/IDETypeChecking.h" #include "swift/AST/ASTContext.h" #include "swift/AST/ASTPrinter.h" #include "swift/AST/Decl.h" #include "swift/AST/Module.h" #include "swift/AST/NameLookup.h" #include "swift/AST/PrintOptions.h" #include "swift/Basic/PrimitiveParsing.h" #include "swift/ClangImporter/ClangImporter.h" #include "swift/ClangImporter/ClangModule.h" #include "swift/Parse/Token.h" #include "swift/Serialization/ModuleFile.h" #include "swift/Subsystems.h" #include "swift/Serialization/<API key>.h" #include "clang/AST/ASTContext.h" #include "clang/AST/Decl.h" #include "clang/AST/DeclObjC.h" #include "clang/Basic/Module.h" #include "clang/Lex/Lexer.h" #include "clang/Lex/MacroInfo.h" #include "clang/Lex/Preprocessor.h" #include <algorithm> #include <memory> #include <queue> #include <string> #include <utility> #include <vector> using namespace swift; namespace { Prints regular comments from clang module headers. class ClangCommentPrinter : public ASTPrinter { public: ClangCommentPrinter(ASTPrinter &OtherPrinter, ClangModuleLoader &ClangLoader) : OtherPrinter(OtherPrinter), ClangLoader(ClangLoader) {} private: void printDeclPre(const Decl *D, Optional<BracketOptions> Bracket) override; void printDeclPost(const Decl *D, Optional<BracketOptions> Bracket) override; void avoidPrintDeclPost(const Decl *D) override; // Forwarding implementations. void printText(StringRef Text) override { return OtherPrinter.printText(Text); } void printDeclLoc(const Decl *D) override { return OtherPrinter.printDeclLoc(D); } void printDeclNameEndLoc(const Decl *D) override { return OtherPrinter.printDeclNameEndLoc(D); } void <API key>(const Decl *D) override { return OtherPrinter.<API key>(D); } void printTypePre(const TypeLoc &TL) override { return OtherPrinter.printTypePre(TL); } void printTypePost(const TypeLoc &TL) override { return OtherPrinter.printTypePost(TL); } void printTypeRef(Type T, const TypeDecl *TD, Identifier Name) override { return OtherPrinter.printTypeRef(T, TD, Name); } void printModuleRef(ModuleEntity Mod, Identifier Name) override { return OtherPrinter.printModuleRef(Mod, Name); } void <API key>(const ExtensionDecl *ED, TypeOrExtensionDecl Target, Optional<BracketOptions> Bracket) override { return OtherPrinter.<API key>(ED, Target, Bracket); } void <API key>(const ExtensionDecl *ED, TypeOrExtensionDecl Target, Optional<BracketOptions> Bracket) override { return OtherPrinter.<API key>(ED, Target, Bracket); } void printStructurePre(PrintStructureKind Kind, const Decl *D) override { return OtherPrinter.printStructurePre(Kind, D); } void printStructurePost(PrintStructureKind Kind, const Decl *D) override { return OtherPrinter.printStructurePost(Kind, D); } void printNamePre(PrintNameContext Context) override { return OtherPrinter.printNamePre(Context); } void printNamePost(PrintNameContext Context) override { return OtherPrinter.printNamePost(Context); } // Prints regular comments of the header the clang node comes from, until // the location of the node. Keeps track of the comments that were printed // from the file and resumes printing for the next node from the same file. // This expects to get passed clang nodes in source-order (at least within the // same header). void printCommentsUntil(ClangNode Node); void printComment(StringRef Text, unsigned StartLocCol); bool <API key>(clang::SourceLocation CommentLoc, ClangNode Node) const; unsigned getResumeOffset(clang::FileID FID) const { auto OffsI = ResumeOffsets.find(FID); if (OffsI != ResumeOffsets.end()) return OffsI->second; return 0; } void setResumeOffset(clang::FileID FID, unsigned Offset) { ResumeOffsets[FID] = Offset; } bool <API key>(ClangNode Node) const; void <API key>(clang::SourceLocation Loc); void <API key>(clang::FileID FID, unsigned LineNo); ASTPrinter &OtherPrinter; ClangModuleLoader &ClangLoader; llvm::DenseMap<clang::FileID, unsigned> ResumeOffsets; SmallVector<StringRef, 2> PendingComments; llvm::DenseMap<clang::FileID, unsigned> LastEntityLines; }; } // unnamed namespace static const clang::Module * <API key>(ImportDecl *Import) { if (auto *ClangMod = Import->getClangModule()) return ClangMod; if (auto Mod = Import->getModule()) if (auto *ClangMod = Mod-><API key>()) return ClangMod; return nullptr; } static void <API key>(Type Ty, std::string &Text) { SmallString<128> Buffer; llvm::raw_svector_ostream OS(Buffer); Ty->print(OS); Text = OS.str(); } bool swift::ide:: printTypeInterface(ModuleDecl *M, Type Ty, ASTPrinter &Printer, std::string &TypeName, std::string &Error) { if (!Ty) { if (Error.empty()) Error = "type cannot be null."; return true; } Ty = Ty->getRValueType(); if (auto ND = Ty-><API key>()) { PrintOptions Options = PrintOptions::printTypeInterface(Ty.getPointer()); ND->print(Printer, Options); <API key>(Ty, TypeName); return false; } Error = "cannot find declaration of type."; return true; } bool swift::ide:: printTypeInterface(ModuleDecl *M, StringRef TypeUSR, ASTPrinter &Printer, std::string &TypeName, std::string &Error) { return printTypeInterface(M, <API key>(M->getASTContext(), TypeUSR, Error), Printer, TypeName, Error); } void swift::ide::<API key>(ModuleDecl *M, Optional<StringRef> Group, <API key> TraversalOptions, ASTPrinter &Printer, const PrintOptions &Options, const bool <API key>) { <API key>(M, M->getName().str(), Group.hasValue() ? Group.getValue() : ArrayRef<StringRef>(), TraversalOptions, Printer, Options, <API key>); } static void adjustPrintOptions(PrintOptions &AdjustedOptions) { // Don't print empty curly braces while printing the module interface. AdjustedOptions.FunctionDefinitions = false; AdjustedOptions.<API key> = false; // Print var declarations separately, one variable per decl. AdjustedOptions.<API key> = true; AdjustedOptions.VarInitializers = false; } ArrayRef<StringRef> swift::ide::collectModuleGroups(ModuleDecl *M, std::vector<StringRef> &Scratch) { for (auto File : M->getFiles()) { File->collectAllGroups(Scratch); } std::sort(Scratch.begin(), Scratch.end(), [](StringRef L, StringRef R) { return L.compare_lower(R) < 0; }); return llvm::makeArrayRef(Scratch); } Determine whether the given extension has a Clang node that created it (vs. being a Swift extension). static bool <API key>(ExtensionDecl *ext) { return static_cast<bool>(swift::ide::<API key>(ext)); } Optional<StringRef> swift::ide::findGroupNameForUSR(ModuleDecl *M, StringRef USR) { for (auto File : M->getFiles()) { if (auto Name = File->getGroupNameByUSR(USR)) { return Name; } } return None; } void swift::ide::<API key>( ModuleDecl *M, ArrayRef<StringRef> FullModuleName, ArrayRef<StringRef> GroupNames, <API key> TraversalOptions, ASTPrinter &Printer, const PrintOptions &Options, const bool <API key>) { auto AdjustedOptions = Options; adjustPrintOptions(AdjustedOptions); SmallVector<Decl *, 1> Decls; M->getDisplayDecls(Decls); auto &SwiftContext = M->getASTContext(); auto &Importer = static_cast<ClangImporter &>(*SwiftContext.<API key>()); const clang::Module *<API key> = nullptr; SmallVector<ImportDecl *, 1> ImportDecls; llvm::DenseSet<const clang::Module *> <API key>; SmallVector<Decl *, 1> SwiftDecls; llvm::DenseMap<const clang::Module *, SmallVector<std::pair<Decl *, clang::SourceLocation>, 1>> ClangDecls; // Drop top-level module name. FullModuleName = FullModuleName.slice(1); <API key> = M-><API key>(); if (<API key>) { for (StringRef Name : FullModuleName) { <API key> = <API key>->findSubmodule(Name); if (!<API key>) return; } } else { assert(FullModuleName.empty()); } // If we're printing recursively, find all of the submodules to print. if (<API key>) { if (TraversalOptions) { SmallVector<const clang::Module *, 8> Worklist; SmallPtrSet<const clang::Module *, 8> Visited; Worklist.push_back(<API key>); Visited.insert(<API key>); while (!Worklist.empty()) { const clang::Module *CM = Worklist.pop_back_val(); if (!(TraversalOptions & ModuleTraversal::VisitHidden) && CM->IsExplicit) continue; ClangDecls.insert({ CM, {} }); // If we're supposed to visit submodules, add them now. if (TraversalOptions & ModuleTraversal::VisitSubmodules) { for (auto Sub = CM->submodule_begin(), SubEnd = CM->submodule_end(); Sub != SubEnd; ++Sub) { if (Visited.insert(*Sub).second) Worklist.push_back(*Sub); } } } } else { ClangDecls.insert({ <API key>, {} }); } } // Collect those submodules that are actually imported but have no import decls // in the module. llvm::SmallPtrSet<const clang::Module *, 16> NoImportSubModules; if (<API key>) { // Assume all submodules are missing. for (auto It =<API key>->submodule_begin(); It != <API key>->submodule_end(); It++) { NoImportSubModules.insert(*It); } } llvm::StringMap<std::vector<Decl*>> FileRangedDecls; // Separate the declarations that we are going to print into different // buckets. for (Decl *D : Decls) { // Skip declarations that are not accessible. if (auto *VD = dyn_cast<ValueDecl>(D)) { if (Options.AccessFilter > AccessLevel::Private && VD->getFormalAccess() < Options.AccessFilter) continue; } auto ShouldPrintImport = [&](ImportDecl *ImportD) -> bool { if (!<API key>) return true; auto ClangMod = ImportD->getClangModule(); if (!ClangMod) return true; if (!ClangMod->isSubModule()) return true; if (ClangMod == <API key>) return false; // FIXME: const-ness on the clang API. return ClangMod->isSubModuleOf( const_cast<clang::Module*>(<API key>)); }; if (auto ID = dyn_cast<ImportDecl>(D)) { if (ShouldPrintImport(ID)) { if (ID->getClangModule()) // Erase those submodules that are not missing. NoImportSubModules.erase(ID->getClangModule()); if (ID->getImportKind() == ImportKind::Module) { // Make sure we don't print duplicate imports, due to getting imports // for both a clang module and its overlay. if (auto *ClangMod = <API key>(ID)) { auto P = <API key>.insert(ClangMod); bool IsNew = P.second; if (!IsNew) continue; } } ImportDecls.push_back(ID); } continue; } auto addToClangDecls = [&](Decl *D, ClangNode CN) { assert(CN && "No Clang node here"); clang::SourceLocation Loc = CN.getLocation(); auto *OwningModule = Importer.<API key>(CN); auto I = ClangDecls.find(OwningModule); if (I != ClangDecls.end()) { I->second.push_back({ D, Loc }); } }; if (auto clangNode = <API key>(D)) { addToClangDecls(D, clangNode); continue; } // If we have an extension containing globals imported as members, // use the first member as the Clang node. if (auto Ext = dyn_cast<ExtensionDecl>(D)) { if (<API key>(Ext)) { addToClangDecls(Ext, <API key>(Ext)); continue; } } if (FullModuleName.empty()) { // If group name is given and the decl does not belong to the group, skip it. if (!GroupNames.empty()){ if (auto Target = D->getGroupName()) { if (std::find(GroupNames.begin(), GroupNames.end(), Target.getValue()) != GroupNames.end()) { FileRangedDecls.insert(std::make_pair(D->getSourceFileName().getValue(), std::vector<Decl*>())).first->getValue().push_back(D); } } continue; } // Add Swift decls if we are printing the top-level module. SwiftDecls.push_back(D); } } if (!GroupNames.empty()) { assert(SwiftDecls.empty()); for (auto &Entry : FileRangedDecls) { auto &DeclsInFile = Entry.getValue(); std::sort(DeclsInFile.begin(), DeclsInFile.end(), [](Decl* LHS, Decl *RHS) { assert(LHS->getSourceOrder().hasValue()); assert(RHS->getSourceOrder().hasValue()); return LHS->getSourceOrder().getValue() < RHS->getSourceOrder().getValue(); }); for (auto D : DeclsInFile) { SwiftDecls.push_back(D); } } } // Create the missing import decls and add to the collector. for (auto *SM : NoImportSubModules) { ImportDecls.push_back(createImportDecl(M->getASTContext(), M, SM, {})); } auto &ClangSourceManager = Importer.getClangASTContext().getSourceManager(); // Sort imported declarations in source order *within a submodule*. for (auto &P : ClangDecls) { std::stable_sort(P.second.begin(), P.second.end(), [&](std::pair<Decl *, clang::SourceLocation> LHS, std::pair<Decl *, clang::SourceLocation> RHS) -> bool { return ClangSourceManager.<API key>(LHS.second, RHS.second); }); } // Sort Swift declarations so that we print them in a consistent order. std::sort(ImportDecls.begin(), ImportDecls.end(), [](ImportDecl *LHS, ImportDecl *RHS) -> bool { auto LHSPath = LHS->getFullAccessPath(); auto RHSPath = RHS->getFullAccessPath(); for (unsigned i = 0, e = std::min(LHSPath.size(), RHSPath.size()); i != e; i++) { if (int Ret = LHSPath[i].first.str().compare(RHSPath[i].first.str())) return Ret < 0; } return false; }); // If the group name is specified, we sort them according to their source order, // which is the order preserved by getTopLevelDecls. if (GroupNames.empty()) { std::stable_sort(SwiftDecls.begin(), SwiftDecls.end(), [&](Decl *LHS, Decl *RHS) -> bool { auto *LHSValue = dyn_cast<ValueDecl>(LHS); auto *RHSValue = dyn_cast<ValueDecl>(RHS); if (LHSValue && RHSValue) { auto LHSName = LHSValue->getBaseName(); auto RHSName = RHSValue->getBaseName(); if (int Ret = LHSName.compare(RHSName)) return Ret < 0; // FIXME: this is not sufficient to establish a total order for overloaded // decls. return LHS->getKind() < RHS->getKind(); } return LHS->getKind() < RHS->getKind(); }); } ASTPrinter *PrinterToUse = &Printer; ClangCommentPrinter <API key>(Printer, Importer); if (Options.<API key>) PrinterToUse = &<API key>; auto PrintDecl = [&](Decl *D) -> bool { ASTPrinter &Printer = *PrinterToUse; if (!AdjustedOptions.shouldPrint(D)) { Printer.<API key>(D); return false; } if (auto Ext = dyn_cast<ExtensionDecl>(D)) { // Clang extensions (categories) are always printed in source order. // Swift extensions are printed with their associated type unless it's // a cross-module extension. if (!<API key>(Ext)) { auto ExtendedNominal = Ext->getExtendedNominal(); if (Ext->getModuleContext() == ExtendedNominal->getModuleContext()) return false; } } std::unique_ptr<<API key>> pAnalyzer; if (auto NTD = dyn_cast<NominalTypeDecl>(D)) { if (<API key>) { pAnalyzer.reset(new <API key>(NTD, AdjustedOptions)); AdjustedOptions.BracketOptions = {NTD, true, true, !pAnalyzer->hasMergeGroup(<API key>:: MergeGroupKind::<API key>)}; } } if (D->print(Printer, AdjustedOptions)) { if (AdjustedOptions.BracketOptions.shouldCloseNominal(D)) Printer << "\n"; AdjustedOptions.BracketOptions = BracketOptions(); if (auto NTD = dyn_cast<NominalTypeDecl>(D)) { std::queue<NominalTypeDecl *> SubDecls{{NTD}}; while (!SubDecls.empty()) { auto NTD = SubDecls.front(); SubDecls.pop(); // Add sub-types of NTD. for (auto Sub : NTD->getMembers()) if (auto N = dyn_cast<NominalTypeDecl>(Sub)) SubDecls.push(N); // Print Ext and add sub-types of Ext. for (auto Ext : NTD->getExtensions()) { if (!<API key>) { if (!AdjustedOptions.shouldPrint(Ext)) { Printer.<API key>(Ext); continue; } if (<API key>(Ext)) continue; // will be printed in its source location, see above. Printer << "\n"; Ext->print(Printer, AdjustedOptions); Printer << "\n"; } for (auto Sub : Ext->getMembers()) if (auto N = dyn_cast<NominalTypeDecl>(Sub)) SubDecls.push(N); } if (!<API key>) continue; bool IsTopLevelDecl = D == NTD; // If printed Decl is the top-level, merge the constraint-free extensions // into the main body. if (IsTopLevelDecl) { // Print the part that should be merged with the type decl. pAnalyzer-><API key>( <API key>::MergeGroupKind:: <API key>, [&](ArrayRef<ExtensionInfo> Decls) { for (auto ET : Decls) { AdjustedOptions.BracketOptions = { ET.Ext, false, Decls.back().Ext == ET.Ext, true}; if (ET.IsSynthesized) AdjustedOptions.<API key>(NTD); ET.Ext->print(Printer, AdjustedOptions); if (ET.IsSynthesized) AdjustedOptions.<API key>(); if (AdjustedOptions.BracketOptions.<API key>( ET.Ext)) Printer << "\n"; } }); } // If the printed Decl is not the top-level one, reset analyzer. if (!IsTopLevelDecl) pAnalyzer.reset(new <API key>(NTD, AdjustedOptions)); // Print the rest as synthesized extensions. pAnalyzer-><API key>( // For top-level decls, only constraint extensions need to be // printed, since the rest are merged into the main body. IsTopLevelDecl ? <API key>::MergeGroupKind:: <API key> : // For sub-decls, all extensions should be printed. <API key>::MergeGroupKind::All, [&](ArrayRef<ExtensionInfo> Decls) { // Whether we've started the extension merge group in printing. bool Opened = false; for (auto ET : Decls) { AdjustedOptions.BracketOptions = { ET.Ext, !Opened, Decls.back().Ext == ET.Ext, true}; if (AdjustedOptions.BracketOptions.shouldOpenExtension( ET.Ext)) Printer << "\n"; if (ET.IsSynthesized) { if (ET.EnablingExt) AdjustedOptions.<API key>( ET.EnablingExt); else AdjustedOptions.<API key>(NTD); } // Set opened if we actually printed this extension. Opened |= ET.Ext->print(Printer, AdjustedOptions); if (ET.IsSynthesized) AdjustedOptions.<API key>(); if (AdjustedOptions.BracketOptions.<API key>( ET.Ext)) Printer << "\n"; } }); AdjustedOptions.BracketOptions = BracketOptions(); } } return true; } return false; }; // Imports from the stdlib are internal details that don't need to be exposed. if (!M->isStdlibModule()) { for (auto *D : ImportDecls) PrintDecl(D); Printer << "\n"; } { using ModuleAndName = std::pair<const clang::Module *, std::string>; SmallVector<ModuleAndName, 8> ClangModules; for (auto P : ClangDecls) { ClangModules.push_back({ P.first, P.first->getFullModuleName() }); } // Sort modules by name. std::sort(ClangModules.begin(), ClangModules.end(), [](const ModuleAndName &LHS, const ModuleAndName &RHS) -> bool { return LHS.second < RHS.second; }); for (auto CM : ClangModules) { for (auto DeclAndLoc : ClangDecls[CM.first]) PrintDecl(DeclAndLoc.first); } } if (!(TraversalOptions & ModuleTraversal::SkipOverlay) || !<API key>) { for (auto *D : SwiftDecls) { if (PrintDecl(D)) Printer << "\n"; } } } static SourceLoc <API key>(SourceFile &File) { SourceManager &SM = File.getASTContext().SourceMgr; SourceLoc Winner; auto tryUpdateStart = [&](SourceLoc Loc) -> bool { if (Loc.isInvalid()) return false; if (Winner.isInvalid()) { Winner = Loc; return true; } if (SM.isBeforeInBuffer(Loc, Winner)) { Winner = Loc; return true; } return false; }; for (auto D : File.Decls) { if (tryUpdateStart(D->getStartLoc())) { tryUpdateStart(D->getAttrs().getStartLoc()); auto RawComment = D->getRawComment(); if (!RawComment.isEmpty()) tryUpdateStart(RawComment.Comments.front().Range.getStart()); } } return Winner; } static void <API key>(SourceFile &File, ASTPrinter &Printer) { if (!File.getBufferID().hasValue()) return; auto BufferID = *File.getBufferID(); auto &SM = File.getASTContext().SourceMgr; CharSourceRange TextRange = SM.getRangeForBuffer(BufferID); auto DeclStartLoc = <API key>(File); if (DeclStartLoc.isValid()) { TextRange = CharSourceRange(SM, TextRange.getStart(), DeclStartLoc); } Printer << SM.extractText(TextRange, BufferID); } void swift::ide::<API key>(SourceFile &File, ASTPrinter &Printer, const PrintOptions &Options) { // We print all comments before the first line of Swift code. <API key>(File, Printer); File.print(Printer, Options); } void swift::ide::<API key>( StringRef Filename, ASTContext &Ctx, ASTPrinter &Printer, const PrintOptions &Options) { auto AdjustedOptions = Options; adjustPrintOptions(AdjustedOptions); auto &Importer = static_cast<ClangImporter &>(*Ctx.<API key>()); auto &ClangSM = Importer.getClangASTContext().getSourceManager(); auto headerFilter = [&](ClangNode ClangN) -> bool { return true; // no need for filtering. }; SmallVector<Decl *, 32> ClangDecls; llvm::SmallPtrSet<Decl *, 32> SeenDecls; auto headerReceiver = [&](Decl *D) { if (SeenDecls.count(D) == 0) { SeenDecls.insert(D); ClangDecls.push_back(D); } }; Importer.<API key>(Filename, headerFilter, headerReceiver); // Sort imported declarations in source order. std::sort(ClangDecls.begin(), ClangDecls.end(), [&](Decl *LHS, Decl *RHS) -> bool { return ClangSM.<API key>( <API key>(LHS).getLocation(), <API key>(RHS).getLocation()); }); ASTPrinter *PrinterToUse = &Printer; ClangCommentPrinter <API key>(Printer, Importer); if (Options.<API key>) PrinterToUse = &<API key>; for (auto *D : ClangDecls) { ASTPrinter &Printer = *PrinterToUse; if (!AdjustedOptions.shouldPrint(D)) { Printer.<API key>(D); continue; } if (D->print(Printer, AdjustedOptions)) Printer << "\n"; } } void ClangCommentPrinter::avoidPrintDeclPost(const Decl *D) { auto CD = D->getClangDecl(); if (!CD) return; const auto &Ctx = ClangLoader.getClangASTContext(); const auto &SM = Ctx.getSourceManager(); auto EndLoc = CD->getSourceRange().getEnd(); if (EndLoc.isInvalid()) return; clang::FileID FID = SM.getFileID(EndLoc); if (FID.isInvalid()) return; auto Loc = EndLoc; for (unsigned Line = SM.<API key>(EndLoc); Loc.isValid() && SM.<API key>(Loc) == Line; Loc = Loc.getLocWithOffset(1)); if (Loc.isInvalid()) return; if (SM.getFileOffset(Loc) > getResumeOffset(FID)) setResumeOffset(FID, SM.getFileOffset(Loc)); } void ClangCommentPrinter::printDeclPre(const Decl *D, Optional<BracketOptions> Bracket) { // Skip parameters, since we do not gracefully handle nested declarations on a // single line. // FIXME: we should fix that, since it also affects struct members, etc. if (!isa<ParamDecl>(D)) { if (auto ClangN = swift::ide::<API key>(D)) { printCommentsUntil(ClangN); if (<API key>(ClangN)) { *this << "\n"; printIndent(); } <API key>(ClangN.getSourceRange().getBegin()); } } return OtherPrinter.printDeclPre(D, Bracket); } void ClangCommentPrinter::printDeclPost(const Decl *D, Optional<BracketOptions> Bracket) { OtherPrinter.printDeclPost(D, Bracket); // Skip parameters; see printDeclPre(). if (isa<ParamDecl>(D)) return; for (auto CommentText : PendingComments) { *this << " " << ASTPrinter::sanitizeUtf8(CommentText); } PendingComments.clear(); if (auto ClangN = swift::ide::<API key>(D)) <API key>(ClangN.getSourceRange().getEnd()); } void ClangCommentPrinter::printCommentsUntil(ClangNode Node) { const auto &Ctx = ClangLoader.getClangASTContext(); const auto &SM = Ctx.getSourceManager(); clang::SourceLocation NodeLoc = SM.getFileLoc(Node.getSourceRange().getBegin()); if (NodeLoc.isInvalid()) return; unsigned NodeLineNo = SM.<API key>(NodeLoc); clang::FileID FID = SM.getFileID(NodeLoc); if (FID.isInvalid()) return; clang::SourceLocation FileLoc = SM.<API key>(FID); StringRef Text = SM.getBufferData(FID); if (Text.empty()) return; const char *BufStart = Text.data(); const char *BufPtr = BufStart + getResumeOffset(FID); const char *BufEnd = BufStart + Text.size(); assert(BufPtr <= BufEnd); if (BufPtr == BufEnd) return; // nothing left. clang::Lexer Lex(FileLoc, Ctx.getLangOpts(), BufStart, BufPtr, BufEnd); Lex.<API key>(true); unsigned &LastPrintedLineNo = LastEntityLines[FID]; clang::Token Tok; do { BufPtr = Lex.getBufferLocation(); Lex.LexFromRawLexer(Tok); if (Tok.is(clang::tok::eof)) break; if (Tok.isNot(clang::tok::comment)) continue; // Reached a comment. clang::SourceLocation CommentLoc = Tok.getLocation(); std::pair<clang::FileID, unsigned> LocInfo = SM.getDecomposedLoc(CommentLoc); assert(LocInfo.first == FID); unsigned LineNo = SM.getLineNumber(LocInfo.first, LocInfo.second); if (LineNo > NodeLineNo) break; // Comment is past the clang node. bool IsDocComment = <API key>(CommentLoc, Node); // Print out the comment. StringRef CommentText(BufStart + LocInfo.second, Tok.getLength()); // Check if comment is on same line but after the declaration. if (SM.<API key>(NodeLoc, Tok.getLocation())) { if (!IsDocComment) PendingComments.push_back(CommentText); continue; } if (LastPrintedLineNo && LineNo - LastPrintedLineNo > 1) { *this << "\n"; printIndent(); } if (!IsDocComment) { unsigned StartLocCol = SM.<API key>(Tok.getLocation()); printComment(CommentText, StartLocCol); } LastPrintedLineNo = SM.getLineNumber(LocInfo.first, LocInfo.second + Tok.getLength()); } while (true); // Resume printing comments from this point. setResumeOffset(FID, BufPtr - BufStart); } void ClangCommentPrinter::printComment(StringRef RawText, unsigned StartCol) { unsigned WhitespaceToTrim = StartCol ? StartCol - 1 : 0; SmallVector<StringRef, 8> Lines; <API key>(RawText, WhitespaceToTrim, Lines); for (auto Line : Lines) { *this << ASTPrinter::sanitizeUtf8(Line) << "\n"; printIndent(); } } bool ClangCommentPrinter::<API key>( clang::SourceLocation CommentLoc, ClangNode Node) const { const clang::Decl *D = Node.getAsDecl(); if (!D) return false; const auto &Ctx = ClangLoader.getClangASTContext(); const auto &SM = Ctx.getSourceManager(); const clang::RawComment *RC = Ctx.<API key>(D); if (!RC) return false; clang::SourceRange DocRange = RC->getSourceRange(); if (SM.<API key>(CommentLoc, DocRange.getBegin()) || SM.<API key>(DocRange.getEnd(), CommentLoc)) return false; return true; } bool ClangCommentPrinter::<API key>(ClangNode Node) const { assert(Node); const auto &Ctx = ClangLoader.getClangASTContext(); const auto &SM = Ctx.getSourceManager(); clang::SourceLocation NodeLoc = SM.getFileLoc(Node.getSourceRange().getBegin()); if (NodeLoc.isInvalid()) return false; unsigned NodeLineNo = SM.<API key>(NodeLoc); clang::FileID FID = SM.getFileID(NodeLoc); if (FID.isInvalid()) return false; unsigned LastEntiyLine = 0; auto It = LastEntityLines.find(FID); if (It != LastEntityLines.end()) LastEntiyLine = It->second; return (NodeLineNo > LastEntiyLine) && NodeLineNo - LastEntiyLine > 1; } void ClangCommentPrinter::<API key>(clang::SourceLocation Loc) { if (Loc.isInvalid()) return; const auto &Ctx = ClangLoader.getClangASTContext(); const auto &SM = Ctx.getSourceManager(); unsigned LineNo = SM.<API key>(Loc); clang::FileID FID = SM.getFileID(Loc); if (FID.isInvalid()) return; <API key>(FID, LineNo); } void ClangCommentPrinter::<API key>(clang::FileID FID, unsigned LineNo) { assert(!FID.isInvalid()); unsigned &LastEntiyLine = LastEntityLines[FID]; if (LineNo > LastEntiyLine) LastEntiyLine = LineNo; }
#include "postgres.h" #include "miscadmin.h" #include "pgstat.h" #include "postmaster/bgworker.h" #include "storage/procsignal.h" #include "storage/shm_mq.h" #include "storage/spin.h" struct shm_mq { slock_t mq_mutex; PGPROC *mq_receiver; PGPROC *mq_sender; pg_atomic_uint64 mq_bytes_read; pg_atomic_uint64 mq_bytes_written; Size mq_ring_size; bool mq_detached; uint8 mq_ring_offset; char mq_ring[<API key>]; }; /* * This structure is a backend-private handle for access to a queue. * * mqh_queue is a pointer to the queue we've attached, and mqh_segment is * an optional pointer to the dynamic shared memory segment that contains it. * (If mqh_segment is provided, we register an on_dsm_detach callback to * make sure we detach from the queue before detaching from DSM.) * * If this queue is intended to connect the current process with a background * worker that started it, the user can pass a pointer to the worker handle * to shm_mq_attach(), and we'll store it in mqh_handle. The point of this * is to allow us to begin sending to or receiving from that queue before the * process we'll be communicating with has even been started. If it fails * to start, the handle will allow us to notice that and fail cleanly, rather * than waiting forever; see <API key>. This is mostly useful in * simple cases - e.g. where there are just 2 processes communicating; in * more complex scenarios, every process may not have a <API key> * available, or may need to watch for the failure of more than one other * process at a time. * * When a message exists as a contiguous chunk of bytes in the queue - that is, * it is smaller than the size of the ring buffer and does not wrap around * the end - we return the message to the caller as a pointer into the buffer. * For messages that are larger or happen to wrap, we reassemble the message * locally by copying the chunks into a backend-local buffer. mqh_buffer is * the buffer, and mqh_buflen is the number of bytes allocated for it. * * mqh_partial_bytes, mqh_expected_bytes, and <API key> * are used to track the state of non-blocking operations. When the caller * attempts a non-blocking operation that returns SHM_MQ_WOULD_BLOCK, they * are expected to retry the call at a later time with the same argument; * we need to retain enough state to pick up where we left off. * <API key> tracks whether we are done sending or receiving * (whichever we're doing) the entire length word. mqh_partial_bytes tracks * the number of bytes read or written for either the length word or the * message itself, and mqh_expected_bytes - which is used only for reads - * tracks the expected total size of the payload. * * <API key> tracks whether we know the counterparty to have * attached to the queue at some previous point. This lets us avoid some * mutex acquisitions. * * mqh_context is the memory context in effect at the time we attached to * the shm_mq. The shm_mq_handle itself is allocated in this context, and * we make sure any other allocations we do happen in this context as well, * to avoid nasty surprises. */ struct shm_mq_handle { shm_mq *mqh_queue; dsm_segment *mqh_segment; <API key> *mqh_handle; char *mqh_buffer; Size mqh_buflen; Size mqh_consume_pending; Size mqh_partial_bytes; Size mqh_expected_bytes; bool <API key>; bool <API key>; MemoryContext mqh_context; }; static void <API key>(shm_mq *mq); static shm_mq_result shm_mq_send_bytes(shm_mq_handle *mqh, Size nbytes, const void *data, bool nowait, Size *bytes_written); static shm_mq_result <API key>(shm_mq_handle *mqh, Size bytes_needed, bool nowait, Size *nbytesp, void **datap); static bool <API key>(shm_mq *mq, <API key> *handle); static bool <API key>(shm_mq *mq, PGPROC **ptr, <API key> *handle); static void <API key>(shm_mq *mq, Size n); static void <API key>(shm_mq *mq, Size n); static void <API key>(dsm_segment *seg, Datum arg); /* Minimum queue size is enough for header and at least one chunk of data. */ const Size shm_mq_minimum_size = MAXALIGN(offsetof(shm_mq, mq_ring)) + MAXIMUM_ALIGNOF; #define MQH_INITIAL_BUFSIZE 8192 /* * Initialize a new shared message queue. */ shm_mq * shm_mq_create(void *address, Size size) { shm_mq *mq = address; Size data_offset = MAXALIGN(offsetof(shm_mq, mq_ring)); /* If the size isn't MAXALIGN'd, just discard the odd bytes. */ size = MAXALIGN_DOWN(size); /* Queue size must be large enough to hold some data. */ Assert(size > data_offset); /* Initialize queue header. */ SpinLockInit(&mq->mq_mutex); mq->mq_receiver = NULL; mq->mq_sender = NULL; pg_atomic_init_u64(&mq->mq_bytes_read, 0); pg_atomic_init_u64(&mq->mq_bytes_written, 0); mq->mq_ring_size = size - data_offset; mq->mq_detached = false; mq->mq_ring_offset = data_offset - offsetof(shm_mq, mq_ring); return mq; } /* * Set the identity of the process that will receive from a shared message * queue. */ void shm_mq_set_receiver(shm_mq *mq, PGPROC *proc) { PGPROC *sender; SpinLockAcquire(&mq->mq_mutex); Assert(mq->mq_receiver == NULL); mq->mq_receiver = proc; sender = mq->mq_sender; SpinLockRelease(&mq->mq_mutex); if (sender != NULL) SetLatch(&sender->procLatch); } /* * Set the identity of the process that will send to a shared message queue. */ void shm_mq_set_sender(shm_mq *mq, PGPROC *proc) { PGPROC *receiver; SpinLockAcquire(&mq->mq_mutex); Assert(mq->mq_sender == NULL); mq->mq_sender = proc; receiver = mq->mq_receiver; SpinLockRelease(&mq->mq_mutex); if (receiver != NULL) SetLatch(&receiver->procLatch); } /* * Get the configured receiver. */ PGPROC * shm_mq_get_receiver(shm_mq *mq) { PGPROC *receiver; SpinLockAcquire(&mq->mq_mutex); receiver = mq->mq_receiver; SpinLockRelease(&mq->mq_mutex); return receiver; } /* * Get the configured sender. */ PGPROC * shm_mq_get_sender(shm_mq *mq) { PGPROC *sender; SpinLockAcquire(&mq->mq_mutex); sender = mq->mq_sender; SpinLockRelease(&mq->mq_mutex); return sender; } /* * Attach to a shared message queue so we can send or receive messages. * * The memory context in effect at the time this function is called should * be one which will last for at least as long as the message queue itself. * We'll allocate the handle in that context, and future allocations that * are needed to buffer incoming data will happen in that context as well. * * If seg != NULL, the queue will be automatically detached when that dynamic * shared memory segment is detached. * * If handle != NULL, the queue can be read or written even before the * other process has attached. We'll wait for it to do so if needed. The * handle must be for a background worker initialized with bgw_notify_pid * equal to our PID. * * shm_mq_detach() should be called when done. This will free the * shm_mq_handle and mark the queue itself as detached, so that our * counterpart won't get stuck waiting for us to fill or drain the queue * after we've already lost interest. */ shm_mq_handle * shm_mq_attach(shm_mq *mq, dsm_segment *seg, <API key> *handle) { shm_mq_handle *mqh = palloc(sizeof(shm_mq_handle)); Assert(mq->mq_receiver == MyProc || mq->mq_sender == MyProc); mqh->mqh_queue = mq; mqh->mqh_segment = seg; mqh->mqh_handle = handle; mqh->mqh_buffer = NULL; mqh->mqh_buflen = 0; mqh->mqh_consume_pending = 0; mqh->mqh_partial_bytes = 0; mqh->mqh_expected_bytes = 0; mqh-><API key> = false; mqh-><API key> = false; mqh->mqh_context = <API key>; if (seg != NULL) on_dsm_detach(seg, <API key>, PointerGetDatum(mq)); return mqh; } /* * Associate a <API key> with a shm_mq_handle just as if it had * been passed to shm_mq_attach. */ void shm_mq_set_handle(shm_mq_handle *mqh, <API key> *handle) { Assert(mqh->mqh_handle == NULL); mqh->mqh_handle = handle; } /* * Write a message into a shared message queue. */ shm_mq_result shm_mq_send(shm_mq_handle *mqh, Size nbytes, const void *data, bool nowait) { shm_mq_iovec iov; iov.data = data; iov.len = nbytes; return shm_mq_sendv(mqh, &iov, 1, nowait); } /* * Write a message into a shared message queue, gathered from multiple * addresses. * * When nowait = false, we'll wait on our process latch when the ring buffer * fills up, and then continue writing once the receiver has drained some data. * The process latch is reset after each wait. * * When nowait = true, we do not manipulate the state of the process latch; * instead, if the buffer becomes full, we return SHM_MQ_WOULD_BLOCK. In * this case, the caller should call this function again, with the same * arguments, each time the process latch is set. (Once begun, the sending * of a message cannot be aborted except by detaching from the queue; changing * the length or payload will corrupt the queue.) */ shm_mq_result shm_mq_sendv(shm_mq_handle *mqh, shm_mq_iovec *iov, int iovcnt, bool nowait) { shm_mq_result res; shm_mq *mq = mqh->mqh_queue; PGPROC *receiver; Size nbytes = 0; Size bytes_written; int i; int which_iov = 0; Size offset; Assert(mq->mq_sender == MyProc); /* Compute total size of write. */ for (i = 0; i < iovcnt; ++i) nbytes += iov[i].len; /* Try to write, or finish writing, the length word into the buffer. */ while (!mqh-><API key>) { Assert(mqh->mqh_partial_bytes < sizeof(Size)); res = shm_mq_send_bytes(mqh, sizeof(Size) - mqh->mqh_partial_bytes, ((char *) &nbytes) + mqh->mqh_partial_bytes, nowait, &bytes_written); if (res == SHM_MQ_DETACHED) { /* Reset state in case caller tries to send another message. */ mqh->mqh_partial_bytes = 0; mqh-><API key> = false; return res; } mqh->mqh_partial_bytes += bytes_written; if (mqh->mqh_partial_bytes >= sizeof(Size)) { Assert(mqh->mqh_partial_bytes == sizeof(Size)); mqh->mqh_partial_bytes = 0; mqh-><API key> = true; } if (res != SHM_MQ_SUCCESS) return res; /* Length word can't be split unless bigger than required alignment. */ Assert(mqh-><API key> || sizeof(Size) > MAXIMUM_ALIGNOF); } /* Write the actual data bytes into the buffer. */ Assert(mqh->mqh_partial_bytes <= nbytes); offset = mqh->mqh_partial_bytes; do { Size chunksize; /* Figure out which bytes need to be sent next. */ if (offset >= iov[which_iov].len) { offset -= iov[which_iov].len; ++which_iov; if (which_iov >= iovcnt) break; continue; } /* * We want to avoid copying the data if at all possible, but every * chunk of bytes we write into the queue has to be MAXALIGN'd, except * the last. Thus, if a chunk other than the last one ends on a * non-MAXALIGN'd boundary, we have to combine the tail end of its * data with data from one or more following chunks until we either * reach the last chunk or accumulate a number of bytes which is * MAXALIGN'd. */ if (which_iov + 1 < iovcnt && offset + MAXIMUM_ALIGNOF > iov[which_iov].len) { char tmpbuf[MAXIMUM_ALIGNOF]; int j = 0; for (;;) { if (offset < iov[which_iov].len) { tmpbuf[j] = iov[which_iov].data[offset]; j++; offset++; if (j == MAXIMUM_ALIGNOF) break; } else { offset -= iov[which_iov].len; which_iov++; if (which_iov >= iovcnt) break; } } res = shm_mq_send_bytes(mqh, j, tmpbuf, nowait, &bytes_written); if (res == SHM_MQ_DETACHED) { /* Reset state in case caller tries to send another message. */ mqh->mqh_partial_bytes = 0; mqh-><API key> = false; return res; } mqh->mqh_partial_bytes += bytes_written; if (res != SHM_MQ_SUCCESS) return res; continue; } /* * If this is the last chunk, we can write all the data, even if it * isn't a multiple of MAXIMUM_ALIGNOF. Otherwise, we need to * MAXALIGN_DOWN the write size. */ chunksize = iov[which_iov].len - offset; if (which_iov + 1 < iovcnt) chunksize = MAXALIGN_DOWN(chunksize); res = shm_mq_send_bytes(mqh, chunksize, &iov[which_iov].data[offset], nowait, &bytes_written); if (res == SHM_MQ_DETACHED) { /* Reset state in case caller tries to send another message. */ mqh-><API key> = false; mqh->mqh_partial_bytes = 0; return res; } mqh->mqh_partial_bytes += bytes_written; offset += bytes_written; if (res != SHM_MQ_SUCCESS) return res; } while (mqh->mqh_partial_bytes < nbytes); /* Reset for next message. */ mqh->mqh_partial_bytes = 0; mqh-><API key> = false; /* If queue has been detached, let caller know. */ if (mq->mq_detached) return SHM_MQ_DETACHED; /* * If the counterparty is known to have attached, we can read mq_receiver * without acquiring the spinlock and assume it isn't NULL. Otherwise, * more caution is needed. */ if (mqh-><API key>) receiver = mq->mq_receiver; else { SpinLockAcquire(&mq->mq_mutex); receiver = mq->mq_receiver; SpinLockRelease(&mq->mq_mutex); if (receiver == NULL) return SHM_MQ_SUCCESS; mqh-><API key> = true; } /* Notify receiver of the newly-written data, and return. */ SetLatch(&receiver->procLatch); return SHM_MQ_SUCCESS; } /* * Receive a message from a shared message queue. * * We set *nbytes to the message length and *data to point to the message * payload. If the entire message exists in the queue as a single, * contiguous chunk, *data will point directly into shared memory; otherwise, * it will point to a temporary buffer. This mostly avoids data copying in * the hoped-for case where messages are short compared to the buffer size, * while still allowing longer messages. In either case, the return value * remains valid until the next receive operation is performed on the queue. * * When nowait = false, we'll wait on our process latch when the ring buffer * is empty and we have not yet received a full message. The sender will * set our process latch after more data has been written, and we'll resume * processing. Each call will therefore return a complete message * (unless the sender detaches the queue). * * When nowait = true, we do not manipulate the state of the process latch; * instead, whenever the buffer is empty and we need to read from it, we * return SHM_MQ_WOULD_BLOCK. In this case, the caller should call this * function again after the process latch has been set. */ shm_mq_result shm_mq_receive(shm_mq_handle *mqh, Size *nbytesp, void **datap, bool nowait) { shm_mq *mq = mqh->mqh_queue; shm_mq_result res; Size rb = 0; Size nbytes; void *rawdata; Assert(mq->mq_receiver == MyProc); /* We can't receive data until the sender has attached. */ if (!mqh-><API key>) { if (nowait) { int counterparty_gone; /* * We shouldn't return at this point at all unless the sender * hasn't attached yet. However, the correct return value depends * on whether the sender is still attached. If we first test * whether the sender has ever attached and then test whether the * sender has detached, there's a race condition: a sender that * attaches and detaches very quickly might fool us into thinking * the sender never attached at all. So, test whether our * counterparty is definitively gone first, and only afterwards * check whether the sender ever attached in the first place. */ counterparty_gone = <API key>(mq, mqh->mqh_handle); if (shm_mq_get_sender(mq) == NULL) { if (counterparty_gone) return SHM_MQ_DETACHED; else return SHM_MQ_WOULD_BLOCK; } } else if (!<API key>(mq, &mq->mq_sender, mqh->mqh_handle) && shm_mq_get_sender(mq) == NULL) { mq->mq_detached = true; return SHM_MQ_DETACHED; } mqh-><API key> = true; } /* * If we've consumed an amount of data greater than 1/4th of the ring * size, mark it consumed in shared memory. We try to avoid doing this * unnecessarily when only a small amount of data has been consumed, * because SetLatch() is fairly expensive and we don't want to do it too * often. */ if (mqh->mqh_consume_pending > mq->mq_ring_size / 4) { <API key>(mq, mqh->mqh_consume_pending); mqh->mqh_consume_pending = 0; } /* Try to read, or finish reading, the length word from the buffer. */ while (!mqh-><API key>) { /* Try to receive the message length word. */ Assert(mqh->mqh_partial_bytes < sizeof(Size)); res = <API key>(mqh, sizeof(Size) - mqh->mqh_partial_bytes, nowait, &rb, &rawdata); if (res != SHM_MQ_SUCCESS) return res; /* * Hopefully, we'll receive the entire message length word at once. * But if sizeof(Size) > MAXIMUM_ALIGNOF, then it might be split over * multiple reads. */ if (mqh->mqh_partial_bytes == 0 && rb >= sizeof(Size)) { Size needed; nbytes = *(Size *) rawdata; /* If we've already got the whole message, we're done. */ needed = MAXALIGN(sizeof(Size)) + MAXALIGN(nbytes); if (rb >= needed) { mqh->mqh_consume_pending += needed; *nbytesp = nbytes; *datap = ((char *) rawdata) + MAXALIGN(sizeof(Size)); return SHM_MQ_SUCCESS; } /* * We don't have the whole message, but we at least have the whole * length word. */ mqh->mqh_expected_bytes = nbytes; mqh-><API key> = true; mqh->mqh_consume_pending += MAXALIGN(sizeof(Size)); rb -= MAXALIGN(sizeof(Size)); } else { Size lengthbytes; /* Can't be split unless bigger than required alignment. */ Assert(sizeof(Size) > MAXIMUM_ALIGNOF); /* Message word is split; need buffer to reassemble. */ if (mqh->mqh_buffer == NULL) { mqh->mqh_buffer = MemoryContextAlloc(mqh->mqh_context, MQH_INITIAL_BUFSIZE); mqh->mqh_buflen = MQH_INITIAL_BUFSIZE; } Assert(mqh->mqh_buflen >= sizeof(Size)); /* Copy partial length word; remember to consume it. */ if (mqh->mqh_partial_bytes + rb > sizeof(Size)) lengthbytes = sizeof(Size) - mqh->mqh_partial_bytes; else lengthbytes = rb; memcpy(&mqh->mqh_buffer[mqh->mqh_partial_bytes], rawdata, lengthbytes); mqh->mqh_partial_bytes += lengthbytes; mqh->mqh_consume_pending += MAXALIGN(lengthbytes); rb -= lengthbytes; /* If we now have the whole word, we're ready to read payload. */ if (mqh->mqh_partial_bytes >= sizeof(Size)) { Assert(mqh->mqh_partial_bytes == sizeof(Size)); mqh->mqh_expected_bytes = *(Size *) mqh->mqh_buffer; mqh-><API key> = true; mqh->mqh_partial_bytes = 0; } } } nbytes = mqh->mqh_expected_bytes; if (mqh->mqh_partial_bytes == 0) { /* * Try to obtain the whole message in a single chunk. If this works, * we need not copy the data and can return a pointer directly into * shared memory. */ res = <API key>(mqh, nbytes, nowait, &rb, &rawdata); if (res != SHM_MQ_SUCCESS) return res; if (rb >= nbytes) { mqh-><API key> = false; mqh->mqh_consume_pending += MAXALIGN(nbytes); *nbytesp = nbytes; *datap = rawdata; return SHM_MQ_SUCCESS; } /* * The message has wrapped the buffer. We'll need to copy it in order * to return it to the client in one chunk. First, make sure we have * a large enough buffer available. */ if (mqh->mqh_buflen < nbytes) { Size newbuflen = Max(mqh->mqh_buflen, MQH_INITIAL_BUFSIZE); while (newbuflen < nbytes) newbuflen *= 2; if (mqh->mqh_buffer != NULL) { pfree(mqh->mqh_buffer); mqh->mqh_buffer = NULL; mqh->mqh_buflen = 0; } mqh->mqh_buffer = MemoryContextAlloc(mqh->mqh_context, newbuflen); mqh->mqh_buflen = newbuflen; } } /* Loop until we've copied the entire message. */ for (;;) { Size still_needed; /* Copy as much as we can. */ Assert(mqh->mqh_partial_bytes + rb <= nbytes); memcpy(&mqh->mqh_buffer[mqh->mqh_partial_bytes], rawdata, rb); mqh->mqh_partial_bytes += rb; /* * Update count of bytes that can be consumed, accounting for * alignment padding. Note that this will never actually insert any * padding except at the end of a message, because the buffer size is * a multiple of MAXIMUM_ALIGNOF, and each read and write is as well. */ Assert(mqh->mqh_partial_bytes == nbytes || rb == MAXALIGN(rb)); mqh->mqh_consume_pending += MAXALIGN(rb); /* If we got all the data, exit the loop. */ if (mqh->mqh_partial_bytes >= nbytes) break; /* Wait for some more data. */ still_needed = nbytes - mqh->mqh_partial_bytes; res = <API key>(mqh, still_needed, nowait, &rb, &rawdata); if (res != SHM_MQ_SUCCESS) return res; if (rb > still_needed) rb = still_needed; } /* Return the complete message, and reset for next message. */ *nbytesp = nbytes; *datap = mqh->mqh_buffer; mqh-><API key> = false; mqh->mqh_partial_bytes = 0; return SHM_MQ_SUCCESS; } /* * Wait for the other process that's supposed to use this queue to attach * to it. * * The return value is SHM_MQ_DETACHED if the worker has already detached or * if it dies; it is SHM_MQ_SUCCESS if we detect that the worker has attached. * Note that we will only be able to detect that the worker has died before * attaching if a background worker handle was passed to shm_mq_attach(). */ shm_mq_result <API key>(shm_mq_handle *mqh) { shm_mq *mq = mqh->mqh_queue; PGPROC **victim; if (shm_mq_get_receiver(mq) == MyProc) victim = &mq->mq_sender; else { Assert(shm_mq_get_sender(mq) == MyProc); victim = &mq->mq_receiver; } if (<API key>(mq, victim, mqh->mqh_handle)) return SHM_MQ_SUCCESS; else return SHM_MQ_DETACHED; } /* * Detach from a shared message queue, and destroy the shm_mq_handle. */ void shm_mq_detach(shm_mq_handle *mqh) { /* Notify counterparty that we're outta here. */ <API key>(mqh->mqh_queue); /* Cancel on_dsm_detach callback, if any. */ if (mqh->mqh_segment) <API key>(mqh->mqh_segment, <API key>, PointerGetDatum(mqh->mqh_queue)); /* Release local memory associated with handle. */ if (mqh->mqh_buffer != NULL) pfree(mqh->mqh_buffer); pfree(mqh); } /* * Notify counterparty that we're detaching from shared message queue. * * The purpose of this function is to make sure that the process * with which we're communicating doesn't block forever waiting for us to * fill or drain the queue once we've lost interest. When the sender * detaches, the receiver can read any messages remaining in the queue; * further reads will return SHM_MQ_DETACHED. If the receiver detaches, * further attempts to send messages will likewise return SHM_MQ_DETACHED. * * This is separated out from shm_mq_detach() because if the on_dsm_detach * callback fires, we only want to do this much. We do not try to touch * the local shm_mq_handle, as it may have been pfree'd already. */ static void <API key>(shm_mq *mq) { PGPROC *victim; SpinLockAcquire(&mq->mq_mutex); if (mq->mq_sender == MyProc) victim = mq->mq_receiver; else { Assert(mq->mq_receiver == MyProc); victim = mq->mq_sender; } mq->mq_detached = true; SpinLockRelease(&mq->mq_mutex); if (victim != NULL) SetLatch(&victim->procLatch); } /* * Get the shm_mq from handle. */ shm_mq * shm_mq_get_queue(shm_mq_handle *mqh) { return mqh->mqh_queue; } /* * Write bytes into a shared message queue. */ static shm_mq_result shm_mq_send_bytes(shm_mq_handle *mqh, Size nbytes, const void *data, bool nowait, Size *bytes_written) { shm_mq *mq = mqh->mqh_queue; Size sent = 0; uint64 used; Size ringsize = mq->mq_ring_size; Size available; while (sent < nbytes) { uint64 rb; uint64 wb; /* Compute number of ring buffer bytes used and available. */ rb = pg_atomic_read_u64(&mq->mq_bytes_read); wb = pg_atomic_read_u64(&mq->mq_bytes_written); Assert(wb >= rb); used = wb - rb; Assert(used <= ringsize); available = Min(ringsize - used, nbytes - sent); /* * Bail out if the queue has been detached. Note that we would be in * trouble if the compiler decided to cache the value of * mq->mq_detached in a register or on the stack across loop * iterations. It probably shouldn't do that anyway since we'll * always return, call an external function that performs a system * call, or reach a memory barrier at some point later in the loop, * but just to be sure, insert a compiler barrier here. */ pg_compiler_barrier(); if (mq->mq_detached) { *bytes_written = sent; return SHM_MQ_DETACHED; } if (available == 0 && !mqh-><API key>) { /* * The queue is full, so if the receiver isn't yet known to be * attached, we must wait for that to happen. */ if (nowait) { if (<API key>(mq, mqh->mqh_handle)) { *bytes_written = sent; return SHM_MQ_DETACHED; } if (shm_mq_get_receiver(mq) == NULL) { *bytes_written = sent; return SHM_MQ_WOULD_BLOCK; } } else if (!<API key>(mq, &mq->mq_receiver, mqh->mqh_handle)) { mq->mq_detached = true; *bytes_written = sent; return SHM_MQ_DETACHED; } mqh-><API key> = true; /* * The receiver may have read some data after attaching, so we * must not wait without rechecking the queue state. */ } else if (available == 0) { /* * Since mq-><API key> is known to be true at this * point, mq_receiver has been set, and it can't change once set. * Therefore, we can read it without acquiring the spinlock. */ Assert(mqh-><API key>); SetLatch(&mq->mq_receiver->procLatch); /* Skip manipulation of our latch if nowait = true. */ if (nowait) { *bytes_written = sent; return SHM_MQ_WOULD_BLOCK; } /* * Wait for our latch to be set. It might already be set for some * unrelated reason, but that'll just result in one extra trip * through the loop. It's worth it to avoid resetting the latch * at top of loop, because setting an already-set latch is much * cheaper than setting one that has been reset. */ (void) WaitLatch(MyLatch, WL_LATCH_SET | WL_EXIT_ON_PM_DEATH, 0, WAIT_EVENT_MQ_SEND); /* Reset the latch so we don't spin. */ ResetLatch(MyLatch); /* An interrupt may have occurred while we were waiting. */ <API key>(); } else { Size offset; Size sendnow; offset = wb % (uint64) ringsize; sendnow = Min(available, ringsize - offset); /* * Write as much data as we can via a single memcpy(). Make sure * these writes happen after the read of mq_bytes_read, above. * This barrier pairs with the one in <API key>. * (Since we're separating the read of mq_bytes_read from a * subsequent write to mq_ring, we need a full barrier here.) */ pg_memory_barrier(); memcpy(&mq->mq_ring[mq->mq_ring_offset + offset], (char *) data + sent, sendnow); sent += sendnow; /* * Update count of bytes written, with alignment padding. Note * that this will never actually insert any padding except at the * end of a run of bytes, because the buffer size is a multiple of * MAXIMUM_ALIGNOF, and each read is as well. */ Assert(sent == nbytes || sendnow == MAXALIGN(sendnow)); <API key>(mq, MAXALIGN(sendnow)); /* * For efficiency, we don't set the reader's latch here. We'll do * that only when the buffer fills up or after writing an entire * message. */ } } *bytes_written = sent; return SHM_MQ_SUCCESS; } /* * Wait until at least *nbytesp bytes are available to be read from the * shared message queue, or until the buffer wraps around. If the queue is * detached, returns SHM_MQ_DETACHED. If nowait is specified and a wait * would be required, returns SHM_MQ_WOULD_BLOCK. Otherwise, *datap is set * to the location at which data bytes can be read, *nbytesp is set to the * number of bytes which can be read at that address, and the return value * is SHM_MQ_SUCCESS. */ static shm_mq_result <API key>(shm_mq_handle *mqh, Size bytes_needed, bool nowait, Size *nbytesp, void **datap) { shm_mq *mq = mqh->mqh_queue; Size ringsize = mq->mq_ring_size; uint64 used; uint64 written; for (;;) { Size offset; uint64 read; /* Get bytes written, so we can compute what's available to read. */ written = pg_atomic_read_u64(&mq->mq_bytes_written); /* * Get bytes read. Include bytes we could consume but have not yet * consumed. */ read = pg_atomic_read_u64(&mq->mq_bytes_read) + mqh->mqh_consume_pending; used = written - read; Assert(used <= ringsize); offset = read % (uint64) ringsize; /* If we have enough data or buffer has wrapped, we're done. */ if (used >= bytes_needed || offset + used >= ringsize) { *nbytesp = Min(used, ringsize - offset); *datap = &mq->mq_ring[mq->mq_ring_offset + offset]; /* * Separate the read of mq_bytes_written, above, from caller's * attempt to read the data itself. Pairs with the barrier in * <API key>. */ pg_read_barrier(); return SHM_MQ_SUCCESS; } /* * Fall out before waiting if the queue has been detached. * * Note that we don't check for this until *after* considering whether * the data already available is enough, since the receiver can finish * receiving a message stored in the buffer even after the sender has * detached. */ if (mq->mq_detached) { /* * If the writer advanced mq_bytes_written and then set * mq_detached, we might not have read the final value of * mq_bytes_written above. Insert a read barrier and then check * again if mq_bytes_written has advanced. */ pg_read_barrier(); if (written != pg_atomic_read_u64(&mq->mq_bytes_written)) continue; return SHM_MQ_DETACHED; } /* * We didn't get enough data to satisfy the request, so mark any data * previously-consumed as read to make more buffer space. */ if (mqh->mqh_consume_pending > 0) { <API key>(mq, mqh->mqh_consume_pending); mqh->mqh_consume_pending = 0; } /* Skip manipulation of our latch if nowait = true. */ if (nowait) return SHM_MQ_WOULD_BLOCK; /* * Wait for our latch to be set. It might already be set for some * unrelated reason, but that'll just result in one extra trip through * the loop. It's worth it to avoid resetting the latch at top of * loop, because setting an already-set latch is much cheaper than * setting one that has been reset. */ (void) WaitLatch(MyLatch, WL_LATCH_SET | WL_EXIT_ON_PM_DEATH, 0, <API key>); /* Reset the latch so we don't spin. */ ResetLatch(MyLatch); /* An interrupt may have occurred while we were waiting. */ <API key>(); } } /* * Test whether a counterparty who may not even be alive yet is definitely gone. */ static bool <API key>(shm_mq *mq, <API key> *handle) { pid_t pid; /* If the queue has been detached, counterparty is definitely gone. */ if (mq->mq_detached) return true; /* If there's a handle, check worker status. */ if (handle != NULL) { BgwHandleStatus status; /* Check for unexpected worker death. */ status = <API key>(handle, &pid); if (status != BGWH_STARTED && status != <API key>) { /* Mark it detached, just to make it official. */ mq->mq_detached = true; return true; } } /* Counterparty is not definitively gone. */ return false; } /* * This is used when a process is waiting for its counterpart to attach to the * queue. We exit when the other process attaches as expected, or, if * handle != NULL, when the referenced background process or the postmaster * dies. Note that if handle == NULL, and the process fails to attach, we'll * potentially get stuck here forever waiting for a process that may never * start. We do check for interrupts, though. * * ptr is a pointer to the memory address that we're expecting to become * non-NULL when our counterpart attaches to the queue. */ static bool <API key>(shm_mq *mq, PGPROC **ptr, <API key> *handle) { bool result = false; for (;;) { BgwHandleStatus status; pid_t pid; /* Acquire the lock just long enough to check the pointer. */ SpinLockAcquire(&mq->mq_mutex); result = (*ptr != NULL); SpinLockRelease(&mq->mq_mutex); /* Fail if detached; else succeed if initialized. */ if (mq->mq_detached) { result = false; break; } if (result) break; if (handle != NULL) { /* Check for unexpected worker death. */ status = <API key>(handle, &pid); if (status != BGWH_STARTED && status != <API key>) { result = false; break; } } /* Wait to be signalled. */ (void) WaitLatch(MyLatch, WL_LATCH_SET | WL_EXIT_ON_PM_DEATH, 0, <API key>); /* Reset the latch so we don't spin. */ ResetLatch(MyLatch); /* An interrupt may have occurred while we were waiting. */ <API key>(); } return result; } /* * Increment the number of bytes read. */ static void <API key>(shm_mq *mq, Size n) { PGPROC *sender; /* * Separate prior reads of mq_ring from the increment of mq_bytes_read * which follows. This pairs with the full barrier in * shm_mq_send_bytes(). We only need a read barrier here because the * increment of mq_bytes_read is actually a read followed by a dependent * write. */ pg_read_barrier(); /* * There's no need to use <API key> here, because nobody * else can be changing this value. This method should be cheaper. */ pg_atomic_write_u64(&mq->mq_bytes_read, pg_atomic_read_u64(&mq->mq_bytes_read) + n); /* * We shouldn't have any bytes to read without a sender, so we can read * mq_sender here without a lock. Once it's initialized, it can't change. */ sender = mq->mq_sender; Assert(sender != NULL); SetLatch(&sender->procLatch); } /* * Increment the number of bytes written. */ static void <API key>(shm_mq *mq, Size n) { /* * Separate prior reads of mq_ring from the write of mq_bytes_written * which we're about to do. Pairs with the read barrier found in * <API key>. */ pg_write_barrier(); /* * There's no need to use <API key> here, because nobody * else can be changing this value. This method avoids taking the bus * lock unnecessarily. */ pg_atomic_write_u64(&mq->mq_bytes_written, pg_atomic_read_u64(&mq->mq_bytes_written) + n); } /* Shim for on_dsm_callback. */ static void <API key>(dsm_segment *seg, Datum arg) { shm_mq *mq = (shm_mq *) DatumGetPointer(arg); <API key>(mq); }
#!/bin/bash cp -a $REPO ./build/ ${PIP} install -r $REPO/requirements.txt
class Dash < Formula desc "POSIX-compliant descendant of NetBSD's ash (the Almquist SHell)" homepage "http://gondor.apana.org.au/~herbert/dash/" url "http://gondor.apana.org.au/~herbert/dash/files/dash-0.5.10.2.tar.gz" mirror "https://dl.bintray.com/homebrew/mirror/dash-0.5.10.2.tar.gz" sha256 "<SHA256-like>" bottle do cellar :any_skip_relocation sha256 "<SHA256-like>" => :mojave sha256 "<SHA256-like>" => :high_sierra sha256 "<SHA256-like>" => :sierra sha256 "<SHA256-like>" => :el_capitan end head do url "https://git.kernel.org/pub/scm/utils/dash/dash.git" depends_on "autoconf" => :build depends_on "automake" => :build end def install system "./autogen.sh" if build.head? system "./configure", "--prefix=#{prefix}", "--with-libedit", "--<API key>", "--enable-fnmatch", "--enable-glob" system "make" system "make", "install" end test do system "#{bin}/dash", "-c", "echo Hello!" end end
cask 'second-life-viewer' do version '3.8.1.303130' sha256 '<SHA256-like>' url "http://download.cloud.secondlife.com/Viewer_3/Second_Life_#{version.gsub('.','_')}_i386.dmg" name 'Second Life Viewer' homepage 'https://secondlife.com/' license :gpl tags :vendor => 'Linden Lab' app 'Second Life Viewer.app' depends_on :macos => '>= :lion' zap :delete => [ '~/Library/Application Support/SecondLife', '~/Library/Caches/SecondLife', ] end
package com.glob3mobile.server.proxy; import java.io.BufferedInputStream; import java.io.<API key>; import java.io.IOException; import java.net.HttpURLConnection; import java.net.<API key>; import java.net.URL; import java.net.URLDecoder; import java.util.Enumeration; import java.util.HashMap; import java.util.Map; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; public class Proxy extends HttpServlet { private static final long serialVersionUID = -<API key>; // private static final int <API key> = 31536000; @Override public void doPost(final HttpServletRequest request, final HttpServletResponse response) throws IOException { doGet(request, response); } @Override public void doGet(final HttpServletRequest request, final HttpServletResponse response) throws IOException { String reqUrl = request.getQueryString(); if (reqUrl == null) { response.setStatus(400); response.getOutputStream().println("ERROR 400: No target specified for proxy."); return; } reqUrl = URLDecoder.decode(reqUrl, "UTF-8"); // reqUrl = reqUrl.replaceFirst("url=", ""); if (reqUrl.toLowerCase().startsWith("url=")) { reqUrl = reqUrl.substring("url=".length()); } try { final URL url = new URL(reqUrl); final Enumeration headerNames = request.getHeaderNames(); final Map<String, String> headers = new HashMap<String, String>(); while (headerNames.hasMoreElements()) { final String headerName = (String) headerNames.nextElement(); String headerValue = request.getHeader(headerName); if (headerName.equalsIgnoreCase("Host")) { // headerValue = reqUrl; final int port = url.getPort(); final String host = url.getHost(); headerValue = ((port == 80) || (port == -1)) ? host : host + ":" + port; } else if (headerName.equalsIgnoreCase("User-Agent")) { continue; } else if (headerName.equalsIgnoreCase("Accept-Encoding")) { continue; } headers.put(headerName, headerValue); } HttpURLConnection connection = null; try { //Create connection connection = (HttpURLConnection) url.openConnection(); connection.setUseCaches(true); connection.setDoInput(true); connection.setDoOutput(false); for (final String headerName : headers.keySet()) { connection.setRequestProperty(headerName, headers.get(headerName)); } connection.setRequestMethod("GET"); // Set maxAge // connection.setRequestProperty("Cache-Control", "max-age=" + <API key>); // Send request // final DataOutputStream wr = new DataOutputStream(connection.getOutputStream()); // wr.flush(); // wr.close(); // Get content type final String contentType = connection.getContentType(); if (contentType != null) { response.setContentType(contentType); final int responseCode = connection.getResponseCode(); response.setStatus(responseCode); BufferedInputStream in; if ((responseCode == 200) || (responseCode == 201)) { in = new BufferedInputStream(connection.getInputStream()); } else { in = new BufferedInputStream(connection.getErrorStream()); } //send output to client final <API key> out = new <API key>(response.getOutputStream()); final byte[] buffer = new byte[4096]; int length = 0; int totalLength = 0; while ((length = in.read(buffer)) > 0) { out.write(buffer, 0, length); totalLength += length; } response.setContentLength(totalLength); out.flush(); out.close(); in.close(); } else { System.out.println("Content type is null"); } } // catch (final Exception e) { // e.printStackTrace(); finally { if (connection != null) { connection.disconnect(); } } } catch (final <API key> e1) { response.setStatus(400); response.getOutputStream().println("ERROR 400: Invalid URL: " + reqUrl); return; } } }
class DocbookXsl < Formula desc "XML vocabulary to create <API key> documents" homepage "https://github.com/docbook/xslt10-stylesheets" url "https://github.com/docbook/xslt10-stylesheets/releases/download/release%2F1.79.2/docbook-xsl-nons-1.79.2.tar.bz2" sha256 "<SHA256-like>" # Except as otherwise noted, for example, under some of the /contrib/ license "MIT" revision 1 livecheck do url :homepage regex(%r{^(?:release/)?(\d+(?:\.\d+)+)$}i) end bottle do sha256 cellar: :any_skip_relocation, arm64_big_sur: "<SHA256-like>" sha256 cellar: :any_skip_relocation, big_sur: "<SHA256-like>" sha256 cellar: :any_skip_relocation, catalina: "<SHA256-like>" sha256 cellar: :any_skip_relocation, mojave: "<SHA256-like>" sha256 cellar: :any_skip_relocation, high_sierra: "<SHA256-like>" sha256 cellar: :any_skip_relocation, x86_64_linux: "<SHA256-like>" end depends_on "docbook" resource "ns" do url "https://github.com/docbook/xslt10-stylesheets/releases/download/release%2F1.79.2/docbook-xsl-1.79.2.tar.bz2" sha256 "<SHA256-like>" end resource "doc" do url "https://github.com/docbook/xslt10-stylesheets/releases/download/release%2F1.79.2/docbook-xsl-doc-1.79.2.tar.bz2" sha256 "<SHA256-like>" end patch do url "http: mirror "https://raw.githubusercontent.com/Homebrew/formula-patches/5f2d6c1/docbook-xsl/docbook-xsl-nons-1.79.2-stack_fix-1.patch" sha256 "<SHA256-like>" end def install ENV["XML_CATALOG_FILES"] = "#{etc}/xml/catalog" doc_files = %w[AUTHORS BUGS COPYING NEWS README RELEASE-NOTES.txt TODO VERSION VERSION.xsl] xsl_files = %w[assembly catalog.xml common docsrc eclipse epub epub3 extensions fo highlighting html htmlhelp images javahelp lib log manpages params profiling roundtrip slides template tests tools webhelp website xhtml xhtml-1_1 xhtml5] touch "log" (prefix/"docbook-xsl").install xsl_files + doc_files resource("ns").stage do touch "log" (prefix/"docbook-xsl-ns").install xsl_files + doc_files end resource("doc").stage do doc.install "doc" => "reference" end bin.write_exec_script "#{prefix}/docbook-xsl/epub/bin/dbtoepub" end def post_install etc_catalog = etc/"xml/catalog" ENV["XML_CATALOG_FILES"] = etc_catalog { "xsl" => "xsl-nons", "xsl-ns" => "xsl", }.each do |old_name, new_name| loc = "file://#{opt_prefix}/docbook-#{old_name}" # add/replace catalog entries cat_loc = "#{loc}/catalog.xml" system "xmlcatalog", "--noout", "--del", cat_loc, etc_catalog system "xmlcatalog", "--noout", "--add", "nextCatalog", "", cat_loc, etc_catalog # add rewrites for the new and old catalog URLs rewrites = ["rewriteSystem", "rewriteURI"] [ "https://cdn.docbook.org/release/#{new_name}", "http://docbook.sourceforge.net/release/#{old_name}", ].each do |url_prefix| [version.to_s, "current"].each do |ver| system "xmlcatalog", "--noout", "--del", "#{url_prefix}/#{ver}", etc_catalog rewrites.each do |rewrite| system "xmlcatalog", "--noout", "--add", rewrite, "#{url_prefix}/#{ver}", loc, etc_catalog end end end end end test do system "xmlcatalog", " system "xmlcatalog", " system "xmlcatalog", " system "xmlcatalog", " system "xmlcatalog", " system "xmlcatalog", " system "xmlcatalog", " system "xmlcatalog", " end end
#ifndef <API key> #define <API key> namespace benchmark { double MyCPUUsage(); double ChildrenCPUUsage(); int NumCPUs(); double CyclesPerSecond(); } // end namespace benchmark #endif // <API key>
<div class="page-header clearfix"> <h1 class="pull-left m0"> <a ui-sref="admin.project.detail({id: project.id})"> {{project.name}}</a> {{version.vs}} <sub> <a class="small" href="" ui-sref="admin.project.version({id: project.id})"></a> </sub> </h1> </div> <ul class="nav nav-tabs" role="tablist" > <li ng-class="{active: e.id == env.id}" ng-model="env" ng-repeat="e in envs"> <a ng-if="e.level == 'unsafe'" ui-sref="admin.project.version.conf.list({eid: e.id})">{{e.name}}</a> <a ng-if="e.level == 'safe'" has-project-safe="{{project.id}}" ng-if="hasProjectSafe_" ui-sref="admin.project.version.conf.list({eid: e.id})">{{e.name}} <sup><i class="star"></i></sup> </a> </li> </ul> <a ui-sref="admin.project.version.conf.list({id: $stateParams.id, vid: $stateParams.vid})" class="btn"></a> <div ui-view="conf-info"></div> <!-- include -->
<!DOCTYPE HTML> <html> <head> <title> Encodings test suite: ISO-8859-8 IW [hebrew] (autodetect) </title> </head> <body> <h2>Encoding: ISO-8859-8 IW [hebrew] (autodetect)</h2> <p>The text in the iframe below should look similar to the reference rendering at the bottom.</p> <h2>Test</h2> <iframe style="border: 1px solid black" src="ISO-8859-8_IW/<API key>.html"></iframe><br /> <h2>Reference rendering</h2> <img style="border: 1px solid black" src="ISO-8859-8_IW/ISO-8859-8_IW.jpg"/> </body> </html>
from __future__ import unicode_literals import datetime import re import sys import warnings from unittest import skipIf from xml.dom.minidom import parseString from django.contrib.auth.models import User from django.core import serializers from django.core.urlresolvers import reverse from django.db.models import Max, Min from django.http import HttpRequest from django.template import ( Context, RequestContext, Template, TemplateSyntaxError, context_processors, ) from django.test import ( TestCase, override_settings, skipIfDBFeature, skipUnlessDBFeature, ) from django.test.utils import requires_tz_support from django.utils import six, timezone from .forms import ( EventForm, EventLocalizedForm, <API key>, EventModelForm, EventSplitForm, ) from .models import ( AllDayEvent, Event, MaybeEvent, Session, SessionEvent, Timestamp, ) try: import pytz except ImportError: pytz = None # These tests use the EAT (Eastern Africa Time) and ICT (Indochina Time) # who don't have Daylight Saving Time, so we can represent them easily # with FixedOffset, and use them directly as tzinfo in the constructors. # settings.TIME_ZONE is forced to EAT. Most tests use a variant of # datetime.datetime(2011, 9, 1, 13, 20, 30), which translates to # 10:20:30 in UTC and 17:20:30 in ICT. UTC = timezone.utc EAT = timezone.get_fixed_timezone(180) # Africa/Nairobi ICT = timezone.get_fixed_timezone(420) # Asia/Bangkok @override_settings(TIME_ZONE='Africa/Nairobi', USE_TZ=False) class LegacyDatabaseTests(TestCase): def test_naive_datetime(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30) Event.objects.create(dt=dt) event = Event.objects.get() self.assertEqual(event.dt, dt) @skipUnlessDBFeature('<API key>') def <API key>(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060) Event.objects.create(dt=dt) event = Event.objects.get() self.assertEqual(event.dt, dt) @skipIfDBFeature('<API key>') def <API key>(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060) Event.objects.create(dt=dt) event = Event.objects.get() # microseconds are lost during a round-trip in the database self.assertEqual(event.dt, dt.replace(microsecond=0)) @skipUnlessDBFeature('supports_timezones') def <API key>(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) Event.objects.create(dt=dt) event = Event.objects.get() self.assertIsNone(event.dt.tzinfo) # interpret the naive datetime in local time to get the correct value self.assertEqual(event.dt.replace(tzinfo=EAT), dt) @skipUnlessDBFeature('supports_timezones') @skipUnlessDBFeature('<API key>') def <API key>(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060, tzinfo=EAT) Event.objects.create(dt=dt) event = Event.objects.get() self.assertIsNone(event.dt.tzinfo) # interpret the naive datetime in local time to get the correct value self.assertEqual(event.dt.replace(tzinfo=EAT), dt) # This combination actually never happens. @skipUnlessDBFeature('supports_timezones') @skipIfDBFeature('<API key>') def <API key>(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060, tzinfo=EAT) Event.objects.create(dt=dt) event = Event.objects.get() self.assertIsNone(event.dt.tzinfo) # interpret the naive datetime in local time to get the correct value # microseconds are lost during a round-trip in the database self.assertEqual(event.dt.replace(tzinfo=EAT), dt.replace(microsecond=0)) @skipUnlessDBFeature('supports_timezones') @skipIfDBFeature('<API key>') def <API key>(self): dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC) Event.objects.create(dt=dt) event = Event.objects.get() self.assertIsNone(event.dt.tzinfo) # interpret the naive datetime in local time to get the correct value self.assertEqual(event.dt.replace(tzinfo=EAT), dt) # This combination is no longer possible since timezone support # was removed from the SQLite backend -- it didn't work. @skipUnlessDBFeature('supports_timezones') @skipUnlessDBFeature('<API key>') def <API key>(self): dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC) Event.objects.create(dt=dt) event = Event.objects.get() self.assertIsNone(event.dt.tzinfo) # django.db.backends.utils.typecast_dt will just drop the # timezone, so a round-trip in the database alters the data (!) # interpret the naive datetime in local time and you get a wrong value self.assertNotEqual(event.dt.replace(tzinfo=EAT), dt) # interpret the naive datetime in original time to get the correct value self.assertEqual(event.dt.replace(tzinfo=UTC), dt) @skipUnlessDBFeature('supports_timezones') @skipIfDBFeature('<API key>') def <API key>(self): dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT) Event.objects.create(dt=dt) event = Event.objects.get() self.assertIsNone(event.dt.tzinfo) # interpret the naive datetime in local time to get the correct value self.assertEqual(event.dt.replace(tzinfo=EAT), dt) # This combination is no longer possible since timezone support # was removed from the SQLite backend -- it didn't work. @skipUnlessDBFeature('supports_timezones') @skipUnlessDBFeature('<API key>') def <API key>(self): dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT) Event.objects.create(dt=dt) event = Event.objects.get() self.assertIsNone(event.dt.tzinfo) # django.db.backends.utils.typecast_dt will just drop the # timezone, so a round-trip in the database alters the data (!) # interpret the naive datetime in local time and you get a wrong value self.assertNotEqual(event.dt.replace(tzinfo=EAT), dt) # interpret the naive datetime in original time to get the correct value self.assertEqual(event.dt.replace(tzinfo=ICT), dt) @skipIfDBFeature('supports_timezones') def <API key>(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) with self.assertRaises(ValueError): Event.objects.create(dt=dt) def <API key>(self): now = datetime.datetime.now() past = now - datetime.timedelta(seconds=2) future = now + datetime.timedelta(seconds=2) Timestamp.objects.create() ts = Timestamp.objects.get() self.assertLess(past, ts.created) self.assertLess(past, ts.updated) self.assertGreater(future, ts.updated) self.assertGreater(future, ts.updated) def test_query_filter(self): dt1 = datetime.datetime(2011, 9, 1, 12, 20, 30) dt2 = datetime.datetime(2011, 9, 1, 14, 20, 30) Event.objects.create(dt=dt1) Event.objects.create(dt=dt2) self.assertEqual(Event.objects.filter(dt__gte=dt1).count(), 2) self.assertEqual(Event.objects.filter(dt__gt=dt1).count(), 1) self.assertEqual(Event.objects.filter(dt__gte=dt2).count(), 1) self.assertEqual(Event.objects.filter(dt__gt=dt2).count(), 0) def <API key>(self): Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0)) Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0)) self.assertEqual(Event.objects.filter(dt__year=2011).count(), 2) self.assertEqual(Event.objects.filter(dt__month=1).count(), 2) self.assertEqual(Event.objects.filter(dt__day=1).count(), 2) self.assertEqual(Event.objects.filter(dt__week_day=7).count(), 2) self.assertEqual(Event.objects.filter(dt__hour=1).count(), 1) self.assertEqual(Event.objects.filter(dt__minute=30).count(), 2) self.assertEqual(Event.objects.filter(dt__second=0).count(), 2) def <API key>(self): # Only min and max make sense for datetimes. Event.objects.create(dt=datetime.datetime(2011, 9, 1, 23, 20, 20)) Event.objects.create(dt=datetime.datetime(2011, 9, 1, 13, 20, 30)) Event.objects.create(dt=datetime.datetime(2011, 9, 1, 3, 20, 40)) result = Event.objects.all().aggregate(Min('dt'), Max('dt')) self.assertEqual(result, { 'dt__min': datetime.datetime(2011, 9, 1, 3, 20, 40), 'dt__max': datetime.datetime(2011, 9, 1, 23, 20, 20), }) def <API key>(self): # Only min and max make sense for datetimes. morning = Session.objects.create(name='morning') afternoon = Session.objects.create(name='afternoon') SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 23, 20, 20), session=afternoon) SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 13, 20, 30), session=afternoon) SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 3, 20, 40), session=morning) morning_min_dt = datetime.datetime(2011, 9, 1, 3, 20, 40) afternoon_min_dt = datetime.datetime(2011, 9, 1, 13, 20, 30) self.assertQuerysetEqual( Session.objects.annotate(dt=Min('events__dt')).order_by('dt'), [morning_min_dt, afternoon_min_dt], transform=lambda d: d.dt) self.assertQuerysetEqual( Session.objects.annotate(dt=Min('events__dt')).filter(dt__lt=afternoon_min_dt), [morning_min_dt], transform=lambda d: d.dt) self.assertQuerysetEqual( Session.objects.annotate(dt=Min('events__dt')).filter(dt__gte=afternoon_min_dt), [afternoon_min_dt], transform=lambda d: d.dt) def <API key>(self): Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0)) Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0)) self.assertQuerysetEqual( Event.objects.datetimes('dt', 'year'), [datetime.datetime(2011, 1, 1, 0, 0, 0)], transform=lambda d: d) self.assertQuerysetEqual( Event.objects.datetimes('dt', 'month'), [datetime.datetime(2011, 1, 1, 0, 0, 0)], transform=lambda d: d) self.assertQuerysetEqual( Event.objects.datetimes('dt', 'day'), [datetime.datetime(2011, 1, 1, 0, 0, 0)], transform=lambda d: d) self.assertQuerysetEqual( Event.objects.datetimes('dt', 'hour'), [datetime.datetime(2011, 1, 1, 1, 0, 0), datetime.datetime(2011, 1, 1, 4, 0, 0)], transform=lambda d: d) self.assertQuerysetEqual( Event.objects.datetimes('dt', 'minute'), [datetime.datetime(2011, 1, 1, 1, 30, 0), datetime.datetime(2011, 1, 1, 4, 30, 0)], transform=lambda d: d) self.assertQuerysetEqual( Event.objects.datetimes('dt', 'second'), [datetime.datetime(2011, 1, 1, 1, 30, 0), datetime.datetime(2011, 1, 1, 4, 30, 0)], transform=lambda d: d) def test_raw_sql(self): # Regression test for #17755 dt = datetime.datetime(2011, 9, 1, 13, 20, 30) event = Event.objects.create(dt=dt) self.assertQuerysetEqual( Event.objects.raw('SELECT * FROM timezones_event WHERE dt = %s', [dt]), [event], transform=lambda d: d) def <API key>(self): # Regression test for #17742 day = datetime.date(2011, 9, 1) AllDayEvent.objects.create(day=day) # This is 2011-09-02T01:30:00+03:00 in EAT dt = datetime.datetime(2011, 9, 1, 22, 30, 0, tzinfo=UTC) self.assertTrue(AllDayEvent.objects.filter(day__gte=dt).exists()) @override_settings(TIME_ZONE='Africa/Nairobi', USE_TZ=True) class NewDatabaseTests(TestCase): @requires_tz_support def test_naive_datetime(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30) with warnings.catch_warnings(record=True) as recorded: warnings.simplefilter('always') Event.objects.create(dt=dt) self.assertEqual(len(recorded), 1) msg = str(recorded[0].message) self.assertTrue(msg.startswith("DateTimeField Event.dt received " "a naive datetime")) event = Event.objects.get() # naive datetimes are interpreted in local time self.assertEqual(event.dt, dt.replace(tzinfo=EAT)) @requires_tz_support def <API key>(self): dt = datetime.date(2011, 9, 1) with warnings.catch_warnings(record=True) as recorded: warnings.simplefilter('always') Event.objects.create(dt=dt) self.assertEqual(len(recorded), 1) msg = str(recorded[0].message) self.assertTrue(msg.startswith("DateTimeField Event.dt received " "a naive datetime")) event = Event.objects.get() self.assertEqual(event.dt, datetime.datetime(2011, 9, 1, tzinfo=EAT)) @requires_tz_support @skipUnlessDBFeature('<API key>') def <API key>(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060) with warnings.catch_warnings(record=True) as recorded: warnings.simplefilter('always') Event.objects.create(dt=dt) self.assertEqual(len(recorded), 1) msg = str(recorded[0].message) self.assertTrue(msg.startswith("DateTimeField Event.dt received " "a naive datetime")) event = Event.objects.get() # naive datetimes are interpreted in local time self.assertEqual(event.dt, dt.replace(tzinfo=EAT)) @requires_tz_support @skipIfDBFeature('<API key>') def <API key>(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060) with warnings.catch_warnings(record=True) as recorded: warnings.simplefilter('always') Event.objects.create(dt=dt) self.assertEqual(len(recorded), 1) msg = str(recorded[0].message) self.assertTrue(msg.startswith("DateTimeField Event.dt received " "a naive datetime")) event = Event.objects.get() # microseconds are lost during a round-trip in the database # naive datetimes are interpreted in local time self.assertEqual(event.dt, dt.replace(microsecond=0, tzinfo=EAT)) def <API key>(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) Event.objects.create(dt=dt) event = Event.objects.get() self.assertEqual(event.dt, dt) @skipUnlessDBFeature('<API key>') def <API key>(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060, tzinfo=EAT) Event.objects.create(dt=dt) event = Event.objects.get() self.assertEqual(event.dt, dt) @skipIfDBFeature('<API key>') def <API key>(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060, tzinfo=EAT) Event.objects.create(dt=dt) event = Event.objects.get() # microseconds are lost during a round-trip in the database self.assertEqual(event.dt, dt.replace(microsecond=0)) def <API key>(self): dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC) Event.objects.create(dt=dt) event = Event.objects.get() self.assertEqual(event.dt, dt) def <API key>(self): dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT) Event.objects.create(dt=dt) event = Event.objects.get() self.assertEqual(event.dt, dt) def <API key>(self): now = timezone.now() past = now - datetime.timedelta(seconds=2) future = now + datetime.timedelta(seconds=2) Timestamp.objects.create() ts = Timestamp.objects.get() self.assertLess(past, ts.created) self.assertLess(past, ts.updated) self.assertGreater(future, ts.updated) self.assertGreater(future, ts.updated) def test_query_filter(self): dt1 = datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=EAT) dt2 = datetime.datetime(2011, 9, 1, 14, 20, 30, tzinfo=EAT) Event.objects.create(dt=dt1) Event.objects.create(dt=dt2) self.assertEqual(Event.objects.filter(dt__gte=dt1).count(), 2) self.assertEqual(Event.objects.filter(dt__gt=dt1).count(), 1) self.assertEqual(Event.objects.filter(dt__gte=dt2).count(), 1) self.assertEqual(Event.objects.filter(dt__gt=dt2).count(), 0) @skipIf(pytz is None, "this test requires pytz") def <API key>(self): tz = pytz.timezone('Europe/Paris') dt = datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=tz) Event.objects.create(dt=dt) next = dt + datetime.timedelta(seconds=3) prev = dt - datetime.timedelta(seconds=3) self.assertEqual(Event.objects.filter(dt__exact=dt).count(), 1) self.assertEqual(Event.objects.filter(dt__exact=next).count(), 0) self.assertEqual(Event.objects.filter(dt__in=(prev, next)).count(), 0) self.assertEqual(Event.objects.filter(dt__in=(prev, dt, next)).count(), 1) self.assertEqual(Event.objects.filter(dt__range=(prev, next)).count(), 1) @requires_tz_support def <API key>(self): dt = datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=EAT) Event.objects.create(dt=dt) dt = dt.replace(tzinfo=None) with warnings.catch_warnings(record=True) as recorded: warnings.simplefilter('always') # naive datetimes are interpreted in local time self.assertEqual(Event.objects.filter(dt__exact=dt).count(), 1) self.assertEqual(Event.objects.filter(dt__lte=dt).count(), 1) self.assertEqual(Event.objects.filter(dt__gt=dt).count(), 0) self.assertEqual(len(recorded), 3) for warning in recorded: msg = str(warning.message) self.assertTrue(msg.startswith("DateTimeField Event.dt " "received a naive datetime")) @skipUnlessDBFeature('<API key>') def <API key>(self): Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT)) Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)) self.assertEqual(Event.objects.filter(dt__year=2011).count(), 2) self.assertEqual(Event.objects.filter(dt__month=1).count(), 2) self.assertEqual(Event.objects.filter(dt__day=1).count(), 2) self.assertEqual(Event.objects.filter(dt__week_day=7).count(), 2) self.assertEqual(Event.objects.filter(dt__hour=1).count(), 1) self.assertEqual(Event.objects.filter(dt__minute=30).count(), 2) self.assertEqual(Event.objects.filter(dt__second=0).count(), 2) @skipUnlessDBFeature('<API key>') def <API key>(self): Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT)) Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)) with timezone.override(UTC): # These two dates fall in the same day in EAT, but in different days, # years and months in UTC. self.assertEqual(Event.objects.filter(dt__year=2011).count(), 1) self.assertEqual(Event.objects.filter(dt__month=1).count(), 1) self.assertEqual(Event.objects.filter(dt__day=1).count(), 1) self.assertEqual(Event.objects.filter(dt__week_day=7).count(), 1) self.assertEqual(Event.objects.filter(dt__hour=22).count(), 1) self.assertEqual(Event.objects.filter(dt__minute=30).count(), 2) self.assertEqual(Event.objects.filter(dt__second=0).count(), 2) def <API key>(self): # Only min and max make sense for datetimes. Event.objects.create(dt=datetime.datetime(2011, 9, 1, 23, 20, 20, tzinfo=EAT)) Event.objects.create(dt=datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)) Event.objects.create(dt=datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT)) result = Event.objects.all().aggregate(Min('dt'), Max('dt')) self.assertEqual(result, { 'dt__min': datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT), 'dt__max': datetime.datetime(2011, 9, 1, 23, 20, 20, tzinfo=EAT), }) def <API key>(self): # Only min and max make sense for datetimes. morning = Session.objects.create(name='morning') afternoon = Session.objects.create(name='afternoon') SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 23, 20, 20, tzinfo=EAT), session=afternoon) SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), session=afternoon) SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT), session=morning) morning_min_dt = datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT) afternoon_min_dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) self.assertQuerysetEqual( Session.objects.annotate(dt=Min('events__dt')).order_by('dt'), [morning_min_dt, afternoon_min_dt], transform=lambda d: d.dt) self.assertQuerysetEqual( Session.objects.annotate(dt=Min('events__dt')).filter(dt__lt=afternoon_min_dt), [morning_min_dt], transform=lambda d: d.dt) self.assertQuerysetEqual( Session.objects.annotate(dt=Min('events__dt')).filter(dt__gte=afternoon_min_dt), [afternoon_min_dt], transform=lambda d: d.dt) @skipUnlessDBFeature('<API key>') def <API key>(self): Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT)) Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)) self.assertQuerysetEqual( Event.objects.datetimes('dt', 'year'), [datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)], transform=lambda d: d) self.assertQuerysetEqual( Event.objects.datetimes('dt', 'month'), [datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)], transform=lambda d: d) self.assertQuerysetEqual( Event.objects.datetimes('dt', 'day'), [datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)], transform=lambda d: d) self.assertQuerysetEqual( Event.objects.datetimes('dt', 'hour'), [datetime.datetime(2011, 1, 1, 1, 0, 0, tzinfo=EAT), datetime.datetime(2011, 1, 1, 4, 0, 0, tzinfo=EAT)], transform=lambda d: d) self.assertQuerysetEqual( Event.objects.datetimes('dt', 'minute'), [datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT), datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)], transform=lambda d: d) self.assertQuerysetEqual( Event.objects.datetimes('dt', 'second'), [datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT), datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)], transform=lambda d: d) @skipUnlessDBFeature('<API key>') def <API key>(self): Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT)) Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)) with timezone.override(UTC): self.assertQuerysetEqual( Event.objects.datetimes('dt', 'year'), [datetime.datetime(2010, 1, 1, 0, 0, 0, tzinfo=UTC), datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC)], transform=lambda d: d) self.assertQuerysetEqual( Event.objects.datetimes('dt', 'month'), [datetime.datetime(2010, 12, 1, 0, 0, 0, tzinfo=UTC), datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC)], transform=lambda d: d) self.assertQuerysetEqual( Event.objects.datetimes('dt', 'day'), [datetime.datetime(2010, 12, 31, 0, 0, 0, tzinfo=UTC), datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC)], transform=lambda d: d) self.assertQuerysetEqual( Event.objects.datetimes('dt', 'hour'), [datetime.datetime(2010, 12, 31, 22, 0, 0, tzinfo=UTC), datetime.datetime(2011, 1, 1, 1, 0, 0, tzinfo=UTC)], transform=lambda d: d) self.assertQuerysetEqual( Event.objects.datetimes('dt', 'minute'), [datetime.datetime(2010, 12, 31, 22, 30, 0, tzinfo=UTC), datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=UTC)], transform=lambda d: d) self.assertQuerysetEqual( Event.objects.datetimes('dt', 'second'), [datetime.datetime(2010, 12, 31, 22, 30, 0, tzinfo=UTC), datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=UTC)], transform=lambda d: d) def test_raw_sql(self): # Regression test for #17755 dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) event = Event.objects.create(dt=dt) self.assertQuerysetEqual( Event.objects.raw('SELECT * FROM timezones_event WHERE dt = %s', [dt]), [event], transform=lambda d: d) @requires_tz_support def <API key>(self): # Regression test for #17742 day = datetime.date(2011, 9, 1) AllDayEvent.objects.create(day=day) # This is 2011-09-02T01:30:00+03:00 in EAT dt = datetime.datetime(2011, 9, 1, 22, 30, 0, tzinfo=UTC) self.assertFalse(AllDayEvent.objects.filter(day__gte=dt).exists()) def test_null_datetime(self): # Regression test for #17294 e = MaybeEvent.objects.create() self.assertEqual(e.dt, None) @override_settings(TIME_ZONE='Africa/Nairobi') class SerializationTests(TestCase): # Backend-specific notes: # - JSON supports only milliseconds, microseconds will be truncated. # - PyYAML dumps the UTC offset correctly for timezone-aware datetimes, # but when it loads this representation, it substracts the offset and # Tests are adapted to take these quirks into account. def <API key>(self, objects, dt): self.assertEqual(objects[0]['fields']['dt'], dt) def <API key>(self, json, dt): self.assertIn('"fields": {"dt": "%s"}' % dt, json) def <API key>(self, xml, dt): field = parseString(xml).<API key>('field')[0] self.assertXMLEqual(field.childNodes[0].wholeText, dt) def <API key>(self, yaml, dt): # Depending on the yaml dumper, '!timestamp' might be absent six.assertRegex(self, yaml, r"\n fields: {dt: !(!timestamp)? '%s'}" % re.escape(dt)) def test_naive_datetime(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30) data = serializers.serialize('python', [Event(dt=dt)]) self.<API key>(data, dt) obj = next(serializers.deserialize('python', data)).object self.assertEqual(obj.dt, dt) data = serializers.serialize('json', [Event(dt=dt)]) self.<API key>(data, "2011-09-01T13:20:30") obj = next(serializers.deserialize('json', data)).object self.assertEqual(obj.dt, dt) data = serializers.serialize('xml', [Event(dt=dt)]) self.<API key>(data, "2011-09-01T13:20:30") obj = next(serializers.deserialize('xml', data)).object self.assertEqual(obj.dt, dt) if not isinstance(serializers.get_serializer('yaml'), serializers.BadSerializer): data = serializers.serialize('yaml', [Event(dt=dt)]) self.<API key>(data, "2011-09-01 13:20:30") obj = next(serializers.deserialize('yaml', data)).object self.assertEqual(obj.dt, dt) def <API key>(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060) data = serializers.serialize('python', [Event(dt=dt)]) self.<API key>(data, dt) obj = next(serializers.deserialize('python', data)).object self.assertEqual(obj.dt, dt) data = serializers.serialize('json', [Event(dt=dt)]) self.<API key>(data, "2011-09-01T13:20:30.405") obj = next(serializers.deserialize('json', data)).object self.assertEqual(obj.dt, dt.replace(microsecond=405000)) data = serializers.serialize('xml', [Event(dt=dt)]) self.<API key>(data, "2011-09-01T13:20:30.405060") obj = next(serializers.deserialize('xml', data)).object self.assertEqual(obj.dt, dt) if not isinstance(serializers.get_serializer('yaml'), serializers.BadSerializer): data = serializers.serialize('yaml', [Event(dt=dt)]) self.<API key>(data, "2011-09-01 13:20:30.405060") obj = next(serializers.deserialize('yaml', data)).object self.assertEqual(obj.dt, dt) def <API key>(self): dt = datetime.datetime(2011, 9, 1, 17, 20, 30, 405060, tzinfo=ICT) data = serializers.serialize('python', [Event(dt=dt)]) self.<API key>(data, dt) obj = next(serializers.deserialize('python', data)).object self.assertEqual(obj.dt, dt) data = serializers.serialize('json', [Event(dt=dt)]) self.<API key>(data, "2011-09-01T17:20:30.405+07:00") obj = next(serializers.deserialize('json', data)).object self.assertEqual(obj.dt, dt.replace(microsecond=405000)) data = serializers.serialize('xml', [Event(dt=dt)]) self.<API key>(data, "2011-09-01T17:20:30.405060+07:00") obj = next(serializers.deserialize('xml', data)).object self.assertEqual(obj.dt, dt) if not isinstance(serializers.get_serializer('yaml'), serializers.BadSerializer): data = serializers.serialize('yaml', [Event(dt=dt)]) self.<API key>(data, "2011-09-01 17:20:30.405060+07:00") obj = next(serializers.deserialize('yaml', data)).object self.assertEqual(obj.dt.replace(tzinfo=UTC), dt) def <API key>(self): dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC) data = serializers.serialize('python', [Event(dt=dt)]) self.<API key>(data, dt) obj = next(serializers.deserialize('python', data)).object self.assertEqual(obj.dt, dt) data = serializers.serialize('json', [Event(dt=dt)]) self.<API key>(data, "2011-09-01T10:20:30Z") obj = next(serializers.deserialize('json', data)).object self.assertEqual(obj.dt, dt) data = serializers.serialize('xml', [Event(dt=dt)]) self.<API key>(data, "2011-09-01T10:20:30+00:00") obj = next(serializers.deserialize('xml', data)).object self.assertEqual(obj.dt, dt) if not isinstance(serializers.get_serializer('yaml'), serializers.BadSerializer): data = serializers.serialize('yaml', [Event(dt=dt)]) self.<API key>(data, "2011-09-01 10:20:30+00:00") obj = next(serializers.deserialize('yaml', data)).object self.assertEqual(obj.dt.replace(tzinfo=UTC), dt) def <API key>(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) data = serializers.serialize('python', [Event(dt=dt)]) self.<API key>(data, dt) obj = next(serializers.deserialize('python', data)).object self.assertEqual(obj.dt, dt) data = serializers.serialize('json', [Event(dt=dt)]) self.<API key>(data, "2011-09-01T13:20:30+03:00") obj = next(serializers.deserialize('json', data)).object self.assertEqual(obj.dt, dt) data = serializers.serialize('xml', [Event(dt=dt)]) self.<API key>(data, "2011-09-01T13:20:30+03:00") obj = next(serializers.deserialize('xml', data)).object self.assertEqual(obj.dt, dt) if not isinstance(serializers.get_serializer('yaml'), serializers.BadSerializer): data = serializers.serialize('yaml', [Event(dt=dt)]) self.<API key>(data, "2011-09-01 13:20:30+03:00") obj = next(serializers.deserialize('yaml', data)).object self.assertEqual(obj.dt.replace(tzinfo=UTC), dt) def <API key>(self): dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT) data = serializers.serialize('python', [Event(dt=dt)]) self.<API key>(data, dt) obj = next(serializers.deserialize('python', data)).object self.assertEqual(obj.dt, dt) data = serializers.serialize('json', [Event(dt=dt)]) self.<API key>(data, "2011-09-01T17:20:30+07:00") obj = next(serializers.deserialize('json', data)).object self.assertEqual(obj.dt, dt) data = serializers.serialize('xml', [Event(dt=dt)]) self.<API key>(data, "2011-09-01T17:20:30+07:00") obj = next(serializers.deserialize('xml', data)).object self.assertEqual(obj.dt, dt) if not isinstance(serializers.get_serializer('yaml'), serializers.BadSerializer): data = serializers.serialize('yaml', [Event(dt=dt)]) self.<API key>(data, "2011-09-01 17:20:30+07:00") obj = next(serializers.deserialize('yaml', data)).object self.assertEqual(obj.dt.replace(tzinfo=UTC), dt) @override_settings(DATETIME_FORMAT='c', TIME_ZONE='Africa/Nairobi', USE_L10N=False, USE_TZ=True) class TemplateTests(TestCase): @requires_tz_support def <API key>(self): """ Test the {% localtime %} templatetag and related filters. """ datetimes = { 'utc': datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC), 'eat': datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), 'ict': datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT), 'naive': datetime.datetime(2011, 9, 1, 13, 20, 30), } templates = { 'notag': Template("{% load tz %}{{ dt }}|{{ dt|localtime }}|{{ dt|utc }}|{{ dt|timezone:ICT }}"), 'noarg': Template("{% load tz %}{% localtime %}{{ dt }}|{{ dt|localtime }}|{{ dt|utc }}|{{ dt|timezone:ICT }}{% endlocaltime %}"), 'on': Template("{% load tz %}{% localtime on %}{{ dt }}|{{ dt|localtime }}|{{ dt|utc }}|{{ dt|timezone:ICT }}{% endlocaltime %}"), 'off': Template("{% load tz %}{% localtime off %}{{ dt }}|{{ dt|localtime }}|{{ dt|utc }}|{{ dt|timezone:ICT }}{% endlocaltime %}"), } # Transform a list of keys in 'datetimes' to the expected template # output. This makes the definition of 'results' more readable. def t(*result): return '|'.join(datetimes[key].isoformat() for key in result) # Results for USE_TZ = True results = { 'utc': { 'notag': t('eat', 'eat', 'utc', 'ict'), 'noarg': t('eat', 'eat', 'utc', 'ict'), 'on': t('eat', 'eat', 'utc', 'ict'), 'off': t('utc', 'eat', 'utc', 'ict'), }, 'eat': { 'notag': t('eat', 'eat', 'utc', 'ict'), 'noarg': t('eat', 'eat', 'utc', 'ict'), 'on': t('eat', 'eat', 'utc', 'ict'), 'off': t('eat', 'eat', 'utc', 'ict'), }, 'ict': { 'notag': t('eat', 'eat', 'utc', 'ict'), 'noarg': t('eat', 'eat', 'utc', 'ict'), 'on': t('eat', 'eat', 'utc', 'ict'), 'off': t('ict', 'eat', 'utc', 'ict'), }, 'naive': { 'notag': t('naive', 'eat', 'utc', 'ict'), 'noarg': t('naive', 'eat', 'utc', 'ict'), 'on': t('naive', 'eat', 'utc', 'ict'), 'off': t('naive', 'eat', 'utc', 'ict'), } } for k1, dt in six.iteritems(datetimes): for k2, tpl in six.iteritems(templates): ctx = Context({'dt': dt, 'ICT': ICT}) actual = tpl.render(ctx) expected = results[k1][k2] self.assertEqual(actual, expected, '%s / %s: %r != %r' % (k1, k2, actual, expected)) # Changes for USE_TZ = False results['utc']['notag'] = t('utc', 'eat', 'utc', 'ict') results['ict']['notag'] = t('ict', 'eat', 'utc', 'ict') with self.settings(USE_TZ=False): for k1, dt in six.iteritems(datetimes): for k2, tpl in six.iteritems(templates): ctx = Context({'dt': dt, 'ICT': ICT}) actual = tpl.render(ctx) expected = results[k1][k2] self.assertEqual(actual, expected, '%s / %s: %r != %r' % (k1, k2, actual, expected)) @skipIf(pytz is None, "this test requires pytz") def <API key>(self): """ Test the |localtime, |utc, and |timezone filters with pytz. """ # Use a pytz timezone as local time tpl = Template("{% load tz %}{{ dt|localtime }}|{{ dt|utc }}") ctx = Context({'dt': datetime.datetime(2011, 9, 1, 12, 20, 30)}) with self.settings(TIME_ZONE='Europe/Paris'): self.assertEqual(tpl.render(ctx), "2011-09-01T12:20:30+02:00|2011-09-01T10:20:30+00:00") # Use a pytz timezone as argument tpl = Template("{% load tz %}{{ dt|timezone:tz }}") ctx = Context({'dt': datetime.datetime(2011, 9, 1, 13, 20, 30), 'tz': pytz.timezone('Europe/Paris')}) self.assertEqual(tpl.render(ctx), "2011-09-01T12:20:30+02:00") # Use a pytz timezone name as argument tpl = Template("{% load tz %}{{ dt|timezone:'Europe/Paris' }}") ctx = Context({'dt': datetime.datetime(2011, 9, 1, 13, 20, 30), 'tz': pytz.timezone('Europe/Paris')}) self.assertEqual(tpl.render(ctx), "2011-09-01T12:20:30+02:00") def <API key>(self): with self.assertRaises(TemplateSyntaxError): Template("{% load tz %}{% localtime foo %}{% endlocaltime %}").render() def <API key>(self): """ Test the |localtime, |utc, and |timezone filters on bad inputs. """ tpl = Template("{% load tz %}{{ dt }}|{{ dt|localtime }}|{{ dt|utc }}|{{ dt|timezone:tz }}") with self.settings(USE_TZ=True): # bad datetime value ctx = Context({'dt': None, 'tz': ICT}) self.assertEqual(tpl.render(ctx), "None|||") ctx = Context({'dt': 'not a date', 'tz': ICT}) self.assertEqual(tpl.render(ctx), "not a date|||") # bad timezone value tpl = Template("{% load tz %}{{ dt|timezone:tz }}") ctx = Context({'dt': datetime.datetime(2011, 9, 1, 13, 20, 30), 'tz': None}) self.assertEqual(tpl.render(ctx), "") ctx = Context({'dt': datetime.datetime(2011, 9, 1, 13, 20, 30), 'tz': 'not a tz'}) self.assertEqual(tpl.render(ctx), "") @requires_tz_support def <API key>(self): """ Test the {% timezone %} templatetag. """ tpl = Template( "{% load tz %}" "{{ dt }}|" "{% timezone tz1 %}" "{{ dt }}|" "{% timezone tz2 %}" "{{ dt }}" "{% endtimezone %}" "{% endtimezone %}" ) ctx = Context({'dt': datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC), 'tz1': ICT, 'tz2': None}) self.assertEqual(tpl.render(ctx), "2011-09-01T13:20:30+03:00|2011-09-01T17:20:30+07:00|2011-09-01T13:20:30+03:00") @skipIf(pytz is None, "this test requires pytz") def <API key>(self): """ Test the {% timezone %} templatetag with pytz. """ tpl = Template("{% load tz %}{% timezone tz %}{{ dt }}{% endtimezone %}") # Use a pytz timezone as argument ctx = Context({'dt': datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), 'tz': pytz.timezone('Europe/Paris')}) self.assertEqual(tpl.render(ctx), "2011-09-01T12:20:30+02:00") # Use a pytz timezone name as argument ctx = Context({'dt': datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), 'tz': 'Europe/Paris'}) self.assertEqual(tpl.render(ctx), "2011-09-01T12:20:30+02:00") def <API key>(self): with self.assertRaises(TemplateSyntaxError): Template("{% load tz %}{% timezone %}{% endtimezone %}").render() with self.assertRaises(ValueError if pytz is None else pytz.<API key>): Template("{% load tz %}{% timezone tz %}{% endtimezone %}").render(Context({'tz': 'foobar'})) @skipIf(sys.platform.startswith('win'), "Windows uses non-standard time zone names") def <API key>(self): """ Test the {% <API key> %} templatetag. """ tpl = Template("{% load tz %}{% <API key> as time_zone %}{{ time_zone }}") self.assertEqual(tpl.render(Context()), "Africa/Nairobi" if pytz else "EAT") with timezone.override(UTC): self.assertEqual(tpl.render(Context()), "UTC") tpl = Template("{% load tz %}{% timezone tz %}{% <API key> as time_zone %}{% endtimezone %}{{ time_zone }}") self.assertEqual(tpl.render(Context({'tz': ICT})), "+0700") with timezone.override(UTC): self.assertEqual(tpl.render(Context({'tz': ICT})), "+0700") @skipIf(pytz is None, "this test requires pytz") def <API key>(self): """ Test the {% <API key> %} templatetag with pytz. """ tpl = Template("{% load tz %}{% <API key> as time_zone %}{{ time_zone }}") with timezone.override(pytz.timezone('Europe/Paris')): self.assertEqual(tpl.render(Context()), "Europe/Paris") tpl = Template("{% load tz %}{% timezone 'Europe/Paris' %}{% <API key> as time_zone %}{% endtimezone %}{{ time_zone }}") self.assertEqual(tpl.render(Context()), "Europe/Paris") def <API key>(self): with self.assertRaises(TemplateSyntaxError): Template("{% load tz %}{% <API key> %}").render() @skipIf(sys.platform.startswith('win'), "Windows uses non-standard time zone names") def <API key>(self): """ Test the django.template.context_processors.tz template context processor. """ tpl = Template("{{ TIME_ZONE }}") context = Context() self.assertEqual(tpl.render(context), "") request_context = RequestContext(HttpRequest(), processors=[context_processors.tz]) self.assertEqual(tpl.render(request_context), "Africa/Nairobi" if pytz else "EAT") @requires_tz_support def <API key>(self): tpl = Template("{{ dt|date:'Y-m-d' }} at {{ dt|time:'H:i:s' }}") ctx = Context({'dt': datetime.datetime(2011, 9, 1, 20, 20, 20, tzinfo=UTC)}) self.assertEqual(tpl.render(ctx), "2011-09-01 at 23:20:20") with timezone.override(ICT): self.assertEqual(tpl.render(ctx), "2011-09-02 at 03:20:20") def <API key>(self): tpl = Template("{% load tz %}{% localtime off %}{{ dt|date:'Y-m-d' }} at {{ dt|time:'H:i:s' }}{% endlocaltime %}") ctx = Context({'dt': datetime.datetime(2011, 9, 1, 20, 20, 20, tzinfo=UTC)}) self.assertEqual(tpl.render(ctx), "2011-09-01 at 20:20:20") with timezone.override(ICT): self.assertEqual(tpl.render(ctx), "2011-09-01 at 20:20:20") def <API key>(self): # Regression for #17274 tpl = Template("{% load tz %}{{ dt }}") ctx = Context({'dt': datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=EAT)}) with self.settings(TIME_ZONE=None): # the actual value depends on the system time zone of the host self.assertTrue(tpl.render(ctx).startswith("2011")) @requires_tz_support def <API key>(self): # Regression for #17343 tpl = Template("{% now \"O\" %}") self.assertEqual(tpl.render(Context({})), "+0300") with timezone.override(ICT): self.assertEqual(tpl.render(Context({})), "+0700") @override_settings(DATETIME_FORMAT='c', TIME_ZONE='Africa/Nairobi', USE_L10N=False, USE_TZ=False) class LegacyFormsTests(TestCase): def test_form(self): form = EventForm({'dt': '2011-09-01 13:20:30'}) self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 9, 1, 13, 20, 30)) @skipIf(pytz is None, "this test requires pytz") def <API key>(self): form = EventForm({'dt': '2011-03-27 02:30:00'}) with timezone.override(pytz.timezone('Europe/Paris')): # this is obviously a bug self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 3, 27, 2, 30, 0)) @skipIf(pytz is None, "this test requires pytz") def <API key>(self): form = EventForm({'dt': '2011-10-30 02:30:00'}) with timezone.override(pytz.timezone('Europe/Paris')): # this is obviously a bug self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 10, 30, 2, 30, 0)) def test_split_form(self): form = EventSplitForm({'dt_0': '2011-09-01', 'dt_1': '13:20:30'}) self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 9, 1, 13, 20, 30)) def test_model_form(self): EventModelForm({'dt': '2011-09-01 13:20:30'}).save() e = Event.objects.get() self.assertEqual(e.dt, datetime.datetime(2011, 9, 1, 13, 20, 30)) @override_settings(DATETIME_FORMAT='c', TIME_ZONE='Africa/Nairobi', USE_L10N=False, USE_TZ=True) class NewFormsTests(TestCase): @requires_tz_support def test_form(self): form = EventForm({'dt': '2011-09-01 13:20:30'}) self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)) def <API key>(self): form = EventForm({'dt': '2011-09-01 17:20:30'}) with timezone.override(ICT): self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)) def <API key>(self): form = EventForm({'dt': '2011-09-01 17:20:30+07:00'}) # Datetime inputs formats don't allow providing a time zone. self.assertFalse(form.is_valid()) @skipIf(pytz is None, "this test requires pytz") def <API key>(self): with timezone.override(pytz.timezone('Europe/Paris')): form = EventForm({'dt': '2011-03-27 02:30:00'}) self.assertFalse(form.is_valid()) self.assertEqual(form.errors['dt'], ["2011-03-27 02:30:00 couldn't be interpreted in time zone " "Europe/Paris; it may be ambiguous or it may not exist."]) @skipIf(pytz is None, "this test requires pytz") def <API key>(self): with timezone.override(pytz.timezone('Europe/Paris')): form = EventForm({'dt': '2011-10-30 02:30:00'}) self.assertFalse(form.is_valid()) self.assertEqual(form.errors['dt'], ["2011-10-30 02:30:00 couldn't be interpreted in time zone " "Europe/Paris; it may be ambiguous or it may not exist."]) @requires_tz_support def test_split_form(self): form = EventSplitForm({'dt_0': '2011-09-01', 'dt_1': '13:20:30'}) self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)) @requires_tz_support def test_localized_form(self): form = EventLocalizedForm(initial={'dt': datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)}) with timezone.override(ICT): self.assertIn("2011-09-01 17:20:30", str(form)) @requires_tz_support def test_model_form(self): EventModelForm({'dt': '2011-09-01 13:20:30'}).save() e = Event.objects.get() self.assertEqual(e.dt, datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)) @requires_tz_support def <API key>(self): form = <API key>(instance=Event(dt=datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT))) with timezone.override(ICT): self.assertIn("2011-09-01 17:20:30", str(form)) @override_settings(DATETIME_FORMAT='c', TIME_ZONE='Africa/Nairobi', USE_L10N=False, USE_TZ=True, PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'], ROOT_URLCONF='timezones.urls') class AdminTests(TestCase): @classmethod def setUpTestData(cls): # password = "secret" cls.u1 = User.objects.create( id=100, password='sha1$995a3$<SHA1-like>', last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super', first_name='Super', last_name='User', email='super@example.com', is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10) ) def setUp(self): self.client.login(username='super', password='secret') @requires_tz_support def test_changelist(self): e = Event.objects.create(dt=datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)) response = self.client.get(reverse('admin:<API key>')) self.assertContains(response, e.dt.astimezone(EAT).isoformat()) def <API key>(self): e = Event.objects.create(dt=datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)) with timezone.override(ICT): response = self.client.get(reverse('admin:<API key>')) self.assertContains(response, e.dt.astimezone(ICT).isoformat()) @requires_tz_support def <API key>(self): e = Event.objects.create(dt=datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)) response = self.client.get(reverse('admin:<API key>', args=(e.pk,))) self.assertContains(response, e.dt.astimezone(EAT).date().isoformat()) self.assertContains(response, e.dt.astimezone(EAT).time().isoformat()) def <API key>(self): e = Event.objects.create(dt=datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)) with timezone.override(ICT): response = self.client.get(reverse('admin:<API key>', args=(e.pk,))) self.assertContains(response, e.dt.astimezone(ICT).date().isoformat()) self.assertContains(response, e.dt.astimezone(ICT).time().isoformat()) @requires_tz_support def <API key>(self): Timestamp.objects.create() # re-fetch the object for backends that lose microseconds (MySQL) t = Timestamp.objects.get() response = self.client.get(reverse('admin:<API key>', args=(t.pk,))) self.assertContains(response, t.created.astimezone(EAT).isoformat()) def <API key>(self): Timestamp.objects.create() # re-fetch the object for backends that lose microseconds (MySQL) t = Timestamp.objects.get() with timezone.override(ICT): response = self.client.get(reverse('admin:<API key>', args=(t.pk,))) self.assertContains(response, t.created.astimezone(ICT).isoformat())
#include "ipc/<API key>.h" #include <windows.h> namespace IPC { namespace internal { HandleAttachmentWin::HandleAttachmentWin(const HANDLE& handle) : handle_(handle) { } HandleAttachmentWin::~HandleAttachmentWin() { } HandleAttachmentWin::BrokerableType HandleAttachmentWin::GetBrokerableType() const { return WIN_HANDLE; } HandleAttachmentWin::WireFormat HandleAttachmentWin::GetWireFormat( const base::ProcessId& destination) const { WireFormat format; format.handle = HandleToLong(handle_); format.attachment_id = GetIdentifier(); format.destination_process = destination; return format; } } // namespace internal } // namespace IPC
package org.eclipse.collections.impl.utility; import org.eclipse.collections.impl.test.Verify; import org.junit.Test; /** * JUnit test for {@link OrderedIterate}. */ public class OrderedIterateTest { @Test public void <API key>() { Verify.<API key>(OrderedIterate.class); } }
#include "chrome/browser/extensions/extension_apitest.h" #include <vector> #include "base/command_line.h" #include "base/memory/ref_counted.h" #include "base/strings/stringprintf.h" #include "chrome/browser/chromeos/extensions/<API key>.h" #include "chrome/browser/chromeos/input_method/input_method_util.h" #include "chrome/common/chrome_features.h" #include "chrome/common/pref_names.h" #include "chromeos/chromeos_switches.h" #include "components/prefs/pref_service.h" #include "content/public/browser/<API key>.h" #include "content/public/browser/<API key>.h" #include "content/public/browser/<API key>.h" #include "content/public/common/content_switches.h" #include "extensions/common/switches.h" #include "extensions/browser/api/test/test_api.h" #include "extensions/browser/notification_types.h" #include "extensions/test/<API key>.h" #include "testing/gtest/include/gtest/gtest.h" #include "ui/base/ime/chromeos/extension_ime_util.h" #include "ui/base/ime/chromeos/<API key>.h" #include "ui/base/ime/chromeos/<API key>.h" #include "ui/base/ime/ime_bridge.h" using namespace chromeos::input_method; namespace { const char <API key>[] = "fr"; const char <API key>[] = "xkb:us::eng"; const char kBackgroundReady[] = "ready"; const char kTestIMEID[] = "<API key>"; const char kTestIMEID2[] = "<API key>"; // Class that listens for the JS message. class TestListener : public content::<API key> { public: TestListener() { registrar_.Add(this, extensions::<API key>, content::NotificationService::AllSources()); } ~TestListener() override {} // Implements the content::<API key> interface. void Observe(int type, const content::NotificationSource& source, const content::NotificationDetails& details) override { const std::string& content = *content::Details<std::string>(details).ptr(); if (content == kBackgroundReady) { // Initializes IMF for testing when receives ready message from // background. InputMethodManager* manager = InputMethodManager::Get(); manager->GetInputMethodUtil()-><API key>( *<API key>().<API key>()); std::vector<std::string> keyboard_layouts; keyboard_layouts.push_back( chromeos::extension_ime_util::<API key>( <API key>)); manager->GetActiveIMEState()->EnableLoginLayouts(<API key>, keyboard_layouts); } } private: content::<API key> registrar_; }; class <API key> : public ExtensionApiTest { void SetUpCommandLine(base::CommandLine* command_line) override { ExtensionApiTest::SetUpCommandLine(command_line); command_line->AppendSwitchASCII( extensions::switches::<API key>, "<API key>"); command_line->AppendSwitchASCII(switches::kEnableFeatures, features::kOptInImeMenu.name); } }; } // namespace <API key>(<API key>, Basic) { // Listener for extension's background ready. TestListener listener; ASSERT_TRUE(RunExtensionTest("input_method/basic")) << message_; } <API key>(<API key>, ImeMenuActivation) { // Listener for IME menu initial state ready. <API key> config_listener("config_ready", false); // Listener for IME menu event ready. <API key> event_listener("event_ready", false); browser()->profile()->GetPrefs()->SetBoolean(prefs::<API key>, true); // Test the initial state and add listener for IME menu activation change. ASSERT_TRUE( LoadExtension(test_data_dir_.AppendASCII("input_method/ime_menu"))); ASSERT_TRUE(config_listener.WaitUntilSatisfied()) << message_; // Trigger chrome.inputMethodPrivate.<API key>() event. browser()->profile()->GetPrefs()->SetBoolean(prefs::<API key>, false); // Test that the extension gets the IME activation change event properly. ASSERT_TRUE(event_listener.WaitUntilSatisfied()) << message_; } <API key>(<API key>, ImeMenuAPITest) { <API key> activated_listener("activated", false); <API key> menu_listener("get_menu_update", false); <API key> <API key>("get_menu_activated", false); <API key> list_listenter("list_change", false); browser()->profile()->GetPrefs()->SetBoolean(prefs::<API key>, true); ASSERT_TRUE( LoadExtension(test_data_dir_.AppendASCII("input_method/ime_menu2"))); std::vector<std::string> extension_ime_ids; extension_ime_ids.push_back(kTestIMEID); extension_ime_ids.push_back(kTestIMEID2); InputMethodManager::Get()->GetActiveIMEState()-><API key>( &extension_ime_ids); <API key> extension_imes; InputMethodManager::Get()->GetActiveIMEState()-><API key>( &extension_imes); InputMethodManager::Get()->GetActiveIMEState()->ChangeInputMethod( kTestIMEID, false /* show_message */); ui::<API key>* engine_handler = ui::IMEBridge::Get()-><API key>(); ASSERT_TRUE(engine_handler); engine_handler->Enable("test"); ASSERT_TRUE(activated_listener.WaitUntilSatisfied()) << message_; ASSERT_TRUE(menu_listener.WaitUntilSatisfied()) << message_; ASSERT_TRUE(<API key>.WaitUntilSatisfied()) << message_; InputMethodManager::Get()->GetActiveIMEState()->ChangeInputMethod( kTestIMEID2, false /* show_message */); engine_handler->Enable("test2"); ASSERT_TRUE(list_listenter.WaitUntilSatisfied()) << message_; }
package org.locationtech.geogig.geotools.cli.geopkg; import org.geotools.data.DataStore; import org.locationtech.geogig.cli.CLICommand; import org.locationtech.geogig.cli.annotation.ReadOnly; import org.locationtech.geogig.geotools.cli.DataStoreDescribe; import org.locationtech.geogig.geotools.plumbing.DescribeOp; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; /** * Describes a table from a Geopackage database. * * Geopackage CLI proxy for {@link DescribeOp} * * @see DescribeOp */ @ReadOnly @Parameters(commandNames = "describe", commandDescription = "Describe a Geopackage table") public class GeopkgDescribe extends DataStoreDescribe implements CLICommand { /** * Common arguments for Geopackage commands. */ @ParametersDelegate final GeopkgCommonArgs commonArgs = new GeopkgCommonArgs(); final GeopkgSupport support = new GeopkgSupport(); @Override protected DataStore getDataStore() { return support.getDataStore(commonArgs); } }
<!doctype html> <html> <head> <title>Draggable select</title> <style type="text/css"> select { border: 1px solid orange; border-top-width: 20px; } </style> <script type="text/javascript"> window.onload = function () { document.<API key>('select')[0].ondragstart = function (e) { e.dataTransfer.effectAllowed = 'copy'; e.dataTransfer.setData('Text', 'dummy text'); }; }; </script> </head> <body> <p>Press your your mouse button down on the orange block and drag downwards. It should open and select items in the dropdown, and should <strong>not</strong> drag the block or text.</p> <div> <select draggable="true"> <option>Option 1</option> <option>Option 2</option> <option>Option 3</option> </select> </div> </body> </html>
require 'test_helper' class NotificationTest < ActiveSupport::TestCase # Replace this with your real tests. test "the truth" do assert true end end
'use strict'; const Platform = require('Platform'); const <API key> = require('<API key>'); const <API key> = require('<API key>'); const UIManager = require('UIManager'); const <API key> = require('<API key>'); const insetsDiffer = require('insetsDiffer'); const matricesDiffer = require('matricesDiffer'); const pointsDiffer = require('pointsDiffer'); const processColor = require('processColor'); const resolveAssetSource = require('resolveAssetSource'); const sizesDiffer = require('sizesDiffer'); const verifyPropTypes = require('verifyPropTypes'); /* $FlowFixMe(>=0.54.0 site=react_native_oss) This comment suppresses an error * found when Flow v0.54 was deployed. To see the error delete this comment and * run Flow. */ const invariant = require('fbjs/lib/invariant'); const warning = require('fbjs/lib/warning'); /** * Used to create React components that directly wrap native component * implementations. Config information is extracted from data exported from the * UIManager module. You should also wrap the native component in a * hand-written component with full propTypes definitions and other * documentation - pass the hand-written component in as `componentInterface` to * verify all the native props are documented via `propTypes`. * * If some native props shouldn't be exposed in the wrapper interface, you can * pass null for `componentInterface` and call `verifyPropTypes` directly * with `nativePropsToIgnore`; * * Common types are lined up with the appropriate prop differs with * `TypeToDifferMap`. Non-scalar types not in the map default to `deepDiffer`. */ import type {ComponentInterface} from 'verifyPropTypes'; let <API key>: boolean = false; function <API key>( viewName: string, componentInterface?: ?ComponentInterface, extraConfig?: ?{nativeOnly?: Object}, ): React$ComponentType<any> | string { function <API key>(viewConfig: any) { if (Platform.OS === 'android') { // This is supported on Android platform only, // as lazy view managers discovery is Android-specific. if (UIManager.ViewManagerNames) { // Lazy view managers enabled. viewConfig = merge(viewConfig, UIManager.<API key>()); } else { viewConfig.bubblingEventTypes = merge( viewConfig.bubblingEventTypes, UIManager.<API key>, ); viewConfig.directEventTypes = merge( viewConfig.directEventTypes, UIManager.<API key>, ); } } } function merge(destination: ?Object, source: ?Object): ?Object { if (!source) { return destination; } if (!destination) { return source; } for (const key in source) { if (!source.hasOwnProperty(key)) { continue; } var sourceValue = source[key]; if (destination.hasOwnProperty(key)) { const destinationValue = destination[key]; if ( typeof sourceValue === 'object' && typeof destinationValue === 'object' ) { sourceValue = merge(destinationValue, sourceValue); } } destination[key] = sourceValue; } return destination; } // Don't load the ViewConfig from UIManager until it's needed for rendering. // Lazy-loading this can help avoid Prepack deopts. function getViewConfig() { const viewConfig = UIManager[viewName]; invariant( viewConfig != null && !viewConfig.NativeProps != null, 'Native component for "%s" does not exist', viewName, ); viewConfig.uiViewClassName = viewName; viewConfig.validAttributes = {}; // ReactNative `View.propTypes` have been deprecated in favor of // `ViewPropTypes`. In their place a temporary getter has been added with a // deprecated warning message. Avoid triggering that warning here by using // temporary workaround, __propTypes<API key>. // TODO (bvaughn) Revert this particular change any time after April 1 if (componentInterface) { viewConfig.propTypes = typeof componentInterface.__propTypes<API key> === 'object' ? componentInterface.__propTypes<API key> : componentInterface.propTypes; } else { viewConfig.propTypes = null; } let baseModuleName = viewConfig.baseModuleName; let bubblingEventTypes = viewConfig.bubblingEventTypes; let directEventTypes = viewConfig.directEventTypes; let nativeProps = viewConfig.NativeProps; while (baseModuleName) { const baseModule = UIManager[baseModuleName]; if (!baseModule) { warning(false, 'Base module "%s" does not exist', baseModuleName); baseModuleName = null; } else { bubblingEventTypes = { baseModule.bubblingEventTypes, bubblingEventTypes, }; directEventTypes = { baseModule.directEventTypes, directEventTypes, }; nativeProps = { baseModule.NativeProps, nativeProps, }; baseModuleName = baseModule.baseModuleName; } } viewConfig.bubblingEventTypes = bubblingEventTypes; viewConfig.directEventTypes = directEventTypes; for (const key in nativeProps) { let useAttribute = false; const attribute = {}; const differ = TypeToDifferMap[nativeProps[key]]; if (differ) { attribute.diff = differ; useAttribute = true; } const processor = TypeToProcessorMap[nativeProps[key]]; if (processor) { attribute.process = processor; useAttribute = true; } viewConfig.validAttributes[key] = useAttribute ? attribute : true; } // Unfortunately, the current set up puts the style properties on the top // level props object. We also need to add the nested form for API // compatibility. This allows these props on both the top level and the // nested style level. TODO: Move these to nested declarations on the // native side. viewConfig.validAttributes.style = <API key>; if (__DEV__) { componentInterface && verifyPropTypes( componentInterface, viewConfig, extraConfig && extraConfig.nativeOnly, ); } if (!<API key>) { <API key>(viewConfig); <API key> = true; } // Register this view's event types with the ReactNative renderer. // This enables view managers to be initialized lazily, improving perf, // While also enabling 3rd party components to define custom event types. <API key>.processEventTypes(viewConfig); return viewConfig; } return <API key>(viewName, getViewConfig); } const TypeToDifferMap = { // iOS Types CATransform3D: matricesDiffer, CGPoint: pointsDiffer, CGSize: sizesDiffer, UIEdgeInsets: insetsDiffer, // Android Types // (not yet implemented) }; function processColorArray(colors: ?Array<any>): ?Array<?number> { return colors && colors.map(processColor); } const TypeToProcessorMap = { // iOS Types CGColor: processColor, CGColorArray: processColorArray, UIColor: processColor, UIColorArray: processColorArray, CGImage: resolveAssetSource, UIImage: resolveAssetSource, RCTImageSource: resolveAssetSource, // Android Types Color: processColor, ColorArray: processColorArray, }; module.exports = <API key>;
// modification, are permitted provided that the following conditions // are met: // and/or other materials provided with the distribution. // * Neither the name of Jaroslaw Kowalski nor the names of its // contributors may be used to endorse or promote products derived from this // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. #if !SILVERLIGHT namespace NLog.UnitTests { using System; using System.IO; using System.Threading; using System.Reflection; using Xunit; using NLog.Config; public class LogFactoryTests : NLogTestBase { [Fact] public void <API key>() { LogManager.Configuration = <API key>(@" <nlog throwExceptions='false'> <targets><target type='MethodCall' name='test' methodName='Throws' className='NLog.UnitTests.LogFactoryTests, NLog.UnitTests.netfx40' /></targets> <rules> <logger name='*' minlevel='Debug' writeto='test'></logger> </rules> </nlog>"); ILogger logger = LogManager.<API key>(); logger.Factory.Flush(_ => { }, TimeSpan.FromMilliseconds(1)); } [Fact] public void <API key>() { Boolean ExceptionThrown = false; try { LogManager.ThrowExceptions = false; LogManager.Configuration = <API key>(@" <nlog <API key>='IamNotBooleanValue'> <targets><target type='MethodCall' name='test' methodName='Throws' className='NLog.UnitTests.LogFactoryTests, NLog.UnitTests.netfx40' /></targets> <rules> <logger name='*' minlevel='Debug' writeto='test'></logger> </rules> </nlog>"); } catch (Exception) { ExceptionThrown = true; } Assert.False(ExceptionThrown); } [Fact] public void <API key>() { Boolean ExceptionThrown = false; try { LogManager.ThrowExceptions = true; LogManager.Configuration = <API key>(@" <nlog <API key>='IamNotBooleanValue'> <targets><target type='MethodCall' name='test' methodName='Throws' className='NLog.UnitTests.LogFactoryTests, NLog.UnitTests.netfx40' /></targets> <rules> <logger name='*' minlevel='Debug' writeto='test'></logger> </rules> </nlog>"); } catch (Exception) { ExceptionThrown = true; } Assert.True(ExceptionThrown); } [Fact] public void <API key>() { File.WriteAllText("NLog.config", "<nlog />"); try { bool threadTerminated; var primaryLogFactory = typeof(LogManager).GetField("factory", BindingFlags.Static | BindingFlags.NonPublic).GetValue(null); var <API key> = typeof(LogFactory).GetField("syncRoot", BindingFlags.Instance | BindingFlags.NonPublic).GetValue(primaryLogFactory); // Simulate a potential deadlock. // If the creation of the new LogFactory takes the lock of the global LogFactory, the thread will deadlock. lock (<API key>) { var thread = new Thread(() => { (new LogFactory()).<API key>(); }); thread.Start(); threadTerminated = thread.Join(TimeSpan.FromSeconds(1)); } Assert.True(threadTerminated); } finally { try { File.Delete("NLog.config"); } catch { } } } [Fact] public void <API key>() { var <API key> = new <API key>(); LogManager.Configuration = <API key>; var logFactory = new LogFactory(<API key>); var <API key> = new <API key>(); Assert.DoesNotThrow(() => logFactory.ReloadConfigOnTimer(<API key>)); } private class <API key> : <API key> { public override <API key> Reload() { return null; } } [Fact] public void <API key>() { var <API key> = new <API key>(); LogManager.Configuration = <API key>; var logFactory = new LogFactory(<API key>); Assert.DoesNotThrow(() => logFactory.ReloadConfigOnTimer(<API key>)); } [Fact] public void <API key>() { var called = false; var <API key> = new <API key>(); LogManager.Configuration = <API key>; var logFactory = new LogFactory(<API key>); logFactory.<API key> += (sender, args) => { called = true; }; logFactory.ReloadConfigOnTimer(<API key>); Assert.True(called); } [Fact] public void ReloadConfigOnTimer_When_No_Exception_Raises_ConfigurationReloadedEvent_With_Correct_Sender() { object calledBy = null; var <API key> = new <API key>(); LogManager.Configuration = <API key>; var logFactory = new LogFactory(<API key>); logFactory.<API key> += (sender, args) => { calledBy = sender; }; logFactory.ReloadConfigOnTimer(<API key>); Assert.Same(calledBy, logFactory); } [Fact] public void ReloadConfigOnTimer_When_No_Exception_Raises_ConfigurationReloadedEvent_With_Argument_Indicating_Success() { <API key> arguments = null; var <API key> = new <API key>(); LogManager.Configuration = <API key>; var logFactory = new LogFactory(<API key>); logFactory.<API key> += (sender, args) => { arguments = args; }; logFactory.ReloadConfigOnTimer(<API key>); Assert.True(arguments.Succeeded); } public static void Throws() { throw new Exception(); } <summary> We should be forward compatible so that we can add easily attributes in the future. </summary> [Fact] public void <API key>() { LogManager.Configuration = <API key>(@" <nlog throwExceptions='true' imAnewAttribute='noError'> <targets><target type='file' name='f1' filename='test.log' /></targets> <rules> <logger name='*' minlevel='Debug' writeto='f1'></logger> </rules> </nlog>"); } [Fact] public void <API key>() { LogManager.Configuration = null; File.WriteAllText("NLog.config", @" <nlog> <variable name='dir' value='c:\mylogs' /> <targets> <target name='f' type='file' fileName='${var:dir}\test.log' /> </targets> <rules> <logger name='*' writeTo='f' /> </rules> </nlog>"); try { LogManager.Configuration.ToString(); } finally { File.Delete("NLog.config"); } } [Fact] public void <API key>() { LogFactory factory = new LogFactory(); #pragma warning disable 618 // In order Suspend => Resume Assert.True(factory.IsLoggingEnabled()); factory.DisableLogging(); Assert.False(factory.IsLoggingEnabled()); factory.EnableLogging(); Assert.True(factory.IsLoggingEnabled()); #pragma warning restore 618 } [Fact] public void <API key>() { LogFactory factory = new LogFactory(); // In order Suspend => Resume [Case 1] Assert.True(factory.IsLoggingEnabled()); factory.SuspendLogging(); Assert.False(factory.IsLoggingEnabled()); factory.ResumeLogging(); Assert.True(factory.IsLoggingEnabled()); // In order Suspend => Resume [Case 2] using (var factory2 = new LogFactory()) { Assert.True(factory.IsLoggingEnabled()); factory.SuspendLogging(); Assert.False(factory.IsLoggingEnabled()); factory.ResumeLogging(); Assert.True(factory.IsLoggingEnabled()); } } [Fact] public void <API key>() { LogFactory factory = new LogFactory(); // Out of order Resume => Suspend => (Suspend => Resume) factory.ResumeLogging(); Assert.True(factory.IsLoggingEnabled()); factory.SuspendLogging(); Assert.True(factory.IsLoggingEnabled()); factory.SuspendLogging(); Assert.False(factory.IsLoggingEnabled()); factory.ResumeLogging(); Assert.True(factory.IsLoggingEnabled()); } } } #endif
package org.motechproject.openmrs.tasks.impl; import org.apache.commons.collections.MapUtils; import org.apache.commons.lang.StringUtils; import org.joda.time.DateTime; import org.motechproject.openmrs.domain.Concept; import org.motechproject.openmrs.domain.ConceptName; import org.motechproject.openmrs.domain.Encounter; import org.motechproject.openmrs.domain.EncounterType; import org.motechproject.openmrs.domain.Identifier; import org.motechproject.openmrs.domain.IdentifierType; import org.motechproject.openmrs.domain.Location; import org.motechproject.openmrs.domain.Observation; import org.motechproject.openmrs.domain.Patient; import org.motechproject.openmrs.domain.Person; import org.motechproject.openmrs.domain.Program; import org.motechproject.openmrs.domain.ProgramEnrollment; import org.motechproject.openmrs.domain.Provider; import org.motechproject.openmrs.service.<API key>; import org.motechproject.openmrs.service.<API key>; import org.motechproject.openmrs.service.<API key>; import org.motechproject.openmrs.service.<API key>; import org.motechproject.openmrs.service.<API key>; import org.motechproject.openmrs.service.<API key>; import org.motechproject.openmrs.service.<API key>; import org.motechproject.openmrs.tasks.<API key>; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; /** * Implementation of the {@link org.motechproject.openmrs.tasks.<API key>} interface. */ @Service("<API key>") public class <API key> implements <API key> { private static final Logger LOGGER = LoggerFactory.getLogger(<API key>.class); private <API key> conceptService; private <API key> encounterService; private <API key> locationService; private <API key> patientService; private <API key> providerService; private <API key> <API key>; private <API key> personService; @Override public void createEncounter(String configName, DateTime encounterDatetime, String encounterType, String locationName, String patientUuid, String providerUuid, Map<String, String> observations) { Location location = getLocationByName(configName, locationName); Patient patient = patientService.getPatientByUuid(configName, patientUuid); Provider provider = providerService.getProviderByUuid(configName, providerUuid); //While creating observations, the encounterDateTime is used as a obsDateTime. List<Observation> observationList = MapUtils.isNotEmpty(observations) ? <API key>(observations, encounterDatetime) : null; EncounterType type = new EncounterType(encounterType); Encounter encounter = new Encounter(location, type, encounterDatetime.toDate(), patient, Collections.singletonList(provider.getPerson()), observationList); encounterService.createEncounter(configName, encounter); } @Override public void createPatient(String configName, String givenName, String middleName, String familyName, String address1, String address2, String address3, String address4, String address5, String address6, String cityVillage, String stateProvince, String country, String postalCode, String countyDistrict, String latitude, String longitude, DateTime startDate, DateTime endDate, DateTime birthDate, Boolean birthDateEstimated, String gender, Boolean dead, String causeOfDeathUUID, String motechId, String locationForMotechId, Map<String, String> identifiers) { Concept causeOfDeath = StringUtils.isNotEmpty(causeOfDeathUUID) ? conceptService.getConceptByUuid(configName, causeOfDeathUUID) : null; Person person = preparePerson(givenName, middleName, familyName, address1, address2, address3, address4, address5, address6, cityVillage, stateProvince, country, postalCode, countyDistrict, latitude, longitude, startDate, endDate, birthDate, birthDateEstimated, gender, dead, causeOfDeath); Location location = StringUtils.isNotEmpty(locationForMotechId) ? getLocationByName(configName, locationForMotechId) : getDefaultLocation(configName); List<Identifier> identifierList = <API key>(identifiers); Patient patient = new Patient(identifierList, person, motechId, location); patientService.createPatient(configName, patient); } @Override public void <API key>(String configName, String patientUuid, Map<String, String> identifiers) { Patient patient = new Patient(); List<Identifier> identifierList = <API key>(identifiers); patient.setIdentifiers(identifierList); patient.setUuid(patientUuid); patientService.<API key>(configName, patient); } @Override public void updatePerson(String configName, String personUuid, String givenName, String middleName, String familyName, String address1, String address2, String address3, String address4, String address5, String address6, String cityVillage, String stateProvince, String country, String postalCode, String countyDistrict, String latitude, String longitude, DateTime startDate, DateTime endDate, DateTime birthDate, Boolean birthDateEstimated, String gender, Boolean dead, String causeOfDeathUUID) { Concept causeOfDeath = StringUtils.isNotEmpty(causeOfDeathUUID) ? conceptService.getConceptByUuid(configName, causeOfDeathUUID) : null; Person person = preparePerson(givenName, middleName, familyName, address1, address2, address3, address4, address5, address6, cityVillage, stateProvince, country, postalCode, countyDistrict, latitude, longitude, startDate, endDate, birthDate, birthDateEstimated, gender, dead, causeOfDeath); person.setUuid(personUuid); personService.updatePerson(configName, person); } @Override public void <API key>(String configName, String patientUuid, String programUuid, DateTime dateEnrolled, DateTime dateCompleted, String locationName) { Patient patient = new Patient(); patient.setUuid(patientUuid); Program program = new Program(); program.setUuid(programUuid); Location location = null; if (StringUtils.isNotBlank(locationName)) { location = getLocationByName(configName, locationName); } ProgramEnrollment programEnrollment = new ProgramEnrollment(); programEnrollment.setPatient(patient); programEnrollment.setProgram(program); programEnrollment.setDateEnrolled(dateEnrolled.toDate()); programEnrollment.setDateCompleted(Objects.nonNull(dateCompleted) ? dateCompleted.toDate() : null); programEnrollment.setLocation(location); <API key>.<API key>(configName, programEnrollment); } @Override public void <API key>(String configName, String <API key>, DateTime <API key>, String stateUuid, DateTime startDate) { ProgramEnrollment programEnrollment = new ProgramEnrollment(); programEnrollment.setUuid(<API key>); programEnrollment.setDateCompleted(Objects.nonNull(<API key>) ? <API key>.toDate() : null); if (StringUtils.isNotBlank(stateUuid)) { Program.State state = new Program.State(); state.setUuid(stateUuid); ProgramEnrollment.StateStatus stateStatus = new ProgramEnrollment.StateStatus(); stateStatus.setState(state); stateStatus.setStartDate(startDate.toDate()); programEnrollment.setStates(Collections.singletonList(stateStatus)); } <API key>.<API key>(configName, programEnrollment); } private Location getDefaultLocation(String configName) { return getLocationByName(configName, <API key>); } private Location getLocationByName(String configName, String locationName) { Location location = null; if (StringUtils.isNotEmpty(locationName)) { List<Location> locations = locationService.getLocations(configName, locationName); if (locations.isEmpty()) { LOGGER.warn("There is no location with name {}", locationName); } else { if (locations.size() > 1) { LOGGER.warn("There is more than one location with name {}.", locationName); } location = locations.get(0); } } return location; } private List<Identifier> <API key>(Map<String, String> identifiers) { List<Identifier> identifierList = new ArrayList<>(); for (String identifierTypeName : identifiers.keySet()) { IdentifierType identifierType = new IdentifierType(); identifierType.setName(identifierTypeName); Identifier identifier = new Identifier(identifiers.get(identifierTypeName), identifierType); identifierList.add(identifier); } return identifierList; } private List<Observation> <API key>(Map<String, String> observations, DateTime obsDatetime) { List<Observation> observationList = new ArrayList<>(); for (String <API key> : observations.keySet()) { Observation observation = new Observation(); ConceptName conceptName = new ConceptName(<API key>); Concept concept = new Concept(conceptName); observation.setConcept(concept); String observationMapValue = observations.get(<API key>); Observation.ObservationValue observationValue = new Observation.ObservationValue(observationMapValue); observation.setValue(observationValue); observation.setObsDatetime(obsDatetime.toDate()); observationList.add(observation); } return observationList; } private Person preparePerson(String givenName, String middleName, String familyName, String address1, String address2, String address3, String address4, String address5, String address6, String cityVillage, String stateProvince, String country, String postalCode, String countyDistrict, String latitude, String longitude, DateTime startDate, DateTime endDate, DateTime birthDate, Boolean birthDateEstimated, String gender, Boolean dead, Concept causeOfDeath) { Person person = new Person(); Person.Name personName = new Person.Name(); personName.setGivenName(givenName); personName.setMiddleName(middleName); personName.setFamilyName(familyName); person.setPreferredName(personName); person.setNames(Collections.singletonList(personName)); Person.Address personAddress = new Person.Address(address1, address2, address3, address4, address5, address6, cityVillage, stateProvince, country, postalCode, countyDistrict, latitude, longitude, Objects.nonNull(startDate) ? startDate.toDate() : null, Objects.nonNull(endDate) ? endDate.toDate() : null); person.setPreferredAddress(personAddress); person.setAddresses(Collections.singletonList(personAddress)); person.setBirthdate(Objects.nonNull(birthDate) ? birthDate.toDate() : null); person.<API key>(birthDateEstimated); person.setDead(dead); person.setCauseOfDeath(causeOfDeath); person.setGender(gender); return person; } @Autowired public void setConceptService(<API key> conceptService) { this.conceptService = conceptService; } @Autowired public void setEncounterService(<API key> encounterService) { this.encounterService = encounterService; } @Autowired public void setLocationService(<API key> locationService) { this.locationService = locationService; } @Autowired public void setPatientService(<API key> patientService) { this.patientService = patientService; } @Autowired public void setPersonService(<API key> personService) { this.personService = personService; } @Autowired public void setProviderService(<API key> providerService) { this.providerService = providerService; } @Autowired public void <API key>(<API key> <API key>) { this.<API key> = <API key>; } }
#ifndef _AQUARIUS_SCF_HPP_ #define _AQUARIUS_SCF_HPP_ #include "util/global.hpp" #include "input/config.hpp" #include "task/task.hpp" namespace aquarius { namespace scf { template <typename T> class SCF : public task::Task { public: SCF(const string& name, input::Config& config); bool run(task::TaskDAG& dag, const Arena& arena); }; } } #endif
layout: docs title: MSBuild ReactJS.NET includes an MSBuild task for compiling JSX into JavaScript. This is handy to improve the start time of your application, especially if you have a large number of JSX files. To use it, first reference the `TransformBabel` task, and then call it wherever you like: xml <UsingTask AssemblyFile="tools\React\React.MSBuild.dll" TaskName="TransformBabel" /> <Target Name="TransformBabel"> <TransformBabel SourceDir="$(<API key>)" TargetDir="" /> </Target> To get started easily, you can install the [React.MSBuild] (https: automatically modify your web application's `.csproj` file to reference the task and run it after every site compilation. To customise the process (for example, to only compile the JSX files for release builds), modify the `TransformBabel` build target that was added to the csproj file. The NuGet package is good for getting started quickly, but it has some limitations. The package needs to add a reference to `React.MSBuild.dll`, even though this assembly is only used at build time and not actually used at runtime. Instead of using the NuGet package, you can just manually copy all the assembly files into a folder (such as `tools\React`) and just reference the task manually.
#ifndef <API key> #define <API key> #include <boost/shared_ptr.hpp> #include <QtCore/QDir> #include "ACL.h" #include "Settings.h" #include "Timer.h" #include "Version.h" // Global helper class to spread variables around across threads. class MainWindow; class ServerHandler; class AudioInput; class AudioOutput; class Database; class Log; class Plugins; class QSettings; class Overlay; class LCD; class BonjourClient; class OverlayClient; class CELTCodec; class <API key>; struct Global Q_DECL_FINAL { private: Q_DISABLE_COPY(Global) public: static Global *g_global_struct; MainWindow *mw; Settings s; boost::shared_ptr<ServerHandler> sh; boost::shared_ptr<AudioInput> ai; boost::shared_ptr<AudioOutput> ao; Database *db; Log *l; Plugins *p; QSettings *qs; Overlay *o; LCD *lcd; BonjourClient *bc; <API key> *nam; int iPushToTalk; Timer tDoublePush; quint64 uiDoublePush; int iTarget; int iPrevTarget; bool bPushToMute; bool bCenterPosition; bool bPosTest; bool bInAudioWizard; OverlayClient *ocIntercept; int iAudioPathTime; unsigned int uiSession; ChanACL::Permissions pPermissions; int iMaxBandwidth; int iAudioBandwidth; QDir qdBasePath; QMap<int, CELTCodec *> qmCodecs; int iCodecAlpha, iCodecBeta; bool bPreferAlpha; bool bOpus; bool bAttenuateOthers; If set the AudioOutput::mix will forcefully adjust the volume of all non-priority speakers. bool <API key>; bool bAllowHTML; unsigned int uiMessageLength; unsigned int uiImageLength; unsigned int uiMaxUsers; bool bQuit; bool bHappyEaster; static const char ccHappyEaster[]; Global(); ~Global(); }; // Class to handle ordered initialization of globals. // This allows the same link-time magic as used everywhere else // for globals that need an init before the GUI starts, but // after we reach main(). class DeferInit { private: Q_DISABLE_COPY(DeferInit) protected: static QMultiMap<int, DeferInit *> *qmDeferers; void add(int priority); public: DeferInit(int priority) { add(priority); }; DeferInit() { add(0); }; virtual ~DeferInit(); virtual void initialize() { }; virtual void destroy() { }; static void run_initializers(); static void run_destroyers(); }; Special exit code which causes mumble to restart itself. The outward facing return code with be 0 const int <API key> = 64738; // -Wshadow is bugged. If an inline function of a class uses a variable or // parameter named 'g', that will generate a warning even if the class header // is included long before this definition. #define g (*Global::g_global_struct) #endif
#!/usr/bin/env bash set -x -e # Build project MVN_ARGS="clean package -B -V" mvn ${MVN_ARGS} -P release,skip-unzip-jdk
<?php namespace yii\build\controllers; use Yii; use yii\base\Exception; use yii\console\Controller; use yii\helpers\ArrayHelper; use yii\helpers\Console; use yii\helpers\FileHelper; /** * ReleaseController is there to help preparing releases. * * Get a version overview: * * ./build release/info * * run it with `--update` to fetch tags for all repos: * * ./build release/info --update * * Make a framework release (apps are always in line with framework): * * ./build release framework * ./build release app-basic * ./build release app-advanced * * Make an extension release (e.g. for redis): * * ./build release redis * * Be sure to check the help info for individual sub-commands: * * @author Carsten Brandt <mail@cebe.cc> * @since 2.0 */ class ReleaseController extends Controller { public $defaultAction = 'release'; /** * @var string base path to use for releases. */ public $basePath; /** * @var bool whether to make actual changes. If true, it will run without changing or pushing anything. */ public $dryRun = false; /** * @var bool whether to fetch latest tags. */ public $update = false; public function options($actionID) { $options = ['basePath']; if ($actionID === 'release') { $options[] = 'dryRun'; } elseif ($actionID === 'info') { $options[] = 'update'; } return array_merge(parent::options($actionID), $options); } public function beforeAction($action) { if (!$this->interactive) { throw new Exception('Sorry, but releases should be run interactively to ensure you actually verify what you are doing ;)'); } if ($this->basePath === null) { $this->basePath = dirname(dirname(__DIR__)); } $this->basePath = rtrim($this->basePath, '\\/'); return parent::beforeAction($action); } /** * Shows information about current framework and extension versions. */ public function actionInfo() { $items = [ 'framework', 'app-basic', 'app-advanced', ]; $extensionPath = "{$this->basePath}/extensions"; foreach (scandir($extensionPath) as $extension) { if (ctype_alpha($extension) && is_dir($extensionPath . '/' . $extension)) { $items[] = $extension; } } if ($this->update) { foreach($items as $item) { $this->stdout("fetching tags for $item..."); if ($item === 'framework') { $this->gitFetchTags("{$this->basePath}"); } elseif (strncmp('app-', $item, 4) === 0) { $this->gitFetchTags("{$this->basePath}/apps/" . substr($item, 4)); } else { $this->gitFetchTags("{$this->basePath}/extensions/$item"); } $this->stdout("done.\n", Console::FG_GREEN, Console::BOLD); } } else { $this->stdout("\nInformation may be outdated, re-run with `--update` to fetch latest tags.\n\n"); } $versions = $this->getCurrentVersions($items); $nextVersions = $this->getNextVersions($versions, self::PATCH); // print version table $w = $this->minWidth(array_keys($versions)); $this->stdout(str_repeat(' ', $w + 2) . "Current Version Next Version\n", Console::BOLD); foreach($versions as $ext => $version) { $this->stdout($ext . str_repeat(' ', $w + 3 - mb_strlen($ext)) . $version . ""); $this->stdout(str_repeat(' ', 17 - mb_strlen($version)) . $nextVersions[$ext] . "\n"); } } private function minWidth($a) { $w = 1; foreach($a as $s) { if (($l = mb_strlen($s)) > $w) { $w = $l; } } return $w; } /** * Automation tool for making Yii framework and official extension releases. * * Usage: * * To make a release, make sure your git is clean (no uncommitted changes) and run the following command in * the yii dev repo root: * * ``` * ./build/build release framework * ``` * * or * * ``` * ./build/build release redis,bootstrap,apidoc * ``` * * You may use the `--dryRun` switch to test the command without changing or pushing anything: * * ``` * ./build/build release redis --dryRun * ``` * * The command will guide you through the complete release process including changing of files, * committing and pushing them. Each git command must be confirmed and can be skipped individually. * You may adjust changes in a separate shell or your IDE while the command is waiting for confirmation. * * @param array $what what do you want to release? this can either be: * * - an extension name such as `redis` or `bootstrap`, * - an application indicated by prefix `app-`, e.g. `app-basic`, * - or `framework` if you want to release a new version of the framework itself. * * @return int */ public function actionRelease(array $what) { if (count($what) > 1) { $this->stdout("Currently only one simultaneous release is supported.\n"); return 1; } $this->stdout("This is the Yii release manager\n\n", Console::BOLD); if ($this->dryRun) { $this->stdout("Running in \"dry-run\" mode, nothing will actually be changed.\n\n", Console::BOLD, Console::FG_GREEN); } $this->validateWhat($what); $versions = $this->getCurrentVersions($what); $newVersions = $this->getNextVersions($versions, self::PATCH);// TODO add support for minor $this->stdout("You are about to prepare a new release for the following things:\n\n"); $this->printWhat($what, $newVersions, $versions); $this->stdout("\n"); $this->stdout("Before you make a release briefly go over the changes and check if you spot obvious mistakes:\n\n", Console::BOLD); if (strncmp('app-', reset($what), 4) !== 0) { $this->stdout("- no accidentally added CHANGELOG lines for other versions than this one?\n"); $this->stdout("- are all new `@since` tags for this relase version?\n"); } $travisUrl = reset($what) === 'framework' ? '' : '-'.reset($what); $this->stdout("- are unit tests passing on travis? https://travis-ci.org/yiisoft/yii2$travisUrl/builds\n"); $this->stdout("- other issues with code changes?\n"); $this->stdout("- also make sure the milestone on github is complete and no issues or PRs are left open.\n\n"); $this->printWhatUrls($what, $versions); $this->stdout("\n"); if (!$this->confirm('When you continue, this tool will run cleanup jobs and update the changelog as well as other files (locally). Continue?', false)) { $this->stdout("Canceled.\n"); return 1; } foreach($what as $ext) { if ($ext === 'framework') { $this->releaseFramework("{$this->basePath}/framework", $newVersions['framework']); } elseif (strncmp('app-', $ext, 4) === 0) { $this->releaseApplication(substr($ext, 4), "{$this->basePath}/apps/" . substr($ext, 4), $newVersions[$ext]); } else { $this->releaseExtension($ext, "{$this->basePath}/extensions/$ext", $newVersions[$ext]); } } return 0; } /** * This will generate application packages for download page. * * Usage: * * ``` * ./build/build release/package app-basic * ``` * * @param array $what what do you want to package? this can either be: * * - an application indicated by prefix `app-`, e.g. `app-basic`, * * @return int */ public function actionPackage(array $what) { $this->validateWhat($what, ['app']); $versions = $this->getCurrentVersions($what); $this->stdout("You are about to generate packages for the following things:\n\n"); foreach($what as $ext) { if (strncmp('app-', $ext, 4) === 0) { $this->stdout(" - "); $this->stdout(substr($ext, 4), Console::FG_RED); $this->stdout(" application version "); } elseif ($ext === 'framework') { $this->stdout(" - Yii Framework version "); } else { $this->stdout(" - "); $this->stdout($ext, Console::FG_RED); $this->stdout(" extension version "); } $this->stdout($versions[$ext], Console::BOLD); $this->stdout("\n"); } $this->stdout("\n"); $packagePath = "{$this->basePath}/packages"; $this->stdout("Packages will be stored in $packagePath\n\n"); if (!$this->confirm('Continue?', false)) { $this->stdout("Canceled.\n"); return 1; } foreach($what as $ext) { if ($ext === 'framework') { throw new Exception('Can not package framework.'); } elseif (strncmp('app-', $ext, 4) === 0) { $this->packageApplication(substr($ext, 4), $versions[$ext], $packagePath); } else { throw new Exception('Can not package extension.'); } } $this->stdout("\ndone. verify the versions composer installed above and push it to github!\n\n"); return 0; } protected function printWhat(array $what, $newVersions, $versions) { foreach($what as $ext) { if (strncmp('app-', $ext, 4) === 0) { $this->stdout(" - "); $this->stdout(substr($ext, 4), Console::FG_RED); $this->stdout(" application version "); } elseif ($ext === 'framework') { $this->stdout(" - Yii Framework version "); } else { $this->stdout(" - "); $this->stdout($ext, Console::FG_RED); $this->stdout(" extension version "); } $this->stdout($newVersions[$ext], Console::BOLD); $this->stdout(", last release was {$versions[$ext]}\n"); } } protected function printWhatUrls(array $what, $oldVersions) { foreach($what as $ext) { if ($ext === 'framework') { $this->stdout("framework: https://github.com/yiisoft/yii2-framework/compare/{$oldVersions[$ext]}...master\n"); $this->stdout("app-basic: https://github.com/yiisoft/yii2-app-basic/compare/{$oldVersions[$ext]}...master\n"); $this->stdout("app-advanced: https://github.com/yiisoft/yii2-app-advanced/compare/{$oldVersions[$ext]}...master\n"); } else { $this->stdout($ext, Console::FG_RED); $this->stdout(": https://github.com/yiisoft/yii2-$ext/compare/{$oldVersions[$ext]}...master\n"); } } } /** * @param array $what list of items * @param array $limit list of things to allow, or empty to allow any, can be `app`, `framework`, `extension` * @throws \yii\base\Exception */ protected function validateWhat(array $what, $limit = []) { foreach($what as $w) { if (strncmp('app-', $w, 4) === 0) { if (!empty($limit) && !in_array('app', $limit)) { throw new Exception("Only the following types are allowed: ".implode(', ', $limit)."\n"); } if (!is_dir($appPath = "{$this->basePath}/apps/" . substr($w, 4))) { throw new Exception("Application path does not exist: \"{$appPath}\"\n"); } $this->ensureGitClean($appPath); } elseif ($w === 'framework') { if (!empty($limit) && !in_array('framework', $limit)) { throw new Exception("Only the following types are allowed: ".implode(', ', $limit)."\n"); } if (!is_dir($fwPath = "{$this->basePath}/framework")) { throw new Exception("Framework path does not exist: \"{$this->basePath}/framework\"\n"); } $this->ensureGitClean($fwPath); } else { if (!empty($limit) && !in_array('ext', $limit)) { throw new Exception("Only the following types are allowed: ".implode(', ', $limit)."\n"); } if (!is_dir($extPath = "{$this->basePath}/extensions/$w")) { throw new Exception("Extension path for \"$w\" does not exist: \"{$this->basePath}/extensions/$w\"\n"); } $this->ensureGitClean($extPath); } } } protected function releaseFramework($frameworkPath, $version) { $this->stdout("\n"); $this->stdout($h = "Preparing framework release version $version", Console::BOLD); $this->stdout("\n" . str_repeat('-', strlen($h)) . "\n\n", Console::BOLD); $this->runGit('git checkout master', $frameworkPath); // TODO add compatibility for other release branches $this->runGit('git pull', $frameworkPath); // TODO add compatibility for other release branches // checks $this->stdout('check if framework composer.json matches yii2-dev composer.json...'); $this->checkComposer($frameworkPath); $this->stdout("done.\n", Console::FG_GREEN, Console::BOLD); // adjustments $this->stdout('prepare classmap...', Console::BOLD); $this->dryRun || Yii::$app->runAction('classmap', [$frameworkPath]); $this->stdout("done.\n", Console::FG_GREEN, Console::BOLD); $this->stdout('updating mimetype magic file...', Console::BOLD); $this->dryRun || Yii::$app->runAction('mime-type', ["$frameworkPath/helpers/mimeTypes.php"]); $this->stdout("done.\n", Console::FG_GREEN, Console::BOLD); $this->stdout("fixing various PHPdoc style issues...\n", Console::BOLD); $this->dryRun || Yii::$app->runAction('php-doc/fix', [$frameworkPath]); $this->stdout("done.\n", Console::FG_GREEN, Console::BOLD); $this->stdout("updating PHPdoc @property annotations...\n", Console::BOLD); $this->dryRun || Yii::$app->runAction('php-doc/property', [$frameworkPath]); $this->stdout("done.\n", Console::FG_GREEN, Console::BOLD); $this->stdout('sorting changelogs...', Console::BOLD); $this->dryRun || $this->resortChangelogs(['framework'], $version); $this->stdout("done.\n", Console::FG_GREEN, Console::BOLD); $this->stdout('closing changelogs...', Console::BOLD); $this->dryRun || $this->closeChangelogs(['framework'], $version); $this->stdout("done.\n", Console::FG_GREEN, Console::BOLD); $this->stdout('updating Yii version...'); $this->dryRun || $this->updateYiiVersion($frameworkPath, $version); $this->stdout("done.\n", Console::FG_GREEN, Console::BOLD); $this->stdout("\nIn the following you can check the above changes using git diff.\n\n"); do { $this->runGit("git diff --color", $frameworkPath); $this->stdout("\n\n\nCheck whether the above diff is okay, if not you may change things as needed before continuing.\n"); $this->stdout("You may abort the program with Ctrl + C and reset the changes by running `git checkout -- .` in the repo.\n\n"); } while(!$this->confirm("Type `yes` to continue, `no` to view git diff again. Continue?")); $this->stdout("\n\n"); $this->stdout(" **** RELEASE TIME! ****\n", Console::FG_YELLOW, Console::BOLD); $this->stdout(" **** Commit, Tag and Push it! ****\n", Console::FG_YELLOW, Console::BOLD); $this->stdout("\n\nHint: if you decide 'no' for any of the following, the command will not be executed. You may manually run them later if needed. E.g. try the release locally without pushing it.\n\n"); $this->runGit("git commit -a -m \"release version $version\"", $frameworkPath); $this->runGit("git tag -a $version -m\"version $version\"", $frameworkPath); $this->runGit("git push origin master", $frameworkPath); $this->runGit("git push --tags", $frameworkPath); $this->stdout("\n\n"); $this->stdout("CONGRATULATIONS! You have just released extension ", Console::FG_YELLOW, Console::BOLD); $this->stdout('framework', Console::FG_RED, Console::BOLD); $this->stdout(" version ", Console::FG_YELLOW, Console::BOLD); $this->stdout($version, Console::BOLD); $this->stdout("!\n\n", Console::FG_YELLOW, Console::BOLD); // TODO release applications // $this-><API key>($what, $version); // $this->resortChangelogs($what, $version); // $this->closeChangelogs($what, $version); // $this-><API key>($what, $version); // if (in_array('framework', $what)) { // $this->updateYiiVersion($version); // if done: // * ./build/build release/done framework 2.0.0-dev 2.0.0-rc // * ./build/build release/done redis 2.0.0-dev 2.0.0-rc // $this->openChangelogs($what, $nextVersion); // $this-><API key>($what, 'dev'); // if (in_array('framework', $what)) { // $this->updateYiiVersion($devVersion); // prepare next release $this->stdout("Time to prepare the next release...\n\n", Console::FG_YELLOW, Console::BOLD); $this->stdout('opening changelogs...', Console::BOLD); $nextVersion = $this->getNextVersions(['framework' => $version], self::PATCH); // TODO support other versions $this->dryRun || $this->openChangelogs(['framework'], $nextVersion['framework']); $this->stdout("done.\n", Console::FG_GREEN, Console::BOLD); $this->stdout('updating Yii version...'); $this->dryRun || $this->updateYiiVersion($frameworkPath, $nextVersion['framework'] . '-dev'); $this->stdout("done.\n", Console::FG_GREEN, Console::BOLD); $this->stdout("\n"); $this->runGit("git diff --color", $frameworkPath); $this->stdout("\n\n"); $this->runGit("git commit -a -m \"prepare for next release\"", $frameworkPath); $this->runGit("git push origin master", $frameworkPath); $this->stdout("\n\nDONE!", Console::FG_YELLOW, Console::BOLD); $this->stdout("\n\nThe following steps are left for you to do manually:\n\n"); $nextVersion2 = $this->getNextVersions($nextVersion, self::PATCH); // TODO support other versions $this->stdout("- wait for your changes to be propagated to the repo and create a tag $version on https://github.com/yiisoft/yii2-framework\n\n"); $this->stdout("- close the $version milestone on github and open new ones for {$nextVersion['framework']} and {$nextVersion2['framework']}: https://github.com/yiisoft/yii2/milestones\n"); $this->stdout("- create a release on github.\n"); $this->stdout("- release news and announcement.\n"); $this->stdout("- update the website (will be automated soon and is only relevant for the new website).\n"); $this->stdout("\n"); $this->stdout("- release applications: ./build/build release app-basic\n"); $this->stdout("- release applications: ./build/build release app-advanced\n"); $this->stdout("\n"); } protected function releaseApplication($name, $path, $version) { $this->stdout("\n"); $this->stdout($h = "Preparing release for application $name version $version", Console::BOLD); $this->stdout("\n" . str_repeat('-', strlen($h)) . "\n\n", Console::BOLD); $this->runGit('git checkout master', $path); // TODO add compatibility for other release branches $this->runGit('git pull', $path); // TODO add compatibility for other release branches // adjustments $this->stdout("fixing various PHPdoc style issues...\n", Console::BOLD); $this->setAppAliases($name, $path); $this->dryRun || Yii::$app->runAction('php-doc/fix', [$path, '<API key>' => true]); $this->resetAppAliases(); $this->stdout("done.\n", Console::FG_GREEN, Console::BOLD); $this->stdout("updating PHPdoc @property annotations...\n", Console::BOLD); $this->setAppAliases($name, $path); $this->dryRun || Yii::$app->runAction('php-doc/property', [$path, '<API key>' => true]); $this->resetAppAliases(); $this->stdout("done.\n", Console::FG_GREEN, Console::BOLD); $this->stdout("updating composer stability...\n", Console::BOLD); $this->dryRun || $this-><API key>(["app-$name"], $version); $this->stdout("done.\n", Console::FG_GREEN, Console::BOLD); $this->stdout("\nIn the following you can check the above changes using git diff.\n\n"); do { $this->runGit("git diff --color", $path); $this->stdout("\n\n\nCheck whether the above diff is okay, if not you may change things as needed before continuing.\n"); $this->stdout("You may abort the program with Ctrl + C and reset the changes by running `git checkout -- .` in the repo.\n\n"); } while(!$this->confirm("Type `yes` to continue, `no` to view git diff again. Continue?")); $this->stdout("\n\n"); $this->stdout(" **** RELEASE TIME! ****\n", Console::FG_YELLOW, Console::BOLD); $this->stdout(" **** Commit, Tag and Push it! ****\n", Console::FG_YELLOW, Console::BOLD); $this->stdout("\n\nHint: if you decide 'no' for any of the following, the command will not be executed. You may manually run them later if needed. E.g. try the release locally without pushing it.\n\n"); $this->runGit("git commit -a -m \"release version $version\"", $path); $this->runGit("git tag -a $version -m\"version $version\"", $path); $this->runGit("git push origin master", $path); $this->runGit("git push --tags", $path); $this->stdout("\n\n"); $this->stdout("CONGRATULATIONS! You have just released application ", Console::FG_YELLOW, Console::BOLD); $this->stdout($name, Console::FG_RED, Console::BOLD); $this->stdout(" version ", Console::FG_YELLOW, Console::BOLD); $this->stdout($version, Console::BOLD); $this->stdout("!\n\n", Console::FG_YELLOW, Console::BOLD); // prepare next release $this->stdout("Time to prepare the next release...\n\n", Console::FG_YELLOW, Console::BOLD); $this->stdout("updating composer stability...\n", Console::BOLD); $this->dryRun || $this-><API key>(["app-$name"], 'dev'); $this->stdout("done.\n", Console::FG_GREEN, Console::BOLD); $nextVersion = $this->getNextVersions(["app-$name" => $version], self::PATCH); // TODO support other versions $this->stdout("\n"); $this->runGit("git diff --color", $path); $this->stdout("\n\n"); $this->runGit("git commit -a -m \"prepare for next release\"", $path); $this->runGit("git push origin master", $path); $this->stdout("\n\nDONE!", Console::FG_YELLOW, Console::BOLD); $this->stdout("\n\nThe following steps are left for you to do manually:\n\n"); $nextVersion2 = $this->getNextVersions($nextVersion, self::PATCH); // TODO support other versions $this->stdout("- close the $version milestone on github and open new ones for {$nextVersion["app-$name"]} and {$nextVersion2["app-$name"]}: https://github.com/yiisoft/yii2-app-$name/milestones\n"); $this->stdout("- Create Application packages and upload them to github: ./build release/package app-$name\n"); $this->stdout("\n"); } private $_oldAlias; protected function setAppAliases($app, $path) { $this->_oldAlias = Yii::getAlias('@app'); switch($app) { case 'basic': Yii::setAlias('@app', $path); break; case 'advanced': // setup @frontend, @backend etc... require("$path/common/config/bootstrap.php"); break; } } protected function resetAppAliases() { Yii::setAlias('@app', $this->_oldAlias); } protected function packageApplication($name, $version, $packagePath) { FileHelper::createDirectory($packagePath); $this->runCommand("composer create-project yiisoft/yii2-app-$name $name $version", $packagePath); // clear cookie validation key in basic app if (is_file($configFile = "$packagePath/$name/config/web.php")) { $this->sed( "/'cookieValidationKey' => '.*?',/", "'cookieValidationKey' => '',", $configFile ); } $this->runCommand("tar zcf yii-$name-app-$version.tgz $name", $packagePath); } protected function releaseExtension($name, $path, $version) { $this->stdout("\n"); $this->stdout($h = "Preparing release for extension $name version $version", Console::BOLD); $this->stdout("\n" . str_repeat('-', strlen($h)) . "\n\n", Console::BOLD); $this->runGit('git checkout master', $path); // TODO add compatibility for other release branches $this->runGit('git pull', $path); // TODO add compatibility for other release branches // adjustments $this->stdout("fixing various PHPdoc style issues...\n", Console::BOLD); $this->dryRun || Yii::$app->runAction('php-doc/fix', [$path]); $this->stdout("done.\n", Console::FG_GREEN, Console::BOLD); $this->stdout("updating PHPdoc @property annotations...\n", Console::BOLD); $this->dryRun || Yii::$app->runAction('php-doc/property', [$path]); $this->stdout("done.\n", Console::FG_GREEN, Console::BOLD); $this->stdout('sorting changelogs...', Console::BOLD); $this->dryRun || $this->resortChangelogs([$name], $version); $this->stdout("done.\n", Console::FG_GREEN, Console::BOLD); $this->stdout('closing changelogs...', Console::BOLD); $this->dryRun || $this->closeChangelogs([$name], $version); $this->stdout("done.\n", Console::FG_GREEN, Console::BOLD); $this->stdout("\nIn the following you can check the above changes using git diff.\n\n"); do { $this->runGit("git diff --color", $path); $this->stdout("\n\n\nCheck whether the above diff is okay, if not you may change things as needed before continuing.\n"); $this->stdout("You may abort the program with Ctrl + C and reset the changes by running `git checkout -- .` in the repo.\n\n"); } while(!$this->confirm("Type `yes` to continue, `no` to view git diff again. Continue?")); $this->stdout("\n\n"); $this->stdout(" **** RELEASE TIME! ****\n", Console::FG_YELLOW, Console::BOLD); $this->stdout(" **** Commit, Tag and Push it! ****\n", Console::FG_YELLOW, Console::BOLD); $this->stdout("\n\nHint: if you decide 'no' for any of the following, the command will not be executed. You may manually run them later if needed. E.g. try the release locally without pushing it.\n\n"); $this->runGit("git commit -a -m \"release version $version\"", $path); $this->runGit("git tag -a $version -m\"version $version\"", $path); $this->runGit("git push origin master", $path); $this->runGit("git push --tags", $path); $this->stdout("\n\n"); $this->stdout("CONGRATULATIONS! You have just released extension ", Console::FG_YELLOW, Console::BOLD); $this->stdout($name, Console::FG_RED, Console::BOLD); $this->stdout(" version ", Console::FG_YELLOW, Console::BOLD); $this->stdout($version, Console::BOLD); $this->stdout("!\n\n", Console::FG_YELLOW, Console::BOLD); // prepare next release $this->stdout("Time to prepare the next release...\n\n", Console::FG_YELLOW, Console::BOLD); $this->stdout('opening changelogs...', Console::BOLD); $nextVersion = $this->getNextVersions([$name => $version], self::PATCH); // TODO support other versions $this->dryRun || $this->openChangelogs([$name], $nextVersion[$name]); $this->stdout("done.\n", Console::FG_GREEN, Console::BOLD); $this->stdout("\n"); $this->runGit("git diff --color", $path); $this->stdout("\n\n"); $this->runGit("git commit -a -m \"prepare for next release\"", $path); $this->runGit("git push origin master", $path); $this->stdout("\n\nDONE!", Console::FG_YELLOW, Console::BOLD); $this->stdout("\n\nThe following steps are left for you to do manually:\n\n"); $nextVersion2 = $this->getNextVersions($nextVersion, self::PATCH); // TODO support other versions $this->stdout("- close the $version milestone on github and open new ones for {$nextVersion[$name]} and {$nextVersion2[$name]}: https://github.com/yiisoft/yii2-$name/milestones\n"); $this->stdout("- release news and announcement.\n"); $this->stdout("- update the website (will be automated soon and is only relevant for the new website).\n"); $this->stdout("\n"); } protected function runCommand($cmd, $path) { $this->stdout("running $cmd ...", Console::BOLD); if ($this->dryRun) { $this->stdout("dry run, command `$cmd` not executed.\n"); return; } chdir($path); exec($cmd, $output, $ret); if ($ret != 0) { echo implode("\n", $output); throw new Exception("Command \"$cmd\" failed with code " . $ret); } $this->stdout("\ndone.\n", Console::BOLD, Console::FG_GREEN); } protected function runGit($cmd, $path) { if ($this->confirm("Run `$cmd`?", true)) { if ($this->dryRun) { $this->stdout("dry run, command `$cmd` not executed.\n"); return; } chdir($path); exec($cmd, $output, $ret); echo implode("\n", $output); if ($ret != 0) { throw new Exception("Command \"$cmd\" failed with code " . $ret); } echo "\n"; } } protected function ensureGitClean($path) { chdir($path); exec('git status --porcelain -uno', $changes, $ret); if ($ret != 0) { throw new Exception('Command "git status --porcelain -uno" failed with code ' . $ret); } if (!empty($changes)) { throw new Exception("You have uncommitted changes in $path: " . print_r($changes, true)); } } protected function gitFetchTags($path) { chdir($path); exec('git fetch --tags', $output, $ret); if ($ret != 0) { throw new Exception('Command "git fetch --tags" failed with code ' . $ret); } } protected function checkComposer($fwPath) { if (!$this->confirm("\nNot yet automated: Please check if composer.json dependencies in framework dir match the one in repo root. Continue?", false)) { exit; } } protected function closeChangelogs($what, $version) { $v = str_replace('\\-', '[\\- ]', preg_quote($version, '/')); $headline = $version . ' ' . date('F d, Y'); $this->sed( '/'.$v.' under development\n(-+?)\n/', $headline . "\n" . str_repeat('-', strlen($headline)) . "\n", $this->getChangelogs($what) ); } protected function openChangelogs($what, $version) { $headline = "\n$version under development\n"; $headline .= str_repeat('-', strlen($headline) - 2) . "\n\n- no changes in this release.\n"; foreach($this->getChangelogs($what) as $file) { $lines = explode("\n", file_get_contents($file)); $hl = [ array_shift($lines), array_shift($lines), ]; array_unshift($lines, $headline); file_put_contents($file, implode("\n", array_merge($hl, $lines))); } } protected function resortChangelogs($what, $version) { foreach($this->getChangelogs($what) as $file) { // split the file into relevant parts list($start, $changelog, $end) = $this->splitChangelog($file, $version); $changelog = $this->resortChangelog($changelog); file_put_contents($file, implode("\n", array_merge($start, $changelog, $end))); } } /** * Extract changelog content for a specific version */ protected function splitChangelog($file, $version) { $lines = explode("\n", file_get_contents($file)); // split the file into relevant parts $start = []; $changelog = []; $end = []; $state = 'start'; foreach($lines as $l => $line) { // starting from the changelogs headline if (isset($lines[$l-2]) && strpos($lines[$l-2], $version) !== false && isset($lines[$l-1]) && strncmp($lines[$l-1], ' $state = 'changelog'; } if ($state === 'changelog' && isset($lines[$l+1]) && strncmp($lines[$l+1], ' $state = 'end'; } ${$state}[] = $line; } return [$start, $changelog, $end]; } /** * Ensure sorting of the changelog lines */ protected function resortChangelog($changelog) { // cleanup whitespace foreach($changelog as $i => $line) { $changelog[$i] = rtrim($line); } $changelog = array_filter($changelog); $i = 0; ArrayHelper::multisort($changelog, function($line) use (&$i) { if (preg_match('/^- (Chg|Enh|Bug|New)( #\d+(, #\d+)*)?: .+$/', $line, $m)) { $o = ['Bug' => 'C', 'Enh' => 'D', 'Chg' => 'E', 'New' => 'F']; return $o[$m[1]] . ' ' . (!empty($m[2]) ? $m[2] : 'AAAA' . $i++); } return 'B' . $i++; }, SORT_ASC, SORT_NATURAL); // re-add leading and trailing lines array_unshift($changelog, ''); $changelog[] = ''; $changelog[] = ''; return $changelog; } protected function getChangelogs($what) { $changelogs = []; if (in_array('framework', $what)) { $changelogs[] = $this-><API key>(); } return array_merge($changelogs, $this-><API key>($what)); } protected function <API key>() { return $this->basePath . '/framework/CHANGELOG.md'; } protected function <API key>($what) { return array_filter(glob($this->basePath . '/extensions/*/CHANGELOG.md'), function($elem) use ($what) { foreach($what as $ext) { if (strpos($elem, "extensions/$ext/CHANGELOG.md") !== false) { return true; } } return false; }); } protected function <API key>($what, $version) { $apps = []; if (in_array('app-advanced', $what)) { $apps[] = $this->basePath . '/apps/advanced/composer.json'; } if (in_array('app-basic', $what)) { $apps[] = $this->basePath . '/apps/basic/composer.json'; } if (in_array('app-benchmark', $what)) { $apps[] = $this->basePath . '/apps/benchmark/composer.json'; } if (empty($apps)) { return; } $stability = 'stable'; if (strpos($version, 'alpha') !== false) { $stability = 'alpha'; } elseif (strpos($version, 'beta') !== false) { $stability = 'beta'; } elseif (strpos($version, 'rc') !== false) { $stability = 'RC'; } elseif (strpos($version, 'dev') !== false) { $stability = 'dev'; } $this->sed( '/"minimum-stability": "(.+?)",/', '"minimum-stability": "' . $stability . '",', $apps ); } protected function updateYiiVersion($frameworkPath, $version) { $this->sed( '/function getVersion\(\)\n \{\n return \'(.+?)\';/', "function getVersion()\n {\n return '$version';", $frameworkPath . '/BaseYii.php'); } protected function sed($pattern, $replace, $files) { foreach((array) $files as $file) { file_put_contents($file, preg_replace($pattern, $replace, file_get_contents($file))); } } protected function getCurrentVersions(array $what) { $versions = []; foreach($what as $ext) { if ($ext === 'framework') { chdir("{$this->basePath}/framework"); } elseif (strncmp('app-', $ext, 4) === 0) { chdir("{$this->basePath}/apps/" . substr($ext, 4)); } else { chdir("{$this->basePath}/extensions/$ext"); } $tags = []; exec('git tag', $tags, $ret); if ($ret != 0) { throw new Exception('Command "git tag" failed with code ' . $ret); } rsort($tags, SORT_NATURAL); // TODO this can not deal with alpha/beta/rc... $versions[$ext] = reset($tags); } return $versions; } const MINOR = 'minor'; const PATCH = 'patch'; protected function getNextVersions(array $versions, $type) { foreach($versions as $k => $v) { if (empty($v)) { $versions[$k] = '2.0.0'; continue; } $parts = explode('.', $v); switch($type) { case self::MINOR: $parts[1]++; break; case self::PATCH: $parts[2]++; break; default: throw new Exception('Unknown version type.'); } $versions[$k] = implode('.', $parts); } return $versions; } }
// tls.h: Simple cross-platform interface for thread local storage. #ifndef COMMON_TLS_H_ #define COMMON_TLS_H_ #include "common/platform.h" #ifdef <API key> // TLS does not exist for Windows Store and needs to be emulated # ifdef <API key> # ifndef TLS_OUT_OF_INDEXES # define TLS_OUT_OF_INDEXES -1 # endif # ifndef CREATE_SUSPENDED # define CREATE_SUSPENDED 0x00000004 # endif # endif typedef DWORD TLSIndex; # define TLS_INVALID_INDEX (TLS_OUT_OF_INDEXES) #elif defined(<API key>) # include <pthread.h> # include <semaphore.h> # include <errno.h> typedef pthread_key_t TLSIndex; # define TLS_INVALID_INDEX (static_cast<TLSIndex>(-1)) #else # error Unsupported platform. #endif // TODO(kbr): for POSIX platforms this will have to be changed to take // in a destructor function pointer, to allow the thread-local storage // to be properly deallocated upon thread exit. TLSIndex CreateTLSIndex(); bool DestroyTLSIndex(TLSIndex index); bool SetTLSValue(TLSIndex index, void *value); void *GetTLSValue(TLSIndex index); #endif // COMMON_TLS_H_
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN"> <html> <head> <title>Imported cssRules</title> <style type="text/css"> @import url(imported.css); </style> <script type="text/javascript"> window.onload = function () { document.<API key>('p')[0].firstChild.nodeValue = ( document.styleSheets.item(0).cssRules.item(0).styleSheet.cssRules.length == 1 ) ? 'PASS' : 'FAIL'; }; </script> </head> <body> <p>FAIL (script not run or produced an error)</p> </body> </html>
from __future__ import absolute_import import itertools from django.contrib import messages from django.core.context_processors import csrf from django.db import transaction from django.http import <API key> from django.views.decorators.cache import never_cache from django.views.decorators.csrf import csrf_protect from django.utils.decorators import method_decorator from sudo.decorators import sudo_required from sentry.models import ( Project, ProjectStatus ) from sentry.plugins import plugins from sentry.web.forms.accounts import ( <API key>, <API key>, <API key> ) from sentry.web.decorators import login_required from sentry.web.frontend.base import BaseView from sentry.web.helpers import render_to_response from sentry.utils.auth import get_auth_providers from sentry.utils.safe import safe_execute class <API key>(BaseView): <API key> = <API key> @method_decorator(csrf_protect) @method_decorator(never_cache) @method_decorator(login_required) @method_decorator(sudo_required) @method_decorator(transaction.atomic) def handle(self, request): settings_form = self.<API key>( request.user, request.POST or None) reports_form = <API key>( request.user, request.POST or None, prefix='reports') project_list = list(Project.objects.filter( <API key>=request.user, <API key>=True, status=ProjectStatus.VISIBLE, ).distinct()) project_forms = [ (project, <API key>( project, request.user, request.POST or None, prefix='project-%s' % (project.id,) )) for project in sorted(project_list, key=lambda x: ( x.organization.name, x.name)) ] ext_forms = [] for plugin in plugins.all(): for form in safe_execute(plugin.<API key>, _with_transaction=False) or (): form = safe_execute(form, plugin, request.user, request.POST or None, prefix=plugin.slug, _with_transaction=False) if not form: continue ext_forms.append(form) if request.POST: all_forms = list(itertools.chain( [settings_form, reports_form], ext_forms, (f for _, f in project_forms) )) if all(f.is_valid() for f in all_forms): for form in all_forms: form.save() messages.add_message(request, messages.SUCCESS, 'Your settings were saved.') return <API key>(request.path) context = csrf(request) context.update({ 'settings_form': settings_form, 'project_forms': project_forms, 'reports_form': reports_form, 'ext_forms': ext_forms, 'page': 'notifications', 'AUTH_PROVIDERS': get_auth_providers(), }) return render_to_response('sentry/account/notifications.html', context, request)
<!doctype html> 001-1 <script> addEventListener("pageshow", function(e) { parent.events.push(e); if (parent.events.length == 2) { parent.do_test(); } }, false); </script>
#include <shogun/neuralnets/<API key>.h> #include <shogun/neuralnets/NeuralInputLayer.h> #include <shogun/neuralnets/NeuralLinearLayer.h> #include <shogun/lib/SGVector.h> #include <shogun/lib/SGMatrix.h> #include <shogun/mathematics/Math.h> #include <shogun/mathematics/<API key>.h> #include <shogun/mathematics/NormalDistribution.h> #include <gtest/gtest.h> #include <random> using namespace shogun; TEST(<API key>, compute_activations) { const int32_t w = 6; const int32_t h = 5; const int32_t rx = 1; const int32_t ry = 1; const int32_t b = 2; const int32_t map_index = 1; const int32_t num_maps = 3; SGMatrix<float64_t> x1(w*h,b); for (int32_t i=0; i<x1.num_rows*x1.num_cols; i++) x1[i] = i; auto input1 = std::make_shared<NeuralInputLayer> (x1.num_rows); input1->set_batch_size(x1.num_cols); // two channels SGMatrix<float64_t> x2(2*w*h,b); for (int32_t i=0; i<x2.num_rows*x2.num_cols; i++) x2[i] = float64_t(i)/8; auto input2 = std::make_shared<NeuralInputLayer> (x2.num_rows); input2->set_batch_size(x2.num_cols); std::vector<std::shared_ptr<NeuralLayer>> layers; layers.push_back(input1); layers.push_back(input2); SGVector<int32_t> input_indices(2); input_indices[0] = 0; input_indices[1] = 1; <API key> map(w,h,rx,ry,1,1,map_index); SGVector<float64_t> params(1+(2*rx+1)*(2*ry+1)*3); for (int32_t i=0; i<params.vlen; i++) params[i] = float64_t(i)/4; input1->compute_activations(x1); input2->compute_activations(x2); SGMatrix<float64_t> A(num_maps*w*h,b); map.compute_activations(params, layers, input_indices, A); // reference numbers generated using scipy.signal.convolve2d float64_t ref[] = { 95.12500, 153.34375, 164.96875, 176.59375, 127.75000, 181.59375, 294.00000, 315.65625, 337.31250, 243.65625, 249.09375, 402.28125, 423.93750, 445.59375, 320.53125, 316.59375, 510.56250, 532.21875, 553.87500, 397.40625, 384.09375, 618.84375, 640.50000, 662.15625, 474.28125, 335.12500, 534.90625, 552.15625, 569.40625, 404.00000, 432.62500, 693.34375, 704.96875, 716.59375, 510.25000, 789.09375, 1255.87500,1277.53125,1299.18750, 918.65625, 856.59375,1364.15625, 1385.81250,1407.46875, 995.53125, 924.09375,1472.43750,1494.09375, 1515.75000,1072.40625, 991.59375,1580.71875,1602.37500,1624.03125, 1149.28125, 807.62500,1277.40625,1294.65625,1311.90625, 921.50000}; for (int32_t i=0; i<w*h; i++) for (int32_t j=0; j<A.num_cols; j++) EXPECT_NEAR(ref[i+j*w*h], A(i+map_index*w*h,j), 1.0e-15); } TEST(<API key>, <API key>) { const int32_t w = 12; const int32_t h = 10; const int32_t rx = 1; const int32_t ry = 1; const int32_t b = 2; const int32_t map_index = 1; const int32_t num_maps = 3; int32_t stride_x = 3; int32_t stride_y = 2; int32_t w_out = w/stride_x; int32_t h_out = h/stride_y; SGMatrix<float64_t> x1(w*h,b); for (int32_t i=0; i<x1.num_rows*x1.num_cols; i++) x1[i] = i; auto input1 = std::make_shared<NeuralInputLayer> (x1.num_rows); input1->set_batch_size(x1.num_cols); // two channels SGMatrix<float64_t> x2(2*w*h,b); for (int32_t i=0; i<x2.num_rows*x2.num_cols; i++) x2[i] = float64_t(i)/8; auto input2 = std::make_shared<NeuralInputLayer> (x2.num_rows); input2->set_batch_size(x2.num_cols); std::vector<std::shared_ptr<NeuralLayer>> layers; layers.push_back(input1); layers.push_back(input2); SGVector<int32_t> input_indices(2); input_indices[0] = 0; input_indices[1] = 1; <API key> map(w,h,rx,ry,stride_x,stride_y,map_index); SGVector<float64_t> params(1+(2*rx+1)*(2*ry+1)*3); for (int32_t i=0; i<params.vlen; i++) params[i] = float64_t(i)/4; input1->compute_activations(x1); input2->compute_activations(x2); SGMatrix<float64_t> A(num_maps*w_out*h_out,b); map.compute_activations(params, layers, input_indices, A); // reference numbers generated using scipy.signal.convolve2d float64_t ref[] = { 344.50000, 549.81250, 573.06250, 596.31250, 619.56250, 880.03125, 1411.12500,1454.43750,1497.75000,1541.06250,1285.03125,2060.81250, 2104.12500,2147.43750,2190.75000,1690.03125,2710.50000,2753.81250, 2797.12500,2840.43750,1694.50000,2709.81250,2733.06250,2756.31250, 2779.56250,3310.03125,5258.62500,5301.93750,5345.25000,5388.56250, 3715.03125,5908.31250,5951.62500,5994.93750,6038.25000,4120.03125, 6558.00000,6601.31250,6644.62500,6687.93750}; for (int32_t i=0; i<w_out*h_out; i++) for (int32_t j=0; j<A.num_cols; j++) EXPECT_NEAR(ref[i+j*w_out*h_out], A(i+map_index*w_out*h_out,j), 1.0e-15); } TEST(<API key>, <API key>) { const int32_t w = 6; const int32_t h = 5; const int32_t rx = 1; const int32_t ry = 1; const int32_t b = 2; const int32_t map_index = 1; const int32_t num_maps = 3; SGMatrix<float64_t> x1(w*h,b); for (int32_t i=0; i<x1.num_rows*x1.num_cols; i++) x1[i] = i; auto input1 = std::make_shared<NeuralInputLayer> (x1.num_rows); input1->set_batch_size(x1.num_cols); // two channels SGMatrix<float64_t> x2(2*w*h,b); for (int32_t i=0; i<x2.num_rows*x2.num_cols; i++) x2[i] = float64_t(i)/8; auto input2 = std::make_shared<NeuralInputLayer> (x2.num_rows); input2->set_batch_size(x2.num_cols); std::vector<std::shared_ptr<NeuralLayer>> layers; layers.push_back(input1); layers.push_back(input2); SGVector<int32_t> input_indices(2); input_indices[0] = 0; input_indices[1] = 1; <API key> map(w,h,rx,ry,1,1,map_index,CMAF_LOGISTIC); SGVector<float64_t> params(1+(2*rx+1)*(2*ry+1)*3); for (int32_t i=0; i<params.vlen; i++) params[i] = float64_t(i)*1e-4; input1->compute_activations(x1); input2->compute_activations(x2); SGMatrix<float64_t> A(num_maps*w*h,b); map.compute_activations(params, layers, input_indices, A); float64_t ref[] = { 0.50951, 0.51533, 0.51649, 0.51765, 0.51277, 0.51815, 0.52937, 0.53152, 0.53368, 0.52435, 0.52489, 0.54014, 0.54229, 0.54444, 0.53201, 0.53162, 0.55088, 0.55302, 0.55516, 0.53966, 0.53833, 0.56157, 0.56370, 0.56583, 0.54729, 0.53346, 0.55329, 0.55499, 0.55670, 0.54031, 0.54315, 0.56889, 0.57003, 0.57117, 0.55085, 0.57826, 0.62301, 0.62504, 0.62707, 0.59085, 0.58483, 0.63313, 0.63514, 0.63714, 0.59826, 0.59137, 0.64313, 0.64512, 0.64710, 0.60563, 0.59788, 0.65301, 0.65497, 0.65692, 0.61295, 0.58007, 0.62503, 0.62665, 0.62826, 0.59112}; for (int32_t i=0; i<w*h; i++) for (int32_t j=0; j<A.num_cols; j++) EXPECT_NEAR(ref[i+j*w*h], A(i+map_index*w*h,j), 1.0e-5); } TEST(<API key>, <API key>) { const int32_t w = 6; const int32_t h = 5; const int32_t rx = 1; const int32_t ry = 1; const int32_t b = 2; const int32_t map_index = 1; const int32_t num_maps = 3; SGMatrix<float64_t> x1(w*h,b); for (int32_t i=0; i<x1.num_rows*x1.num_cols; i++) x1[i] = i; auto input1 = std::make_shared<NeuralInputLayer> (x1.num_rows); input1->set_batch_size(x1.num_cols); // two channels SGMatrix<float64_t> x2(2*w*h,b); for (int32_t i=0; i<x2.num_rows*x2.num_cols; i++) x2[i] = float64_t(i)/8; auto input2 = std::make_shared<NeuralInputLayer> (x2.num_rows); input2->set_batch_size(x2.num_cols); std::vector<std::shared_ptr<NeuralLayer>> layers; layers.push_back(input1); layers.push_back(input2); SGVector<int32_t> input_indices(2); input_indices[0] = 0; input_indices[1] = 1; <API key> map(w,h,rx,ry,1,1,map_index,<API key>); SGVector<float64_t> params(1+(2*rx+1)*(2*ry+1)*3); for (int32_t i=0; i<params.vlen; i++) params[i] = float64_t(i)/4 - 2.5; input1->compute_activations(x1); input2->compute_activations(x2); SGMatrix<float64_t> A(num_maps*w*h,b); map.compute_activations(params, layers, input_indices, A); float64_t ref[] = { 17.62500, 28.96875, 21.84375, 14.71875, 12.75000, 19.71875, 38.37500, 31.90625, 25.43750, 25.53125, -0.00000, 6.03125, -0.00000, -0.00000, 8.65625, -0.00000, -0.00000, -0.00000, -0.00000, -0.00000, -0.00000, -0.00000, -0.00000, -0.00000, -0.00000, 7.62500, 35.53125, 34.03125, 32.53125, 39.00000, -0.00000, -0.00000, -0.00000, -0.00000, -0.00000, -0.00000, -0.00000, -0.00000, -0.00000, 25.53125, -0.00000, -0.00000, -0.00000, -0.00000, 8.65625, -0.00000, -0.00000, -0.00000, -0.00000, -0.00000, -0.00000, -0.00000, -0.00000, -0.00000, -0.00000, 30.12500, 103.03125, 101.53125, 100.03125, 106.50000}; for (int32_t i=0; i<w*h; i++) for (int32_t j=0; j<A.num_cols; j++) EXPECT_NEAR(ref[i+j*w*h], A(i+map_index*w*h,j), 1.0e-15); } TEST(<API key>, <API key>) { const int32_t seed = 10; const int32_t w = 6; const int32_t h = 5; const int32_t rx = 1; const int32_t ry = 1; const int32_t b = 2; const int32_t map_index = 1; const int32_t num_maps = 3; std::mt19937_64 prng(seed); <API key><float64_t> uniform_real_dist; SGMatrix<float64_t> x1(w*h,b); for (int32_t i=0; i<x1.num_rows*x1.num_cols; i++) x1[i] = uniform_real_dist(prng, {-10.0,10.0}); auto input1 = std::make_shared<NeuralInputLayer> (x1.num_rows); input1->set_batch_size(x1.num_cols); // two channels SGMatrix<float64_t> x2(2*w*h,b); for (int32_t i=0; i<x2.num_rows*x2.num_cols; i++) x2[i] = uniform_real_dist(prng, {-10.0,10.0}); auto input2 = std::make_shared<NeuralInputLayer> (x2.num_rows); input2->set_batch_size(x2.num_cols); std::vector<std::shared_ptr<NeuralLayer>> layers; layers.push_back(input1); layers.push_back(input2); SGVector<int32_t> input_indices(2); input_indices[0] = 0; input_indices[1] = 1; NormalDistribution<float64_t> normal_dist; <API key> map(w,h,rx,ry,1,1,map_index); SGVector<float64_t> params(1+(2*rx+1)*(2*ry+1)*3); for (int32_t i=0; i<params.vlen; i++) params[i] = normal_dist(prng, {0.0,0.01}); input1->compute_activations(x1); input2->compute_activations(x2); SGMatrix<float64_t> A(num_maps*w*h,b); A.zero(); map.compute_activations(params, layers, input_indices, A); // compute activation gradients with respect to some function // assuming the function is 0.5*sum(A[i]^2) SGMatrix<float64_t> AG(num_maps*w*h,b); for (int32_t i=0; i<AG.num_rows*AG.num_cols; i++) AG[i] = A[i]; // compute parameter gradients SGVector<float64_t> PG(params.vlen); map.compute_gradients(params, A, AG, layers, input_indices, PG); // approximate parameter gradients SGVector<float64_t> PG_numerical(params.vlen); float64_t epsilon = 1e-9; for (int32_t i=0; i<params.vlen; i++) { params[i] += epsilon; map.compute_activations(params, layers, input_indices, A); float64_t error_plus = 0; for (int32_t k=0; k<A.num_rows*A.num_cols; k++) error_plus += 0.5*A[k]*A[k]; params[i] -= 2*epsilon; map.compute_activations(params, layers, input_indices, A); float64_t error_minus = 0; for (int32_t k=0; k<A.num_rows*A.num_cols; k++) error_minus += 0.5*A[k]*A[k]; params[i] += epsilon; PG_numerical[i] = (<API key>)/(2*epsilon); } // compare for (int32_t i=0; i<PG.vlen; i++) EXPECT_NEAR(PG_numerical[i], PG[i], 1e-5); } TEST(<API key>, <API key>) { const int32_t seed = 10; const int32_t w = 12; const int32_t h = 10; const int32_t rx = 1; const int32_t ry = 1; const int32_t b = 2; const int32_t map_index = 1; const int32_t num_maps = 3; int32_t stride_x = 3; int32_t stride_y = 2; int32_t w_out = w/stride_x; int32_t h_out = h/stride_y; std::mt19937_64 prng(seed); <API key><float64_t> uniform_real_dist; SGMatrix<float64_t> x1(w*h,b); for (int32_t i=0; i<x1.num_rows*x1.num_cols; i++) x1[i] = uniform_real_dist(prng, {-10.0,10.0}); auto input1 = std::make_shared<NeuralInputLayer> (x1.num_rows); input1->set_batch_size(x1.num_cols); // two channels SGMatrix<float64_t> x2(2*w*h,b); for (int32_t i=0; i<x2.num_rows*x2.num_cols; i++) x2[i] = uniform_real_dist(prng, {-10.0,10.0}); auto input2 = std::make_shared<NeuralInputLayer> (x2.num_rows); input2->set_batch_size(x2.num_cols); std::vector<std::shared_ptr<NeuralLayer>> layers; layers.push_back(input1); layers.push_back(input2); SGVector<int32_t> input_indices(2); input_indices[0] = 0; input_indices[1] = 1; NormalDistribution<float64_t> normal_dist; <API key> map(w,h,rx,ry,stride_x,stride_y,map_index); SGVector<float64_t> params(1+(2*rx+1)*(2*ry+1)*3); for (int32_t i=0; i<params.vlen; i++) params[i] = normal_dist(prng, {0.0,0.01}); input1->compute_activations(x1); input2->compute_activations(x2); SGMatrix<float64_t> A(num_maps*w_out*h_out,b); A.zero(); map.compute_activations(params, layers, input_indices, A); // compute activation gradients with respect to some function // assuming the function is 0.5*sum(A[i]^2) SGMatrix<float64_t> AG(num_maps*w_out*h_out,b); for (int32_t i=0; i<AG.num_rows*AG.num_cols; i++) AG[i] = A[i]; // compute parameter gradients SGVector<float64_t> PG(params.vlen); map.compute_gradients(params, A, AG, layers, input_indices, PG); // approximate parameter gradients SGVector<float64_t> PG_numerical(params.vlen); float64_t epsilon = 1e-9; for (int32_t i=0; i<params.vlen; i++) { params[i] += epsilon; map.compute_activations(params, layers, input_indices, A); float64_t error_plus = 0; for (int32_t k=0; k<A.num_rows*A.num_cols; k++) error_plus += 0.5*A[k]*A[k]; params[i] -= 2*epsilon; map.compute_activations(params, layers, input_indices, A); float64_t error_minus = 0; for (int32_t k=0; k<A.num_rows*A.num_cols; k++) error_minus += 0.5*A[k]*A[k]; params[i] += epsilon; PG_numerical[i] = (<API key>)/(2*epsilon); } // compare for (int32_t i=0; i<PG.vlen; i++) EXPECT_NEAR(PG_numerical[i], PG[i], 1e-5); } TEST(<API key>, <API key>) { const int32_t seed = 10; const int32_t w = 6; const int32_t h = 5; const int32_t rx = 1; const int32_t ry = 1; const int32_t b = 2; std::mt19937_64 prng(seed); <API key><float64_t> uniform_real_dist; SGMatrix<float64_t> x1(w*h,b); for (int32_t i=0; i<x1.num_rows*x1.num_cols; i++) x1[i] = uniform_real_dist(prng, {-10.0,10.0}); auto input1 = std::make_shared<NeuralInputLayer> (x1.num_rows); input1->set_batch_size(x1.num_cols); std::vector<std::shared_ptr<NeuralLayer>> layers; layers.push_back(input1); SGVector<int32_t> input_indices(1); input_indices[0] = 0; NormalDistribution<float64_t> normal_dist; <API key> map(w,h,rx,ry,1,1,0, CMAF_LOGISTIC); SGVector<float64_t> params(1+(2*rx+1)*(2*ry+1)); for (int32_t i=0; i<params.vlen; i++) params[i] = normal_dist(prng, {0.0,0.01}); input1->compute_activations(x1); SGMatrix<float64_t> A(w*h,b); A.zero(); map.compute_activations(params, layers, input_indices, A); // compute activation gradients with respect to some function // assuming the function is 0.5*sum(A[i]^2) SGMatrix<float64_t> AG(w*h,b); for (int32_t i=0; i<AG.num_rows*AG.num_cols; i++) AG[i] = A[i]; // compute parameter gradients SGVector<float64_t> PG(params.vlen); map.compute_gradients(params, A, AG, layers, input_indices, PG); // approximate parameter gradients SGVector<float64_t> PG_numerical(params.vlen); float64_t epsilon = 1e-9; for (int32_t i=0; i<params.vlen; i++) { params[i] += epsilon; map.compute_activations(params, layers, input_indices, A); float64_t error_plus = 0; for (int32_t k=0; k<A.num_rows*A.num_cols; k++) error_plus += 0.5*A[k]*A[k]; params[i] -= 2*epsilon; map.compute_activations(params, layers, input_indices, A); float64_t error_minus = 0; for (int32_t k=0; k<A.num_rows*A.num_cols; k++) error_minus += 0.5*A[k]*A[k]; params[i] += epsilon; PG_numerical[i] = (<API key>)/(2*epsilon); } // compare for (int32_t i=0; i<PG.vlen; i++) EXPECT_NEAR(PG_numerical[i], PG[i], 1e-5); } TEST(<API key>, <API key>) { const int32_t seed = 10; const int32_t w = 6; const int32_t h = 5; const int32_t rx = 1; const int32_t ry = 1; const int32_t b = 2; std::mt19937_64 prng(seed); <API key><float64_t> uniform_real_dist; SGMatrix<float64_t> x1(w*h,b); for (int32_t i=0; i<x1.num_rows*x1.num_cols; i++) x1[i] = uniform_real_dist(prng, {-10.0,10.0}); auto input1 = std::make_shared<NeuralInputLayer> (x1.num_rows); input1->set_batch_size(x1.num_cols); std::vector<std::shared_ptr<NeuralLayer>> layers; layers.push_back(input1); SGVector<int32_t> input_indices(1); input_indices[0] = 0; NormalDistribution<float64_t> normal_dist; <API key> map(w,h,rx,ry,1,1,0, <API key>); SGVector<float64_t> params(1+(2*rx+1)*(2*ry+1)); for (int32_t i=0; i<params.vlen; i++) params[i] = normal_dist(prng, {0.0,0.01}); input1->compute_activations(x1); SGMatrix<float64_t> A(w*h,b); A.zero(); map.compute_activations(params, layers, input_indices, A); // compute activation gradients with respect to some function // assuming the function is 0.5*sum(A[i]^2) SGMatrix<float64_t> AG(w*h,b); for (int32_t i=0; i<AG.num_rows*AG.num_cols; i++) AG[i] = A[i]; // compute parameter gradients SGVector<float64_t> PG(params.vlen); map.compute_gradients(params, A, AG, layers, input_indices, PG); // approximate parameter gradients SGVector<float64_t> PG_numerical(params.vlen); float64_t epsilon = 1e-9; for (int32_t i=0; i<params.vlen; i++) { params[i] += epsilon; map.compute_activations(params, layers, input_indices, A); float64_t error_plus = 0; for (int32_t k=0; k<A.num_rows*A.num_cols; k++) error_plus += 0.5*A[k]*A[k]; params[i] -= 2*epsilon; map.compute_activations(params, layers, input_indices, A); float64_t error_minus = 0; for (int32_t k=0; k<A.num_rows*A.num_cols; k++) error_minus += 0.5*A[k]*A[k]; params[i] += epsilon; PG_numerical[i] = (<API key>)/(2*epsilon); } // compare for (int32_t i=0; i<PG.vlen; i++) EXPECT_NEAR(PG_numerical[i], PG[i], 1e-5); } TEST(<API key>, <API key>) { const int32_t seed = 100; const int32_t w = 6; const int32_t h = 5; const int32_t rx = 1; const int32_t ry = 1; const int32_t b = 2; const int32_t map_index = 0; const int32_t num_maps = 1; std::mt19937_64 prng(seed); <API key><float64_t> uniform_real_dist; auto input1 = std::make_shared<NeuralLinearLayer> (w*h); input1->set_batch_size(b); // two channels auto input2 = std::make_shared<NeuralLinearLayer> (2*w*h); input2->set_batch_size(b); for (int32_t i=0; i<input1->get_num_neurons()*b; i++) input1->get_activations()[i] = uniform_real_dist(prng, {-10.0,10.0}); for (int32_t i=0; i<input2->get_num_neurons()*b; i++) input2->get_activations()[i] = uniform_real_dist(prng, {-10.0,10.0}); std::vector<std::shared_ptr<NeuralLayer>> layers; layers.push_back(input1); layers.push_back(input2); SGVector<int32_t> input_indices(2); input_indices[0] = 0; input_indices[1] = 1; NormalDistribution<float64_t> normal_dist; <API key> map(w,h,rx,ry,1,1,map_index); SGVector<float64_t> params(1+(2*rx+1)*(2*ry+1)*3); for (int32_t i=0; i<params.vlen; i++) params[i] = normal_dist(prng, {0.0,0.01}); SGMatrix<float64_t> A(num_maps*w*h,b); A.zero(); map.compute_activations(params, layers, input_indices, A); // compute activation gradients with respect to some function // assuming the function is 0.5*sum(A[i]^2) SGMatrix<float64_t> AG(num_maps*w*h,b); for (int32_t i=0; i<AG.num_rows*AG.num_cols; i++) AG[i] = A[i]; // compute gradients input1-><API key>().zero(); input2-><API key>().zero(); SGVector<float64_t> PG(params.vlen); map.compute_gradients(params, A, AG, layers, input_indices, PG); // approximate input gradients float64_t epsilon = 1e-9; SGMatrix<float64_t> IG1(input1->get_num_neurons(), b); for (int32_t i=0; i<IG1.num_rows*IG1.num_cols; i++) { input1->get_activations()[i] += epsilon; map.compute_activations(params, layers, input_indices, A); float64_t error_plus = 0; for (int32_t k=0; k<A.num_rows*A.num_cols; k++) error_plus += 0.5*A[k]*A[k]; input1->get_activations()[i] -= 2*epsilon; map.compute_activations(params, layers, input_indices, A); float64_t error_minus = 0; for (int32_t k=0; k<A.num_rows*A.num_cols; k++) error_minus += 0.5*A[k]*A[k]; input1->get_activations()[i] += epsilon; IG1[i] = (<API key>)/(2*epsilon); } SGMatrix<float64_t> IG2(input2->get_num_neurons(), b); for (int32_t i=0; i<IG2.num_rows*IG2.num_cols; i++) { input2->get_activations()[i] += epsilon; map.compute_activations(params, layers, input_indices, A); float64_t error_plus = 0; for (int32_t k=0; k<A.num_rows*A.num_cols; k++) error_plus += 0.5*A[k]*A[k]; input2->get_activations()[i] -= 2*epsilon; map.compute_activations(params, layers, input_indices, A); float64_t error_minus = 0; for (int32_t k=0; k<A.num_rows*A.num_cols; k++) error_minus += 0.5*A[k]*A[k]; input2->get_activations()[i] += epsilon; IG2[i] = (<API key>)/(2*epsilon); } // compare for (int32_t i=0; i<IG1.num_rows*IG1.num_cols; i++) EXPECT_NEAR(IG1[i], input1-><API key>()[i], 1e-5); for (int32_t i=0; i<IG2.num_rows*IG2.num_cols; i++) EXPECT_NEAR(IG2[i], input2-><API key>()[i], 1e-5); } TEST(<API key>, pool_activations) { const int32_t w = 6; const int32_t h = 4; const int32_t pw = 2; const int32_t ph = 2; const int32_t b = 2; const int32_t map_index = 1; const int32_t num_maps = 3; SGMatrix<float64_t> activations(num_maps*w*h,b); for (int32_t i=0; i<activations.num_rows*activations.num_cols; i++) activations[i] = i; SGMatrix<float64_t> pooled(num_maps*w*h/(pw*ph),b); SGMatrix<float64_t> max_indices(num_maps*w*h/(pw*ph),b); pooled.zero(); max_indices.zero(); <API key> map(w,h,1,1,1,1, map_index); map.pool_activations(activations, pw, ph, pooled, max_indices); float64_t ref_pooled[] = { 0,0,0,0,0,0,29,31,37,39,45,47,0,0,0,0,0,0,0,0,0,0, 0,0,101,103,109,111,117,119,0,0,0,0,0,0}; float64_t ref_max_indices[] = { 0,0,0,0,0,0,29,31,37,39,45,47,0,0,0,0,0,0,0, 0,0,0,0,0,29,31,37,39,45,47,0,0,0,0,0,0}; for (int32_t i=0; i<pooled.num_rows*pooled.num_cols; i++) EXPECT_EQ(ref_pooled[i], pooled[i]); for (int32_t i=0; i<max_indices.num_rows*max_indices.num_cols; i++) EXPECT_EQ(ref_max_indices[i], max_indices[i]); }
<?php namespace ZF\Doctrine\QueryBuilder\Filter\ORM; class NotLike extends AbstractFilter { public function filter($queryBuilder, $metadata, $option) { if (isset($option['where'])) { if ($option['where'] === 'and') { $queryType = 'andWhere'; } elseif ($option['where'] === 'or') { $queryType = 'orWhere'; } } if (! isset($queryType)) { $queryType = 'andWhere'; } if (! isset($option['alias'])) { $option['alias'] = 'row'; } $queryBuilder->$queryType( $queryBuilder ->expr() ->notlike( $option['alias'] . '.' . $option['field'], $queryBuilder->expr()->literal($option['value']) ) ); } }
#!/bin/sh # IMPORTANT: To use, do the folling: # 1. Change 'NAME' variable to the name of your project. E.g. "bednets_for_nigeria" # 2. Place this file in the TOP-LEVEL of your project, right where 'rapidsms' is # 3. Link it into /etc/init.d e.g. > ln -s /usr/local/my_project/rapidsms-init.sh /etc/init.d/ # 4. Add it to the runlevels, on Ubuntu/Debian there is a nice tool to do this for you: # > sudo update-rc.d rapidsms-init.sh defaults # NOTE: If you want to run multiple instances of RapidSMS, just put this init file in each project dir, # set a different NAME for each project, link it into /etc/init.d with _different_ names, # and add _each_ script to the runlevels. BEGIN INIT INFO # Provides: rapidsms daemon instance # Required-Start: $all # Required-Stop: $all # Default-Start: 2 3 4 5 # Default-Stop: 0 1 6 # Short-Description: starts instances of rapidsms router and web server # Description: starts instance of rapidsms router and web server using start-stop-daemon END INIT INFO # set -e ME=`readlink -f $0` WHERE_AM_I=`dirname $ME` NAME="smsforum" # change to your project name DAEMON=$WHERE_AM_I/rapidsms DAEMON_OPTS="" RUN_AS=root APP_PATH=$WHERE_AM_I ROUTER_PID_FILE=/var/run/${NAME}_router.pid #WEBSERVER_PID_FILE=/var/run/${NAME}_webs.pid WEBSERVER_PORT=8000 WEBSERVER_IP=127.0.0.1 # By default both router and webserver are started # You may turn off one or the other by setting the appropraite # value to 0 for off or 1 for on. # Most common use case would be to turn off the webserver # because you are running Django in another container like Apache # Nginx, or CherryPy START_ROUTER=1 START_WEBSERVER=1 test -x $DAEMON || exit 0 do_start() { if [ "${START_ROUTER}" -eq 1 ] ; then echo -n "Starting router for $NAME... " start-stop-daemon -d $APP_PATH -c $RUN_AS --start --background --pidfile $ROUTER_PID_FILE --make-pidfile --exec $DAEMON -- route $DAEMON_OPTS echo "Router Started" sleep 2 fi if [ "${START_WEBSERVER}" -eq 1 ] ; then echo -n "Starting webserver for $NAME... " start-stop-daemon -d $APP_PATH -c $RUN_AS --start --background --exec $DAEMON -- runserver $WEBSERVER_IP:$WEBSERVER_PORT echo "Webserver Started" fi } hard_stop_runserver() { echo "Hard stopping runserver" for i in `ps aux | grep -i "rapidsms runserver" | grep -v grep | awk '{print $2}' ` ; do kill -9 $i done } hard_stop_router() { echo "Hard stopping router" for i in `ps aux | grep -i "rapidsms route" | grep -v grep | awk '{print $2}' ` ; do kill -9 $i done rm $ROUTER_PID_FILE 2>/dev/null } do_hard_restart() { do_hard_stop_all do_start } do_hard_stop_all() { hard_stop_runserver hard_stop_router } do_stop() { if [ "${START_ROUTER}" -eq 1 ] ; then echo -n "Stopping router for $NAME... " start-stop-daemon --stop --pidfile $ROUTER_PID_FILE rm $ROUTER_PID_FILE 2>/dev/null echo "Router Stopped" sleep 2 fi if [ "${START_WEBSERVER}" -eq 1 ] ; then echo -n "Stopping webserver for $NAME... " hard_stop_runserver echo "Webserver Stopped" fi } do_restart() { do_stop sleep 2 do_start } # check on PID's, if not running, restart do_check_restart() { if [ "${START_ROUTER}" -eq 1 ] ; then for pidf in $ROUTER_PID_FILE ; do if [ -f $pidf ] ; then pid=`cat $pidf` if [ ! -e /proc/$pid ] ; then echo "Process for file $pidf not running. Performing hard stop, restart" do_hard_restart return fi fi done fi # now check for runserver if [ "${START_WEBSERVER}" -eq 1 ] ; then webs=`ps aux | grep -i "rapidsms runserver" | grep -v grep | wc -l` if [ $webs -lt 2 ] ; then echo "Can't find webserver, doing hard stop, restart" do_hard_restart fi fi } case "$1" in start) do_start ;; stop) do_stop ;; check-restart) do_check_restart ;; hard-stop) do_hard_stop_all ;; hard-restart) do_hard_restart ;; restart|force-reload) do_restart ;; *) echo "Usage: $ME {start|stop|restart|force-reload|check-restart|hard-stop|hard-restart}" >&2 exit 1 ;; esac exit 0
package org.jbehave.core.reporters; import java.io.File; import org.jbehave.core.configuration.Keywords; /** * @deprecated Use {@link XmlTemplateOutput} */ public class XmlTemplateOuput extends XmlTemplateOutput { public XmlTemplateOuput(File file, Keywords keywords) { super(file, keywords); } public XmlTemplateOuput(File file, Keywords keywords, TemplateProcessor processor, String templatePath) { super(file, keywords, processor, templatePath); } }
#ifndef <API key> #define <API key> #include <string> #include <vector> #include "base/compiler_specific.h" #include "base/synchronization/cancellation_flag.h" #include "chrome/browser/autocomplete/autocomplete_input.h" #include "chrome/browser/autocomplete/history_provider.h" #include "chrome/browser/autocomplete/<API key>.h" #include "chrome/browser/autocomplete/url_prefix.h" #include "chrome/browser/omnibox/omnibox_field_trial.h" #include "chrome/browser/search_engines/search_terms_data.h" #include "chrome/browser/search_engines/template_url.h" class Profile; namespace base { class MessageLoop; } namespace history { class HistoryBackend; class URLDatabase; } // How history autocomplete works // Read down this diagram for temporal ordering. // Main thread History thread // <API key>::Start // -> HistoryURLProvider::Start // -> <API key> // -> SuggestExactInput // [params_ allocated] // -> DoAutocomplete (for inline autocomplete) // -> URLDatabase::<API key> (on in-memory DB) // -> HistoryService::<API key> // HistoryBackend::<API key> // -> HistoryURLProvider::ExecuteWithDB // -> DoAutocomplete // -> URLDatabase::<API key> // HistoryService::QueryComplete // [params_ destroyed] // -> <API key>::OnProviderUpdate // The autocomplete controller calls us, and must be called back, on the main // thread. When called, we run two autocomplete passes. The first pass runs // synchronously on the main thread and queries the in-memory URL database. // This pass promotes matches for inline autocomplete if applicable. We do // this synchronously so that users get consistent behavior when they type // quickly and hit enter, no matter how loaded the main history database is. // Doing this synchronously also prevents inline autocomplete from being // "flickery" in the AutocompleteEdit. Because the in-memory DB does not have // redirect data, results other than the top match might change between the // two passes, so we can't just decide to use this pass' matches as the final // results. // The second autocomplete pass uses the full history database, which must be // queried on the history thread. Start() asks the history service schedule to // callback on the history thread with a pointer to the main database. When we // are done doing queries, we schedule a task on the main thread that notifies // the <API key> that we're done. // The communication between these threads is done using a // <API key> object. This is allocated in the main thread, and // normally deleted in QueryComplete(). So that both autocomplete passes can // use the same code, we also use this to hold results during the first // autocomplete pass. // While the second pass is running, the <API key> may cancel the // request. This can happen frequently when the user is typing quickly. In // this case, the main thread sets params_->cancel, which the background thread // checks periodically. If it finds the flag set, it stops what it's doing // immediately and calls back to the main thread. (We don't delete the params // on the history thread, because we should only do that when we can safely // NULL out params_, and that must be done on the main thread.) // Used to communicate autocomplete parameters between threads via the history // service. struct <API key> { <API key>(const AutocompleteInput& input, bool trim_http, const std::string& languages, TemplateURL* <API key>, const SearchTermsData& search_terms_data); ~<API key>(); base::MessageLoop* message_loop; // A copy of the autocomplete input. We need the copy since this object will // live beyond the original query while it runs on the history thread. AutocompleteInput input; // Should inline autocompletion be disabled? This is initalized from // |input.<API key>()|, but set to false is the input // contains trailing white space. bool <API key>; bool trim_http; // Set by the main thread to cancel this request. If this flag is set when // the query runs, the query will be abandoned. This allows us to avoid // running queries that are no longer needed. Since we don't care if we run // the extra queries, the lack of signaling is not a problem. base::CancellationFlag cancel_flag; // Set by ExecuteWithDB() on the history thread when the query could not be // performed because the history system failed to properly init the database. // If this is set when the main thread is called back, it avoids changing // |matches_| at all, so it won't delete the default match // <API key>() creates. bool failed; // List of matches written by the history thread. We keep this separate list // to avoid having the main thread read the provider's matches while the // history thread is manipulating them. The provider copies this list back // to matches_ on the main thread in QueryComplete(). ACMatches matches; // Languages we should pass to gfx::<API key>. std::string languages; // When true, we should avoid calling SuggestExactInput(). bool <API key>; // The default search provider and search terms data necessary to cull results // that correspond to searches (on the default engine). These can only be // obtained on the UI thread, so we have to copy them into here to pass them // to the history thread. We use a scoped_ptr<TemplateURL> for the DSP since // TemplateURLs can't be copied by value. We use a scoped_ptr<SearchTermsData> // so that we can store a snapshot of the SearchTermsData accessible from the // history thread. scoped_ptr<TemplateURL> <API key>; scoped_ptr<SearchTermsData> search_terms_data; private: <API key>(<API key>); }; // This class is an autocomplete provider and is also a pseudo-internal // component of the history system. See comments above. class HistoryURLProvider : public HistoryProvider { public: // Various values used in scoring, made public so other providers // can insert results in appropriate ranges relative to these. static const int <API key>; static const int <API key>; static const int <API key>; static const int <API key>; HistoryURLProvider(<API key>* listener, Profile* profile); // HistoryProvider: virtual void Start(const AutocompleteInput& input, bool minimal_changes) OVERRIDE; virtual void Stop(bool <API key>) OVERRIDE; // Returns a match representing a navigation to |destination_url| given user // input of |text|. |trim_http| controls whether the match's |fill_into_edit| // and |contents| should have any HTTP scheme stripped off, and should not be // set to true if |text| contains an http prefix. // NOTE: This does not set the relevance of the returned match, as different // callers want different behavior. Callers must set this manually. AutocompleteMatch SuggestExactInput(const base::string16& text, const GURL& destination_url, bool trim_http); // Runs the history query on the history thread, called by the history // system. The history database MAY BE NULL in which case it is not // available and we should return no data. Also schedules returning the // results to the main thread void ExecuteWithDB(history::HistoryBackend* backend, history::URLDatabase* db, <API key>* params); // Actually runs the autocomplete job on the given database, which is // guaranteed not to be NULL. void DoAutocomplete(history::HistoryBackend* backend, history::URLDatabase* db, <API key>* params); // Dispatches the results to the autocomplete controller. Called on the // main thread by ExecuteWithDB when the results are available. // Frees params_gets_deleted on exit. void QueryComplete(<API key>* params_gets_deleted); private: <API key>(<API key>, <API key>); enum MatchType { NORMAL, WHAT_YOU_TYPED, INLINE_AUTOCOMPLETE, UNVISITED_INTRANET, // An intranet site that has never been visited. }; class VisitClassifier; ~HistoryURLProvider(); // Determines the relevance for a match, given its type. If // |match_type| is NORMAL, |match_number| is a number [0, // kMaxSuggestions) indicating the relevance of the match (higher == // more relevant). For other values of |match_type|, |match_number| // is ignored. Only called some of the time; for some matches, // relevancy scores are assigned consecutively decreasing (1416, // 1415, 1414, ...). int CalculateRelevance(MatchType match_type, size_t match_number) const; // Helper function that actually launches the two autocomplete passes. void <API key>(const AutocompleteInput& input, bool <API key>); // SuggestExactInput(), looks up its info in the DB. If found, fills in the // title from the DB, promotes the match's priority to that of an inline // autocomplete match (maybe it should be slightly better?), and places it on // the front of |matches| (so we pick the right matches to throw away // when culling redirects to/from it). Returns whether a match was promoted. bool <API key>(history::URLDatabase* db, const AutocompleteInput& input, const VisitClassifier& classifier, AutocompleteMatch* match, history::HistoryMatches* matches) const; // Helper function for <API key>, this returns true if the input // corresponds to some intranet URL where the user has previously visited the // host in question. In this case the input should be treated as a URL. bool CanFindIntranetURL(history::URLDatabase* db, const AutocompleteInput& input) const; // Determines if |match| is suitable for inline autocomplete. If so, and if // |params| is non-NULL, promotes the match. Returns whether |match| is // suitable for inline autocomplete. bool <API key>(const history::HistoryMatch& match, <API key>* params); // Sees if a shorter version of the best match should be created, and if so // places it at the front of |matches|. This can suggest history URLs that // are prefixes of the best match (if they've been visited enough, compared to // the best match), or create host-only suggestions even when they haven't void <API key>( history::URLDatabase* db, const <API key>& params, bool <API key>, const AutocompleteMatch& <API key>, history::HistoryMatches* matches); // Removes results that have been rarely typed or visited, and not any time // recently. The exact parameters for this heuristic can be found in the // function body. Also culls results corresponding to queries from the default // search engine. These are low-quality, <API key> matches for // users, and the SearchProvider should surface past queries in a better way // anyway. void CullPoorMatches(const <API key>& params, history::HistoryMatches* matches) const; // Removes results that redirect to each other, leaving at most |max_results| // results. void CullRedirects(history::HistoryBackend* backend, history::HistoryMatches* matches, size_t max_results) const; // Helper function for CullRedirects, this removes all but the first // occurance of [any of the set of strings in |remove|] from the |matches| // list. // The return value is the index of the item that is after the item in the // input identified by |source_index|. If |source_index| or an item before // is removed, the next item will be shifted, and this allows the caller to // pick up on the next one when this happens. size_t <API key>(history::HistoryMatches* matches, size_t source_index, const std::vector<GURL>& remove) const; // Converts a line from the database into an autocomplete match for display. // If experimental scoring is enabled, the final relevance score might be // different from the given |relevance|. AutocompleteMatch <API key>( const <API key>& params, const history::HistoryMatch& history_match, MatchType match_type, int relevance); // Returns a set of classifications that highlight all the occurrences // of |input_text| at word breaks in |description|. static <API key> ClassifyDescription( const base::string16& input_text, const base::string16& description); // Returns a new relevance score for the given |match| based on the // |old_relevance| score and |scoring_params_|. The new relevance score is // guaranteed to be less than or equal to |old_relevance|. In other words, // this function can only demote a score, never boost it. // Returns |old_relevance| score if experimental scoring is disabled // or if the |match.promoted| is true. int <API key>( const history::HistoryMatch& match, int old_relevance) const; // Params for the current query. The provider should not free this directly; // instead, it is passed as a parameter through the history backend, and the // parameter itself is freed once it's no longer needed. The only reason we // keep this member is so we can set the cancel bit on it. <API key>* params_; // Params controlling experimental behavior of this provider. HUPScoringParams scoring_params_; // If true, HistoryURL provider should lookup and cull redirects. If // false, it returns matches that may be redirects to each other and // simply hopes the default <API key> behavior to remove bool cull_redirects_; // Used in <API key>(). If true, we may create // shorter suggestions even when they haven't been visited before: bool <API key>; // Whether to query the history URL database to match. Even if // false, we still use the URL database to decide if the // URL-what-you-typed was visited before or not. If false, the only // possible result that HistoryURL provider can return is // URL-what-you-typed. This variable is not part of params_ because // it never changes after the HistoryURLProvider is initialized. // It's used to aid the transition to get all URLs from history to // be scored in the HistoryQuick provider only. bool <API key>; }; #endif // <API key>
<!doctype html> <title>WebSockets: addEventListener open, OPEN</title> <pre>FAIL (script didn't run)</pre> <script src=/resources/jsframework2.js></script> <script src=../../constants.js></script> <script src=gc.js></script> <script> assertNotEquals(window.WebSocket, undefined, 'WebSocket not supported'); // see gc-policy.txt for the garbage collection policy setTestTimeout(12000); assertNotThrows(function() { gc(function() { var ws = new WebSocket(SCHEME_AND_DOMAIN+':8007/<API key>?'+encodeURIComponent('\\xFF\\x00')); ws.addEventListener('open', function(e) { debug(e); ws = null; gc(end, assertUnreached); }, false); }, assertUnreached); }); </script>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http: <html xmlns="http: <head> <meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> <title>libpgm.graphskeleton &mdash; libpgm 1.1 documentation</title> <link rel="stylesheet" href="../../_static/default.css" type="text/css" /> <link rel="stylesheet" href="../../_static/pygments.css" type="text/css" /> <script type="text/javascript"> var <API key> = { URL_ROOT: '../../', VERSION: '1.1', COLLAPSE_INDEX: false, FILE_SUFFIX: '.html', HAS_SOURCE: true }; </script> <script type="text/javascript" src="../../_static/jquery.js"></script> <script type="text/javascript" src="../../_static/underscore.js"></script> <script type="text/javascript" src="../../_static/doctools.js"></script> <script type="text/javascript" src="http://cdn.mathjax.org/mathjax/latest/MathJax.js?config=<API key>"></script> <link rel="top" title="libpgm 1.1 documentation" href="../../index.html" /> <link rel="up" title="Module code" href="../index.html" /> </head> <body> <div class="related"> <h3>Navigation</h3> <ul> <li class="right" style="margin-right: 10px"> <a href="../../genindex.html" title="General Index" accesskey="I">index</a></li> <li class="right" > <a href="../../py-modindex.html" title="Python Module Index" >modules</a> |</li> <li><a href="../../index.html">libpgm 1.1 documentation</a> &raquo;</li> <li><a href="../index.html" accesskey="U">Module code</a> &raquo;</li> </ul> </div> <div class="document"> <div class="documentwrapper"> <div class="bodywrapper"> <div class="body"> <h1>Source code for libpgm.graphskeleton</h1><div class="highlight"><pre> <span class="c"> <span class="c"> <span class="c"># </span> <span class="c"> <span class="c"># modification, are permitted provided that the following conditions are met:</span> <span class="c"> <span class="c"> <span class="c"> <span class="c"> <span class="c"># documentation and/or other materials provided with the distribution.</span> <span class="c"># * Neither the name of the CyberPoint International, LLC nor the</span> <span class="c"># names of its contributors may be used to endorse or promote products</span> <span class="c"> <span class="c"># </span> <span class="c"> <span class="c"># ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED</span> <span class="c"># WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE</span> <span class="c"># DISCLAIMED. IN NO EVENT SHALL CYBERPOINT INTERNATIONAL, LLC BE LIABLE FOR ANY</span> <span class="c"># DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES</span> <span class="c"># (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;</span> <span class="c"># LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND</span> <span class="c"># ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT</span> <span class="c"># (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS</span> <span class="c"># SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.</span> <span class="sd">&#39;&#39;&#39;</span> <span class="sd">This module provides tools for creating and using graph skeletons for Bayesian networks. A graph skeleton in this case is a vertex set and a directed edge set, with no further information about the specific nodes. </span> <span class="sd">&#39;&#39;&#39;</span> <span class="kn">from</span> <span class="nn">dictionary</span> <span class="kn">import</span> <span class="n">Dictionary</span> <span class="kn">import</span> <span class="nn">sys</span> <div class="viewcode-block" id="GraphSkeleton"><a class="viewcode-back" href="../../graphskeleton.html#libpgm.graphskeleton.GraphSkeleton">[docs]</a><span class="k">class</span> <span class="nc">GraphSkeleton</span><span class="p">(</span><span class="n">Dictionary</span><span class="p">):</span> <span class="sd">&#39;&#39;&#39;</span> <span class="sd"> This class represents a graph skeleton, meaning a vertex set and a directed edge set. It contains the attributes *V* and *E*, and the methods *load*, *getparents*, *getchildren*, and *toporder*.</span> <span class="sd"> </span> <span class="sd"> &#39;&#39;&#39;</span> <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span> <span class="bp">self</span><span class="o">.</span><span class="n">V</span> <span class="o">=</span> <span class="bp">None</span> <span class="sd">&#39;&#39;&#39;A list of names of vertices.&#39;&#39;&#39;</span> <span class="bp">self</span><span class="o">.</span><span class="n">E</span> <span class="o">=</span> <span class="bp">None</span> <span class="sd">&#39;&#39;&#39;A list of [origin, destination] pairs of vertices that constitute edges.&#39;&#39;&#39;</span> <span class="bp">self</span><span class="o">.</span><span class="n">alldata</span> <span class="o">=</span> <span class="bp">None</span> <span class="sd">&#39;&#39;&#39;(Inherited from dictionary) A variable that stores a key-indexable dictionary once it is loaded from a file.&#39;&#39;&#39;</span> <div class="viewcode-block" id="GraphSkeleton.load"><a class="viewcode-back" href="../../graphskeleton.html#libpgm.graphskeleton.GraphSkeleton.load">[docs]</a> <span class="k">def</span> <span class="nf">load</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">path</span><span class="p">):</span> <span class="sd">&#39;&#39;&#39;</span> <span class="sd"> Load the graph skeleton from a text file located at *path*. </span> <span class="sd"> </span> <span class="sd"> Text file must be a plaintext .txt file with a JSON-style representation of a dict. Dict must contain the top-level keys &quot;V&quot; and &quot;E&quot; with the following formats::</span> <span class="sd"> {</span> <span class="sd"> &#39;V&#39;: [&#39;&lt;vertex_name_1&gt;&#39;, ... , &#39;&lt;vertex_name_n&#39;],</span> <span class="sd"> &#39;E&#39;: [[&#39;vertex_of_origin&#39;, &#39;<API key>&#39;], ... ]</span> <span class="sd"> }</span> <span class="sd"> </span> <span class="sd"> Arguments:</span> <span class="sd"> 1. *path* -- The path to the file containing input data (e.g., &quot;mydictionary.txt&quot;).</span> <span class="sd"> </span> <span class="sd"> Attributes modified: </span> <span class="sd"> 1. *V* -- The set of vertices. </span> <span class="sd"> 2. *E* -- The set of edges.</span> <span class="sd"> &#39;&#39;&#39;</span> <span class="bp">self</span><span class="o">.</span><span class="n">dictload</span><span class="p">(</span><span class="n">path</span><span class="p">)</span> <span class="bp">self</span><span class="o">.</span><span class="n">V</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">alldata</span><span class="p">[</span><span class="s">&quot;V&quot;</span><span class="p">]</span> <span class="bp">self</span><span class="o">.</span><span class="n">E</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">alldata</span><span class="p">[</span><span class="s">&quot;E&quot;</span><span class="p">]</span> <span class="c"># free unused memory</span> <span class="k">del</span> <span class="bp">self</span><span class="o">.</span><span class="n">alldata</span> </div> <div class="viewcode-block" id="GraphSkeleton.getparents"><a class="viewcode-back" href="../../graphskeleton.html#libpgm.graphskeleton.GraphSkeleton.getparents">[docs]</a> <span class="k">def</span> <span class="nf">getparents</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">vertex</span><span class="p">):</span> <span class="sd">&#39;&#39;&#39;</span> <span class="sd"> Return the parents of *vertex* in the graph skeleton.</span> <span class="sd"> </span> <span class="sd"> Arguments:</span> <span class="sd"> 1. *vertex* -- The name of the vertex whose parents the function finds.</span> <span class="sd"> </span> <span class="sd"> Returns:</span> <span class="sd"> A list containing the names of the parents of the vertex.</span> <span class="sd"> &#39;&#39;&#39;</span> <span class="k">assert</span> <span class="p">(</span><span class="n">vertex</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">V</span><span class="p">),</span> <span class="s">&quot;The graph skeleton does not contain this vertex.&quot;</span> <span class="n">parents</span> <span class="o">=</span> <span class="p">[]</span> <span class="k">for</span> <span class="n">pair</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">E</span><span class="p">:</span> <span class="k">if</span> <span class="p">(</span><span class="n">pair</span><span class="p">[</span><span class="mi">1</span><span class="p">]</span> <span class="o">==</span> <span class="n">vertex</span><span class="p">):</span> <span class="n">parents</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">pair</span><span class="p">[</span><span class="mi">0</span><span class="p">])</span> <span class="k">return</span> <span class="n">parents</span> </div> <div class="viewcode-block" id="GraphSkeleton.getchildren"><a class="viewcode-back" href="../../graphskeleton.html#libpgm.graphskeleton.GraphSkeleton.getchildren">[docs]</a> <span class="k">def</span> <span class="nf">getchildren</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">vertex</span><span class="p">):</span> <span class="sd">&#39;&#39;&#39;</span> <span class="sd"> Return the children of *vertex* in the graph skeleton. </span> <span class="sd"> </span> <span class="sd"> Arguments:</span> <span class="sd"> 1. *vertex* -- The name of the vertex whose children the function finds.</span> <span class="sd"> </span> <span class="sd"> Returns:</span> <span class="sd"> A list containing the names of the children of the vertex.</span> <span class="sd"> &#39;&#39;&#39;</span> <span class="k">assert</span> <span class="p">(</span><span class="n">vertex</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">V</span><span class="p">),</span> <span class="s">&quot;The graph skeleton does not contain this vertex.&quot;</span> <span class="n">children</span> <span class="o">=</span> <span class="p">[]</span> <span class="k">for</span> <span class="n">pair</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">E</span><span class="p">:</span> <span class="k">if</span> <span class="p">(</span><span class="n">pair</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="o">==</span> <span class="n">vertex</span><span class="p">):</span> <span class="n">children</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">pair</span><span class="p">[</span><span class="mi">1</span><span class="p">])</span> <span class="k">return</span> <span class="n">children</span> </div> <div class="viewcode-block" id="GraphSkeleton.toporder"><a class="viewcode-back" href="../../graphskeleton.html#libpgm.graphskeleton.GraphSkeleton.toporder">[docs]</a> <span class="k">def</span> <span class="nf">toporder</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span> <span class="sd">&#39;&#39;&#39;</span> <span class="sd"> Modify the vertices of the graph skeleton such that they are in topological order. </span> <span class="sd"> A topological order is an order of vertices such that if there is an edge from *u* to *v*, *u* appears before *v* in the ordering. It works only for directed ayclic graphs.</span> <span class="sd"> </span> <span class="sd"> Attributes modified:</span> <span class="sd"> 1. *V* -- The names of the vertices are put in topological order.</span> <span class="sd"> </span> <span class="sd"> The function also checks for cycles in the graph, and returns an error if one is found.</span> <span class="sd"> &#39;&#39;&#39;</span> <span class="n">Ecopy</span> <span class="o">=</span> <span class="p">[</span><span class="n">x</span><span class="p">[:]</span> <span class="k">for</span> <span class="n">x</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">E</span><span class="p">]</span> <span class="n">roots</span> <span class="o">=</span> <span class="p">[]</span> <span class="n">toporder</span> <span class="o">=</span> <span class="p">[]</span> <span class="k">for</span> <span class="n">vertex</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">V</span><span class="p">:</span> <span class="c"># find roots</span> <span class="k">if</span> <span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">getparents</span><span class="p">(</span><span class="n">vertex</span><span class="p">)</span> <span class="o">==</span> <span class="p">[]):</span> <span class="n">roots</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">vertex</span><span class="p">)</span> <span class="k">while</span> <span class="n">roots</span> <span class="o">!=</span> <span class="p">[]:</span> <span class="n">n</span> <span class="o">=</span> <span class="n">roots</span><span class="o">.</span><span class="n">pop</span><span class="p">()</span> <span class="n">toporder</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">n</span><span class="p">)</span> <span class="k">for</span> <span class="n">edge</span> <span class="ow">in</span> <span class="nb">reversed</span><span class="p">(</span><span class="n">Ecopy</span><span class="p">):</span> <span class="k">if</span> <span class="n">edge</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="o">==</span> <span class="n">n</span><span class="p">:</span> <span class="n">m</span> <span class="o">=</span> <span class="n">edge</span><span class="p">[</span><span class="mi">1</span><span class="p">]</span> <span class="n">Ecopy</span><span class="o">.</span><span class="n">remove</span><span class="p">(</span><span class="n">edge</span><span class="p">)</span> <span class="n">yesparent</span> <span class="o">=</span> <span class="bp">False</span> <span class="k">for</span> <span class="n">e</span> <span class="ow">in</span> <span class="n">Ecopy</span><span class="p">:</span> <span class="k">if</span> <span class="n">e</span><span class="p">[</span><span class="mi">1</span><span class="p">]</span> <span class="o">==</span> <span class="n">m</span><span class="p">:</span> <span class="n">yesparent</span> <span class="o">=</span> <span class="bp">True</span> <span class="k">break</span> <span class="k">if</span> <span class="n">yesparent</span> <span class="o">==</span> <span class="bp">False</span><span class="p">:</span> <span class="n">roots</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">m</span><span class="p">)</span> <span class="k">assert</span> <span class="p">(</span><span class="ow">not</span> <span class="n">Ecopy</span><span class="p">),</span> <span class="p">(</span><span class="s">&quot;Graph contains a cycle&quot;</span><span class="p">,</span> <span class="n">Ecopy</span><span class="p">)</span> <span class="bp">self</span><span class="o">.</span><span class="n">V</span> <span class="o">=</span> <span class="n">toporder</span> </pre></div></div></div> </div> </div> </div> <div class="sphinxsidebar"> <div class="<API key>"> <div id="searchbox" style="display: none"> <h3>Quick search</h3> <form class="search" action="../../search.html" method="get"> <input type="text" name="q" /> <input type="submit" value="Go" /> <input type="hidden" name="check_keywords" value="yes" /> <input type="hidden" name="area" value="default" /> </form> <p class="searchtip" style="font-size: 90%"> Enter search terms or a module, class or function name. </p> </div> <script type="text/javascript">$('#searchbox').show(0);</script> </div> </div> <div class="clearer"></div> </div> <div class="related"> <h3>Navigation</h3> <ul> <li class="right" style="margin-right: 10px"> <a href="../../genindex.html" title="General Index" >index</a></li> <li class="right" > <a href="../../py-modindex.html" title="Python Module Index" >modules</a> |</li> <li><a href="../../index.html">libpgm 1.1 documentation</a> &raquo;</li> <li><a href="../index.html" >Module code</a> &raquo;</li> </ul> </div> <div class="footer"> &copy; Copyright 2012, CyberPoint International, LLC. Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> 1.1.3. </div> </body> </html>
define(['../internal/baseCallback', '../internal/baseWhile'], function(baseCallback, baseWhile) { /** * Creates a slice of `array` excluding elements dropped from the end. * Elements are dropped until `predicate` returns falsey. The predicate is * bound to `thisArg` and invoked with three arguments: (value, index, array). * * If a property name is provided for `predicate` the created `_.property` * style callback returns the property value of the given element. * * If a value is also provided for `thisArg` the created `_.matchesProperty` * style callback returns `true` for elements that have a matching property * value, else `false`. * * If an object is provided for `predicate` the created `_.matches` style * callback returns `true` for elements that match the properties of the given * object, else `false`. * * @static * @memberOf _ * @category Array * @param {Array} array The array to query. * @param {Function|Object|string} [predicate=_.identity] The function invoked * per iteration. * @param {*} [thisArg] The `this` binding of `predicate`. * @returns {Array} Returns the slice of `array`. * @example * * _.dropRightWhile([1, 2, 3], function(n) { * return n > 1; * }); * // => [1] * * var users = [ * { 'user': 'barney', 'active': true }, * { 'user': 'fred', 'active': false }, * { 'user': 'pebbles', 'active': false } * ]; * * // using the `_.matches` callback shorthand * _.pluck(_.dropRightWhile(users, { 'user': 'pebbles', 'active': false }), 'user'); * // => ['barney', 'fred'] * * // using the `_.matchesProperty` callback shorthand * _.pluck(_.dropRightWhile(users, 'active', false), 'user'); * // => ['barney'] * * // using the `_.property` callback shorthand * _.pluck(_.dropRightWhile(users, 'active'), 'user'); * // => ['barney', 'fred', 'pebbles'] */ function dropRightWhile(array, predicate, thisArg) { return (array && array.length) ? baseWhile(array, baseCallback(predicate, thisArg, 3), true, true) : []; } return dropRightWhile; });
#include "SkBitmap.h" #include "SkErrorInternals.h" #include "SkOrderedReadBuffer.h" #include "SkStream.h" #include "SkTypeface.h" SkOrderedReadBuffer::SkOrderedReadBuffer() : INHERITED() { fMemoryPtr = NULL; fBitmapStorage = NULL; fTFArray = NULL; fTFCount = 0; fFactoryTDArray = NULL; fFactoryArray = NULL; fFactoryCount = 0; fBitmapDecoder = NULL; } SkOrderedReadBuffer::SkOrderedReadBuffer(const void* data, size_t size) : INHERITED() { fReader.setMemory(data, size); fMemoryPtr = NULL; fBitmapStorage = NULL; fTFArray = NULL; fTFCount = 0; fFactoryTDArray = NULL; fFactoryArray = NULL; fFactoryCount = 0; fBitmapDecoder = NULL; } SkOrderedReadBuffer::SkOrderedReadBuffer(SkStream* stream) { const size_t length = stream->getLength(); fMemoryPtr = sk_malloc_throw(length); stream->read(fMemoryPtr, length); fReader.setMemory(fMemoryPtr, length); fBitmapStorage = NULL; fTFArray = NULL; fTFCount = 0; fFactoryTDArray = NULL; fFactoryArray = NULL; fFactoryCount = 0; fBitmapDecoder = NULL; } SkOrderedReadBuffer::~SkOrderedReadBuffer() { sk_free(fMemoryPtr); SkSafeUnref(fBitmapStorage); } bool SkOrderedReadBuffer::readBool() { return fReader.readBool(); } SkColor SkOrderedReadBuffer::readColor() { return fReader.readInt(); } SkFixed SkOrderedReadBuffer::readFixed() { return fReader.readS32(); } int32_t SkOrderedReadBuffer::readInt() { return fReader.readInt(); } SkScalar SkOrderedReadBuffer::readScalar() { return fReader.readScalar(); } uint32_t SkOrderedReadBuffer::readUInt() { return fReader.readU32(); } int32_t SkOrderedReadBuffer::read32() { return fReader.readInt(); } void SkOrderedReadBuffer::readString(SkString* string) { size_t len; const char* strContents = fReader.readString(&len); string->set(strContents, len); } void* SkOrderedReadBuffer::readEncodedString(size_t* length, SkPaint::TextEncoding encoding) { SkDEBUGCODE(int32_t encodingType = ) fReader.readInt(); SkASSERT(encodingType == encoding); *length = fReader.readInt(); void* data = sk_malloc_throw(*length); memcpy(data, fReader.skip(SkAlign4(*length)), *length); return data; } void SkOrderedReadBuffer::readPoint(SkPoint* point) { point->fX = fReader.readScalar(); point->fY = fReader.readScalar(); } void SkOrderedReadBuffer::readMatrix(SkMatrix* matrix) { fReader.readMatrix(matrix); } void SkOrderedReadBuffer::readIRect(SkIRect* rect) { memcpy(rect, fReader.skip(sizeof(SkIRect)), sizeof(SkIRect)); } void SkOrderedReadBuffer::readRect(SkRect* rect) { memcpy(rect, fReader.skip(sizeof(SkRect)), sizeof(SkRect)); } void SkOrderedReadBuffer::readRegion(SkRegion* region) { fReader.readRegion(region); } void SkOrderedReadBuffer::readPath(SkPath* path) { fReader.readPath(path); } uint32_t SkOrderedReadBuffer::readByteArray(void* value) { const uint32_t length = fReader.readU32(); memcpy(value, fReader.skip(SkAlign4(length)), length); return length; } uint32_t SkOrderedReadBuffer::readColorArray(SkColor* colors) { const uint32_t count = fReader.readU32(); const uint32_t byteLength = count * sizeof(SkColor); memcpy(colors, fReader.skip(SkAlign4(byteLength)), byteLength); return count; } uint32_t SkOrderedReadBuffer::readIntArray(int32_t* values) { const uint32_t count = fReader.readU32(); const uint32_t byteLength = count * sizeof(int32_t); memcpy(values, fReader.skip(SkAlign4(byteLength)), byteLength); return count; } uint32_t SkOrderedReadBuffer::readPointArray(SkPoint* points) { const uint32_t count = fReader.readU32(); const uint32_t byteLength = count * sizeof(SkPoint); memcpy(points, fReader.skip(SkAlign4(byteLength)), byteLength); return count; } uint32_t SkOrderedReadBuffer::readScalarArray(SkScalar* values) { const uint32_t count = fReader.readU32(); const uint32_t byteLength = count * sizeof(SkScalar); memcpy(values, fReader.skip(SkAlign4(byteLength)), byteLength); return count; } uint32_t SkOrderedReadBuffer::getArrayCount() { return *(uint32_t*)fReader.peek(); } void SkOrderedReadBuffer::readBitmap(SkBitmap* bitmap) { const int width = this->readInt(); const int height = this->readInt(); // The writer stored a boolean value to determine whether an SkBitmapHeap was used during // writing. if (this->readBool()) { // An SkBitmapHeap was used for writing. Read the index from the stream and find the // corresponding SkBitmap in fBitmapStorage. const uint32_t index = fReader.readU32(); fReader.readU32(); // bitmap generation ID (see <API key>::writeBitmap) if (fBitmapStorage) { *bitmap = *fBitmapStorage->getBitmap(index); fBitmapStorage->releaseRef(index); return; } else { // The bitmap was stored in a heap, but there is no way to access it. Set an error and // fall through to use a place holder bitmap. SkErrorInternals::SetError(kParseError_SkError, "<API key>::writeBitmap " "stored the SkBitmap in an SkBitmapHeap, but " "SkOrderedReadBuffer has no SkBitmapHeapReader to " "retrieve the SkBitmap."); } } else { // The writer stored false, meaning the SkBitmap was not stored in an SkBitmapHeap. const size_t length = this->readUInt(); if (length > 0) { // A non-zero size means the SkBitmap was encoded. const void* data = this->skip(length); if (fBitmapDecoder != NULL && fBitmapDecoder(data, length, bitmap)) { if (bitmap->width() == width && bitmap->height() == height) { return; } // This case can only be reached if extractSubset was called, so // the recorded width and height must be smaller than (or equal to // the encoded width and height. SkASSERT(width <= bitmap->width() && height <= bitmap->height()); // FIXME: Once the writer is changed to record the (x,y) offset, // they will be used to store the correct portion of the picture. SkBitmap subsetBm; #ifdef <API key> int32_t x = fReader.readS32(); int32_t y = fReader.readS32(); SkIRect subset = SkIRect::MakeXYWH(x, y, width, height); #else SkIRect subset = SkIRect::MakeWH(width, height); #endif if (bitmap->extractSubset(&subsetBm, subset)) { bitmap->swap(subsetBm); return; } } // This bitmap was encoded when written, but we are unable to decode, possibly due to // not having a decoder. SkErrorInternals::SetError(kParseError_SkError, "Could not decode bitmap. Resulting bitmap will be red."); } else { // A size of zero means the SkBitmap was simply flattened. bitmap->unflatten(*this); return; } } // Could not read the SkBitmap. Use a placeholder bitmap. bitmap->setConfig(SkBitmap::kARGB_8888_Config, width, height); bitmap->allocPixels(); bitmap->eraseColor(SK_ColorRED); } SkTypeface* SkOrderedReadBuffer::readTypeface() { uint32_t index = fReader.readU32(); if (0 == index || index > (unsigned)fTFCount) { if (index) { SkDebugf("====== typeface index %d\n", index); } return NULL; } else { SkASSERT(fTFArray); return fTFArray[index - 1]; } } SkFlattenable* SkOrderedReadBuffer::readFlattenable() { SkFlattenable::Factory factory = NULL; if (fFactoryCount > 0) { int32_t index = fReader.readU32(); if (0 == index) { return NULL; // writer failed to give us the flattenable } index -= 1; // we stored the index-base-1 SkASSERT(index < fFactoryCount); factory = fFactoryArray[index]; } else if (fFactoryTDArray) { int32_t index = fReader.readU32(); if (0 == index) { return NULL; // writer failed to give us the flattenable } index -= 1; // we stored the index-base-1 factory = (*fFactoryTDArray)[index]; } else { factory = (SkFlattenable::Factory)readFunctionPtr(); if (NULL == factory) { return NULL; // writer failed to give us the flattenable } } // if we get here, factory may still be null, but if that is the case, the // failure was ours, not the writer. SkFlattenable* obj = NULL; uint32_t sizeRecorded = fReader.readU32(); if (factory) { uint32_t offset = fReader.offset(); obj = (*factory)(*this); // check that we read the amount we expected uint32_t sizeRead = fReader.offset() - offset; if (sizeRecorded != sizeRead) { // we could try to fix up the offset... sk_throw(); } } else { // we must skip the remaining data fReader.skip(sizeRecorded); } return obj; }
#include <sb6.h> class simpleclear_app : public sb6::application { void init() { static const char title[] = "OpenGL SuperBible - Simple Clear"; sb6::application::init(); memcpy(info.title, title, sizeof(title)); } virtual void render(double currentTime) { static const GLfloat red[] = { 1.0f, 0.0f, 0.0f, 1.0f }; glClearBufferfv(GL_COLOR, 0, red); } }; DECLARE_MAIN(simpleclear_app)
#if 0 #ifndef lint static const char copyright[] = "@( The Regents of the University of California. All rights reserved.\n"; #endif #ifndef lint static char sccsid[] = "@(#)nfsiod.c 8.4 (Berkeley) 5/3/95"; #endif #endif #include <sys/cdefs.h> __FBSDID("$FreeBSD$"); #include <sys/param.h> #include <sys/syslog.h> #include <sys/wait.h> #include <sys/linker.h> #include <sys/mount.h> #include <sys/sysctl.h> #include <err.h> #include <stdio.h> #include <stdlib.h> #include <unistd.h> #define MAXNFSDCNT 20 static void usage(void) { (void)fprintf(stderr, "usage: nfsiod [-n num_servers]\n"); exit(1); } int main(int argc, char *argv[]) { int ch; struct xvfsconf vfc; int error; unsigned int iodmin, iodmax, num_servers; size_t len; error = getvfsbyname("nfs", &vfc); if (error) { if (kldload("nfs") == -1) err(1, "kldload(nfs)"); error = getvfsbyname("nfs", &vfc); } if (error) errx(1, "NFS support is not available in the running kernel"); num_servers = 0; while ((ch = getopt(argc, argv, "n:")) != -1) switch (ch) { case 'n': num_servers = atoi(optarg); if (num_servers < 1) { warnx("nfsiod count %u; reset to %d", num_servers, 1); num_servers = 1; } if (num_servers > MAXNFSDCNT) { warnx("nfsiod count %u; reset to %d", num_servers, MAXNFSDCNT); num_servers = MAXNFSDCNT; } break; case '?': default: usage(); } argc -= optind; argv += optind; if (argc > 0) usage(); len = sizeof iodmin; error = sysctlbyname("vfs.nfs.iodmin", &iodmin, &len, NULL, 0); if (error < 0) err(1, "sysctlbyname(\"vfs.nfs.iodmin\")"); len = sizeof iodmax; error = sysctlbyname("vfs.nfs.iodmax", &iodmax, &len, NULL, 0); if (error < 0) err(1, "sysctlbyname(\"vfs.nfs.iodmax\")"); if (num_servers == 0) { /* no change */ printf("vfs.nfs.iodmin=%u\nvfs.nfs.iodmax=%u\n", iodmin, iodmax); exit(0); } /* Catch the case where we're lowering num_servers below iodmin */ if (iodmin > num_servers) { iodmin = num_servers; error = sysctlbyname("vfs.nfs.iodmin", NULL, 0, &iodmin, sizeof iodmin); if (error < 0) err(1, "sysctlbyname(\"vfs.nfs.iodmin\")"); } iodmax = num_servers; error = sysctlbyname("vfs.nfs.iodmax", NULL, 0, &iodmax, sizeof iodmax); if (error < 0) err(1, "sysctlbyname(\"vfs.nfs.iodmax\")"); exit (0); }
import json import pathlib import sys import boto3 dist_folder = pathlib.Path.cwd() / 'dist' try: f = next(dist_folder.glob('*.whl')) except StopIteration: print("No .whl files found in ./dist!") sys.exit() print("Uploading", f.name) s3 = boto3.client('s3') s3.upload_file(str(f), 'releases.wagtail.io', 'nightly/dist/' + f.name, ExtraArgs={'ACL': 'public-read'}) print("Updating latest.json") boto3.resource('s3').Object('releases.wagtail.io', 'nightly/latest.json').put( ACL='public-read', Body=json.dumps({ "url": 'https://releases.wagtail.io/nightly/dist/' + f.name, }) )
from chatterbot import ChatBot from chatterbot.adapters import Adapter from .base_case import ChatBotTestCase class <API key>(ChatBotTestCase): def <API key>(self): kwargs = self.get_kwargs() kwargs['storage_adapter'] = 'chatterbot.input.TerminalAdapter' with self.assertRaises(Adapter.<API key>): self.chatbot = ChatBot('Test Bot', **kwargs) def <API key>(self): kwargs = self.get_kwargs() kwargs['storage_adapter'] = 'chatterbot.storage.<API key>' try: self.chatbot = ChatBot('Test Bot', **kwargs) except Adapter.<API key>: self.fail('Test raised <API key> unexpectedly!') def <API key>(self): kwargs = self.get_kwargs() kwargs['input_adapter'] = 'chatterbot.storage.<API key>' with self.assertRaises(Adapter.<API key>): self.chatbot = ChatBot('Test Bot', **kwargs) def <API key>(self): kwargs = self.get_kwargs() kwargs['input_adapter'] = 'chatterbot.input.TerminalAdapter' try: self.chatbot = ChatBot('Test Bot', **kwargs) except Adapter.<API key>: self.fail('Test raised <API key> unexpectedly!') def <API key>(self): kwargs = self.get_kwargs() kwargs['output_adapter'] = 'chatterbot.input.TerminalAdapter' with self.assertRaises(Adapter.<API key>): self.chatbot = ChatBot('Test Bot', **kwargs) def <API key>(self): kwargs = self.get_kwargs() kwargs['output_adapter'] = 'chatterbot.output.TerminalAdapter' try: self.chatbot = ChatBot('Test Bot', **kwargs) except Adapter.<API key>: self.fail('Test raised <API key> unexpectedly!') def <API key>(self): kwargs = self.get_kwargs() kwargs['logic_adapters'] = ['chatterbot.input.TerminalAdapter'] with self.assertRaises(Adapter.<API key>): self.chatbot = ChatBot('Test Bot', **kwargs) def <API key>(self): kwargs = self.get_kwargs() kwargs['logic_adapters'] = ['chatterbot.logic.BestMatch'] try: self.chatbot = ChatBot('Test Bot', **kwargs) except Adapter.<API key>: self.fail('Test raised <API key> unexpectedly!') def <API key>(self): kwargs = self.get_kwargs() kwargs['storage_adapter'] = { 'import_path': 'chatterbot.storage.<API key>' } try: self.chatbot = ChatBot('Test Bot', **kwargs) except Adapter.<API key>: self.fail('Test raised <API key> unexpectedly!') def <API key>(self): kwargs = self.get_kwargs() kwargs['storage_adapter'] = { 'import_path': 'chatterbot.logic.BestMatch' } with self.assertRaises(Adapter.<API key>): self.chatbot = ChatBot('Test Bot', **kwargs) class MultiAdapterTests(ChatBotTestCase): def <API key>(self): count_before = len(self.chatbot.logic.adapters) self.chatbot.logic.add_adapter( 'chatterbot.logic.BestMatch' ) self.assertIsLength(self.chatbot.logic.adapters, count_before + 1) def <API key>(self): self.chatbot.logic.add_adapter('chatterbot.logic.TimeLogicAdapter') self.chatbot.logic.add_adapter('chatterbot.logic.BestMatch') self.chatbot.logic.<API key>('chatterbot.logic.<API key>', 1) self.assertEqual( type(self.chatbot.logic.adapters[1]).__name__, '<API key>' ) def <API key>(self): self.chatbot.logic.add_adapter('chatterbot.logic.TimeLogicAdapter') self.chatbot.logic.add_adapter('chatterbot.logic.<API key>') adapter_count = len(self.chatbot.logic.adapters) removed = self.chatbot.logic.<API key>('<API key>') self.assertTrue(removed) self.assertIsLength(self.chatbot.logic.adapters, adapter_count - 1) def <API key>(self): self.chatbot.logic.add_adapter('chatterbot.logic.TimeLogicAdapter') adapter_count = len(self.chatbot.logic.adapters) removed = self.chatbot.logic.<API key>('<API key>') self.assertFalse(removed) self.assertIsLength(self.chatbot.logic.adapters, adapter_count)
#ifndef <API key> #define <API key> #include <atlbase.h> #include <atlcom.h> #include <UIAutomationCore.h> #include "base/compiler_specific.h" #include "ui/base/ui_base_export.h" namespace base { namespace win { // UIA Text provider implementation for edit controls. class UI_BASE_EXPORT UIATextProvider : public NON_EXPORTED_BASE(CComObjectRootEx<<API key>>), public IValueProvider, public ITextProvider { public: BEGIN_COM_MAP(UIATextProvider) <API key>(IUnknown, ITextProvider) COM_INTERFACE_ENTRY(IValueProvider) COM_INTERFACE_ENTRY(ITextProvider) END_COM_MAP() UIATextProvider(); // Creates an instance of the UIATextProvider class. // Returns true on success static bool CreateTextProvider(bool editable, IUnknown** provider); void set_editable(bool editable) { editable_ = editable; } // IValueProvider methods. STDMETHOD(get_IsReadOnly)(BOOL* read_only); // IValueProvider methods not implemented. STDMETHOD(SetValue)(const wchar_t* val) { return E_NOTIMPL; } STDMETHOD(get_Value)(BSTR* value) { return E_NOTIMPL; } // ITextProvider methods. STDMETHOD(GetSelection)(SAFEARRAY** ret) { return E_NOTIMPL; } STDMETHOD(GetVisibleRanges)(SAFEARRAY** ret) { return E_NOTIMPL; } STDMETHOD(RangeFromChild)(<API key>* child, ITextRangeProvider** ret) { return E_NOTIMPL; } STDMETHOD(RangeFromPoint)(struct UiaPoint point, ITextRangeProvider** ret) { return E_NOTIMPL; } STDMETHOD(get_DocumentRange)(ITextRangeProvider** ret) { return E_NOTIMPL; } STDMETHOD(<API key>)(enum <API key>* ret) { return E_NOTIMPL; } private: bool editable_; }; } // win } // base #endif // <API key>
using System; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Globalization; using NServiceKit.Text; namespace NServiceKit.Html { <summary>Dictionary of view data.</summary> public class ViewDataDictionary : IDictionary<string, object> { private readonly Dictionary<string, object> innerDictionary = new Dictionary<string, object>(StringComparer.OrdinalIgnoreCase); private object model; private ModelMetadata modelMetadata; private <API key> modelState; private TemplateInfo _templateMetadata; <summary>Initializes a new instance of the NServiceKit.Html.ViewDataDictionary class.</summary> public ViewDataDictionary() : this((object)null) { } <summary>Initializes a new instance of the NServiceKit.Html.ViewDataDictionary class.</summary> <param name="model">The model.</param> public ViewDataDictionary(object model) { Model = model; } <summary>Initializes a new instance of the NServiceKit.Html.ViewDataDictionary class.</summary> <exception cref="<API key>">Thrown when one or more required arguments are null.</exception> <param name="dictionary">The dictionary.</param> public ViewDataDictionary(ViewDataDictionary dictionary) { if (dictionary == null) { throw new <API key>("dictionary"); } foreach (var entry in dictionary) { innerDictionary.Add(entry.Key, entry.Value); } foreach (var entry in dictionary.ModelState) { ModelState.Add(entry.Key, entry.Value); } Model = dictionary.Model; // PERF: Don't unnecessarily instantiate the model metadata modelMetadata = dictionary.modelMetadata; } <summary>Gets the number of. </summary> <value>The count.</value> public int Count { get { return innerDictionary.Count; } } <summary>Gets a value indicating whether this object is read only.</summary> <value>true if this object is read only, false if not.</value> public bool IsReadOnly { get { return ((IDictionary<string, object>)innerDictionary).IsReadOnly; } } <summary>Gets the keys.</summary> <value>The keys.</value> public ICollection<string> Keys { get { return innerDictionary.Keys; } } <summary>Gets or sets the model.</summary> <value>The model.</value> public object Model { get { return model; } set { modelMetadata = null; SetModel(value); } } <summary>Gets or sets the model metadata.</summary> <value>The model metadata.</value> public virtual ModelMetadata ModelMetadata { get { if (modelMetadata == null && model != null) { modelMetadata = <API key>.Current.GetMetadataForType(() => model, model.GetType()); } return modelMetadata; } set { modelMetadata = value; } } <summary>Gets the state of the model.</summary> <value>The model state.</value> public <API key> ModelState { get { return modelState ?? (modelState = new <API key>()); } } private bool <API key>; <summary>Populate model state.</summary> <exception cref="Exception">Thrown when an exception error condition occurs.</exception> public virtual void PopulateModelState() { if (model == null) return; if (<API key>) return; lock (this) { if (<API key>) return; //Skip non-poco's, i.e. List modelState = new <API key>(); var modelType = model.GetType(); var listType = modelType.IsGenericType ? modelType.<API key>(typeof(IList<>)) : null; if (listType != null || model.GetType().IsArray) return; var strModel = TypeSerializer.SerializeToString(model); var map = TypeSerializer.<API key><Dictionary<string, string>>(strModel); foreach (var kvp in map) { var valueState = new ModelState { Value = new ValueProviderResult(kvp.Value, kvp.Value, CultureInfo.CurrentCulture) }; try { modelState.Add(kvp.Key, valueState); } catch (Exception ex) { ex.Message.PrintDump(); throw; } } <API key> = true; } } <summary>Indexer to get or set items within this collection using array index syntax.</summary> <param name="key">The key.</param> <returns>The indexed item.</returns> public object this[string key] { get { object value; innerDictionary.TryGetValue(key, out value); return value; } set { innerDictionary[key] = value; } } <summary>Gets or sets information describing the template.</summary> <value>Information describing the template.</value> public TemplateInfo TemplateInfo { get { if (_templateMetadata == null) { _templateMetadata = new TemplateInfo(); } return _templateMetadata; } set { _templateMetadata = value; } } <summary>Gets the values.</summary> <value>The values.</value> public ICollection<object> Values { get { return innerDictionary.Values; } } <summary>Adds key.</summary> <param name="item">The item to remove.</param> public void Add(KeyValuePair<string, object> item) { ((IDictionary<string, object>)innerDictionary).Add(item); } <summary>Adds key.</summary> <param name="key"> The key.</param> <param name="value">The value.</param> public void Add(string key, object value) { innerDictionary.Add(key, value); } <summary>Clears this object to its blank/initial state.</summary> public void Clear() { innerDictionary.Clear(); } <summary>Query if this object contains the given item.</summary> <param name="item">The item to remove.</param> <returns>true if the object is in this collection, false if not.</returns> public bool Contains(KeyValuePair<string, object> item) { return ((IDictionary<string, object>)innerDictionary).Contains(item); } <summary>Query if 'key' contains key.</summary> <param name="key">The key.</param> <returns>true if it succeeds, false if it fails.</returns> public bool ContainsKey(string key) { return innerDictionary.ContainsKey(key); } <summary>Copies to.</summary> <param name="array"> The array.</param> <param name="arrayIndex">Zero-based index of the array.</param> public void CopyTo(KeyValuePair<string, object>[] array, int arrayIndex) { ((IDictionary<string, object>)innerDictionary).CopyTo(array, arrayIndex); } <summary>Evals.</summary> <param name="expression">The expression.</param> <returns>A string.</returns> public object Eval(string expression) { ViewDataInfo info = GetViewDataInfo(expression); return (info != null) ? info.Value : null; } <summary>Evals.</summary> <param name="expression">The expression.</param> <param name="format"> Describes the format to use.</param> <returns>A string.</returns> public string Eval(string expression, string format) { object value = Eval(expression); if (value == null) { return String.Empty; } if (String.IsNullOrEmpty(format)) { return Convert.ToString(value, CultureInfo.CurrentCulture); } else { return String.Format(CultureInfo.CurrentCulture, format, value); } } internal static string FormatValueInternal(object value, string format) { if (value == null) { return String.Empty; } if (String.IsNullOrEmpty(format)) { return Convert.ToString(value, CultureInfo.CurrentCulture); } else { return String.Format(CultureInfo.CurrentCulture, format, value); } } <summary>Gets the enumerator.</summary> <returns>The enumerator.</returns> public IEnumerator<KeyValuePair<string, object>> GetEnumerator() { return innerDictionary.GetEnumerator(); } <summary>Gets view data information.</summary> <exception cref="ArgumentException">Thrown when one or more arguments have unsupported or illegal values.</exception> <param name="expression">The expression.</param> <returns>The view data information.</returns> public ViewDataInfo GetViewDataInfo(string expression) { if (String.IsNullOrEmpty(expression)) { throw new ArgumentException(MvcResources.Common_NullOrEmpty, "expression"); } return ViewDataEvaluator.Eval(this, expression); } <summary>Removes the given key.</summary> <param name="item">The item to remove.</param> <returns>true if it succeeds, false if it fails.</returns> public bool Remove(KeyValuePair<string, object> item) { return ((IDictionary<string, object>)innerDictionary).Remove(item); } <summary>Removes the given key.</summary> <param name="key">The key to remove.</param> <returns>true if it succeeds, false if it fails.</returns> public bool Remove(string key) { return innerDictionary.Remove(key); } <summary> This method will execute before the derived type's instance constructor executes. Derived types must be aware of this and should plan accordingly. For example, the logic in SetModel() should be simple enough so as not to depend on the "this" pointer referencing a fully constructed object. </summary> <param name="value">The value.</param> protected virtual void SetModel(object value) { model = value; } <summary>Attempts to get value from the given data.</summary> <param name="key"> The key.</param> <param name="value">The value.</param> <returns>true if it succeeds, false if it fails.</returns> public bool TryGetValue(string key, out object value) { return innerDictionary.TryGetValue(key, out value); } internal static class ViewDataEvaluator { <summary>Evals.</summary> <param name="vdd"> The vdd.</param> <param name="expression">The expression.</param> <returns>A ViewDataInfo.</returns> public static ViewDataInfo Eval(ViewDataDictionary vdd, string expression) { //Given an expression "foo.bar.baz" we look up the following (pseudocode): // this["foo.bar.baz.quux"] // this["foo.bar.baz"]["quux"] // this["foo.bar"]["baz.quux] // this["foo.bar"]["baz"]["quux"] // this["foo"]["bar.baz.quux"] // this["foo"]["bar.baz"]["quux"] // this["foo"]["bar"]["baz.quux"] // this["foo"]["bar"]["baz"]["quux"] ViewDataInfo evaluated = <API key>(vdd, expression); return evaluated; } private static ViewDataInfo <API key>(object indexableObject, string expression) { foreach (ExpressionPair expressionPair in <API key>(expression)) { string subExpression = expressionPair.Left; string postExpression = expressionPair.Right; ViewDataInfo subTargetInfo = GetPropertyValue(indexableObject, subExpression); if (subTargetInfo != null) { if (String.IsNullOrEmpty(postExpression)) { return subTargetInfo; } if (subTargetInfo.Value != null) { ViewDataInfo potential = <API key>(subTargetInfo.Value, postExpression); if (potential != null) { return potential; } } } } return null; } private static IEnumerable<ExpressionPair> <API key>(string expression) { // Produces an enumeration of all the combinations of complex property names // given a complex expression. See the list above for an example of the result // of the enumeration. yield return new ExpressionPair(expression, String.Empty); int lastDot = expression.LastIndexOf('.'); string subExpression = expression; string postExpression = string.Empty; while (lastDot > -1) { subExpression = expression.Substring(0, lastDot); postExpression = expression.Substring(lastDot + 1); yield return new ExpressionPair(subExpression, postExpression); lastDot = subExpression.LastIndexOf('.'); } } private static ViewDataInfo <API key>(object indexableObject, string key) { var dict = indexableObject as IDictionary<string, object>; object value = null; bool success = false; if (dict != null) { success = dict.TryGetValue(key, out value); } else { var tgvDel = TypeHelpers.<API key>(indexableObject.GetType()); if (tgvDel != null) { success = tgvDel(indexableObject, key, out value); } } if (success) { return new ViewDataInfo() { Container = indexableObject, Value = value }; } return null; } private static ViewDataInfo GetPropertyValue(object container, string propertyName) { // This method handles one "segment" of a complex property expression // First, we try to evaluate the property based on its indexer var value = <API key>(container, propertyName); if (value != null) { return value; } // If the indexer didn't return anything useful, continue... // If the container is a ViewDataDictionary then treat its Model property // as the container instead of the ViewDataDictionary itself. var vdd = container as ViewDataDictionary; if (vdd != null) { container = vdd.Model; } // If the container is null, we're out of options if (container == null) { return null; } // Second, we try to use PropertyDescriptors and treat the expression as a property name var descriptor = TypeDescriptor.GetProperties(container).Find(propertyName, true); if (descriptor == null) { return null; } return new ViewDataInfo(() => descriptor.GetValue(container)) { Container = container, PropertyDescriptor = descriptor }; } private struct ExpressionPair { <summary>The left.</summary> public readonly string Left; <summary>The right.</summary> public readonly string Right; <summary>Initializes a new instance of the NServiceKit.Html.ViewDataDictionary.ViewDataEvaluator class.</summary> <param name="left"> The left.</param> <param name="right">The right.</param> public ExpressionPair(string left, string right) { Left = left; Right = right; } } } IEnumerator IEnumerable.GetEnumerator() { return ((IEnumerable)innerDictionary).GetEnumerator(); } <summary>Converts this object to a raw JSON.</summary> <returns>A MvcHtmlString.</returns> public MvcHtmlString AsRawJson() { return MvcHtmlString.Create(Model.ToJson()); } <summary>Converts this object to a raw.</summary> <returns>A MvcHtmlString.</returns> public MvcHtmlString AsRaw() { return MvcHtmlString.Create((Model ?? "").ToString()); } } }
require File.dirname(__FILE__) + '/../../spec_helper' require File.dirname(__FILE__) + '/fixtures/classes' describe "Array#partition" do it "returns two arrays" do [].partition {}.should == [[], []] end it "returns in the left array values for which the block evaluates to true" do ary = [0, 1, 2, 3, 4, 5] ary.partition { |i| true }.should == [ary, []] ary.partition { |i| 5 }.should == [ary, []] ary.partition { |i| false }.should == [[], ary] ary.partition { |i| nil }.should == [[], ary] ary.partition { |i| i % 2 == 0 }.should == [[0, 2, 4], [1, 3, 5]] ary.partition { |i| i / 3 == 0 }.should == [[0, 1, 2], [3, 4, 5]] end it "does not return subclass instances on Array subclasses" do result = ArraySpecs::MyArray[1, 2, 3].partition { |x| x % 2 == 0 } result.class.should == Array result[0].class.should == Array result[1].class.should == Array end end
import json import os from urlparse import urlparse from django.conf import settings from django.core.files.storage import default_storage as storage from django.core.urlresolvers import resolve from django.http import Http404 from django.shortcuts import render from django.utils import translation from django.views.decorators.cache import cache_control from django.views.decorators.gzip import gzip_page import newrelic.agent import waffle from mkt.commonplace.models import DeployBuildId from mkt.regions.middleware import RegionMiddleware from mkt.account.helpers import fxa_auth_info from mkt.webapps.models import Webapp @gzip_page @cache_control(max_age=settings.<API key>) def commonplace(request, repo, **kwargs): """Serves the frontend single-page apps.""" if repo not in settings.FRONTEND_REPOS: raise Http404 BUILD_ID = get_build_id(repo) ua = request.META.get('HTTP_USER_AGENT', '').lower() include_splash = False <API key> = False if repo == 'fireplace': include_splash = True <API key> = ( 'mccs' in request.GET or ('mcc' in request.GET and 'mnc' in request.GET)) if not <API key>: # If we didn't receive mcc/mnc, then use geoip to detect region, # enabling fireplace to avoid the consumer_info API call that it # does normally to fetch the region. <API key> = True fxa_auth_state, fxa_auth_url = fxa_auth_info() site_settings = { 'dev_pay_providers': settings.DEV_PAY_PROVIDERS, 'fxa_auth_state': fxa_auth_state, 'fxa_auth_url': fxa_auth_url, } ctx = { 'BUILD_ID': BUILD_ID, 'LANG': request.LANG, 'DIR': lang_dir(request.LANG), 'include_splash': include_splash, 'repo': repo, 'robots': 'googlebot' in ua, 'site_settings': site_settings, 'newrelic_header': newrelic.agent.<API key>, 'newrelic_footer': newrelic.agent.<API key>, } if repo == 'fireplace': # For OpenGraph stuff. resolved_url = resolve(request.path) if resolved_url.url_name == 'detail': ctx = add_app_ctx(ctx, resolved_url.kwargs['app_slug']) ctx['waffle_switches'] = list( waffle.models.Switch.objects.filter(active=True) .values_list('name', flat=True)) media_url = urlparse(settings.MEDIA_URL) if media_url.netloc: ctx['media_origin'] = media_url.scheme + '://' + media_url.netloc if <API key>: region_middleware = RegionMiddleware() ctx['geoip_region'] = region_middleware.region_from_request(request) if repo in settings.REACT_REPOS: return render(request, 'commonplace/index_react.html', ctx) elif repo in settings.COMMONPLACE_REPOS: return render(request, 'commonplace/index.html', ctx) def get_allowed_origins(request, include_loop=True): current_domain = settings.DOMAIN current_origin = '%s://%s' % ('https' if request.is_secure() else 'http', current_domain) development_server = (settings.DEBUG or current_domain == 'marketplace-dev.allizom.org') allowed = [ # Start by allowing the 2 app:// variants for the current domain, # and then add the current http or https origin. 'app://packaged.%s' % current_domain, 'app://%s' % current_domain, current_origin, # Also include Tarako 'app://tarako.%s' % current_domain, ] # On dev, also allow localhost/mp.dev. if development_server: allowed.extend([ 'http://localhost:8675', 'https://localhost:8675', 'http://localhost', 'https://localhost', 'http://mp.dev', 'https://mp.dev', ]) if include_loop: # Include loop origins if necessary. allowed.extend([ 'https://hello.firefox.com', 'https://call.firefox.com', ]) # On dev, include loop dev & stage origin as well. if development_server: allowed.extend([ 'https://loop-webapp-dev.stage.mozaws.net', 'https://call.stage.mozaws.net', ]) return json.dumps(allowed) def get_build_id(repo): try: # Get the build ID from the database (bug 1083185). return DeployBuildId.objects.get(repo=repo).build_id except DeployBuildId.DoesNotExist: # If we haven't initialized a build ID yet, read it directly from the # build_id.txt by our frontend builds. try: build_id_path = os.path.join(settings.MEDIA_ROOT, repo, 'build_id.txt') with storage.open(build_id_path) as f: return f.read() except: return 'dev' def fxa_authorize(request): """ A page to mimic commonplace's fxa-authorize page to handle login. """ return render(request, 'commonplace/fxa_authorize.html') def add_app_ctx(ctx, app_slug): """ If we are hitting the Fireplace detail page, get the app for Open Graph tags. """ try: app = Webapp.objects.get(app_slug=app_slug) ctx['app'] = app except Webapp.DoesNotExist: pass return ctx @gzip_page def iframe_install(request): return render(request, 'commonplace/iframe-install.html', { 'allowed_origins': get_allowed_origins(request) }) @gzip_page def potatolytics(request): return render(request, 'commonplace/potatolytics.html', { 'allowed_origins': get_allowed_origins(request, include_loop=False) }) def lang_dir(lang): if lang == 'rtl' or translation.get_language_bidi(): return 'rtl' else: return 'ltr'
// Use of this source code is governed by a BSD-style /* A trivial example of wrapping a C library in Go. For a more complex example and explanation, see ../gmp/gmp.go. */ package stdio /* #include <stdio.h> #include <stdlib.h> #include <sys/stat.h> #include <errno.h> char* greeting = "hello, world"; */ import "C" import "unsafe" type File C.FILE var Stdout = (*File)(C.stdout) var Stderr = (*File)(C.stderr) // Test reference to library symbol. // Stdout and stderr are too special to be a reliable test. var myerr = C.sys_errlist func (f *File) WriteString(s string) { p := C.CString(s) C.fputs(p, (*C.FILE)(f)) C.free(unsafe.Pointer(p)) f.Flush() } func (f *File) Flush() { C.fflush((*C.FILE)(f)) } var Greeting = C.GoString(C.greeting) var Gbytes = C.GoBytes(unsafe.Pointer(C.greeting), C.int(len(Greeting)))
#ifndef _DSC_FILE_H_ #define _DSC_FILE_H_ typedef struct _SECTION_LINE { struct _SECTION_LINE *Next; char *Line; char *FileName; UINT32 LineNum; } SECTION_LINE; // Use this structure to keep track of parsed file names. Then // if we get a parse error we can figure out the file/line of // the error and print a useful message. typedef struct _DSC_FILE_NAME { struct _DSC_FILE_NAME *Next; char *FileName; } DSC_FILE_NAME; // We create a list of section names when we pre-parse a description file. // Use this structure. typedef struct _SECTION { struct _SECTION *Next; char *Name; SECTION_LINE *FirstLine; } SECTION; #define MAX_SAVES 4 typedef struct { SECTION_LINE *SavedPosition[MAX_SAVES]; int SavedPositionIndex; SECTION *Sections; SECTION_LINE *Lines; SECTION *LastSection; SECTION_LINE *LastLine; SECTION_LINE *CurrentLine; DSC_FILE_NAME *FileName; DSC_FILE_NAME *LastFileName; } DSC_FILE; // Function prototypes int DSCFileSetFile ( DSC_FILE *DSC, char *FileName ) ; SECTION * DSCFileFindSection ( DSC_FILE *DSC, char *Name ) ; int DSCFileSavePosition ( DSC_FILE *DSC ) ; int <API key> ( DSC_FILE *DSC ) ; char * DSCFileGetLine ( DSC_FILE *DSC, char *Line, int LineLen ) ; int DSCFileInit ( DSC_FILE *DSC ) ; int DSCFileDestroy ( DSC_FILE *DSC ) ; #endif // ifndef _DSC_FILE_H_
#ifndef <API key> #define <API key> #include <string> #include "base/macros.h" #include "base/memory/weak_ptr.h" #include "content/public/browser/<API key>.h" namespace base { class DictionaryValue; } namespace content { class BrowserContext; class WebContents; } namespace gfx { class Size; } namespace extensions { // This class filters out incoming GuestView-specific IPC messages from the // the IO thread or the UI thread. class <API key> : public content::<API key> { public: <API key>(int render_process_id, content::BrowserContext* context); int render_process_id() const { return render_process_id_; } private: friend class content::BrowserThread; friend class base::DeleteHelper<<API key>>; ~<API key>() override; // content::<API key> implementation. void <API key>(const IPC::Message& message, content::BrowserThread::ID* thread) override; void OnDestruct() const override; bool OnMessageReceived(const IPC::Message& message) override; // Message handlers on the UI thread. void OnAttachGuest(int element_instance_id, int guest_instance_id, const base::DictionaryValue& attach_params); void <API key>(int render_frame_id, const std::string& view_id, int element_instance_id, const gfx::Size& element_size); void OnResizeGuest(int render_frame_id, int element_instance_id, const gfx::Size& new_size); // Runs on UI thread. void <API key>(int element_instance_id, int <API key>, int <API key>, const gfx::Size& element_size, content::WebContents* web_contents); const int render_process_id_; // Should only be accessed on the UI thread. content::BrowserContext* const browser_context_; // Weak pointers produced by this factory are bound to the IO thread. base::WeakPtrFactory<<API key>> weak_ptr_factory_; <API key>(<API key>); }; } // namespace extensions #endif // <API key>
package Moose::Object; BEGIN { $Moose::Object::AUTHORITY = 'cpan:STEVAN'; } { $Moose::Object::VERSION = '2.0602'; } use strict; use warnings; use Carp (); use Devel::GlobalDestruction (); use MRO::Compat (); use Scalar::Util (); use Try::Tiny (); use if ( not our $__mx_is_compiled ), 'Moose::Meta::Class'; use if ( not our $__mx_is_compiled ), metaclass => 'Moose::Meta::Class'; sub new { my $class = shift; my $real_class = Scalar::Util::blessed($class) || $class; my $params = $real_class->BUILDARGS(@_); return Class::MOP::Class->initialize($real_class)->new_object($params); } sub BUILDARGS { my $class = shift; if ( scalar @_ == 1 ) { unless ( defined $_[0] && ref $_[0] eq 'HASH' ) { Class::MOP::class_of($class)->throw_error( "Single parameters to new() must be a HASH ref", data => $_[0] ); } return { %{ $_[0] } }; } elsif ( @_ % 2 ) { Carp::carp( "The new() method for $class expects a hash reference or a key/value list." . " You passed an odd number of arguments" ); return { @_, undef }; } else { return {@_}; } } sub BUILDALL { # NOTE: we ask Perl if we even # need to do this first, to avoid # extra meta level calls return unless $_[0]->can('BUILD'); my ($self, $params) = @_; foreach my $method (reverse Class::MOP::class_of($self)-><API key>('BUILD')) { $method->{code}->execute($self, $params); } } sub DEMOLISHALL { my $self = shift; my ($<API key>) = @_; # NOTE: we ask Perl if we even # need to do this first, to avoid # extra meta level calls return unless $self->can('DEMOLISH'); my @isa; if ( my $meta = Class::MOP::class_of($self ) ) { @isa = $meta->linearized_isa; } else { # We cannot count on being able to retrieve a previously made # metaclass, _or_ being able to make a new one during global # destruction. However, we should still be able to use mro at # that time (at least tests suggest so ;) my $class_name = ref $self; @isa = @{ mro::get_linear_isa($class_name) } } foreach my $class (@isa) { no strict 'refs'; my $demolish = *{"${class}::DEMOLISH"}{CODE}; $self->$demolish($<API key>) if defined $demolish; } } sub DESTROY { my $self = shift; local $?; Try::Tiny::try { $self->DEMOLISHALL(Devel::GlobalDestruction::<API key>); } Try::Tiny::catch { die $_; }; return; } # support for UNIVERSAL::DOES ... BEGIN { my $does = UNIVERSAL->can("DOES") ? "SUPER::DOES" : "isa"; eval 'sub DOES { my ( $self, $class_or_role_name ) = @_; return $self->'.$does.'($class_or_role_name) || $self->does($class_or_role_name); }'; } # new does() methods will be created # as appropiate see Moose::Meta::Role sub does { my ($self, $role_name) = @_; my $class = Scalar::Util::blessed($self) || $self; my $meta = Class::MOP::Class->initialize($class); (defined $role_name) || $meta->throw_error("You must supply a role name to does()"); return 1 if $meta->can('does_role') && $meta->does_role($role_name); return 0; } sub dump { my $self = shift; require Data::Dumper; local $Data::Dumper::Maxdepth = shift if @_; Data::Dumper::Dumper $self; } 1; # ABSTRACT: The base object for Moose =pod =head1 NAME Moose::Object - The base object for Moose =head1 VERSION version 2.0602 =head1 DESCRIPTION This class is the default base class for all Moose-using classes. When you C<use Moose> in this class, your class will inherit from this class. It provides a default constructor and destructor, which run all of the C<BUILD> and C<DEMOLISH> methods in the inheritance hierarchy, respectively. You don't actually I<need> to inherit from this in order to use Moose, but it makes it easier to take advantage of all of Moose's features. =head1 METHODS =over 4 =item B<< Moose::Object->new(%params|$params) >> This method calls C<< $class->BUILDARGS(@_) >>, and then creates a new instance of the appropriate class. Once the instance is created, it calls C<< $instance->BUILD($params) >> for each C<BUILD> method in the inheritance hierarchy. =item B<< Moose::Object->BUILDARGS(%params|$params) >> The default implementation of this method accepts a hash or hash reference of named parameters. If it receives a single argument that I<isn't> a hash reference it throws an error. You can override this method in your class to handle other types of options passed to the constructor. This method should always return a hash reference of named options. =item B<< $object->does($role_name) >> This returns true if the object does the given role. =item B<< $object->DOES($class_or_role_name) >> This is a a Moose role-aware implementation of L<UNIVERSAL/DOES>. This is effectively the same as writing: $object->does($name) || $object->isa($name) This method will work with Perl 5.8, which did not implement C<UNIVERSAL::DOES>. =item B<< $object->dump($maxdepth) >> This is a handy utility for C<Data::Dumper>ing an object. By default, the maximum depth is 1, to avoid making a mess. =item B<< $object->DESTROY >> A default destructor is provided, which calls C<< $instance->DEMOLISH($<API key>) >> for each C<DEMOLISH> method in the inheritance hierarchy. =back =head1 BUGS See L<Moose/BUGS> for details on reporting bugs. =head1 AUTHOR Moose is maintained by the Moose Cabal, along with the help of many contributors. See L<Moose/CABAL> and L<Moose/CONTRIBUTORS> for details. =head1 COPYRIGHT AND LICENSE This software is copyright (c) 2012 by Infinity Interactive, Inc.. This is free software; you can redistribute it and/or modify it under the same terms as the Perl 5 programming language system itself. =cut __END__
<?php namespace Zend\Mime; /** * Support class for MultiPart Mime Messages */ class Mime { const TYPE_OCTETSTREAM = 'application/octet-stream'; const TYPE_TEXT = 'text/plain'; const TYPE_HTML = 'text/html'; const ENCODING_7BIT = '7bit'; const ENCODING_8BIT = '8bit'; const <API key> = 'quoted-printable'; const ENCODING_BASE64 = 'base64'; const <API key> = 'attachment'; const DISPOSITION_INLINE = 'inline'; const LINELENGTH = 72; const LINEEND = "\n"; const <API key> = 'multipart/alternative'; const MULTIPART_MIXED = 'multipart/mixed'; const MULTIPART_RELATED = 'multipart/related'; protected $boundary; protected static $makeUnique = 0; // lookup-Tables for QuotedPrintable public static $qpKeys = array( "\x00","\x01","\x02","\x03","\x04","\x05","\x06","\x07", "\x08","\x09","\x0A","\x0B","\x0C","\x0D","\x0E","\x0F", "\x10","\x11","\x12","\x13","\x14","\x15","\x16","\x17", "\x18","\x19","\x1A","\x1B","\x1C","\x1D","\x1E","\x1F", "\x7F","\x80","\x81","\x82","\x83","\x84","\x85","\x86", "\x87","\x88","\x89","\x8A","\x8B","\x8C","\x8D","\x8E", "\x8F","\x90","\x91","\x92","\x93","\x94","\x95","\x96", "\x97","\x98","\x99","\x9A","\x9B","\x9C","\x9D","\x9E", "\x9F","\xA0","\xA1","\xA2","\xA3","\xA4","\xA5","\xA6", "\xA7","\xA8","\xA9","\xAA","\xAB","\xAC","\xAD","\xAE", "\xAF","\xB0","\xB1","\xB2","\xB3","\xB4","\xB5","\xB6", "\xB7","\xB8","\xB9","\xBA","\xBB","\xBC","\xBD","\xBE", "\xBF","\xC0","\xC1","\xC2","\xC3","\xC4","\xC5","\xC6", "\xC7","\xC8","\xC9","\xCA","\xCB","\xCC","\xCD","\xCE", "\xCF","\xD0","\xD1","\xD2","\xD3","\xD4","\xD5","\xD6", "\xD7","\xD8","\xD9","\xDA","\xDB","\xDC","\xDD","\xDE", "\xDF","\xE0","\xE1","\xE2","\xE3","\xE4","\xE5","\xE6", "\xE7","\xE8","\xE9","\xEA","\xEB","\xEC","\xED","\xEE", "\xEF","\xF0","\xF1","\xF2","\xF3","\xF4","\xF5","\xF6", "\xF7","\xF8","\xF9","\xFA","\xFB","\xFC","\xFD","\xFE", "\xFF" ); public static $qpReplaceValues = array( "=00","=01","=02","=03","=04","=05","=06","=07", "=08","=09","=0A","=0B","=0C","=0D","=0E","=0F", "=10","=11","=12","=13","=14","=15","=16","=17", "=18","=19","=1A","=1B","=1C","=1D","=1E","=1F", "=7F","=80","=81","=82","=83","=84","=85","=86", "=87","=88","=89","=8A","=8B","=8C","=8D","=8E", "=8F","=90","=91","=92","=93","=94","=95","=96", "=97","=98","=99","=9A","=9B","=9C","=9D","=9E", "=9F","=A0","=A1","=A2","=A3","=A4","=A5","=A6", "=A7","=A8","=A9","=AA","=AB","=AC","=AD","=AE", "=AF","=B0","=B1","=B2","=B3","=B4","=B5","=B6", "=B7","=B8","=B9","=BA","=BB","=BC","=BD","=BE", "=BF","=C0","=C1","=C2","=C3","=C4","=C5","=C6", "=C7","=C8","=C9","=CA","=CB","=CC","=CD","=CE", "=CF","=D0","=D1","=D2","=D3","=D4","=D5","=D6", "=D7","=D8","=D9","=DA","=DB","=DC","=DD","=DE", "=DF","=E0","=E1","=E2","=E3","=E4","=E5","=E6", "=E7","=E8","=E9","=EA","=EB","=EC","=ED","=EE", "=EF","=F0","=F1","=F2","=F3","=F4","=F5","=F6", "=F7","=F8","=F9","=FA","=FB","=FC","=FD","=FE", "=FF" ); public static $qpKeysString = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0A\x0B\x0C\x0D\x0E\x0F\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1A\x1B\x1C\x1D\x1E\x1F\x7F\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8A\x8B\x8C\x8D\x8E\x8F\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9A\x9B\x9C\x9D\x9E\x9F\xA0\xA1\xA2\xA3\xA4\xA5\xA6\xA7\xA8\xA9\xAA\xAB\xAC\xAD\xAE\xAF\xB0\xB1\xB2\xB3\xB4\xB5\xB6\xB7\xB8\xB9\xBA\xBB\xBC\xBD\xBE\xBF\xC0\xC1\xC2\xC3\xC4\xC5\xC6\xC7\xC8\xC9\xCA\xCB\xCC\xCD\xCE\xCF\xD0\xD1\xD2\xD3\xD4\xD5\xD6\xD7\xD8\xD9\xDA\xDB\xDC\xDD\xDE\xDF\xE0\xE1\xE2\xE3\xE4\xE5\xE6\xE7\xE8\xE9\xEA\xEB\xEC\xED\xEE\xEF\xF0\xF1\xF2\xF3\xF4\xF5\xF6\xF7\xF8\xF9\xFA\xFB\xFC\xFD\xFE\xFF"; /** * Check if the given string is "printable" * * Checks that a string contains no unprintable characters. If this returns * false, encode the string for secure delivery. * * @param string $str * @return bool */ public static function isPrintable($str) { return (strcspn($str, static::$qpKeysString) == strlen($str)); } /** * Encode a given string with the QUOTED_PRINTABLE mechanism and wrap the lines. * * @param string $str * @param int $lineLength Defaults to {@link LINELENGTH} * @param string $lineEnd Defaults to {@link LINEEND} * @return string */ public static function <API key>($str, $lineLength = self::LINELENGTH, $lineEnd = self::LINEEND) { $out = ''; $str = self::<API key>($str); // Split encoded text into separate lines while ($str) { $ptr = strlen($str); if ($ptr > $lineLength) { $ptr = $lineLength; } // Ensure we are not splitting across an encoded character $pos = strrpos(substr($str, 0, $ptr), '='); if ($pos !== false && $pos >= $ptr - 2) { $ptr = $pos; } // Check if there is a space at the end of the line and rewind if ($ptr > 0 && $str[$ptr - 1] == ' ') { --$ptr; } // Add string and continue $out .= substr($str, 0, $ptr) . '=' . $lineEnd; $str = substr($str, $ptr); } $out = rtrim($out, $lineEnd); $out = rtrim($out, '='); return $out; } /** * Converts a string into quoted printable format. * * @param string $str * @return string */ private static function <API key>($str) { $str = str_replace('=', '=3D', $str); $str = str_replace(static::$qpKeys, static::$qpReplaceValues, $str); $str = rtrim($str); return $str; } /** * Encode a given string with the QUOTED_PRINTABLE mechanism for Mail Headers. * * Mail headers depend on an extended quoted printable algorithm otherwise * a range of bugs can occur. * * @param string $str * @param string $charset * @param int $lineLength Defaults to {@link LINELENGTH} * @param string $lineEnd Defaults to {@link LINEEND} * @return string */ public static function <API key>($str, $charset, $lineLength = self::LINELENGTH, $lineEnd = self::LINEEND) { // Reduce line-length by the length of the required delimiter, charsets and encoding $prefix = sprintf('=?%s?Q?', $charset); $lineLength = $lineLength-strlen($prefix)-3; $str = self::<API key>($str); // Mail-Header required chars have to be encoded also: $str = str_replace(array('?', ' ', '_'), array('=3F', '=20', '=5F'), $str); // initialize first line, we need it anyways $lines = array(0 => ""); // Split encoded text into separate lines $tmp = ""; while (strlen($str) > 0) { $currentLine = max(count($lines)-1, 0); $token = static::<API key>($str); $str = substr($str, strlen($token)); $tmp .= $token; if ($token == '=20') { // only if we have a single char token or space, we can append the // tempstring it to the current line or start a new line if necessary. if (strlen($lines[$currentLine] . $tmp) > $lineLength) { $lines[$currentLine+1] = $tmp; } else { $lines[$currentLine] .= $tmp; } $tmp = ""; } // don't forget to append the rest to the last line if (strlen($str) == 0) { $lines[$currentLine] .= $tmp; } } // assemble the lines together by pre- and appending delimiters, charset, encoding. for ($i = 0; $i < count($lines); $i++) { $lines[$i] = " " . $prefix . $lines[$i] . "?="; } $str = trim(implode($lineEnd, $lines)); return $str; } /** * Retrieves the first token from a quoted printable string. * * @param string $str * @return string */ private static function <API key>($str) { if (substr($str, 0, 1) == "=") { $token = substr($str, 0, 3); } else { $token = substr($str, 0, 1); } return $token; } /** * Encode a given string in mail header compatible base64 encoding. * * @param string $str * @param string $charset * @param int $lineLength Defaults to {@link LINELENGTH} * @param string $lineEnd Defaults to {@link LINEEND} * @return string */ public static function encodeBase64Header($str, $charset, $lineLength = self::LINELENGTH, $lineEnd = self::LINEEND) { $prefix = '=?' . $charset . '?B?'; $suffix = '?='; $remainingLength = $lineLength - strlen($prefix) - strlen($suffix); $encodedValue = static::encodeBase64($str, $remainingLength, $lineEnd); $encodedValue = str_replace($lineEnd, $suffix . $lineEnd . ' ' . $prefix, $encodedValue); $encodedValue = $prefix . $encodedValue . $suffix; return $encodedValue; } /** * Encode a given string in base64 encoding and break lines * according to the maximum linelength. * * @param string $str * @param int $lineLength Defaults to {@link LINELENGTH} * @param string $lineEnd Defaults to {@link LINEEND} * @return string */ public static function encodeBase64($str, $lineLength = self::LINELENGTH, $lineEnd = self::LINEEND) { return rtrim(chunk_split(base64_encode($str), $lineLength, $lineEnd)); } /** * Constructor * * @param null|string $boundary * @access public */ public function __construct($boundary = null) { // This string needs to be somewhat unique if ($boundary === null) { $this->boundary = '=_' . md5(microtime(1) . static::$makeUnique++); } else { $this->boundary = $boundary; } } /** * Encode the given string with the given encoding. * * @param string $str * @param string $encoding * @param string $EOL EOL string; defaults to {@link LINEEND} * @return string */ public static function encode($str, $encoding, $EOL = self::LINEEND) { switch ($encoding) { case self::ENCODING_BASE64: return static::encodeBase64($str, self::LINELENGTH, $EOL); case self::<API key>: return static::<API key>($str, self::LINELENGTH, $EOL); default: /** * @todo 7Bit and 8Bit is currently handled the same way. */ return $str; } } /** * Return a MIME boundary * * @access public * @return string */ public function boundary() { return $this->boundary; } /** * Return a MIME boundary line * * @param string $EOL Defaults to {@link LINEEND} * @access public * @return string */ public function boundaryLine($EOL = self::LINEEND) { return $EOL . '--' . $this->boundary . $EOL; } /** * Return MIME ending * * @param string $EOL Defaults to {@link LINEEND} * @access public * @return string */ public function mimeEnd($EOL = self::LINEEND) { return $EOL . '--' . $this->boundary . '--' . $EOL; } }
<?php namespace QATools\QATools\HtmlElements\Element; /** * Represents text block on a web page. */ class TextBlock extends <API key> { /** * Returns text contained in text block. * * @return string */ public function getText() { return $this->getWrappedElement()->getText(); } }
#ifndef <API key> #define <API key> #include <string> #include <libtorrent/kademlia/find_data.hpp> #include <libtorrent/kademlia/item.hpp> namespace libtorrent { namespace dht { class get_item : public find_data { public: typedef boost::function<bool(item&)> data_callback; void got_data(bdecode_node const& v, char const* pk, boost::uint64_t seq, char const* sig); // for immutable itms get_item(node_impl& node , node_id target , data_callback const& dcallback); // for mutable items get_item(node_impl& node , char const* pk , std::string const& salt , data_callback const& dcallback); virtual char const* name() const; protected: virtual observer_ptr new_observer(void* ptr, udp::endpoint const& ep, node_id const& id); virtual bool invoke(observer_ptr o); virtual void done(); void put(std::vector<std::pair<node_entry, std::string> > const& v); data_callback m_data_callback; item m_data; std::string m_salt; }; class get_item_observer : public find_data_observer { public: get_item_observer( boost::intrusive_ptr<traversal_algorithm> const& algorithm , udp::endpoint const& ep, node_id const& id) : find_data_observer(algorithm, ep, id) {} virtual void reply(msg const&); }; } } // namespace libtorrent::dht #endif // <API key>
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ....testing import assert_equal from ..preprocess import MaskTool def <API key>(): input_map = dict(args=dict(argstr='%s', ), count=dict(argstr='-count', position=2, ), datum=dict(argstr='-datum %s', ), dilate_inputs=dict(argstr='-dilate_inputs %s', ), dilate_results=dict(argstr='-dilate_results %s', ), environ=dict(nohash=True, usedefault=True, ), fill_dirs=dict(argstr='-fill_dirs %s', requires=['fill_holes'], ), fill_holes=dict(argstr='-fill_holes', ), frac=dict(argstr='-frac %s', ), ignore_exception=dict(nohash=True, usedefault=True, ), in_file=dict(argstr='-input %s', copyfile=False, mandatory=True, position=-1, ), inter=dict(argstr='-inter', ), out_file=dict(argstr='-prefix %s', name_source='in_file', name_template='%s_mask', ), outputtype=dict(), terminal_output=dict(nohash=True, ), union=dict(argstr='-union', ), ) inputs = MaskTool.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): yield assert_equal, getattr(inputs.traits()[key], metakey), value def <API key>(): output_map = dict(out_file=dict(), ) outputs = MaskTool.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): yield assert_equal, getattr(outputs.traits()[key], metakey), value
package org.hisp.dhis.period; import static org.junit.Assert.assertEquals; import java.util.List; import org.joda.time.DateTime; import org.junit.Before; import org.junit.Test; /** * @author Lars Helge Overland */ public class <API key> { private DateTime startDate; private DateTime endDate; private DateTime testDate; private CalendarPeriodType periodType; @Before public void before() { periodType = new <API key>(); } @Test public void testCreatePeriod() { testDate = new DateTime( 2009, 2, 15, 0, 0 ); startDate = new DateTime( 2008, 4, 1, 0, 0 ); endDate = new DateTime( 2009, 3, 31, 0, 0 ); Period period = periodType.createPeriod( testDate.toDate() ); assertEquals( startDate.toDate(), period.getStartDate() ); assertEquals( endDate.toDate(), period.getEndDate() ); testDate = new DateTime( 2009, 9, 12, 0, 0 ); period = periodType.createPeriod( testDate.toDate() ); startDate = new DateTime( 2009, 4, 1, 0, 0 ); endDate = new DateTime( 2010, 3, 31, 0, 0 ); assertEquals( startDate.toDate(), period.getStartDate() ); assertEquals( endDate.toDate(), period.getEndDate() ); } @Test public void testGetNextPeriod() { testDate = new DateTime( 2009, 2, 15, 0, 0 ); Period period = periodType.createPeriod( testDate.toDate() ); period = periodType.getNextPeriod( period ); startDate = new DateTime( 2009, 4, 1, 0, 0 ); endDate = new DateTime( 2010, 3, 31, 0, 0 ); assertEquals( startDate.toDate(), period.getStartDate() ); assertEquals( endDate.toDate(), period.getEndDate() ); } @Test public void <API key>() { testDate = new DateTime( 2009, 2, 15, 0, 0 ); Period period = periodType.createPeriod( testDate.toDate() ); period = periodType.getPreviousPeriod( period ); startDate = new DateTime( 2007, 4, 1, 0, 0 ); endDate = new DateTime( 2008, 3, 31, 0, 0 ); assertEquals( startDate.toDate(), period.getStartDate() ); assertEquals( endDate.toDate(), period.getEndDate() ); } @Test public void testGeneratePeriods() { testDate = new DateTime( 2009, 2, 15, 0, 0 ); List<Period> periods = periodType.generatePeriods( testDate.toDate() ); assertEquals( 11, periods.size() ); assertEquals( periodType.createPeriod( new DateTime( 2003, 4, 1, 0, 0 ).toDate() ), periods.get( 0 ) ); assertEquals( periodType.createPeriod( new DateTime( 2004, 4, 1, 0, 0 ).toDate() ), periods.get( 1 ) ); assertEquals( periodType.createPeriod( new DateTime( 2005, 4, 1, 0, 0 ).toDate() ), periods.get( 2 ) ); assertEquals( periodType.createPeriod( new DateTime( 2006, 4, 1, 0, 0 ).toDate() ), periods.get( 3 ) ); assertEquals( periodType.createPeriod( new DateTime( 2007, 4, 1, 0, 0 ).toDate() ), periods.get( 4 ) ); assertEquals( periodType.createPeriod( new DateTime( 2008, 4, 1, 0, 0 ).toDate() ), periods.get( 5 ) ); assertEquals( periodType.createPeriod( new DateTime( 2009, 4, 1, 0, 0 ).toDate() ), periods.get( 6 ) ); assertEquals( periodType.createPeriod( new DateTime( 2010, 4, 1, 0, 0 ).toDate() ), periods.get( 7 ) ); assertEquals( periodType.createPeriod( new DateTime( 2011, 4, 1, 0, 0 ).toDate() ), periods.get( 8 ) ); assertEquals( periodType.createPeriod( new DateTime( 2012, 4, 1, 0, 0 ).toDate() ), periods.get( 9 ) ); assertEquals( periodType.createPeriod( new DateTime( 2013, 4, 1, 0, 0 ).toDate() ), periods.get( 10 ) ); testDate = new DateTime( 2009, 9, 12, 0, 0 ); periods = periodType.generatePeriods( testDate.toDate() ); assertEquals( 11, periods.size() ); assertEquals( periodType.createPeriod( new DateTime( 2004, 4, 1, 0, 0 ).toDate() ), periods.get( 0 ) ); } }
id: <API key> title: Factorialize a Number isRequired: true challengeType: 5 videoUrl: '' localeTitle: Factorialize عدد ## Description <section id="description"> عودة المضروب من عدد صحيح المقدمة. إذا تم تمثيل العدد الصحيح بالحرف n ، فإن العامل الحاسوبي هو نتاج جميع الأعداد الصحيحة الموجبة أقل من أو يساوي n. غالبًا ما يتم تمثيل العوامل بعبارة الاختزال <code>n!</code> على سبيل المثال: <code>5! = 1 * 2 * 3 * 4 * 5 = 120</code> فقط تزويد الأعداد الصحيحة التي تزيد عن أو تساوي الصفر بالوظيفة. تذكر استخدام <a href="http: ## Instructions <section id="instructions"> </section> ## Tests <section id='tests'> yml tests: - text: يجب أن يعيد <code>factorialize(5)</code> رقمًا. testString: 'assert(typeof factorialize(5) === "number", "<code>factorialize(5)</code> should return a number.");' - text: يجب أن يعيد <code>factorialize(5)</code> 120. testString: 'assert(factorialize(5) === 120, "<code>factorialize(5)</code> should return 120.");' - text: يجب أن يعيد التجهيز <code>factorialize(10)</code> 3628800. testString: 'assert(factorialize(10) === 3628800, "<code>factorialize(10)</code> should return 3628800.");' - text: يجب أن تعيد إنتاجية <code>factorialize(20)</code> 2432902008176640000. testString: 'assert(factorialize(20) === 2432902008176640000, "<code>factorialize(20)</code> should return 2432902008176640000.");' - text: يجب أن يعيد <code>factorialize(0)</code> 1. testString: 'assert(factorialize(0) === 1, "<code>factorialize(0)</code> should return 1.");' </section> ## Challenge Seed <section id='challengeSeed'> <div id='js-seed'> js function factorialize(num) { return num; } factorialize(5); </div> </section> ## Solution <section id='solution'> js // solution required </section>
# <API key>: true require 'rails_helper' module Spree RSpec.describe ReimbursementType::StoreCredit do let(:reimbursement) { create(:reimbursement, return_items_count: 2) } let(:return_item) { reimbursement.return_items.first } let(:return_item2) { reimbursement.return_items.last } let(:payment) { reimbursement.order.payments.first } let(:simulate) { false } let!(:<API key>) { Spree::RefundReason.find_or_create_by!(name: Spree::RefundReason::<API key>, mutable: false) } let!(:primary_credit_type) { create(:primary_credit_type) } let(:created_by_user) { create(:user, email: 'user@email.com') } let!(:<API key>) { create(:<API key>) } subject { Spree::ReimbursementType::StoreCredit.reimburse(reimbursement, [return_item, return_item2], simulate, created_by: created_by_user) } before do reimbursement.update!(total: reimbursement.calculated_total) end describe '.reimburse' do context 'simulate is true' do let(:simulate) { true } context 'for store credits that the customer used' do before do expect(Spree::ReimbursementType::StoreCredit).to receive(:<API key>).and_return([payment]) end it 'creates readonly refunds for all store credit payments' do expect(subject.map(&:class)).to eq [Spree::Refund] expect(subject.map(&:readonly?)).to eq [true] end it 'does not save to the database' do expect { subject }.to_not change { payment.refunds.count } end end context 'for return items that were not paid for with store credit' do before do expect(Spree::ReimbursementType::StoreCredit).to receive(:<API key>).and_return([]) end context 'creates one readonly lump credit for all outstanding balance payable to the customer' do it 'creates a credit that is read only' do expect(subject.map(&:class)).to eq [Spree::Reimbursement::Credit] expect(subject.map(&:readonly?)).to eq [true] end it 'creates a credit which amounts to the sum of the return items rounded down' do expect(return_item).to receive(:total).and_return(10.0076) expect(return_item2).to receive(:total).and_return(10.0023) expect(subject.sum(&:amount)).to eq 20.0 end end it 'does not save to the database' do expect { subject }.to_not change { Spree::Reimbursement::Credit.count } end end end context 'simulate is false' do let(:simulate) { false } context 'for store credits that the customer used' do before do expect(Spree::ReimbursementType::StoreCredit).to receive(:<API key>).and_return([payment]) end it 'performs refunds for all store credit payments' do expect { subject }.to change { payment.refunds.count }.by(1) expect(payment.refunds.sum(:amount)).to eq reimbursement.return_items.to_a.sum(&:total) end end context 'for return items that were not paid for with store credit' do before do expect(Spree::ReimbursementType::StoreCredit).to receive(:<API key>).and_return([]) end it 'creates one lump credit for all outstanding balance payable to the customer' do expect { subject }.to change { Spree::Reimbursement::Credit.count }.by(1) expect(subject.sum(&:amount)).to eq reimbursement.return_items.to_a.sum(&:total) end it "creates a store credit with the same currency as the reimbursement's order" do expect { subject }.to change { Spree::StoreCredit.count }.by(1) expect(Spree::StoreCredit.last.currency).to eq reimbursement.order.currency end context 'without a user with email address "spree@example.com" in the database' do before do default_user = Spree::LegacyUser.find_by(email: "spree@example.com") default_user.destroy if default_user end it "creates a store credit with the same currency as the reimbursement's order" do expect { subject }.to change { Spree::StoreCredit.count }.by(1) expect(Spree::StoreCredit.last.currency).to eq reimbursement.order.currency end end end end end end end
# Generated by Django 2.1.7 on 2019-03-25 09:40 import versatileimagefield.fields from django.db import migrations class Migration(migrations.Migration): dependencies = [("account", "<API key>")] operations = [ migrations.AddField( model_name="user", name="avatar", field=versatileimagefield.fields.VersatileImageField( blank=True, null=True, upload_to="user-avatars" ), ) ]
import csv from bs4 import BeautifulSoup from collections import Counter import re import os OUTPUT_NAME = os.getenv('OUTPUT_NAME', '<API key>.csv') csv_file = open('<API key>.csv') csv_reader = csv.DictReader(csv_file) # Tranform individual label to candidate pair label label_map = { 'pos_ahok': 'pos_ahok_djarot', 'pos_djarot': 'pos_ahok_djarot', 'pos_anies': 'pos_anies_sandi', 'pos_sandi': 'pos_anies_sandi', 'pos_agus': 'pos_agus_sylvi', 'pos_sylvi': 'pos_agus_sylvi', 'neg_ahok': 'neg_ahok_djarot', 'neg_djarot': 'neg_ahok_djarot', 'neg_anies': 'neg_anies_sandi', 'neg_sandi': 'neg_anies_sandi', 'neg_agus': 'neg_agus_sylvi', 'neg_sylvi': 'neg_agus_sylvi', 'oot': 'oot' } fields = ['title', 'raw_content', 'labels'] train_file = open(OUTPUT_NAME, 'w') csv_writer = csv.DictWriter(train_file, fields) csv_writer.writeheader() for row in csv_reader: title = row['title'] raw_content = row['raw_content'] labels = [] label_1 = row['sentiment_1'] if label_1 != '': <API key> = label_map[label_1] if not <API key> in labels: labels.append(<API key>) label_2 = row['sentiment_2'] if label_2 != '': <API key> = label_map[label_2] if not <API key> in labels: labels.append(<API key>) label_3 = row['sentiment_3'] if label_3 != '': <API key> = label_map[label_3] if not <API key> in labels: labels.append(<API key>) # Skip content if label not exists if not labels: continue label_str = ','.join(labels) data_row = {'title': title, 'raw_content': raw_content, 'labels': label_str} csv_writer.writerow(data_row) print OUTPUT_NAME, 'created' csv_file.close() train_file.close()
<?php require_once 'phing/Task.php'; require_once 'phing/tasks/ext/phk/PhkPackageWebAccess.php'; class PhkPackageTask extends Task { /** * @var string */ private $outputFile; /** * @var string */ private $inputDirectory; /** * @var string */ private $phkCreatorPath; /** * @var PhkPackageWebAccess */ private $webAccess; /** * @var array */ private $modifiers = array(); /** * @var array */ private $options = array(); /** * @return PhkPackageWebAccess */ public function createWebAccess() { return ($this->webAccess = new PhkPackageWebAccess()); } /** * @param string $crcCheck */ public function setCrcCheck($crcCheck) { $this->options['crc_check'] = ('true' == $crcCheck ? true : false); } /** * @param string $webRunScript */ public function setWebRunScript($webRunScript) { $this->options['web_run_script'] = $webRunScript; } /** * @param string $cliRunScript */ public function setCliRunScript($cliRunScript) { $this->options['cli_run_script'] = $cliRunScript; } /** * @param string $libRunScript */ public function setLibRunScript($libRunScript) { $this->options['lib_run_script'] = $libRunScript; } /** * @param string $name */ public function setName($name) { $this->options['name'] = $name; } /** * @param string $webMainRedirect */ public function setWebMainRedirect($webMainRedirect) { $this->options['web_main_redirect'] = ('true' == $webMainRedirect ? true : false); } /** * @param string $pluginClass */ public function setPluginClass($pluginClass) { $this->options['plugin_class'] = $pluginClass; } /** * @param string $version */ public function setVersion($version) { $this->options['version'] = $version; } /** * @param string $summary */ public function setSummary($summary) { $this->options['summary'] = $summary; } /** * @param string $inputDirectory */ public function setInputDirectory($inputDirectory) { $this->inputDirectory = $inputDirectory; } /** * @param string $outputFile */ public function setOutputFile($outputFile) { $this->outputFile = $outputFile; } /** * May be none, gzip or bzip2. * * @param string $compress */ public function setCompress($compress) { $this->modifiers['compress'] = $compress; } /** * True or false. * * @param srting $strip */ public function setStrip($strip) { $this->modifiers['strip'] = $strip; } /** * Path to PHK_Creator.phk file. * * @param srting $path */ public function setPhkCreatorPath($path) { $this->phkCreatorPath = $path; } public function init() { } /** * Main method... */ public function main() { /* * Check for empty first - speed ;) */ if (!is_file($this->phkCreatorPath)) { throw new BuildException('You must specify the "phkcreatorpath" attribute for PHK task.'); } if (empty($this->inputDirectory)) { throw new BuildException('You must specify the "inputdirectory" attribute for PHK task.'); } if (empty($this->outputFile)) { throw new BuildException('You must specify the "outputfile" attribute for PHK task.'); } require_once $this->phkCreatorPath; $mountPoint = PHK_Mgr::mount($this->outputFile, PHK::F_CREATOR); $phkManager = PHK_Mgr::instance($mountPoint); /* * Add files. */ $phkManager->ftree()->merge_file_tree('/', $this->inputDirectory, $this->modifiers); /* * Add web_access to options, if present. */ if (!is_null($this->webAccess)) { $webAccessPaths = $this->webAccess->getPaths(); if (!empty($webAccessPaths)) { $this->options['web_access'] = $webAccessPaths; } } $phkManager->set_options($this->options); /* * Intercept output (in PHP we can't intercept stream). */ ob_start(); /* * Create file... */ $phkManager->dump(); /* * Print with Phing log... */ $output = trim(ob_get_clean()); $output = explode("\n", $output); foreach ($output as $line) { /* * TODO Change preg_math to more faster alternative. */ if (preg_match('/^---/', $line)) { continue; } $this->log($line); } /* * Set rights for generated file... Don't use umask() - see * notes in official documentation for this function. */ chmod($this->outputFile, 0644); } }
package scodec package codecs import scodec.bits.BitVector class OptionalCodecTest extends CodecSuite { "the optional combinator" should { "produce the target value on true" in { forAll { (n: Int) => val codec = optional(provide(true), int32) shouldDecodeFullyTo(codec, BitVector fromInt n, Some(n)) } } "produce none on false" in { forAll { (n: Int) => val codec = optional(provide(false), int32) val Attempt.Successful(DecodeResult(b, rest)) = codec.decode(BitVector fromInt n) rest shouldBe BitVector.fromInt(n) b shouldBe None } } } }
// PORTAL DETAILS MAIN /////////////////////////////////////////////// // main code block that renders the portal details in the sidebar and // methods that highlight the portal in the map view. window.renderPortalDetails = function(guid) { selectPortal(window.portals[guid] ? guid : null); if (guid && !portalDetail.isFresh(guid)) { portalDetail.request(guid); } // TODO? handle the case where we request data for a particular portal GUID, but it *isn't* in // window.portals.... if(!window.portals[guid]) { urlPortal = guid; $('#portaldetails').html(''); if(isSmartphone()) { $('.fullimg').remove(); $('#mobileinfo').html('<div style="text-align: center"><b>tap here for info screen</b></div>'); } return; } var portal = window.portals[guid]; var data = portal.options.data; var details = portalDetail.get(guid); // details and data can get out of sync. if we have details, construct a matching 'data' if (details) { data = <API key>(details); } var modDetails = details ? '<div class="mods">'+getModDetails(details)+'</div>' : ''; var miscDetails = details ? <API key>(guid,details) : ''; var resoDetails = details ? getResonatorDetails(details) : ''; //TODO? other status details... var statusDetails = details ? '' : '<div id="portalStatus">Loading details...</div>'; var img = fixPortalImageUrl(details ? details.image : data.image); var title = data.title; var lat = data.latE6/1E6; var lng = data.lngE6/1E6; var imgTitle = details ? <API key>(details) : data.title; imgTitle += '\n\nClick to show full image.'; var portalDetailObj = details ? window.<API key>(details) : undefined; var <API key> = ''; if(portalDetailObj) { <API key> = '<table description="Portal Photo Details" class="portal_details">'; // TODO (once the data supports it) - portals can have multiple photos. display all, with navigation between them // (at this time the data isn't returned from the server - although a count of images IS returned!) if(portalDetailObj.submitter.name.length > 0) { if(portalDetailObj.submitter.team) { submitterSpan = '<span class="' + (portalDetailObj.submitter.team === 'RESISTANCE' ? 'res' : 'enl') + ' nickname">'; } else { submitterSpan = '<span class="none">'; } <API key> += '<tr><th>Photo by:</th><td>' + submitterSpan + <API key>(portalDetailObj.submitter.name) + '</span>'+(portalDetailObj.submitter.voteCount !== undefined ? ' (' + portalDetailObj.submitter.voteCount + ' votes)' : '')+'</td></tr>'; } if(portalDetailObj.submitter.link.length > 0) { <API key> += '<tr><th>Photo from:</th><td><a href="' + <API key>(portalDetailObj.submitter.link) + '">' + <API key>(portalDetailObj.submitter.link) + '</a></td></tr>'; } if(portalDetailObj.description) { <API key> += '<tr class="padding-top"><th>Description:</th><td>' + <API key>(portalDetailObj.description) + '</td></tr>'; } // if(d.descriptiveText.map.ADDRESS) { // <API key> += '<tr><th>Address:</th><td>' + <API key>(d.descriptiveText.map.ADDRESS) + '</td></tr>'; <API key> += '</table>'; } // portal level. start with basic data - then extend with fractional info in tooltip if available var levelInt = (teamStringToId(data.team) == TEAM_NONE) ? 0 : data.level; var levelDetails = levelInt; if (details) { levelDetails = getPortalLevel(details); if(levelDetails != 8) { if(levelDetails==Math.ceil(levelDetails)) levelDetails += "\n8"; else levelDetails += "\n" + (Math.ceil(levelDetails) - levelDetails)*8; levelDetails += " resonator level(s) needed for next portal level"; } else { levelDetails += "\nfully upgraded"; } } levelDetails = "Level " + levelDetails; var linkDetails = []; var posOnClick = 'window.showPortalPosLinks('+lat+','+lng+',\''+<API key>(title)+'\')'; var permalinkUrl = '/intel?ll='+lat+','+lng+'&z=17&pll='+lat+','+lng; if (typeof android !== 'undefined' && android && android.intentPosLink) { // android devices. one share link option - and the android app provides an interface to share the URL, // share as a geo: intent (navigation via google maps), etc var shareLink = $('<div>').html( $('<a>').attr({onclick:posOnClick}).text('Share portal') ).html(); linkDetails.push('<aside>'+shareLink+'</aside>'); } else { // non-android - a permalink for the portal var permaHtml = $('<div>').html( $('<a>').attr({href:permalinkUrl, title:'Create a URL link to this portal'}).text('Portal link') ).html(); linkDetails.push ( '<aside>'+permaHtml+'</aside>' ); // and a map link popup dialog var mapHtml = $('<div>').html( $('<a>').attr({onclick:posOnClick, title:'Link to alternative maps (Google, etc)'}).text('Map links') ).html(); linkDetails.push('<aside>'+mapHtml+'</aside>'); } $('#portaldetails') .html('') //to ensure it's clear .attr('class', TEAM_TO_CSS[teamStringToId(data.team)]) .append( $('<h3>').attr({class:'title'}).text(data.title), $('<span>').attr({ class: 'close', title: 'Close [w]', onclick:'renderPortalDetails(null); if(isSmartphone()) show("map");', accesskey: 'w' }).text('X'), // help cursor via ".imgpreview img" $('<div>') .attr({class:'imgpreview', title:imgTitle, style:"background-image: url('"+img+"')"}) .append( $('<span>').attr({id:'level', title: levelDetails}).text(levelInt), $('<div>').attr({class:'portalDetails'}).html(<API key>), $('<img>').attr({class:'hide', src:img}) ), modDetails, miscDetails, resoDetails, statusDetails, '<div class="linkdetails">' + linkDetails.join('') + '</div>' ); // only run the hooks when we have a portalDetails object - most plugins rely on the extended data // TODO? another hook to call always, for any plugins that can work with less data? if (details) { runHooks('<API key>', {guid: guid, portal: portal, portalDetails: details, portalData: data}); } } window.<API key> = function(guid,d) { var randDetails; if (d) { var linkInfo = getPortalLinks(guid); var linkCount = linkInfo.in.length + linkInfo.out.length; var links = {incoming: linkInfo.in.length, outgoing: linkInfo.out.length}; function linkExpl(t) { return '<tt title="'+links.outgoing+' links out (8 max)\n'+links.incoming+' links in\n('+(links.outgoing+links.incoming)+' total)">'+t+'</tt>'; } var linksText = [linkExpl('links'), linkExpl(links.outgoing+' out / '+links.incoming+' in')]; var player = d.owner ? '<span class="nickname">' + d.owner + '</span>' : '-'; var playerText = ['owner', player]; var fieldCount = <API key>(guid); var fieldsText = ['fields', fieldCount]; var apGainText = getAttackApGainText(d,fieldCount,linkCount); var attackValues = <API key>(d); // collect and html-ify random data var randDetailsData = [ // these pieces of data are only relevant when the portal is captured // maybe check if portal is captured and remove? // But this makes the info panel look rather empty for unclaimed portals playerText, getRangeText(d), linksText, fieldsText, getMitigationText(d,linkCount), getEnergyText(d), // and these have some use, even for uncaptured portals apGainText, getHackDetailsText(d), ]; if(attackValues.attack_frequency != 0) randDetailsData.push([ '<span title="attack frequency" class="<API key>">attack frequency</span>', '×'+attackValues.attack_frequency]); if(attackValues.hit_bonus != 0) randDetailsData.push(['hit bonus', attackValues.hit_bonus+'%']); if(attackValues.force_amplifier != 0) randDetailsData.push([ '<span title="force amplifier" class="<API key>">force amplifier</span>', '×'+attackValues.force_amplifier]); // artifact details // 2014-02-06: stock site changed from supporting 'jarvis shards' to 'amar artifacts'(?) - so let's see what we can do to be generic... $.each(artifact.getArtifactTypes(),function(index,type) { var artdata = artifact.getPortalData (guid, type); if (artdata) { var details = artifact.<API key>(type); if (details) { // the genFourColumnTable function below doesn't handle cases where one column is null and the other isn't - so default to *something* in both columns var target = ['',''], shards = [details.fragmentName,'(none)']; if (artdata.target) { target = ['target', '<span class="'+TEAM_TO_CSS[artdata.target]+'">'+(artdata.target==TEAM_RES?'Resistance':'Enlightened')+'</span>']; } if (artdata.fragments) { shards = [details.fragmentName, '#'+artdata.fragments.join(', #')]; } randDetailsData.push (target, shards); } else { console.warn('Unknown artifact type '+type+': no names, so cannot display'); } } }); randDetails = '<table id="randdetails">' + genFourColumnTable(randDetailsData) + '</table>'; } return randDetails; } // draws link-range and hack-range circles around the portal with the // given details. Clear them if parameter 'd' is null. window.setPortalIndicators = function(p) { if(<API key>) map.removeLayer(<API key>); <API key> = null; if(<API key>) map.removeLayer(<API key>); <API key> = null; // if we have a portal... if(p) { var coord = p.getLatLng(); // range is only known for sure if we have portal details // TODO? render a min range guess until details are loaded..? var d = portalDetail.get(p.options.guid); if (d) { var range = getPortalRange(d); <API key> = (range.range > 0 ? L.geodesicCircle(coord, range.range, { fill: false, color: <API key>, weight: 3, dashArray: range.isLinkable ? undefined : "10,10", clickable: false }) : L.circle(coord, range.range, { fill: false, stroke: false, clickable: false }) ).addTo(map); } <API key> = L.circle(coord, HACK_RANGE, { fill: false, color: <API key>, weight: 2, clickable: false } ).addTo(map); } } // highlights portal with given GUID. Automatically clears highlights // on old selection. Returns false if the selected portal changed. // Returns true if it's still the same portal that just needs an // update. window.selectPortal = function(guid) { var update = selectedPortal === guid; var oldPortalGuid = selectedPortal; selectedPortal = guid; var oldPortal = portals[oldPortalGuid]; var newPortal = portals[guid]; // Restore style of unselected portal if(!update && oldPortal) setMarkerStyle(oldPortal,false); // Change style of selected portal if(newPortal) { setMarkerStyle(newPortal, true); if (map.hasLayer(newPortal)) { newPortal.bringToFront(); } } setPortalIndicators(newPortal); runHooks('portalSelected', {selectedPortalGuid: guid, <API key>: oldPortalGuid}); return update; }
# <API key>: true module SafeZip class Extract Error = Class.new(StandardError) <API key> = Class.new(Error) <API key> = Class.new(Error) <API key> = Class.new(Error) AlreadyExistsError = Class.new(Error) NoMatchingError = Class.new(Error) ExtractError = Class.new(Error) attr_reader :archive_path def initialize(archive_file) @archive_path = archive_file end def extract(opts = {}) params = SafeZip::ExtractParams.new(**opts) if Feature.enabled?(:safezip_use_rubyzip, default_enabled: true) <API key>(params) else <API key>(params) end end private def <API key>(params) ::Zip::File.open(archive_path) do |zip_archive| # Extract all files in the following order: # 1. Directories first, # 2. Files next, # 3. Symlinks last (or anything else) extracted = extract_all_entries(zip_archive, params, zip_archive.lazy.select(&:directory?)) extracted += extract_all_entries(zip_archive, params, zip_archive.lazy.select(&:file?)) extracted += extract_all_entries(zip_archive, params, zip_archive.lazy.reject(&:directory?).reject(&:file?)) raise NoMatchingError, 'No entries extracted' unless extracted > 0 end end def extract_all_entries(zip_archive, params, entries) entries.count do |zip_entry| SafeZip::Entry.new(zip_archive, zip_entry, params) .extract end end def <API key>(params) # Requires UnZip at least 6.00 Info-ZIP. # -n never overwrite existing files args = %W(unzip -n -qq #{archive_path}) # We add * to end of directory, because we want to extract directory and all subdirectories args += params.<API key> # Target directory where we extract args += %W(-d #{params.extract_path}) unless system(*args) raise Error, 'archive failed to extract' end end end end
<?php namespace App\Http\Controllers\Application; use App\Http\Controllers\Controller; use Session; use Input; use Redirect; class LanguageController extends Controller { /** * Set the application language * * @return Response */ public function postChange() { Session::put('language', Input::get('language')); return Redirect::back(); } }
<?php namespace Zend\Ldap\Node\Schema; use Zend\Ldap; use Zend\Ldap\Converter; use Zend\Ldap\Node; /** * Zend\Ldap\Node\Schema\OpenLDAP provides a simple data-container for the Schema node of * an OpenLDAP server. */ class OpenLdap extends Node\Schema { /** * The attribute Types * * @var array */ protected $attributeTypes = null; /** * The object classes * * @var array */ protected $objectClasses = null; /** * The LDAP syntaxes * * @var array */ protected $ldapSyntaxes = null; /** * The matching rules * * @var array */ protected $matchingRules = null; /** * The matching rule use * * @var array */ protected $matchingRuleUse = null; /** * Parses the schema * * @param \Zend\Ldap\Dn $dn * @param \Zend\Ldap\Ldap $ldap * @return OpenLdap Provides a fluid interface */ protected function parseSchema(Ldap\Dn $dn, Ldap\Ldap $ldap) { parent::parseSchema($dn, $ldap); $this->loadAttributeTypes(); $this->loadLdapSyntaxes(); $this->loadMatchingRules(); $this->loadMatchingRuleUse(); $this->loadObjectClasses(); return $this; } /** * Gets the attribute Types * * @return array */ public function getAttributeTypes() { return $this->attributeTypes; } /** * Gets the object classes * * @return array */ public function getObjectClasses() { return $this->objectClasses; } /** * Gets the LDAP syntaxes * * @return array */ public function getLdapSyntaxes() { return $this->ldapSyntaxes; } /** * Gets the matching rules * * @return array */ public function getMatchingRules() { return $this->matchingRules; } /** * Gets the matching rule use * * @return array */ public function getMatchingRuleUse() { return $this->matchingRuleUse; } /** * Loads the attribute Types * * @return void */ protected function loadAttributeTypes() { $this->attributeTypes = array(); foreach ($this->getAttribute('attributeTypes') as $value) { $val = $this->parseAttributeType($value); $val = new AttributeType\OpenLdap($val); $this->attributeTypes[$val->getName()] = $val; } foreach ($this->attributeTypes as $val) { if (count($val->sup) > 0) { $this->resolveInheritance($val, $this->attributeTypes); } foreach ($val->aliases as $alias) { $this->attributeTypes[$alias] = $val; } } ksort($this->attributeTypes, SORT_STRING); } /** * Parses an attributeType value * * @param string $value * @return array */ protected function parseAttributeType($value) { $attributeType = array( 'oid' => null, 'name' => null, 'desc' => null, 'obsolete' => false, 'sup' => null, 'equality' => null, 'ordering' => null, 'substr' => null, 'syntax' => null, 'max-length' => null, 'single-value' => false, 'collective' => false, '<API key>' => false, 'usage' => 'userApplications', '_string' => $value, '_parents' => array()); $tokens = $this->tokenizeString($value); $attributeType['oid'] = array_shift($tokens); // first token is the oid $this-><API key>($attributeType, $tokens); if (array_key_exists('syntax', $attributeType)) { // get max length from syntax if (preg_match('/^(.+){(\d+)}$/', $attributeType['syntax'], $matches)) { $attributeType['syntax'] = $matches[1]; $attributeType['max-length'] = $matches[2]; } } $this->ensureNameAttribute($attributeType); return $attributeType; } /** * Loads the object classes * * @return void */ protected function loadObjectClasses() { $this->objectClasses = array(); foreach ($this->getAttribute('objectClasses') as $value) { $val = $this->parseObjectClass($value); $val = new ObjectClass\OpenLdap($val); $this->objectClasses[$val->getName()] = $val; } foreach ($this->objectClasses as $val) { if (count($val->sup) > 0) { $this->resolveInheritance($val, $this->objectClasses); } foreach ($val->aliases as $alias) { $this->objectClasses[$alias] = $val; } } ksort($this->objectClasses, SORT_STRING); } /** * Parses an objectClasses value * * @param string $value * @return array */ protected function parseObjectClass($value) { $objectClass = array( 'oid' => null, 'name' => null, 'desc' => null, 'obsolete' => false, 'sup' => array(), 'abstract' => false, 'structural' => false, 'auxiliary' => false, 'must' => array(), 'may' => array(), '_string' => $value, '_parents' => array()); $tokens = $this->tokenizeString($value); $objectClass['oid'] = array_shift($tokens); // first token is the oid $this-><API key>($objectClass, $tokens); $this->ensureNameAttribute($objectClass); return $objectClass; } /** * Resolves inheritance in objectClasses and attributes * * @param AbstractItem $node * @param array $repository */ protected function resolveInheritance(AbstractItem $node, array $repository) { $data = $node->getData(); $parents = $data['sup']; if ($parents === null || !is_array($parents) || count($parents) < 1) { return; } foreach ($parents as $parent) { if (!array_key_exists($parent, $repository)) { continue; } if (!array_key_exists('_parents', $data) || !is_array($data['_parents'])) { $data['_parents'] = array(); } $data['_parents'][] = $repository[$parent]; } $node->setData($data); } /** * Loads the LDAP syntaxes * * @return void */ protected function loadLdapSyntaxes() { $this->ldapSyntaxes = array(); foreach ($this->getAttribute('ldapSyntaxes') as $value) { $val = $this->parseLdapSyntax($value); $this->ldapSyntaxes[$val['oid']] = $val; } ksort($this->ldapSyntaxes, SORT_STRING); } /** * Parses an ldapSyntaxes value * * @param string $value * @return array */ protected function parseLdapSyntax($value) { $ldapSyntax = array( 'oid' => null, 'desc' => null, '_string' => $value); $tokens = $this->tokenizeString($value); $ldapSyntax['oid'] = array_shift($tokens); // first token is the oid $this-><API key>($ldapSyntax, $tokens); return $ldapSyntax; } /** * Loads the matching rules * * @return void */ protected function loadMatchingRules() { $this->matchingRules = array(); foreach ($this->getAttribute('matchingRules') as $value) { $val = $this->parseMatchingRule($value); $this->matchingRules[$val['name']] = $val; } ksort($this->matchingRules, SORT_STRING); } /** * Parses a matchingRules value * * @param string $value * @return array */ protected function parseMatchingRule($value) { $matchingRule = array( 'oid' => null, 'name' => null, 'desc' => null, 'obsolete' => false, 'syntax' => null, '_string' => $value); $tokens = $this->tokenizeString($value); $matchingRule['oid'] = array_shift($tokens); // first token is the oid $this-><API key>($matchingRule, $tokens); $this->ensureNameAttribute($matchingRule); return $matchingRule; } /** * Loads the matching rule use * * @return void */ protected function loadMatchingRuleUse() { $this->matchingRuleUse = array(); foreach ($this->getAttribute('matchingRuleUse') as $value) { $val = $this-><API key>($value); $this->matchingRuleUse[$val['name']] = $val; } ksort($this->matchingRuleUse, SORT_STRING); } /** * Parses a matchingRuleUse value * * @param string $value * @return array */ protected function <API key>($value) { $matchingRuleUse = array( 'oid' => null, 'name' => null, 'desc' => null, 'obsolete' => false, 'applies' => array(), '_string' => $value); $tokens = $this->tokenizeString($value); $matchingRuleUse['oid'] = array_shift($tokens); // first token is the oid $this-><API key>($matchingRuleUse, $tokens); $this->ensureNameAttribute($matchingRuleUse); return $matchingRuleUse; } /** * Ensures that a name element is present and that it is single-values. * * @param array $data */ protected function ensureNameAttribute(array &$data) { if (!array_key_exists('name', $data) || empty($data['name'])) { // force a name $data['name'] = $data['oid']; } if (is_array($data['name'])) { // make one name the default and put the other ones into aliases $aliases = $data['name']; $data['name'] = array_shift($aliases); $data['aliases'] = $aliases; } else { $data['aliases'] = array(); } } /** * Parse the given tokens into a data structure * * @param array $data * @param array $tokens * @return void */ protected function <API key>(array &$data, array $tokens) { // tokens that have no value associated $noValue = array('single-value', 'obsolete', 'collective', '<API key>', 'abstract', 'structural', 'auxiliary'); // tokens that can have multiple values $multiValue = array('must', 'may', 'sup'); while (count($tokens) > 0) { $token = strtolower(array_shift($tokens)); if (in_array($token, $noValue)) { $data[$token] = true; // single value token } else { $data[$token] = array_shift($tokens); // this one follows a string or a list if it is multivalued if ($data[$token] == '(') { // this creates the list of values and cycles through the tokens // until the end of the list is reached ')' $data[$token] = array(); $tmp = array_shift($tokens); while ($tmp) { if ($tmp == ')') { break; } if ($tmp != '$') { $data[$token][] = Converter\Converter::fromLdap($tmp); } $tmp = array_shift($tokens); } } else { $data[$token] = Converter\Converter::fromLdap($data[$token]); } // create an array if the value should be multivalued but was not if (in_array($token, $multiValue) && !is_array($data[$token])) { $data[$token] = array($data[$token]); } } } } /** * Tokenizes the given value into an array * * @param string $value * @return array tokens */ protected function tokenizeString($value) { $tokens = array(); $matches = array(); // this one is taken from PEAR::Net_LDAP2 $pattern = "/\\s* (?:([()]) | ([^'\\s()]+) | '((?:[^']+|'[^\\s)])*)') \\s*/x"; preg_match_all($pattern, $value, $matches); $cMatches = count($matches[0]); $cPattern = count($matches); for ($i = 0; $i < $cMatches; $i++) { // number of tokens (full pattern match) for ($j = 1; $j < $cPattern; $j++) { // each subpattern $tok = trim($matches[$j][$i]); if (!empty($tok)) { // pattern match in this subpattern $tokens[$i] = $tok; // this is the token } } } if ($tokens[0] == '(') { array_shift($tokens); } if ($tokens[count($tokens) - 1] == ')') { array_pop($tokens); } return $tokens; } }
layout: page title: AccountDoesNotExist number: 172 categories: [AvaTax Error Codes] disqus: 1 ## Summary You attempted to modify an account that does not exist. ## Example json { "code": "AccountDoesNotExist", "target": "Unknown", "details": [ { "code": "AccountDoesNotExist", "number": 172, "message": "Account not found.", "description": "The AccountId: -0- doesn't exist. Please provide an account that exists.", "faultCode": "Client", "helpLink": "http://developer.avalara.com/avatax/errors/AccountDoesNotExist", "severity": "Error" } ] } ## Explanation This error message occurs when working with registrar credentials. You attempted to work with an account that does not exist. Please check your account number and try the API call again with a corrected account ID.
// of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // all copies or substantial portions of the Software. // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. #pragma once #include "../Graphics/Texture.h" #include "../Resource/Image.h" #include "../UI/BorderImage.h" struct SDL_Cursor; namespace Urho3D { %Cursor shapes recognized by the UI subsystem. enum CursorShape { CS_NORMAL = 0, CS_IBEAM, CS_CROSS, CS_RESIZEVERTICAL, <API key>, CS_RESIZEHORIZONTAL, <API key>, CS_RESIZE_ALL, CS_ACCEPTDROP, CS_REJECTDROP, CS_BUSY, CS_BUSY_ARROW, CS_MAX_SHAPES }; %Cursor image and hotspot information. struct URHO3D_API CursorShapeInfo { Construct with defaults. CursorShapeInfo() : imageRect_(IntRect::ZERO), hotSpot_(IntVector2::ZERO), osCursor_(0), systemDefined_(false), systemCursor_(-1) { } Construct with system cursor. CursorShapeInfo(int systemCursor) : imageRect_(IntRect::ZERO), hotSpot_(IntVector2::ZERO), osCursor_(0), systemDefined_(false), systemCursor_(systemCursor) { } Image. SharedPtr<Image> image_; Texture. SharedPtr<Texture> texture_; Image rectangle. IntRect imageRect_; Hotspot coordinates. IntVector2 hotSpot_; OS cursor. SDL_Cursor* osCursor_; Whether the OS cursor is system defined. bool systemDefined_; System cursor index. int systemCursor_; }; Mouse cursor %UI element. class URHO3D_API Cursor : public BorderImage { URHO3D_OBJECT(Cursor, BorderImage); public: Construct. Cursor(Context* context); Destruct. virtual ~Cursor(); Register object factory. static void RegisterObject(Context* context); Return UI rendering batches. virtual void GetBatches(PODVector<UIBatch>& batches, PODVector<float>& vertexData, const IntRect& currentScissor); Define a shape. void DefineShape(const String& shape, Image* image, const IntRect& imageRect, const IntVector2& hotSpot); Define a shape. void DefineShape(CursorShape shape, Image* image, const IntRect& imageRect, const IntVector2& hotSpot); Set current shape. void SetShape(const String& shape); Set current shape. void SetShape(CursorShape shape); Set whether to use system default shapes. Is only possible when the OS mouse cursor has been set visible from the Input subsystem. void SetUseSystemShapes(bool enable); Get current shape. const String& GetShape() const { return shape_; } Return whether is using system default shapes. bool GetUseSystemShapes() const { return useSystemShapes_; } Set shapes attribute. void SetShapesAttr(const VariantVector& value); Return shapes attribute. VariantVector GetShapesAttr() const; Apply pending OS cursor shape. Called by UI. No-op when the OS mouse pointer is not used. void ApplyOSCursorShape(); protected: Handle operating system mouse cursor visibility change event. void <API key>(StringHash eventType, VariantMap& eventData); Current shape definition. String shape_; Shape definitions. HashMap<String, CursorShapeInfo> shapeInfos_; Use system default shapes flag. bool useSystemShapes_; OS cursor shape needs update flag. bool osShapeDirty_; }; }
/*! `latex` grammar compiled for Highlight.js 11.3.1 */ (()=>{var e=(()=>{"use strict";return e=>{const n=[{begin:/\^{6}[0-9a-f]{6}/},{ begin:/\^{5}[0-9a-f]{5}/},{begin:/\^{4}[0-9a-f]{4}/},{begin:/\^{3}[0-9a-f]{3}/ },{begin:/\^{2}[0-9a-f]{2}/},{begin:/\^{2}[\u0000-\u007f]/}],a=[{ className:"keyword",begin:/\\/,relevance:0,contains:[{endsParent:!0, begin:e.regex.either(...["(?:NeedsTeXFormat|RequirePackage|GetIdInfo)","Provides(?:Expl)?(?:Package|Class|File)","(?:DeclareOption|ProcessOptions)","(?:documentclass|usepackage|input|include)","makeat(?:letter|other)","ExplSyntax(?:On|Off)","(?:new|renew|provide)?command","(?:re)newenvironment","(?:New|Renew|Provide|Declare)(?:Expandable)?DocumentCommand","(?:New|Renew|Provide|Declare)DocumentEnvironment","(?:(?:e|g|x)?def|let)","(?:begin|end)","(?:part|chapter|(?:sub){0,2}section|(?:sub)?paragraph)","caption","(?:label|(?:eq|page|name)?ref|(?:paren|foot|super)?cite)","(?:alpha|beta|[Gg]amma|[Dd]elta|(?:var)?epsilon|zeta|eta|[Tt]heta|vartheta)","(?:iota|(?:var)?kappa|[Ll]ambda|mu|nu|[Xx]i|[Pp]i|varpi|(?:var)rho)","(?:[Ss]igma|varsigma|tau|[Uu]psilon|[Pp]hi|varphi|chi|[Pp]si|[Oo]mega)","(?:frac|sum|prod|lim|infty|times|sqrt|leq|geq|left|right|middle|[bB]igg?)","(?:[lr]angle|q?quad|[lcvdi]?dots|d?dot|hat|tilde|bar)"].map((e=>e+"(?![a-zA-Z@:_])"))) },{endsParent:!0, begin:RegExp(["(?:__)?[a-zA-Z]{2,}_[a-zA-Z](?:_?[a-zA-Z])+:[a-zA-Z]*","[lgc]__?[a-zA-Z](?:_?[a-zA-Z])*_[a-zA-Z]{2,}","[qs]__?[a-zA-Z](?:_?[a-zA-Z])+","use(?:_i)?:[a-zA-Z]*","(?:else|fi|or):","(?:if|cs|exp):w","(?:hbox|vbox):n","::[a-zA-Z]_unbraced","::[a-zA-Z:]"].map((e=>e+"(?![a-zA-Z:_])")).join("|")) },{endsParent:!0,variants:n},{endsParent:!0,relevance:0,variants:[{ begin:/[a-zA-Z@]+/},{begin:/[^a-zA-Z@]?/}]}]},{className:"params",relevance:0, begin:/#+\d?/},{variants:n},{className:"built_in",relevance:0,begin:/[$&^_]/},{ className:"meta",begin:/% ?!(T[eE]X|tex|BIB|bib)/,end:"$",relevance:10 },e.COMMENT("%","$",{relevance:0})],i={begin:/\{/,end:/\}/,relevance:0, contains:["self",...a]},t=e.inherit(i,{relevance:0,endsParent:!0, contains:[i,...a]}),r={begin:/\[/,end:/\]/,endsParent:!0,relevance:0, contains:[i,...a]},s={begin:/\s+/,relevance:0},c=[t],l=[r],o=(e,n)=>({ contains:[s],starts:{relevance:0,contains:e,starts:n}}),d=(e,n)=>({ begin:"\\\\"+e+"(?![a-zA-Z@:_])",keywords:{$pattern:/\\[a-zA-Z]+/,keyword:"\\"+e },relevance:0,contains:[s],starts:n}),g=(n,a)=>e.inherit({ begin:"\\\\begin(?=[ \t]*(\\r?\\n[ \t]*)?\\{"+n+"\\})",keywords:{ $pattern:/\\[a-zA-Z]+/,keyword:"\\begin"},relevance:0 },o(c,a)),m=(n="string")=>e.END_SAME_AS_BEGIN({className:n,begin:/(.|\r?\n)/, end:/(.|\r?\n)/,excludeBegin:!0,excludeEnd:!0,endsParent:!0}),b=e=>({ className:"string",end:"(?=\\\\end\\{"+e+"\\})"}),p=(e="string")=>({relevance:0, begin:/\{/,starts:{endsParent:!0,contains:[{className:e,end:/(?=\})/, endsParent:!0,contains:[{begin:/\{/,end:/\}/,relevance:0,contains:["self"]}]}]} });return{name:"LaTeX",aliases:["tex"], contains:[...["verb","lstinline"].map((e=>d(e,{contains:[m()]}))),d("mint",o(c,{ contains:[m()]})),d("mintinline",o(c,{contains:[p(),m()]})),d("url",{ contains:[p("link"),p("link")]}),d("hyperref",{contains:[p("link")] }),d("href",o(l,{contains:[p("link")] })),...[].concat(...["","\\*"].map((e=>[g("verbatim"+e,b("verbatim"+e)),g("filecontents"+e,o(c,b("filecontents"+e))),...["","B","L"].map((n=>g(n+"Verbatim"+e,o(l,b(n+"Verbatim"+e)))))]))),g("minted",o(l,o(c,b("minted")))),...a] }}})();hljs.registerLanguage("latex",e)})();