code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
3
1.05M
import numpy as np from scipy.linalg import norm from .base import AppearanceLucasKanade class SimultaneousForwardAdditive(AppearanceLucasKanade): @property def algorithm(self): return 'Simultaneous-FA' def _fit(self, lk_fitting, max_iters=20, project=True): # Initial error > eps error = self.eps + 1 image = lk_fitting.image lk_fitting.weights = [] n_iters = 0 # Number of shape weights n_params = self.transform.n_parameters # Initial appearance weights if project: # Obtained weights by projection IWxp = image.warp_to(self.template.mask, self.transform, interpolator=self.interpolator) weights = self.appearance_model.project(IWxp) # Reset template self.template = self.appearance_model.instance(weights) else: # Set all weights to 0 (yielding the mean) weights = np.zeros(self.appearance_model.n_active_components) lk_fitting.weights.append(weights) # Compute appearance model Jacobian wrt weights appearance_jacobian = self.appearance_model._jacobian.T # Forward Additive Algorithm while n_iters < max_iters and error > self.eps: # Compute warped image with current weights IWxp = image.warp_to(self.template.mask, self.transform, interpolator=self.interpolator) # Compute warp Jacobian dW_dp = self.transform.jacobian( self.template.mask.true_indices) # Compute steepest descent images, VI_dW_dp J = self.residual.steepest_descent_images( image, dW_dp, forward=(self.template, self.transform, self.interpolator)) # Concatenate VI_dW_dp with appearance model Jacobian self._J = np.hstack((J, appearance_jacobian)) # Compute Hessian and inverse self._H = self.residual.calculate_hessian(self._J) # Compute steepest descent parameter updates sd_delta_p = self.residual.steepest_descent_update( self._J, self.template, IWxp) # Compute gradient descent parameter updates delta_p = np.real(self._calculate_delta_p(sd_delta_p)) # Update warp weights parameters = self.transform.as_vector() + delta_p[:n_params] self.transform.from_vector_inplace(parameters) lk_fitting.parameters.append(parameters) # Update appearance weights weights -= delta_p[n_params:] self.template = self.appearance_model.instance(weights) lk_fitting.weights.append(weights) # Test convergence error = np.abs(norm(delta_p)) n_iters += 1 lk_fitting.fitted = True return lk_fitting class SimultaneousForwardCompositional(AppearanceLucasKanade): @property def algorithm(self): return 'Simultaneous-FC' def _set_up(self): # Compute warp Jacobian self._dW_dp = self.transform.jacobian( self.template.mask.true_indices) def _fit(self, lk_fitting, max_iters=20, project=True): # Initial error > eps error = self.eps + 1 image = lk_fitting.image lk_fitting.weights = [] n_iters = 0 # Number of shape weights n_params = self.transform.n_parameters # Initial appearance weights if project: # Obtained weights by projection IWxp = image.warp_to(self.template.mask, self.transform, interpolator=self.interpolator) weights = self.appearance_model.project(IWxp) # Reset template self.template = self.appearance_model.instance(weights) else: # Set all weights to 0 (yielding the mean) weights = np.zeros(self.appearance_model.n_active_components) lk_fitting.weights.append(weights) # Compute appearance model Jacobian wrt weights appearance_jacobian = self.appearance_model._jacobian.T # Forward Additive Algorithm while n_iters < max_iters and error > self.eps: # Compute warped image with current weights IWxp = image.warp_to(self.template.mask, self.transform, interpolator=self.interpolator) # Compute steepest descent images, VI_dW_dp J = self.residual.steepest_descent_images(IWxp, self._dW_dp) # Concatenate VI_dW_dp with appearance model Jacobian self._J = np.hstack((J, appearance_jacobian)) # Compute Hessian and inverse self._H = self.residual.calculate_hessian(self._J) # Compute steepest descent parameter updates sd_delta_p = self.residual.steepest_descent_update( self._J, self.template, IWxp) # Compute gradient descent parameter updates delta_p = np.real(self._calculate_delta_p(sd_delta_p)) # Update warp weights self.transform.compose_after_from_vector_inplace(delta_p[:n_params]) lk_fitting.parameters.append(self.transform.as_vector()) # Update appearance weights weights -= delta_p[n_params:] self.template = self.appearance_model.instance(weights) lk_fitting.weights.append(weights) # Test convergence error = np.abs(norm(delta_p)) n_iters += 1 lk_fitting.fitted = True return lk_fitting class SimultaneousInverseCompositional(AppearanceLucasKanade): @property def algorithm(self): return 'Simultaneous-IA' def _set_up(self): # Compute the Jacobian of the warp self._dW_dp = self.transform.jacobian( self.appearance_model.mean.mask.true_indices) def _fit(self, lk_fitting, max_iters=20, project=True): # Initial error > eps error = self.eps + 1 image = lk_fitting.image lk_fitting.weights = [] n_iters = 0 # Number of shape weights n_params = self.transform.n_parameters # Initial appearance weights if project: # Obtained weights by projection IWxp = image.warp_to(self.template.mask, self.transform, interpolator=self.interpolator) weights = self.appearance_model.project(IWxp) # Reset template self.template = self.appearance_model.instance(weights) else: # Set all weights to 0 (yielding the mean) weights = np.zeros(self.appearance_model.n_active_components) lk_fitting.weights.append(weights) # Compute appearance model Jacobian wrt weights appearance_jacobian = -self.appearance_model._jacobian.T # Baker-Matthews, Inverse Compositional Algorithm while n_iters < max_iters and error > self.eps: # Compute warped image with current weights IWxp = image.warp_to(self.template.mask, self.transform, interpolator=self.interpolator) # Compute steepest descent images, VT_dW_dp J = self.residual.steepest_descent_images(self.template, self._dW_dp) # Concatenate VI_dW_dp with appearance model Jacobian self._J = np.hstack((J, appearance_jacobian)) # Compute Hessian and inverse self._H = self.residual.calculate_hessian(self._J) # Compute steepest descent parameter updates sd_delta_p = self.residual.steepest_descent_update( self._J, IWxp, self.template) # Compute gradient descent parameter updates delta_p = -np.real(self._calculate_delta_p(sd_delta_p)) # Update warp weights self.transform.compose_after_from_vector_inplace(delta_p[:n_params]) lk_fitting.parameters.append(self.transform.as_vector()) # Update appearance weights weights -= delta_p[n_params:] self.template = self.appearance_model.instance(weights) lk_fitting.weights.append(weights) # Test convergence error = np.abs(norm(delta_p)) n_iters += 1 lk_fitting.fitted = True return lk_fitting
jabooth/menpo-archive
menpo/fit/lucaskanade/appearance/simultaneous.py
Python
bsd-3-clause
8,583
package org.cagrid.gme.common.exceptions; import java.io.IOException; @SuppressWarnings("serial") public class SchemaParsingException extends IOException { public SchemaParsingException() { super(); } public SchemaParsingException(String s) { super(s); } }
NCIP/cagrid
cagrid/Software/core/caGrid/projects/globalModelExchange/src/org/cagrid/gme/common/exceptions/SchemaParsingException.java
Java
bsd-3-clause
296
require 'erb' describe "ERB.new" do before :all do @eruby_str = <<'END' <ul> <% list = [1,2,3] %> <% for item in list %> <% if item %> <li><%= item %></li> <% end %> <% end %> </ul> END @eruby_str2 = <<'END' <ul> % list = [1,2,3] %for item in list % if item <li><%= item %> <% end %> <% end %> </ul> %%% END end it "compiles eRuby script into ruby code when trim mode is 0 or not specified" do expected = "<ul>\n\n\n\n<li>1</li>\n\n\n\n<li>2</li>\n\n\n\n<li>3</li>\n\n\n</ul>\n" [0, '', nil].each do |trim_mode| ERB.new(@eruby_str, nil, trim_mode).result.should == expected end end it "removes '\n' when trim_mode is 1 or '>'" do expected = "<ul>\n<li>1</li>\n<li>2</li>\n<li>3</li>\n</ul>\n" [1, '>'].each do |trim_mode| ERB.new(@eruby_str, nil, trim_mode).result.should == expected end end it "removes spaces at beginning of line and '\n' when trim_mode is 2 or '<>'" do expected = "<ul>\n<li>1</li>\n<li>2</li>\n<li>3</li>\n</ul>\n" [2, '<>'].each do |trim_mode| ERB.new(@eruby_str, nil, trim_mode).result.should == expected end end it "removes spaces around '<%- -%>' when trim_mode is '-'" do expected = "<ul>\n <li>1 <li>2 <li>3</ul>\n" input = <<'END' <ul> <%- for item in [1,2,3] -%> <%- if item -%> <li><%= item -%> <%- end -%> <%- end -%> </ul> END ERB.new(input, nil, '-').result.should == expected end it "not support '<%-= expr %> even when trim_mode is '-'" do input = <<'END' <p> <%= expr -%> <%-= expr -%> </p> END lambda { ERB.new(input, nil, '-').result }.should raise_error end ruby_bug "#213", "1.8.7" do it "regards lines starting with '%' as '<% ... %>' when trim_mode is '%'" do expected = "<ul>\n <li>1\n \n <li>2\n \n <li>3\n \n\n</ul>\n%%\n" ERB.new(@eruby_str2, nil, "%").result.should == expected end end it "regards lines starting with '%' as '<% ... %>' and remove \"\\n\" when trim_mode is '%>'" do expected = "<ul>\n <li>1 <li>2 <li>3 </ul>\n%%\n" ERB.new(@eruby_str2, nil, '%>').result.should == expected end it "regard lines starting with '%' as '<% ... %>' and remove \"\\n\" when trim_mode is '%<>'" do expected = "<ul>\n <li>1\n \n <li>2\n \n <li>3\n \n</ul>\n%%\n" ERB.new(@eruby_str2, nil, '%<>').result.should == expected end it "regard lines starting with '%' as '<% ... %>' and spaces around '<%- -%>' when trim_mode is '%-'" do expected = "<ul>\n<li>1</li>\n<li>2</li>\n</ul>\n%%\n" input = <<'END' <ul> %list = [1,2] %for item in list <li><%= item %></li> <% end %></ul> %%% END trim_mode = '%-' ERB.new(input, nil, '%-').result.should == expected end not_compliant_on :rubinius do it "accepts a safe level as second argument" do input = "<b><%=- 2+2 %>" safe_level = 3 lambda { ERB.new(input, safe_level).result }.should_not raise_error end end it "changes '_erbout' variable name in the produced source" do input = @eruby_str match_erbout = ERB.new(input, nil, nil).src match_buf = ERB.new(input, nil, nil, 'buf').src match_erbout.gsub("_erbout", "buf").should == match_buf end it "ignores '<%# ... %>'" do input = <<'END' <%# for item in list %> <b><%#= item %></b> <%# end %> END ERB.new(input).result.should == "\n<b></b>\n\n" ERB.new(input, nil, '<>').result.should == "<b></b>\n" end ruby_version_is ""..."2.0" do it "remember local variables defined previous one" do ERB.new(@eruby_str).result ERB.new("<%= list.inspect %>").result.should == "[1, 2, 3]" end end ruby_version_is "2.0" do it "forget local variables defined previous one" do ERB.new(@eruby_str).result lambda{ ERB.new("<%= list %>").result }.should raise_error(NameError) end end end
rubysl/rubysl-erb
spec/new_spec.rb
Ruby
bsd-3-clause
3,851
/* * Copyright (C) 2013 Google Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following disclaimer * in the documentation and/or other materials provided with the * distribution. * * Neither the name of Google Inc. nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "config.h" #include "HTMLMediaSource.h" namespace WebCore { URLRegistry* HTMLMediaSource::s_registry = 0; void HTMLMediaSource::setRegistry(URLRegistry* registry) { ASSERT(!s_registry); s_registry = registry; } }
klim-iv/phantomjs-qt5
src/webkit/Source/WebCore/html/HTMLMediaSource.cpp
C++
bsd-3-clause
1,798
/* Copyright (c) 2009, University of Oslo, Norway * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of the University of Oslo nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package vtk.text.tl.expr; import java.math.BigDecimal; import vtk.text.tl.Symbol; public class Multiply extends NumericOperator { public Multiply(Symbol symbol) { super(symbol); } @Override protected Object evalNumeric(BigDecimal n1, BigDecimal n2) { return n1.multiply(n2); } }
vtkio/vtk
src/main/java/vtk/text/tl/expr/Multiply.java
Java
bsd-3-clause
1,929
# Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Provides fakes for several of Telemetry's internal objects. These allow code like story_runner and Benchmark to be run and tested without compiling or starting a browser. Class names prepended with an underscore are intended to be implementation details, and should not be subclassed; however, some, like _FakeBrowser, have public APIs that may need to be called in tests. """ from telemetry.internal.backends.chrome_inspector import websocket from telemetry.internal.browser import browser_options from telemetry.internal.platform import system_info from telemetry.page import shared_page_state from telemetry.util import image_util from telemetry.testing.internal import fake_gpu_info from types import ModuleType # Classes and functions which are intended to be part of the public # fakes API. class FakePlatform(object): def __init__(self): self._network_controller = None self._tracing_controller = None self._has_battor = False self._os_name = 'FakeOS' self._device_type_name = 'abc' self._is_svelte = False self._is_aosp = True @property def is_host_platform(self): raise NotImplementedError @property def network_controller(self): if self._network_controller is None: self._network_controller = _FakeNetworkController() return self._network_controller @property def tracing_controller(self): if self._tracing_controller is None: self._tracing_controller = _FakeTracingController() return self._tracing_controller def Initialize(self): pass def CanMonitorThermalThrottling(self): return False def IsThermallyThrottled(self): return False def HasBeenThermallyThrottled(self): return False def GetArchName(self): raise NotImplementedError def SetOSName(self, name): self._os_name = name def GetOSName(self): return self._os_name def GetOSVersionName(self): raise NotImplementedError def GetOSVersionDetailString(self): raise NotImplementedError def StopAllLocalServers(self): pass def WaitForBatteryTemperature(self, _): pass def HasBattOrConnected(self): return self._has_battor def SetBattOrDetected(self, b): assert isinstance(b, bool) self._has_battor = b # TODO(rnephew): Investigate moving from setters to @property. def SetDeviceTypeName(self, name): self._device_type_name = name def GetDeviceTypeName(self): return self._device_type_name def SetIsSvelte(self, b): assert isinstance(b, bool) self._is_svelte = b def IsSvelte(self): if self._os_name != 'android': raise NotImplementedError return self._is_svelte def SetIsAosp(self, b): assert isinstance(b, bool) self._is_aosp = b def IsAosp(self): return self._is_aosp and self._os_name == 'android' class FakeLinuxPlatform(FakePlatform): def __init__(self): super(FakeLinuxPlatform, self).__init__() self.screenshot_png_data = None self.http_server_directories = [] self.http_server = FakeHTTPServer() @property def is_host_platform(self): return True def GetDeviceTypeName(self): return 'Desktop' def GetArchName(self): return 'x86_64' def GetOSName(self): return 'linux' def GetOSVersionName(self): return 'trusty' def GetOSVersionDetailString(self): return '' def CanTakeScreenshot(self): return bool(self.screenshot_png_data) def TakeScreenshot(self, file_path): if not self.CanTakeScreenshot(): raise NotImplementedError img = image_util.FromBase64Png(self.screenshot_png_data) image_util.WritePngFile(img, file_path) return True def SetHTTPServerDirectories(self, paths): self.http_server_directories.append(paths) class FakeHTTPServer(object): def UrlOf(self, url): del url # unused return 'file:///foo' class FakePossibleBrowser(object): def __init__(self, execute_on_startup=None, execute_after_browser_creation=None): self._returned_browser = _FakeBrowser(FakeLinuxPlatform()) self.browser_type = 'linux' self.supports_tab_control = False self.is_remote = False self.execute_on_startup = execute_on_startup self.execute_after_browser_creation = execute_after_browser_creation @property def returned_browser(self): """The browser object that will be returned through later API calls.""" return self._returned_browser def Create(self, finder_options): if self.execute_on_startup is not None: self.execute_on_startup() del finder_options # unused if self.execute_after_browser_creation is not None: self.execute_after_browser_creation(self._returned_browser) return self.returned_browser @property def platform(self): """The platform object from the returned browser. To change this or set it up, change the returned browser's platform. """ return self.returned_browser.platform def IsRemote(self): return self.is_remote def SetCredentialsPath(self, _): pass class FakeSharedPageState(shared_page_state.SharedPageState): def __init__(self, test, finder_options, story_set): super(FakeSharedPageState, self).__init__(test, finder_options, story_set) def _GetPossibleBrowser(self, test, finder_options): p = FakePossibleBrowser() self.ConfigurePossibleBrowser(p) return p def ConfigurePossibleBrowser(self, possible_browser): """Override this to configure the PossibleBrowser. Can make changes to the browser's configuration here via e.g.: possible_browser.returned_browser.returned_system_info = ... """ pass def DidRunStory(self, results): # TODO(kbr): add a test which throws an exception from DidRunStory # to verify the fix from https://crrev.com/86984d5fc56ce00e7b37ebe . super(FakeSharedPageState, self).DidRunStory(results) class FakeSystemInfo(system_info.SystemInfo): def __init__(self, model_name='', gpu_dict=None, command_line=''): if gpu_dict == None: gpu_dict = fake_gpu_info.FAKE_GPU_INFO super(FakeSystemInfo, self).__init__(model_name, gpu_dict, command_line) class _FakeBrowserFinderOptions(browser_options.BrowserFinderOptions): def __init__(self, execute_on_startup=None, execute_after_browser_creation=None, *args, **kwargs): browser_options.BrowserFinderOptions.__init__(self, *args, **kwargs) self.fake_possible_browser = \ FakePossibleBrowser( execute_on_startup=execute_on_startup, execute_after_browser_creation=execute_after_browser_creation) def CreateBrowserFinderOptions(browser_type=None, execute_on_startup=None, execute_after_browser_creation=None): """Creates fake browser finder options for discovering a browser.""" return _FakeBrowserFinderOptions( browser_type=browser_type, execute_on_startup=execute_on_startup, execute_after_browser_creation=execute_after_browser_creation) # Internal classes. Note that end users may still need to both call # and mock out methods of these classes, but they should not be # subclassed. class _FakeBrowser(object): def __init__(self, platform): self._tabs = _FakeTabList(self) # Fake the creation of the first tab. self._tabs.New() self._returned_system_info = FakeSystemInfo() self._platform = platform self._browser_type = 'release' self._is_crashed = False @property def platform(self): return self._platform @platform.setter def platform(self, incoming): """Allows overriding of the fake browser's platform object.""" assert isinstance(incoming, FakePlatform) self._platform = incoming @property def returned_system_info(self): """The object which will be returned from calls to GetSystemInfo.""" return self._returned_system_info @returned_system_info.setter def returned_system_info(self, incoming): """Allows overriding of the returned SystemInfo object. Incoming argument must be an instance of FakeSystemInfo.""" assert isinstance(incoming, FakeSystemInfo) self._returned_system_info = incoming @property def browser_type(self): """The browser_type this browser claims to be ('debug', 'release', etc.)""" return self._browser_type @browser_type.setter def browser_type(self, incoming): """Allows setting of the browser_type.""" self._browser_type = incoming @property def credentials(self): return _FakeCredentials() def Close(self): self._is_crashed = False @property def supports_system_info(self): return True def GetSystemInfo(self): return self.returned_system_info @property def supports_tab_control(self): return True @property def tabs(self): return self._tabs def DumpStateUponFailure(self): pass class _FakeCredentials(object): def WarnIfMissingCredentials(self, _): pass class _FakeTracingController(object): def __init__(self): self._is_tracing = False def StartTracing(self, tracing_config, timeout=10): self._is_tracing = True del tracing_config del timeout def StopTracing(self): self._is_tracing = False @property def is_tracing_running(self): return self._is_tracing def ClearStateIfNeeded(self): pass def IsChromeTracingSupported(self): return True class _FakeNetworkController(object): def __init__(self): self.wpr_mode = None self.extra_wpr_args = None self.is_initialized = False self.is_open = False self.use_live_traffic = None def InitializeIfNeeded(self, use_live_traffic=False): self.use_live_traffic = use_live_traffic def UpdateTrafficSettings(self, round_trip_latency_ms=None, download_bandwidth_kbps=None, upload_bandwidth_kbps=None): pass def Open(self, wpr_mode, extra_wpr_args, use_wpr_go=False): del use_wpr_go # Unused. self.wpr_mode = wpr_mode self.extra_wpr_args = extra_wpr_args self.is_open = True def Close(self): self.wpr_mode = None self.extra_wpr_args = None self.is_initialized = False self.is_open = False def StartReplay(self, archive_path, make_javascript_deterministic=False): del make_javascript_deterministic # Unused. assert self.is_open self.is_initialized = archive_path is not None def StopReplay(self): self.is_initialized = False class _FakeTab(object): def __init__(self, browser, tab_id): self._browser = browser self._tab_id = str(tab_id) self._collect_garbage_count = 0 self.test_png = None @property def collect_garbage_count(self): return self._collect_garbage_count @property def id(self): return self._tab_id @property def browser(self): return self._browser def WaitForDocumentReadyStateToBeComplete(self, timeout=0): pass def Navigate(self, url, script_to_evaluate_on_commit=None, timeout=0): del script_to_evaluate_on_commit, timeout # unused if url == 'chrome://crash': self.browser._is_crashed = True raise Exception def WaitForDocumentReadyStateToBeInteractiveOrBetter(self, timeout=0): pass def WaitForFrameToBeDisplayed(self, timeout=0): pass def IsAlive(self): return True def CloseConnections(self): pass def CollectGarbage(self): self._collect_garbage_count += 1 def Close(self): pass @property def screenshot_supported(self): return self.test_png is not None def Screenshot(self): assert self.screenshot_supported, 'Screenshot is not supported' return image_util.FromBase64Png(self.test_png) class _FakeTabList(object): _current_tab_id = 0 def __init__(self, browser): self._tabs = [] self._browser = browser def New(self, timeout=300): del timeout # unused type(self)._current_tab_id += 1 t = _FakeTab(self._browser, type(self)._current_tab_id) self._tabs.append(t) return t def __iter__(self): return self._tabs.__iter__() def __len__(self): return len(self._tabs) def __getitem__(self, index): if self._tabs[index].browser._is_crashed: raise Exception else: return self._tabs[index] def GetTabById(self, identifier): """The identifier of a tab can be accessed with tab.id.""" for tab in self._tabs: if tab.id == identifier: return tab return None class FakeInspectorWebsocket(object): _NOTIFICATION_EVENT = 1 _NOTIFICATION_CALLBACK = 2 """A fake InspectorWebsocket. A fake that allows tests to send pregenerated data. Normal InspectorWebsockets allow for any number of domain handlers. This fake only allows up to 1 domain handler, and assumes that the domain of the response always matches that of the handler. """ def __init__(self, mock_timer): self._mock_timer = mock_timer self._notifications = [] self._response_handlers = {} self._pending_callbacks = {} self._handler = None def RegisterDomain(self, _, handler): self._handler = handler def AddEvent(self, method, params, time): if self._notifications: assert self._notifications[-1][1] < time, ( 'Current response is scheduled earlier than previous response.') response = {'method': method, 'params': params} self._notifications.append((response, time, self._NOTIFICATION_EVENT)) def AddAsyncResponse(self, method, result, time): if self._notifications: assert self._notifications[-1][1] < time, ( 'Current response is scheduled earlier than previous response.') response = {'method': method, 'result': result} self._notifications.append((response, time, self._NOTIFICATION_CALLBACK)) def AddResponseHandler(self, method, handler): self._response_handlers[method] = handler def SyncRequest(self, request, *args, **kwargs): del args, kwargs # unused handler = self._response_handlers[request['method']] return handler(request) if handler else None def AsyncRequest(self, request, callback): self._pending_callbacks.setdefault(request['method'], []).append(callback) def SendAndIgnoreResponse(self, request): pass def Connect(self, _): pass def DispatchNotifications(self, timeout): current_time = self._mock_timer.time() if not self._notifications: self._mock_timer.SetTime(current_time + timeout + 1) raise websocket.WebSocketTimeoutException() response, time, kind = self._notifications[0] if time - current_time > timeout: self._mock_timer.SetTime(current_time + timeout + 1) raise websocket.WebSocketTimeoutException() self._notifications.pop(0) self._mock_timer.SetTime(time + 1) if kind == self._NOTIFICATION_EVENT: self._handler(response) elif kind == self._NOTIFICATION_CALLBACK: callback = self._pending_callbacks.get(response['method']).pop(0) callback(response) else: raise Exception('Unexpected response type') class FakeTimer(object): """ A fake timer to fake out the timing for a module. Args: module: module to fake out the time """ def __init__(self, module=None): self._elapsed_time = 0 self._module = module self._actual_time = None if module: assert isinstance(module, ModuleType) self._actual_time = module.time self._module.time = self def sleep(self, time): self._elapsed_time += time def time(self): return self._elapsed_time def SetTime(self, time): self._elapsed_time = time def __del__(self): self.Restore() def Restore(self): if self._module: self._module.time = self._actual_time self._module = None self._actual_time = None
benschmaus/catapult
telemetry/telemetry/testing/fakes/__init__.py
Python
bsd-3-clause
15,827
/*----------------------------------------------------------------------------*/ /* Copyright (c) FIRST 2008. All Rights Reserved. */ /* Open Source Software - may be modified and shared by FRC teams. The code */ /* must be accompanied by the FIRST BSD license file in the root directory of */ /* the project. */ /*----------------------------------------------------------------------------*/ package edu.wpi.first.wpilibj.templates; import edu.wpi.first.wpilibj.Compressor; import edu.wpi.first.wpilibj.Jaguar; import edu.wpi.first.wpilibj.Joystick; import edu.wpi.first.wpilibj.Servo; import edu.wpi.first.wpilibj.SimpleRobot; /** * The VM is configured to automatically run this class, and to call the * functions corresponding to each mode, as described in the SimpleRobot * documentation. If you change the name of this class or the package after * creating this project, you must also update the manifest file in the resource * directory. */ public class RobotTemplate extends SimpleRobot { private Joystick joystick = new Joystick(1); private Drivetrain drivetrain; private BowlerArm arm; Compressor compressor; Pan pan; //int port_1 = 7; //these ports were placeholders, no longer applicable //int port_2 = 7; public RobotTemplate() { drivetrain = new Drivetrain(); arm = new BowlerArm(); pan = new Pan(); compressor = new Compressor(7, 7);//7 for the switch, 7 for the relay } /** * This function is called once each time the robot enters autonomous mode. */ public void autonomous() { drivetrain.set(1, 1); sleep(5000); drivetrain.set(0,0); // arm.auto(); } /** * This function is called once each time the robot enters operator control. */ public void operatorControl() { compressor.start(); arm.setSolenoid(-1); while (isOperatorControl()) { //drivetrain updates double lstick = -joystick.getRawAxis(2); double rstick = -joystick.getRawAxis(4); drivetrain.set(Math.abs(lstick) * lstick, Math.abs(rstick) * rstick); //If I'm not mistaken, this is the most convenient way to square in Java? //pan updates version 2 (Amita); this is basic and can be used for backup if(joystick.getRawButton(10)){ pan.endGame(); } else{ pan.resetServo(); } //bowler arm updates if (joystick.getRawButton(7)) { arm.rampDown(); } else if (joystick.getRawButton(5)) { arm.rampUp(); } else { arm.setRamp(0); } arm.setSolenoid((int) joystick.getRawAxis(6)); } } /* *changes the servo state based on the button being pressed. *once it is pressed, it is set to the opposite of what is was at the start, ditto for release. */ /** * This function is called once each time the robot enters test mode. */ public void test() { } public void updateDrivetrain(){ } public void updateArm(){ } public void updatePan(){ } public static void sleep(long ms){ long t=System.currentTimeMillis()+ms; while(System.currentTimeMillis()<t){ //do nothing! } } }
2374/chris
src/edu/wpi/first/wpilibj/templates/RobotTemplate.java
Java
bsd-3-clause
3,586
/* * Copyright (c) 2010, Anima Games, Benjamin Karaban, Laurent Schneider, * Jérémie Comarmond, Didier Colin. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * - Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * - Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * - Neither the name of the copyright holder nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "PtyPartEffectTranslate.moc.h" #include <QtToolbox/CollapsibleWidget.moc.h> #include <QtToolbox/SingleSlidingValue.moc.h> #include <QtToolbox/SingleSlidingHDR.moc.h> #include <QGridLayout> #include <QPushButton> namespace EPI { //----------------------------------------------------------------------------- //----------------------------------------------------------------------------- PtyPartEffectTranslate::PtyPartEffectTranslate( const Ptr<Universe::NodeEmitter>& pNodeE, const Ptr<Universe::PartEffectTranslate>& pEffect, const Core::String& title) : PtyPartEffect(pNodeE, pEffect, title) { updateProperty(); } //----------------------------------------------------------------------------- PtyPartEffectTranslate::PtyPartEffectTranslate(const Ptr<Universe::NodeEmitter>& pNodeE, const Core::String& title) : PtyPartEffect( pNodeE, Ptr<Universe::PartEffectTranslate>(new Universe::PartEffectTranslate()), title) { updateProperty(); } //----------------------------------------------------------------------------- PtyPartEffectTranslate::~PtyPartEffectTranslate() {} //----------------------------------------------------------------------------- Ptr<PropertyWidget> PtyPartEffectTranslate::internalCreatePropertyWidget(const Ptr<PropertyWidgetDataProxy>& pDataProxy, QWidget * parent) { Ptr<PtyWidgetPartEffectTranslate> pPW (new PtyWidgetPartEffectTranslate(pDataProxy, parent)); return pPW; } //----------------------------------------------------------------------------- void PtyPartEffectTranslate::updateData() { Ptr<Universe::PartEffectTranslate> pEffet = LM_DEBUG_PTR_CAST<Universe::PartEffectTranslate> (getEffect()); pEffet->setConstSpeed(_constSpeed); pEffet->setRandSpeed(_randSpeed); } //----------------------------------------------------------------------------- void PtyPartEffectTranslate::updateProperty() { Ptr<Universe::PartEffectTranslate> pEffet = LM_DEBUG_PTR_CAST<Universe::PartEffectTranslate> (getEffect()); _constSpeed = pEffet->getConstSpeed(); _randSpeed = pEffet->getRandSpeed(); } //----------------------------------------------------------------------------- void PtyPartEffectTranslate::internalResurrect(const Ptr<Universe::World>& pWorld, const Ptr<Universe::World>& pWorldInfoContent, const Ptr<Property>& pty) { LM_ASSERT(getEffect()==null); Ptr<Universe::IPartEffect> pEffet = Ptr<Universe::PartEffectTranslate>(new Universe::PartEffectTranslate()); setEffect(pEffet); getUniverseNodeEmitter()->addEffect(getEffect()); updateData(); } //----------------------------------------------------------------------------- Ptr<Property> PtyPartEffectTranslate::clone() const { return Ptr<Property>(new PtyPartEffectTranslate( *this )); } //----------------------------------------------------------------------------- void PtyPartEffectTranslate::internalCopy(const Ptr<Property>& pSrc) { PtyPartEffect::internalCopy(pSrc); Ptr<PtyPartEffectTranslate> pPty = LM_DEBUG_PTR_CAST<PtyPartEffectTranslate>(pSrc); _constSpeed = pPty->_constSpeed; _randSpeed = pPty->_randSpeed; updateData(); } //----------------------------------------------------------------------------- //----------------------------------------------------------------------------- PtyWidgetPartEffectTranslate::PtyWidgetPartEffectTranslate(const Ptr<PropertyWidgetDataProxy>& data, QWidget * parent) : PropertyWidget(data, parent) { setupUi(); } //----------------------------------------------------------------------------- PtyWidgetPartEffectTranslate::~PtyWidgetPartEffectTranslate() {} //----------------------------------------------------------------------------- void PtyWidgetPartEffectTranslate::readProperty() { Ptr<PtyPartEffectTranslate> pP = LM_DEBUG_PTR_CAST<PtyPartEffectTranslate>(getDataProxy()->getProperty()); _constSpeedX->setSingleValue(pP->_constSpeed.x); _constSpeedY->setSingleValue(pP->_constSpeed.y); _constSpeedZ->setSingleValue(pP->_constSpeed.z); _randSpeedX->setSingleValue(pP->_randSpeed.x); _randSpeedY->setSingleValue(pP->_randSpeed.y); _randSpeedZ->setSingleValue(pP->_randSpeed.z); } //----------------------------------------------------------------------------- void PtyWidgetPartEffectTranslate::writeProperty(QWidget* pWidget) { Ptr<PtyPartEffectTranslate> pP = LM_DEBUG_PTR_CAST<PtyPartEffectTranslate>(getDataProxy()->getProperty()); double x = 0.0; double y = 0.0; double z = 0.0; _constSpeedX->getSingleValue(x); _constSpeedY->getSingleValue(y); _constSpeedZ->getSingleValue(z); pP->_constSpeed = Core::Vector3f(float(x), float(y), float(z)); _randSpeedX->getSingleValue(x); _randSpeedY->getSingleValue(y); _randSpeedZ->getSingleValue(z); pP->_randSpeed = Core::Vector3f(float(x), float(y), float(z)); } //----------------------------------------------------------------------------- void PtyWidgetPartEffectTranslate::setupUi() { _layout = new QGridLayout(this); _layout->setContentsMargins(0, 0, 0, 0); _layout->setSpacing(0); _groupBox = new QtToolbox::CollapsibleWidget(this, "Translate effect"); _del = new QPushButton(QIcon(":/icons/smallClearBW.png"), "", this); _constSpeedX = new QtToolbox::SingleSlidingHDR(this, "Const X", true); _constSpeedY = new QtToolbox::SingleSlidingHDR(this, "Const Y", true); _constSpeedZ = new QtToolbox::SingleSlidingHDR(this, "Const Z", true); _randSpeedX = new QtToolbox::SingleSlidingHDR(this, "Rand X", true); _randSpeedY = new QtToolbox::SingleSlidingHDR(this, "Rand Y", true); _randSpeedZ = new QtToolbox::SingleSlidingHDR(this, "Rand Z", true); _groupBox->addWidgetToTitle(_del); _groupBox->getLayout()->addWidget(_constSpeedX); _groupBox->getLayout()->addWidget(_constSpeedY); _groupBox->getLayout()->addWidget(_constSpeedZ); _groupBox->getLayout()->addWidget(_randSpeedX); _groupBox->getLayout()->addWidget(_randSpeedY); _groupBox->getLayout()->addWidget(_randSpeedZ); _layout->addWidget(_groupBox); setLayout(_layout); getWidgetsForUndoRedo().push_back(_constSpeedX); getWidgetsForUndoRedo().push_back(_constSpeedY); getWidgetsForUndoRedo().push_back(_constSpeedZ); getWidgetsForUndoRedo().push_back(_randSpeedX); getWidgetsForUndoRedo().push_back(_randSpeedY); getWidgetsForUndoRedo().push_back(_randSpeedZ); PropertyWidget::setupUi(); connect(_del, SIGNAL(clicked()), this, SLOT(deleteWidget())); } //----------------------------------------------------------------------------- void PtyWidgetPartEffectTranslate::deleteWidget() { emit deletePtyWidgetEffect(this); } //----------------------------------------------------------------------------- //----------------------------------------------------------------------------- }//namespace EPI
benkaraban/anima-games-engine
Sources/Tools/EPI/Document/Properties/PtyNodeEmitter/PtyPartEffectTranslate.moc.cpp
C++
bsd-3-clause
8,876
# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst """ Quality Control based on fuzzy logic. """ import logging import numpy as np from .core import QCCheckVar from .gradient import gradient from .spike import spike from .woa_normbias import woa_normbias from cotede.fuzzy import fuzzy_uncertainty module_logger = logging.getLogger(__name__) def fuzzylogic(features, cfg, require="all"): """ FIXME: Think about, should I return 0, or have an assert, and at qc.py all qc tests are applied with a try, and in case it fails it flag 0s. """ require = cfg.get("require", require) if (require == "all") and not np.all([f in features for f in cfg["features"]]): module_logger.warning( "Not all features (%s) required by fuzzy logic are available".format( cfg["features"].keys() ) ) raise KeyError uncertainty = fuzzy_uncertainty( data=features, features=cfg["features"], output=cfg["output"], require=require ) return uncertainty class FuzzyLogic(QCCheckVar): def set_features(self): self.features = {} for v in [f for f in self.cfg["features"] if f not in self.features]: if v == "woa_bias": woa_comparison = woa_normbias(self.data, self.varname, self.attrs) self.features[v] = woa_comparison["woa_bias"] elif v == "woa_normbias": woa_comparison = woa_normbias(self.data, self.varname, self.attrs) self.features[v] = woa_comparison["woa_normbias"] elif v == "spike": self.features[v] = spike(self.data[self.varname]) elif v == "gradient": self.features[v] = gradient(self.data[self.varname]) self.features["fuzzylogic"] = fuzzylogic(self.features, self.cfg) def test(self): self.flags = {} cfg = self.cfg flag = np.zeros(np.shape(self.data[self.varname]), dtype="i1") uncertainty = self.features["fuzzylogic"] # FIXME: As it is now, it will have no zero flag value. Think about cases # where some values in a profile would not be estimated, hence flag=0 # I needed to use np.nonzeros because now uncertainty is a masked array, # to accept when a feature is masked. flag[np.nonzero(uncertainty <= 0.29)] = 1 flag[np.nonzero((uncertainty > 0.29) & (uncertainty <= 0.34))] = 2 flag[np.nonzero((uncertainty > 0.34) & (uncertainty <= 0.72))] = 3 flag[np.nonzero(uncertainty > 0.72)] = 4 self.flags["fuzzylogic"] = flag
castelao/CoTeDe
cotede/qctests/fuzzylogic.py
Python
bsd-3-clause
2,680
// Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.ui.widget; import android.content.Context; import android.util.AttributeSet; import android.util.SparseArray; import android.view.View; import android.view.ViewGroup; import androidx.annotation.IdRes; import androidx.annotation.Nullable; import androidx.annotation.VisibleForTesting; import org.chromium.base.BuildConfig; import java.lang.ref.WeakReference; /** * An {@link OptimizedFrameLayout} that increases the speed of frequent view lookup by ID by caching * the result of the lookup. Adding or removing a view with the same ID as a cached version will * cause the cache to be invalidated for that view and cause a re-lookup the next time it is * queried. The goal of this view type is to be used in cases where child views are frequently * accessed or reused, for example as part of a {@link androidx.recyclerview.widget.RecyclerView}. * The logic in the {@link #fastFindViewById(int)} method would be in {@link #findViewById(int)} if * it weren't final on the {@link View} class. * * {@link android.view.ViewGroup.OnHierarchyChangeListener}s cannot be used on ViewGroups that are * children of this group since they would overwrite the listeners that are critical to this class' * functionality. * * Usage: * Use the same way that you would use a normal {@link android.widget.FrameLayout}, but instead * of using {@link #findViewById(int)} to access views, use {@link #fastFindViewById(int)}. */ public class ViewLookupCachingFrameLayout extends OptimizedFrameLayout { /** A map containing views that have had lookup performed on them for quicker access. */ private final SparseArray<WeakReference<View>> mCachedViews = new SparseArray<>(); /** The hierarchy listener responsible for notifying the cache that the tree has changed. */ @VisibleForTesting final OnHierarchyChangeListener mListener = new OnHierarchyChangeListener() { @Override public void onChildViewAdded(View parent, View child) { mCachedViews.remove(child.getId()); setHierarchyListenerOnTree(child, this); } @Override public void onChildViewRemoved(View parent, View child) { mCachedViews.remove(child.getId()); setHierarchyListenerOnTree(child, null); } }; /** Default constructor for use in XML. */ public ViewLookupCachingFrameLayout(Context context, AttributeSet atts) { super(context, atts); setOnHierarchyChangeListener(mListener); } @Override public void setOnHierarchyChangeListener(OnHierarchyChangeListener listener) { assert listener == mListener : "Hierarchy change listeners cannot be set for this group!"; super.setOnHierarchyChangeListener(listener); } /** * Set the hierarchy listener that invalidates relevant parts of the cache when subtrees change. * @param view The root of the tree to attach listeners to. * @param listener The listener to attach (null to unset). */ private void setHierarchyListenerOnTree(View view, OnHierarchyChangeListener listener) { if (!(view instanceof ViewGroup)) return; ViewGroup group = (ViewGroup) view; group.setOnHierarchyChangeListener(listener); for (int i = 0; i < group.getChildCount(); i++) { setHierarchyListenerOnTree(group.getChildAt(i), listener); } } /** * Does the same thing as {@link #findViewById(int)} but caches the result if not null. * Subsequent lookups are cheaper as a result. Adding or removing a child view invalidates * the cache for the ID of the view removed and causes a re-lookup. * @param id The ID of the view to lookup. * @return The view if it exists. */ @Nullable public View fastFindViewById(@IdRes int id) { WeakReference<View> ref = mCachedViews.get(id); View view = null; if (ref != null) view = ref.get(); if (view == null) view = findViewById(id); if (BuildConfig.DCHECK_IS_ON) { assert view == findViewById(id) : "View caching logic is broken!"; assert ref == null || ref.get() != null : "Cache held reference to garbage collected view!"; } if (view != null) mCachedViews.put(id, new WeakReference<>(view)); return view; } @VisibleForTesting SparseArray<WeakReference<View>> getCache() { return mCachedViews; } }
endlessm/chromium-browser
ui/android/java/src/org/chromium/ui/widget/ViewLookupCachingFrameLayout.java
Java
bsd-3-clause
4,641
/* -*- mode: c++; fill-column: 132; c-basic-offset: 4; indent-tabs-mode: nil -*- */ #include "irods_auth_object.hpp" namespace irods { auth_object::auth_object( rError_t* _r_error ) : r_error_( _r_error ) { // TODO - stub } auth_object::~auth_object() { // TODO - stub } auth_object::auth_object( const auth_object& _rhs ) { r_error_ = _rhs.r_error(); request_result_ = _rhs.request_result(); context_ = _rhs.context(); } auth_object& auth_object::operator=( const auth_object& _rhs ) { r_error_ = _rhs.r_error(); request_result_ = _rhs.request_result(); context_ = _rhs.context(); return *this; } bool auth_object::operator==( const auth_object& _rhs ) const { // For the base class just always return true return ( r_error_ == _rhs.r_error() && request_result_ == _rhs.request_result() && context_ == _rhs.context() ); } }; // namespace irods
leesab/irods
iRODS/lib/core/src/irods_auth_object.cpp
C++
bsd-3-clause
1,097
package operation import ( "fmt" "os" "github.com/runabove/sail/internal" "github.com/spf13/cobra" ) var cmdOperationAttach = &cobra.Command{ Use: "attach", Short: "Attach to an ongoing operation output: sail operation attach [applicationName] <operationId>", Long: `Attach to an ongoing operation output: sail operation attach [applicationName] <operationId> Example: sail operation attach devel/redis fa853ede-6c05-4823-8b20-46a5389fe0de If the applicationName is not passed, the default application name will be used (the user's username). `, Run: func(cmd *cobra.Command, args []string) { switch len(args) { case 1: // applicationName was not passed. Using default one. applicationName := internal.GetUserName() operationAttach(applicationName, args[0]) case 2: operationAttach(args[0], args[1]) default: fmt.Fprintln(os.Stderr, "Invalid usage. sail operation attach [applicationName] <operationId>. Please see sail operation attach --help") } }, } func operationAttach(app, operationID string) { // Split namespace and service internal.StreamPrint("GET", fmt.Sprintf("/applications/%s/operation/%s/attach", app, operationID), nil) internal.ExitAfterCtrlC() }
runabove/sail
operation/attach.go
GO
bsd-3-clause
1,209
/*------------------------------------------------------------------------- * OpenGL Conformance Test Suite * ----------------------------- * * Copyright (c) 2016 Google Inc. * Copyright (c) 2016 The Khronos Group Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /*! * \file * \brief CTS runner main(). */ /*-------------------------------------------------------------------*/ #include "deString.h" #include "deUniquePtr.hpp" #include "glcTestRunner.hpp" #include "tcuPlatform.hpp" #include "tcuResource.hpp" #include <cstdio> // See tcuMain.cpp tcu::Platform* createPlatform(void); struct CommandLine { CommandLine(void) : runType(glu::ApiType::es(2, 0)), flags(0) { } glu::ApiType runType; std::string dstLogDir; deUint32 flags; }; static bool parseCommandLine(CommandLine& cmdLine, int argc, const char* const* argv) { for (int argNdx = 1; argNdx < argc; argNdx++) { const char* arg = argv[argNdx]; if (deStringBeginsWith(arg, "--type=")) { static const struct { const char* name; glu::ApiType runType; } runTypes[] = { { "es2", glu::ApiType::es(2, 0) }, { "es3", glu::ApiType::es(3, 0) }, { "es31", glu::ApiType::es(3, 1) }, { "es32", glu::ApiType::es(3, 2) }, { "gl30", glu::ApiType::core(3, 0) }, { "gl31", glu::ApiType::core(3, 1) }, { "gl32", glu::ApiType::core(3, 2) }, { "gl33", glu::ApiType::core(3, 3) }, { "gl40", glu::ApiType::core(4, 0) }, { "gl41", glu::ApiType::core(4, 1) }, { "gl42", glu::ApiType::core(4, 2) }, { "gl43", glu::ApiType::core(4, 3) }, { "gl44", glu::ApiType::core(4, 4) }, { "gl45", glu::ApiType::core(4, 5) }, { "gl46", glu::ApiType::core(4, 6) } }; const char* value = arg + 7; int ndx = 0; for (; ndx < DE_LENGTH_OF_ARRAY(runTypes); ndx++) { if (deStringEqual(runTypes[ndx].name, value)) { cmdLine.runType = runTypes[ndx].runType; break; } } if (ndx >= DE_LENGTH_OF_ARRAY(runTypes)) return false; } else if (deStringBeginsWith(arg, "--logdir=")) { const char* value = arg + 9; cmdLine.dstLogDir = value; } else if (deStringBeginsWith(arg, "--summary")) { cmdLine.flags = glcts::TestRunner::PRINT_SUMMARY; } else if (deStringEqual(arg, "--verbose")) cmdLine.flags = glcts::TestRunner::VERBOSE_ALL; else return false; } return true; } static void printHelp(const char* binName) { printf("%s:\n", binName); printf(" --type=[esN[M]|glNM] Conformance test run type. Choose from\n"); printf(" ES: es2, es3, es31, es32\n"); printf(" GL: gl30, gl31, gl32, gl33, gl40, gl41, gl42, gl43, gl44, gl45, gl46\n"); printf(" --logdir=[path] Destination directory for log files\n"); printf(" --summary Print summary without running the tests\n"); printf(" --verbose Print out and log more information\n"); } int main(int argc, char** argv) { CommandLine cmdLine; int exitStatus = EXIT_SUCCESS; if (!parseCommandLine(cmdLine, argc, argv)) { printHelp(argv[0]); return -1; } try { de::UniquePtr<tcu::Platform> platform(createPlatform()); tcu::DirArchive archive("."); glcts::TestRunner runner(static_cast<tcu::Platform&>(*platform.get()), archive, cmdLine.dstLogDir.c_str(), cmdLine.runType, cmdLine.flags); for (;;) { if (!runner.iterate()) { if (!runner.isConformant()) { exitStatus = EXIT_FAILURE; } break; } } } catch (const std::exception& e) { printf("ERROR: %s\n", e.what()); return -1; } return exitStatus; }
endlessm/chromium-browser
third_party/angle/third_party/VK-GL-CTS/src/external/openglcts/modules/runner/glcTestRunnerMain.cpp
C++
bsd-3-clause
4,116
import sys import warnings try: import itertools.izip as zip except ImportError: pass from itertools import product import numpy as np from .. import util from ..dimension import dimension_name from ..element import Element from ..ndmapping import NdMapping, item_check, sorted_context from .interface import DataError, Interface from .pandas import PandasInterface from .util import finite_range class cuDFInterface(PandasInterface): """ The cuDFInterface allows a Dataset objects to wrap a cuDF DataFrame object. Using cuDF allows working with columnar data on a GPU. Most operations leave the data in GPU memory, however to plot the data it has to be loaded into memory. The cuDFInterface covers almost the complete API exposed by the PandasInterface with two notable exceptions: 1) Aggregation and groupby do not have a consistent sort order (see https://github.com/rapidsai/cudf/issues/4237) 3) Not all functions can be easily applied to a cuDF so some functions applied with aggregate and reduce will not work. """ datatype = 'cuDF' types = () @classmethod def loaded(cls): return 'cudf' in sys.modules @classmethod def applies(cls, obj): if not cls.loaded(): return False import cudf return isinstance(obj, (cudf.DataFrame, cudf.Series)) @classmethod def init(cls, eltype, data, kdims, vdims): import cudf import pandas as pd element_params = eltype.param.objects() kdim_param = element_params['kdims'] vdim_param = element_params['vdims'] if isinstance(data, (cudf.Series, pd.Series)): data = data.to_frame() if not isinstance(data, cudf.DataFrame): data, _, _ = PandasInterface.init(eltype, data, kdims, vdims) data = cudf.from_pandas(data) columns = list(data.columns) ncols = len(columns) index_names = [data.index.name] if index_names == [None]: index_names = ['index'] if eltype._auto_indexable_1d and ncols == 1 and kdims is None: kdims = list(index_names) if isinstance(kdim_param.bounds[1], int): ndim = min([kdim_param.bounds[1], len(kdim_param.default)]) else: ndim = None nvdim = vdim_param.bounds[1] if isinstance(vdim_param.bounds[1], int) else None if kdims and vdims is None: vdims = [c for c in columns if c not in kdims] elif vdims and kdims is None: kdims = [c for c in columns if c not in vdims][:ndim] elif kdims is None: kdims = list(columns[:ndim]) if vdims is None: vdims = [d for d in columns[ndim:((ndim+nvdim) if nvdim else None)] if d not in kdims] elif kdims == [] and vdims is None: vdims = list(columns[:nvdim if nvdim else None]) # Handle reset of index if kdims reference index by name for kd in kdims: kd = dimension_name(kd) if kd in columns: continue if any(kd == ('index' if name is None else name) for name in index_names): data = data.reset_index() break if any(isinstance(d, (np.int64, int)) for d in kdims+vdims): raise DataError("cudf DataFrame column names used as dimensions " "must be strings not integers.", cls) if kdims: kdim = dimension_name(kdims[0]) if eltype._auto_indexable_1d and ncols == 1 and kdim not in columns: data = data.copy() data.insert(0, kdim, np.arange(len(data))) for d in kdims+vdims: d = dimension_name(d) if len([c for c in columns if c == d]) > 1: raise DataError('Dimensions may not reference duplicated DataFrame ' 'columns (found duplicate %r columns). If you want to plot ' 'a column against itself simply declare two dimensions ' 'with the same name. '% d, cls) return data, {'kdims':kdims, 'vdims':vdims}, {} @classmethod def range(cls, dataset, dimension): dimension = dataset.get_dimension(dimension, strict=True) column = dataset.data[dimension.name] if dimension.nodata is not None: column = cls.replace_value(column, dimension.nodata) if column.dtype.kind == 'O': return np.NaN, np.NaN else: return finite_range(column, column.min(), column.max()) @classmethod def values(cls, dataset, dim, expanded=True, flat=True, compute=True, keep_index=False): dim = dataset.get_dimension(dim, strict=True) data = dataset.data[dim.name] if not expanded: data = data.unique() return data.values_host if compute else data.values elif keep_index: return data elif compute: return data.values_host try: return data.values except Exception: return data.values_host @classmethod def groupby(cls, dataset, dimensions, container_type, group_type, **kwargs): # Get dimensions information dimensions = [dataset.get_dimension(d).name for d in dimensions] kdims = [kdim for kdim in dataset.kdims if kdim not in dimensions] # Update the kwargs appropriately for Element group types group_kwargs = {} group_type = dict if group_type == 'raw' else group_type if issubclass(group_type, Element): group_kwargs.update(util.get_param_values(dataset)) group_kwargs['kdims'] = kdims group_kwargs.update(kwargs) # Propagate dataset group_kwargs['dataset'] = dataset.dataset # Find all the keys along supplied dimensions keys = product(*(dataset.data[dimensions[0]].unique().values_host for d in dimensions)) # Iterate over the unique entries applying selection masks grouped_data = [] for unique_key in util.unique_iterator(keys): group_data = dataset.select(**dict(zip(dimensions, unique_key))) if not len(group_data): continue group_data = group_type(group_data, **group_kwargs) grouped_data.append((unique_key, group_data)) if issubclass(container_type, NdMapping): with item_check(False), sorted_context(False): kdims = [dataset.get_dimension(d) for d in dimensions] return container_type(grouped_data, kdims=kdims) else: return container_type(grouped_data) @classmethod def select_mask(cls, dataset, selection): """ Given a Dataset object and a dictionary with dimension keys and selection keys (i.e. tuple ranges, slices, sets, lists, or literals) return a boolean mask over the rows in the Dataset object that have been selected. """ mask = None for dim, sel in selection.items(): if isinstance(sel, tuple): sel = slice(*sel) arr = cls.values(dataset, dim, keep_index=True) if util.isdatetime(arr) and util.pd: try: sel = util.parse_datetime_selection(sel) except: pass new_masks = [] if isinstance(sel, slice): with warnings.catch_warnings(): warnings.filterwarnings('ignore', r'invalid value encountered') if sel.start is not None: new_masks.append(sel.start <= arr) if sel.stop is not None: new_masks.append(arr < sel.stop) if not new_masks: continue new_mask = new_masks[0] for imask in new_masks[1:]: new_mask &= imask elif isinstance(sel, (set, list)): for v in sel: new_masks.append(arr==v) if not new_masks: continue new_mask = new_masks[0] for imask in new_masks[1:]: new_mask |= imask elif callable(sel): new_mask = sel(arr) else: new_mask = arr == sel if mask is None: mask = new_mask else: mask &= new_mask return mask @classmethod def select(cls, dataset, selection_mask=None, **selection): df = dataset.data if selection_mask is None: selection_mask = cls.select_mask(dataset, selection) indexed = cls.indexed(dataset, selection) if selection_mask is not None: df = df.loc[selection_mask] if indexed and len(df) == 1 and len(dataset.vdims) == 1: return df[dataset.vdims[0].name].iloc[0] return df @classmethod def concat_fn(cls, dataframes, **kwargs): import cudf return cudf.concat(dataframes, **kwargs) @classmethod def add_dimension(cls, dataset, dimension, dim_pos, values, vdim): data = dataset.data.copy() if dimension.name not in data: data[dimension.name] = values return data @classmethod def aggregate(cls, dataset, dimensions, function, **kwargs): data = dataset.data cols = [d.name for d in dataset.kdims if d in dimensions] vdims = dataset.dimensions('value', label='name') reindexed = data[cols+vdims] agg = function.__name__ if len(dimensions): agg_map = {'amin': 'min', 'amax': 'max'} agg = agg_map.get(agg, agg) grouped = reindexed.groupby(cols, sort=False) if not hasattr(grouped, agg): raise ValueError('%s aggregation is not supported on cudf DataFrame.' % agg) df = getattr(grouped, agg)().reset_index() else: agg_map = {'amin': 'min', 'amax': 'max', 'size': 'count'} agg = agg_map.get(agg, agg) if not hasattr(reindexed, agg): raise ValueError('%s aggregation is not supported on cudf DataFrame.' % agg) agg = getattr(reindexed, agg)() data = dict(((col, [v]) for col, v in zip(agg.index.values_host, agg.to_array()))) df = util.pd.DataFrame(data, columns=list(agg.index.values_host)) dropped = [] for vd in vdims: if vd not in df.columns: dropped.append(vd) return df, dropped @classmethod def iloc(cls, dataset, index): import cudf rows, cols = index scalar = False columns = list(dataset.data.columns) if isinstance(cols, slice): cols = [d.name for d in dataset.dimensions()][cols] elif np.isscalar(cols): scalar = np.isscalar(rows) cols = [dataset.get_dimension(cols).name] else: cols = [dataset.get_dimension(d).name for d in index[1]] col_index = [columns.index(c) for c in cols] if np.isscalar(rows): rows = [rows] if scalar: return dataset.data[cols[0]].iloc[rows[0]] result = dataset.data.iloc[rows, col_index] # cuDF does not handle single rows and cols indexing correctly # as of cudf=0.10.0 so we have to convert Series back to DataFrame if isinstance(result, cudf.Series): if len(cols) == 1: result = result.to_frame(cols[0]) else: result = result.to_frame().T return result @classmethod def sort(cls, dataset, by=[], reverse=False): cols = [dataset.get_dimension(d, strict=True).name for d in by] return dataset.data.sort_values(by=cols, ascending=not reverse) @classmethod def dframe(cls, dataset, dimensions): if dimensions: return dataset.data[dimensions].to_pandas() else: return dataset.data.to_pandas() Interface.register(cuDFInterface)
ioam/holoviews
holoviews/core/data/cudf.py
Python
bsd-3-clause
12,346
/* * Copyright (c) Contributors, http://openviewer.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenViewer Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * */ using System; using OpenViewer.Model; namespace OpenViewer.UI { public abstract class UIBase : IUI { protected MetaverseSession m_model; public abstract string GetName(); public abstract void Initialize(MetaverseSession model, string renderingEngine, string loginURI, string username, string password); public abstract void Run(); } }
jimmygkr/openviewer
OpenViewer/UI/UIBase.cs
C#
bsd-3-clause
2,040
<?php /** * Zend Framework (http://framework.zend.com/) * * @link http://github.com/zendframework/ZendSkeletonApplication for the canonical source repository * @copyright Copyright (c) 2005-2015 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License */ return array( 'router' => array( 'routes' => array( // The following is a route to simplify getting started creating // new controllers and actions without needing to create a new // module. Simply drop new controllers in, and you can access them // using the path /api/:controller/:action 'api' => array( 'type' => 'Hostname', 'options' => array( 'route' => 'api.vuongquocbalo.com', ), ), ), ), );
thailvn/dev
module/Api/config/api.vuongquocbalo.com.php
PHP
bsd-3-clause
893
<?php namespace Jazzee\Element; /** * Phonenumber Element * * @author Jon Johnson <jon.johnson@ucsf.edu> * @license http://jazzee.org/license BSD-3-Clause */ class Phonenumber extends TextInput { const PAGEBUILDER_SCRIPT = 'resource/scripts/element_types/JazzeeElementPhonenumber.js'; public function addToField(\Foundation\Form\Field $field) { $element = $field->newElement('TextInput', 'el' . $this->_element->getId()); $element->setLabel($this->_element->getTitle()); $element->setInstructions($this->_element->getInstructions()); $element->setFormat($this->_element->getFormat()); $element->setDefaultValue($this->_element->getDefaultValue()); if ($this->_element->isRequired()) { $validator = new \Foundation\Form\Validator\NotEmpty($element); $element->addValidator($validator); } $validator = new \Foundation\Form\Validator\Phonenumber($element); $element->addValidator($validator); $filter = new \Foundation\Form\Filter\Phonenumber($element); $element->addFilter($filter); return $element; } }
Jazzee/Jazzee
src/Jazzee/Element/Phonenumber.php
PHP
bsd-3-clause
1,083
# for for i in 1..10 do puts i end
TJ-Hidetaka-Takano/mrubyc
sample_ruby/basic_sample05.rb
Ruby
bsd-3-clause
39
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/media/webrtc/permission_bubble_media_access_handler.h" #include <memory> #include <utility> #include "base/bind.h" #include "base/callback_helpers.h" #include "base/metrics/field_trial.h" #include "base/task/post_task.h" #include "build/build_config.h" #include "chrome/browser/media/webrtc/media_capture_devices_dispatcher.h" #include "chrome/browser/media/webrtc/media_stream_capture_indicator.h" #include "chrome/browser/media/webrtc/media_stream_device_permissions.h" #include "chrome/browser/permissions/permission_manager_factory.h" #include "chrome/browser/profiles/profile.h" #include "chrome/common/pref_names.h" #include "components/content_settings/browser/tab_specific_content_settings.h" #include "components/content_settings/core/browser/host_content_settings_map.h" #include "components/permissions/permission_manager.h" #include "components/permissions/permission_result.h" #include "components/pref_registry/pref_registry_syncable.h" #include "components/prefs/pref_service.h" #include "components/webrtc/media_stream_devices_controller.h" #include "content/public/browser/browser_task_traits.h" #include "content/public/browser/browser_thread.h" #include "content/public/browser/notification_service.h" #include "content/public/browser/notification_types.h" #include "content/public/browser/web_contents.h" #if defined(OS_ANDROID) #include <vector> #include "chrome/browser/flags/android/chrome_feature_list.h" #include "chrome/browser/media/webrtc/screen_capture_infobar_delegate_android.h" #include "components/permissions/permission_uma_util.h" #include "components/permissions/permission_util.h" #endif // defined(OS_ANDROID) #if defined(OS_MACOSX) #include "base/metrics/histogram_macros.h" #include "chrome/browser/content_settings/chrome_content_settings_utils.h" #include "chrome/browser/media/webrtc/system_media_capture_permissions_mac.h" #include "chrome/browser/media/webrtc/system_media_capture_permissions_stats_mac.h" #endif using content::BrowserThread; using RepeatingMediaResponseCallback = base::RepeatingCallback<void(const blink::MediaStreamDevices& devices, blink::mojom::MediaStreamRequestResult result, std::unique_ptr<content::MediaStreamUI> ui)>; #if defined(OS_MACOSX) using system_media_permissions::SystemPermission; #endif namespace { void UpdateTabSpecificContentSettings( content::WebContents* web_contents, const content::MediaStreamRequest& request, ContentSetting audio_setting, ContentSetting video_setting) { if (!web_contents) return; auto* content_settings = content_settings::TabSpecificContentSettings::FromWebContents( web_contents); if (!content_settings) return; content_settings::TabSpecificContentSettings::MicrophoneCameraState microphone_camera_state = content_settings::TabSpecificContentSettings:: MICROPHONE_CAMERA_NOT_ACCESSED; std::string selected_audio_device; std::string selected_video_device; std::string requested_audio_device = request.requested_audio_device_id; std::string requested_video_device = request.requested_video_device_id; // TODO(raymes): Why do we use the defaults here for the selected devices? // Shouldn't we just use the devices that were actually selected? Profile* profile = Profile::FromBrowserContext(web_contents->GetBrowserContext()); if (audio_setting != CONTENT_SETTING_DEFAULT) { selected_audio_device = requested_audio_device.empty() ? profile->GetPrefs()->GetString(prefs::kDefaultAudioCaptureDevice) : requested_audio_device; microphone_camera_state |= content_settings::TabSpecificContentSettings::MICROPHONE_ACCESSED | (audio_setting == CONTENT_SETTING_ALLOW ? 0 : content_settings::TabSpecificContentSettings:: MICROPHONE_BLOCKED); } if (video_setting != CONTENT_SETTING_DEFAULT) { selected_video_device = requested_video_device.empty() ? profile->GetPrefs()->GetString(prefs::kDefaultVideoCaptureDevice) : requested_video_device; microphone_camera_state |= content_settings::TabSpecificContentSettings::CAMERA_ACCESSED | (video_setting == CONTENT_SETTING_ALLOW ? 0 : content_settings::TabSpecificContentSettings::CAMERA_BLOCKED); } content_settings->OnMediaStreamPermissionSet( PermissionManagerFactory::GetForProfile(profile)->GetCanonicalOrigin( ContentSettingsType::MEDIASTREAM_CAMERA, request.security_origin, web_contents->GetLastCommittedURL()), microphone_camera_state, selected_audio_device, selected_video_device, requested_audio_device, requested_video_device); } } // namespace struct PermissionBubbleMediaAccessHandler::PendingAccessRequest { PendingAccessRequest(const content::MediaStreamRequest& request, RepeatingMediaResponseCallback callback) : request(request), callback(callback) {} ~PendingAccessRequest() {} // TODO(gbillock): make the MediaStreamDevicesController owned by // this object when we're using bubbles. content::MediaStreamRequest request; RepeatingMediaResponseCallback callback; }; PermissionBubbleMediaAccessHandler::PermissionBubbleMediaAccessHandler() { // PermissionBubbleMediaAccessHandler should be created on UI thread. // Otherwise, it will not receive // content::NOTIFICATION_WEB_CONTENTS_DESTROYED, and that will result in // possible use after free. DCHECK_CURRENTLY_ON(BrowserThread::UI); notifications_registrar_.Add(this, content::NOTIFICATION_WEB_CONTENTS_DESTROYED, content::NotificationService::AllSources()); } PermissionBubbleMediaAccessHandler::~PermissionBubbleMediaAccessHandler() {} bool PermissionBubbleMediaAccessHandler::SupportsStreamType( content::WebContents* web_contents, const blink::mojom::MediaStreamType type, const extensions::Extension* extension) { #if defined(OS_ANDROID) return type == blink::mojom::MediaStreamType::DEVICE_VIDEO_CAPTURE || type == blink::mojom::MediaStreamType::DEVICE_AUDIO_CAPTURE || type == blink::mojom::MediaStreamType::GUM_DESKTOP_VIDEO_CAPTURE || type == blink::mojom::MediaStreamType::DISPLAY_VIDEO_CAPTURE; #else return type == blink::mojom::MediaStreamType::DEVICE_VIDEO_CAPTURE || type == blink::mojom::MediaStreamType::DEVICE_AUDIO_CAPTURE; #endif } bool PermissionBubbleMediaAccessHandler::CheckMediaAccessPermission( content::RenderFrameHost* render_frame_host, const GURL& security_origin, blink::mojom::MediaStreamType type, const extensions::Extension* extension) { content::WebContents* web_contents = content::WebContents::FromRenderFrameHost(render_frame_host); Profile* profile = Profile::FromBrowserContext(web_contents->GetBrowserContext()); ContentSettingsType content_settings_type = type == blink::mojom::MediaStreamType::DEVICE_AUDIO_CAPTURE ? ContentSettingsType::MEDIASTREAM_MIC : ContentSettingsType::MEDIASTREAM_CAMERA; DCHECK(!security_origin.is_empty()); GURL embedding_origin = web_contents->GetLastCommittedURL().GetOrigin(); permissions::PermissionManager* permission_manager = PermissionManagerFactory::GetForProfile(profile); return permission_manager ->GetPermissionStatusForFrame(content_settings_type, render_frame_host, security_origin) .content_setting == CONTENT_SETTING_ALLOW; } void PermissionBubbleMediaAccessHandler::HandleRequest( content::WebContents* web_contents, const content::MediaStreamRequest& request, content::MediaResponseCallback callback, const extensions::Extension* extension) { DCHECK_CURRENTLY_ON(BrowserThread::UI); #if defined(OS_ANDROID) if (blink::IsScreenCaptureMediaType(request.video_type) && !base::FeatureList::IsEnabled( chrome::android::kUserMediaScreenCapturing)) { // If screen capturing isn't enabled on Android, we'll use "invalid state" // as result, same as on desktop. std::move(callback).Run( blink::MediaStreamDevices(), blink::mojom::MediaStreamRequestResult::INVALID_STATE, nullptr); return; } #endif // defined(OS_ANDROID) RequestsMap& requests_map = pending_requests_[web_contents]; requests_map.emplace( next_request_id_++, PendingAccessRequest( request, base::AdaptCallbackForRepeating(std::move(callback)))); // If this is the only request then show the infobar. if (requests_map.size() == 1) ProcessQueuedAccessRequest(web_contents); } void PermissionBubbleMediaAccessHandler::ProcessQueuedAccessRequest( content::WebContents* web_contents) { DCHECK_CURRENTLY_ON(BrowserThread::UI); auto it = pending_requests_.find(web_contents); if (it == pending_requests_.end() || it->second.empty()) { // Don't do anything if the tab was closed. return; } DCHECK(!it->second.empty()); const int request_id = it->second.begin()->first; const content::MediaStreamRequest& request = it->second.begin()->second.request; #if defined(OS_ANDROID) if (blink::IsScreenCaptureMediaType(request.video_type)) { ScreenCaptureInfoBarDelegateAndroid::Create( web_contents, request, base::BindOnce( &PermissionBubbleMediaAccessHandler::OnAccessRequestResponse, base::Unretained(this), web_contents, request_id)); return; } #endif webrtc::MediaStreamDevicesController::RequestPermissions( request, MediaCaptureDevicesDispatcher::GetInstance(), base::BindOnce( &PermissionBubbleMediaAccessHandler::OnMediaStreamRequestResponse, base::Unretained(this), web_contents, request_id, request)); } void PermissionBubbleMediaAccessHandler::UpdateMediaRequestState( int render_process_id, int render_frame_id, int page_request_id, blink::mojom::MediaStreamType stream_type, content::MediaRequestState state) { DCHECK_CURRENTLY_ON(BrowserThread::UI); if (state != content::MEDIA_REQUEST_STATE_CLOSING) return; bool found = false; for (auto requests_it = pending_requests_.begin(); requests_it != pending_requests_.end(); ++requests_it) { RequestsMap& requests_map = requests_it->second; for (RequestsMap::iterator it = requests_map.begin(); it != requests_map.end(); ++it) { if (it->second.request.render_process_id == render_process_id && it->second.request.render_frame_id == render_frame_id && it->second.request.page_request_id == page_request_id) { requests_map.erase(it); found = true; break; } } if (found) break; } } // static void PermissionBubbleMediaAccessHandler::RegisterProfilePrefs( user_prefs::PrefRegistrySyncable* prefs) { prefs->RegisterBooleanPref(prefs::kVideoCaptureAllowed, true); prefs->RegisterBooleanPref(prefs::kAudioCaptureAllowed, true); prefs->RegisterListPref(prefs::kVideoCaptureAllowedUrls); prefs->RegisterListPref(prefs::kAudioCaptureAllowedUrls); } void PermissionBubbleMediaAccessHandler::OnMediaStreamRequestResponse( content::WebContents* web_contents, int request_id, content::MediaStreamRequest request, const blink::MediaStreamDevices& devices, blink::mojom::MediaStreamRequestResult result, bool blocked_by_feature_policy, ContentSetting audio_setting, ContentSetting video_setting) { if (pending_requests_.find(web_contents) == pending_requests_.end()) { // WebContents has been destroyed. Don't need to do anything. return; } // If the kill switch is, or the request was blocked because of feature // policy we don't update the tab context. if (result != blink::mojom::MediaStreamRequestResult::KILL_SWITCH_ON && !blocked_by_feature_policy) { UpdateTabSpecificContentSettings(web_contents, request, audio_setting, video_setting); } std::unique_ptr<content::MediaStreamUI> ui; if (!devices.empty()) { ui = MediaCaptureDevicesDispatcher::GetInstance() ->GetMediaStreamCaptureIndicator() ->RegisterMediaStream(web_contents, devices); } OnAccessRequestResponse(web_contents, request_id, devices, result, std::move(ui)); } void PermissionBubbleMediaAccessHandler::OnAccessRequestResponse( content::WebContents* web_contents, int request_id, const blink::MediaStreamDevices& devices, blink::mojom::MediaStreamRequestResult result, std::unique_ptr<content::MediaStreamUI> ui) { DCHECK_CURRENTLY_ON(BrowserThread::UI); auto request_maps_it = pending_requests_.find(web_contents); if (request_maps_it == pending_requests_.end()) { // WebContents has been destroyed. Don't need to do anything. return; } RequestsMap& requests_map(request_maps_it->second); if (requests_map.empty()) return; auto request_it = requests_map.find(request_id); DCHECK(request_it != requests_map.end()); if (request_it == requests_map.end()) return; blink::mojom::MediaStreamRequestResult final_result = result; #if defined(OS_MACOSX) // If the request was approved, ask for system permissions if needed, and run // this function again when done. if (result == blink::mojom::MediaStreamRequestResult::OK) { const content::MediaStreamRequest& request = request_it->second.request; if (request.audio_type == blink::mojom::MediaStreamType::DEVICE_AUDIO_CAPTURE) { const SystemPermission system_audio_permission = system_media_permissions::CheckSystemAudioCapturePermission(); UMA_HISTOGRAM_ENUMERATION( "Media.Audio.Capture.Mac.MicSystemPermission.UserMedia", system_audio_permission); if (system_audio_permission == SystemPermission::kNotDetermined) { // Using WeakPtr since callback can come at any time and we might be // destroyed. system_media_permissions::RequestSystemAudioCapturePermisson( base::BindOnce( &PermissionBubbleMediaAccessHandler::OnAccessRequestResponse, weak_factory_.GetWeakPtr(), web_contents, request_id, devices, result, std::move(ui)), {content::BrowserThread::UI}); return; } else if (system_audio_permission == SystemPermission::kRestricted || system_audio_permission == SystemPermission::kDenied) { content_settings::UpdateLocationBarUiForWebContents(web_contents); final_result = blink::mojom::MediaStreamRequestResult::SYSTEM_PERMISSION_DENIED; system_media_permissions::SystemAudioCapturePermissionBlocked(); } else { DCHECK_EQ(system_audio_permission, SystemPermission::kAllowed); content_settings::UpdateLocationBarUiForWebContents(web_contents); } } if (request.video_type == blink::mojom::MediaStreamType::DEVICE_VIDEO_CAPTURE) { const SystemPermission system_video_permission = system_media_permissions::CheckSystemVideoCapturePermission(); UMA_HISTOGRAM_ENUMERATION( "Media.Video.Capture.Mac.CameraSystemPermission.UserMedia", system_video_permission); if (system_video_permission == SystemPermission::kNotDetermined) { // Using WeakPtr since callback can come at any time and we might be // destroyed. system_media_permissions::RequestSystemVideoCapturePermisson( base::BindOnce( &PermissionBubbleMediaAccessHandler::OnAccessRequestResponse, weak_factory_.GetWeakPtr(), web_contents, request_id, devices, result, std::move(ui)), {content::BrowserThread::UI}); return; } else if (system_video_permission == SystemPermission::kRestricted || system_video_permission == SystemPermission::kDenied) { content_settings::UpdateLocationBarUiForWebContents(web_contents); final_result = blink::mojom::MediaStreamRequestResult::SYSTEM_PERMISSION_DENIED; system_media_permissions::SystemVideoCapturePermissionBlocked(); } else { DCHECK_EQ(system_video_permission, SystemPermission::kAllowed); content_settings::UpdateLocationBarUiForWebContents(web_contents); } } } #endif // defined(OS_MACOSX) RepeatingMediaResponseCallback callback = std::move(request_it->second.callback); requests_map.erase(request_it); if (!requests_map.empty()) { // Post a task to process next queued request. It has to be done // asynchronously to make sure that calling infobar is not destroyed until // after this function returns. base::PostTask( FROM_HERE, {BrowserThread::UI}, base::BindOnce( &PermissionBubbleMediaAccessHandler::ProcessQueuedAccessRequest, base::Unretained(this), web_contents)); } std::move(callback).Run(devices, final_result, std::move(ui)); } void PermissionBubbleMediaAccessHandler::Observe( int type, const content::NotificationSource& source, const content::NotificationDetails& details) { DCHECK_CURRENTLY_ON(BrowserThread::UI); DCHECK_EQ(content::NOTIFICATION_WEB_CONTENTS_DESTROYED, type); pending_requests_.erase(content::Source<content::WebContents>(source).ptr()); }
endlessm/chromium-browser
chrome/browser/media/webrtc/permission_bubble_media_access_handler.cc
C++
bsd-3-clause
17,709
# Copyright (c) 2006-2009 The Trustees of Indiana University. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # - Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # - Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # - Neither the Indiana University nor the names of its contributors may be used # to endorse or promote products derived from this software without specific # prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from corepy.spre.spe import Instruction, DispatchInstruction, Register from spu_insts import * __doc__=""" ISA for the Cell Broadband Engine's SPU. """ class lqx(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':452} cycles = (1, 6, 0) class stqx(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':324} cycles = (1, 6, 0) class cbx(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':468} cycles = (1, 4, 0) class chx(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':469} cycles = (1, 4, 0) class cwx(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':470} cycles = (1, 4, 0) class cdx(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':471} cycles = (1, 4, 0) class ah(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':200} cycles = (0, 2, 0) class a(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':192} cycles = (0, 2, 0) class sfh(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':72} cycles = (0, 2, 0) class sf(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':64} cycles = (0, 2, 0) class addx(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':832} cycles = (0, 2, 0) class cg(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':194} cycles = (0, 2, 0) class cgx(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':834} cycles = (0, 2, 0) class sfx(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':833} cycles = (0, 2, 0) class bg(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':66} cycles = (0, 2, 0) class bgx(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':835} cycles = (0, 2, 0) class mpy(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':964} cycles = (0, 7, 0) class mpyu(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':972} cycles = (0, 7, 0) class mpyh(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':965} cycles = (0, 7, 0) class mpys(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':967} cycles = (0, 7, 0) class mpyhh(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':966} cycles = (0, 7, 0) class mpyhha(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':838} cycles = (0, 7, 0) class mpyhhu(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':974} cycles = (0, 7, 0) class mpyhhau(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':846} cycles = (0, 7, 0) class clz(Instruction): machine_inst = OPCD_A_T params = {'OPCD':677} cycles = (0, 2, 0) class cntb(Instruction): machine_inst = OPCD_A_T params = {'OPCD':692} cycles = (0, 4, 0) class fsmb(Instruction): machine_inst = OPCD_A_T params = {'OPCD':438} cycles = (1, 4, 0) class fsmh(Instruction): machine_inst = OPCD_A_T params = {'OPCD':437} cycles = (1, 4, 0) class fsm(Instruction): machine_inst = OPCD_A_T params = {'OPCD':436} cycles = (1, 4, 0) class gbb(Instruction): machine_inst = OPCD_A_T params = {'OPCD':434} cycles = (1, 4, 0) class gbh(Instruction): machine_inst = OPCD_A_T params = {'OPCD':433} cycles = (1, 4, 0) class gb(Instruction): machine_inst = OPCD_A_T params = {'OPCD':432} cycles = (1, 4, 0) class avgb(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':211} cycles = (0, 4, 0) class absdb(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':83} cycles = (0, 4, 0) class sumb(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':595} cycles = (0, 4, 0) class xsbh(Instruction): machine_inst = OPCD_A_T params = {'OPCD':694} cycles = (0, 2, 0) class xshw(Instruction): machine_inst = OPCD_A_T params = {'OPCD':686} cycles = (0, 2, 0) class xswd(Instruction): machine_inst = OPCD_A_T params = {'OPCD':678} cycles = (0, 2, 0) class and_(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':193} cycles = (0, 2, 0) class andc(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':705} cycles = (0, 2, 0) class or_(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':65} cycles = (0, 2, 0) class orc(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':713} cycles = (0, 2, 0) class orx(Instruction): machine_inst = OPCD_A_T params = {'OPCD':496} cycles = (1, 4, 0) class xor(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':577} cycles = (0, 2, 0) class nand(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':201} cycles = (0, 2, 0) class nor(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':73} cycles = (0, 2, 0) class eqv(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':585} cycles = (0, 2, 0) class shlh(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':95} cycles = (0, 4, 0) class shl(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':91} cycles = (0, 4, 0) class shlqbi(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':475} cycles = (1, 4, 0) class shlqby(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':479} cycles = (1, 4, 0) class shlqbybi(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':463} cycles = (1, 4, 0) class roth(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':92} cycles = (0, 4, 0) class rot(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':88} cycles = (0, 4, 0) class rotqby(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':476} cycles = (1, 4, 0) class rotqbybi(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':460} cycles = (1, 4, 0) class rotqbi(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':472} cycles = (1, 4, 0) class rothm(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':93} cycles = (0, 4, 0) class rotm(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':89} cycles = (0, 4, 0) class rotqmby(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':477} cycles = (1, 4, 0) class rotqmbybi(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':461} cycles = (1, 4, 0) class rotqmbi(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':473} cycles = (1, 4, 0) class rotmah(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':94} cycles = (0, 4, 0) class rotma(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':90} cycles = (0, 4, 0) class heq(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':984} cycles = (0, 2, 0) class hgt(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':600} cycles = (0, 2, 0) class hlgt(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':728} cycles = (0, 2, 0) class ceqb(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':976} cycles = (0, 2, 0) class ceqh(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':968} cycles = (0, 2, 0) class ceq(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':960} cycles = (0, 2, 0) class cgtb(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':592} cycles = (0, 2, 0) class cgth(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':584} cycles = (0, 2, 0) class cgt(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':576} cycles = (0, 2, 0) class clgtb(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':720} cycles = (0, 2, 0) class clgth(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':712} cycles = (0, 2, 0) class clgt(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':704} cycles = (0, 2, 0) class bi(Instruction): machine_inst = OPCD_A_D_E params = {'OPCD':424} cycles = (1, 4, 0) class iret(Instruction): machine_inst = OPCD_A_D_E params = {'OPCD':426} cycles = (1, 4, 0) class bisled(Instruction): machine_inst = OPCD_A_T_D_E params = {'OPCD':427} cycles = (1, 4, 0) class bisl(Instruction): machine_inst = OPCD_A_T_D_E params = {'OPCD':425} cycles = (1, 4, 0) class biz(Instruction): machine_inst = OPCD_A_T_D_E params = {'OPCD':296} cycles = (1, 4, 0) class binz(Instruction): machine_inst = OPCD_A_T_D_E params = {'OPCD':297} cycles = (1, 4, 0) class bihz(Instruction): machine_inst = OPCD_A_T_D_E params = {'OPCD':294} cycles = (1, 4, 0) class bihnz(Instruction): machine_inst = OPCD_A_T_D_E params = {'OPCD':299} cycles = (1, 4, 0) # TODO - can we check that if P is set then RO is zero as required? class hbr(DispatchInstruction): cycles = (1, 15, 0) dispatch = ( (OPCD_RO_A_P, {'OPCD':428}), (OPCD_LBL9_A_P, {'OPCD':428})) class fa(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':708} cycles = (0, 6, 0) class dfa(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':716} cycles = (0, 13, 6) class fs(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':709} cycles = (0, 6, 0) class dfs(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':717} cycles = (0, 13, 6) class fm(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':710} cycles = (0, 6, 0) class dfm(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':718} cycles = (0, 13, 6) class dfma(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':860} cycles = (0, 13, 6) class dfnms(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':862} cycles = (0, 13, 6) class dfms(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':861} cycles = (0, 13, 6) class dfnma(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':863} cycles = (0, 13, 6) class frest(Instruction): machine_inst = OPCD_A_T params = {'OPCD':440} cycles = (1, 4, 0) class frsqest(Instruction): machine_inst = OPCD_A_T params = {'OPCD':441} cycles = (1, 4, 0) class fi(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':980} cycles = (0, 7, 0) class frds(Instruction): machine_inst = OPCD_A_T params = {'OPCD':953} cycles = (0, 13, 6) class fesd(Instruction): machine_inst = OPCD_A_T params = {'OPCD':952} cycles = (0, 13, 6) class fceq(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':962} cycles = (0, 2, 0) class fcmeq(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':970} cycles = (0, 2, 0) class fcgt(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':706} cycles = (0, 2, 0) class fcmgt(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':714} cycles = (0, 2, 0) class fscrwr(Instruction): machine_inst = OPCD_A_T params = {'OPCD':954} cycles = (0, 7, 0) class fscrrd(Instruction): machine_inst = OPCD_T params = {'OPCD':920} cycles = (0, 13, 6) class stop(Instruction): machine_inst = OPCD_STOP_SIG params = {'OPCD':0} cycles = (1, 4, 0) class stopd(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':320} cycles = (1, 4, 0) class lnop(Instruction): machine_inst = OPCD params = {'OPCD':1} cycles = (1, 0, 0) class nop(Instruction): machine_inst = OPCD_T params = {'OPCD':513} cycles = (0, 0, 0) class sync(Instruction): machine_inst = OPCD_CF params = {'OPCD':2} cycles = (1, 4, 0) class dsync(Instruction): machine_inst = OPCD params = {'OPCD':3} cycles = (1, 4, 0) class mfspr(Instruction): machine_inst = OPCD_SA_T params = {'OPCD':12} cycles = (1, 6, 0) class mtspr(Instruction): machine_inst = OPCD_SA_T params = {'OPCD':268} cycles = (1, 6, 0) class rdch(Instruction): machine_inst = OPCD_A_T params = {'OPCD':13} cycles = (1, 6, 0) class rchcnt(Instruction): machine_inst = OPCD_A_T params = {'OPCD':15} cycles = (1, 6, 0) class wrch(Instruction): machine_inst = OPCD_A_T params = {'OPCD':269} cycles = (1, 6, 0) class mpya(Instruction): machine_inst = OPCD_T_B_A_C params = {'OPCD':12} cycles = (0, 7, 0) class selb(Instruction): machine_inst = OPCD_T_B_A_C params = {'OPCD':8} cycles = (0, 2, 0) class shufb(Instruction): machine_inst = OPCD_T_B_A_C params = {'OPCD':11} cycles = (1, 4, 0) class fma(Instruction): machine_inst = OPCD_T_B_A_C params = {'OPCD':14} cycles = (0, 6, 0) class fnms(Instruction): machine_inst = OPCD_T_B_A_C params = {'OPCD':13} cycles = (0, 6, 0) class fms(Instruction): machine_inst = OPCD_T_B_A_C params = {'OPCD':15} cycles = (0, 6, 0) class cbd(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':500} cycles = (1, 4, 0) class chd(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':501} cycles = (1, 4, 0) class cwd(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':502} cycles = (1, 4, 0) class cdd(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':503} cycles = (1, 4, 0) class shlhi(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':127} cycles = (0, 4, 0) class shli(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':123} cycles = (0, 4, 0) class shlqbii(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':507} cycles = (1, 4, 0) class shlqbyi(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':511} cycles = (1, 4, 0) class rothi(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':124} cycles = (0, 4, 0) class roti(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':120} cycles = (0, 4, 0) class rotqbyi(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':508} cycles = (1, 4, 0) class rotqbii(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':504} cycles = (1, 4, 0) class rothmi(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':125} cycles = (0, 4, 0) class rotmi(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':121} cycles = (0, 4, 0) class rotqmbyi(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':509} cycles = (1, 4, 0) class rotqmbii(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':505} cycles = (1, 4, 0) class rotmahi(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':126} cycles = (0, 4, 0) class rotmai(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':122} cycles = (0, 4, 0) class csflt(Instruction): machine_inst = OPCD_I8_A_T params = {'OPCD':474} cycles = (0, 7, 0) class cflts(Instruction): machine_inst = OPCD_I8_A_T params = {'OPCD':472} cycles = (0, 7, 0) class cuflt(Instruction): machine_inst = OPCD_I8_A_T params = {'OPCD':475} cycles = (0, 7, 0) class cfltu(Instruction): machine_inst = OPCD_I8_A_T params = {'OPCD':473} cycles = (0, 7, 0) class lqd(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':52} cycles = (1, 6, 0) class stqd(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':36} cycles = (1, 6, 0) class ahi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':29} cycles = (0, 2, 0) class ai(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':28} cycles = (0, 2, 0) class sfhi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':13} cycles = (0, 2, 0) class sfi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':12} cycles = (0, 2, 0) class mpyi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':116} cycles = (0, 7, 0) class mpyui(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':117} cycles = (0, 7, 0) class andbi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':22} cycles = (0, 2, 0) class andhi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':21} cycles = (0, 2, 0) class andi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':20} cycles = (0, 2, 0) class orbi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':6} cycles = (0, 2, 0) class orhi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':5} cycles = (0, 2, 0) class ori(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':4} cycles = (0, 2, 0) class xorbi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':70} cycles = (0, 2, 0) class xorhi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':69} cycles = (0, 2, 0) class xori(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':68} cycles = (0, 2, 0) class heqi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':127} cycles = (0, 2, 0) class hgti(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':79} cycles = (0, 2, 0) class hlgti(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':95} cycles = (0, 2, 0) class ceqbi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':126} cycles = (0, 2, 0) class ceqhi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':125} cycles = (0, 2, 0) class ceqi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':124} cycles = (0, 2, 0) class cgtbi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':78} cycles = (0, 2, 0) class cgthi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':77} cycles = (0, 2, 0) class cgti(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':76} cycles = (0, 2, 0) class clgtbi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':94} cycles = (0, 2, 0) class clgthi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':93} cycles = (0, 2, 0) class clgti(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':92} cycles = (0, 2, 0) class lqa(Instruction): machine_inst = OPCD_I16_T params = {'OPCD':97} cycles = (1, 6, 0) class lqr(Instruction): machine_inst = OPCD_I16_T params = {'OPCD':103} cycles = (1, 6, 0) class stqa(Instruction): machine_inst = OPCD_I16_T params = {'OPCD':65} cycles = (1, 6, 0) class stqr(Instruction): machine_inst = OPCD_I16_T params = {'OPCD':71} cycles = (1, 6, 0) class ilh(Instruction): machine_inst = OPCD_I16_T params = {'OPCD':131} cycles = (0, 2, 0) class ilhu(Instruction): machine_inst = OPCD_I16_T params = {'OPCD':130} cycles = (0, 2, 0) class il(Instruction): machine_inst = OPCD_I16_T params = {'OPCD':129} cycles = (0, 2, 0) class iohl(Instruction): machine_inst = OPCD_I16_T params = {'OPCD':193} cycles = (0, 2, 0) class fsmbi(Instruction): machine_inst = OPCD_I16_T params = {'OPCD':101} cycles = (1, 4, 0) class br(DispatchInstruction): cycles = (1, 4, 0) dispatch = ( (OPCD_I16, {'OPCD':100}), (OPCD_LBL16, {'OPCD':100})) # TODO - how can I do absolute branches? class bra(Instruction): machine_inst = OPCD_I16 params = {'OPCD':96} cycles = (1, 4, 0) # TODO - I16 has two zero bits appended, do I handle this correctly? # What is the correct way, anyway? class brsl(DispatchInstruction): cycles = (1, 4, 0) dispatch = ( (OPCD_I16_T, {'OPCD':102}), (OPCD_LBL16_T, {'OPCD':102})) class brasl(Instruction): machine_inst = OPCD_I16_T params = {'OPCD':98} cycles = (1, 4, 0) class brnz(DispatchInstruction): cycles = (1, 4, 0) dispatch = ( (OPCD_I16_T, {'OPCD':66}), (OPCD_LBL16_T, {'OPCD':66})) class brz(DispatchInstruction): cycles = (1, 4, 0) dispatch = ( (OPCD_I16_T, {'OPCD':64}), (OPCD_LBL16_T, {'OPCD':64})) class brhnz(DispatchInstruction): cycles = (1, 4, 0) dispatch = ( (OPCD_I16, {'OPCD':70}), (OPCD_LBL16, {'OPCD':70})) class brhz(DispatchInstruction): cycles = (1, 4, 0) dispatch = ( (OPCD_I16, {'OPCD':68}), (OPCD_LBL16, {'OPCD':68})) class hbra(Instruction): machine_inst = OPCD_LBL9_I16 params = {'OPCD':8} cycles = (1, 15, 0) class hbrr(DispatchInstruction): cycles = (1, 15, 0) dispatch = ( (OPCD_ROA_I16, {'OPCD':9}), (OPCD_LBL9_LBL16, {'OPCD':9})) class ila(Instruction): machine_inst = OPCD_I18_T params = {'OPCD':33} cycles = (0, 2, 0)
matthiaskramm/corepy
corepy/arch/spu/isa/spu_isa.py
Python
bsd-3-clause
22,294
/* * Copyright (c) 2012, United States Government, as represented by the Secretary of Health and Human Services. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above * copyright notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of the United States Government nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE UNITED STATES GOVERNMENT BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package gov.hhs.fha.nhinc.patientdiscovery.inbound.deferred.request; import gov.hhs.fha.nhinc.aspect.InboundProcessingEvent; import gov.hhs.fha.nhinc.common.nhinccommon.AssertionType; import gov.hhs.fha.nhinc.nhinclib.NhincConstants; import gov.hhs.fha.nhinc.patientdiscovery.PatientDiscoveryAuditor; import gov.hhs.fha.nhinc.patientdiscovery.adapter.deferred.request.proxy.AdapterPatientDiscoveryDeferredReqProxy; import gov.hhs.fha.nhinc.patientdiscovery.adapter.deferred.request.proxy.AdapterPatientDiscoveryDeferredReqProxyObjectFactory; import gov.hhs.fha.nhinc.patientdiscovery.aspect.MCCIIN000002UV01EventDescriptionBuilder; import gov.hhs.fha.nhinc.patientdiscovery.aspect.PRPAIN201305UV02EventDescriptionBuilder; import org.hl7.v3.MCCIIN000002UV01; import org.hl7.v3.PRPAIN201305UV02; public abstract class AbstractInboundPatientDiscoveryDeferredRequest implements InboundPatientDiscoveryDeferredRequest { private final AdapterPatientDiscoveryDeferredReqProxyObjectFactory adapterFactory; public AbstractInboundPatientDiscoveryDeferredRequest(AdapterPatientDiscoveryDeferredReqProxyObjectFactory factory) { adapterFactory = factory; } abstract MCCIIN000002UV01 process(PRPAIN201305UV02 request, AssertionType assertion); abstract PatientDiscoveryAuditor getAuditLogger(); /** * Processes the PD Deferred request message. This call will audit the message and send it to the Nhin. * * @param request * @param assertion * @return MCCIIN000002UV01 */ @InboundProcessingEvent(beforeBuilder = PRPAIN201305UV02EventDescriptionBuilder.class, afterReturningBuilder = MCCIIN000002UV01EventDescriptionBuilder.class, serviceType = "Patient Discovery Deferred Request", version = "1.0") public MCCIIN000002UV01 respondingGatewayPRPAIN201305UV02(PRPAIN201305UV02 request, AssertionType assertion) { auditRequestFromNhin(request, assertion); MCCIIN000002UV01 response = process(request, assertion); auditResponseToNhin(response, assertion); return response; } protected MCCIIN000002UV01 sendToAdapter(PRPAIN201305UV02 request, AssertionType assertion) { AdapterPatientDiscoveryDeferredReqProxy proxy = adapterFactory.getAdapterPatientDiscoveryDeferredReqProxy(); return proxy.processPatientDiscoveryAsyncReq(request, assertion); } private void auditRequestFromNhin(PRPAIN201305UV02 request, AssertionType assertion) { getAuditLogger().auditNhinDeferred201305(request, assertion, NhincConstants.AUDIT_LOG_INBOUND_DIRECTION); } private void auditResponseToNhin(MCCIIN000002UV01 response, AssertionType assertion) { getAuditLogger().auditAck(response, assertion, NhincConstants.AUDIT_LOG_OUTBOUND_DIRECTION, NhincConstants.AUDIT_LOG_NHIN_INTERFACE); } protected void auditRequestToAdapter(PRPAIN201305UV02 request, AssertionType assertion) { getAuditLogger().auditAdapterDeferred201305(request, assertion, NhincConstants.AUDIT_LOG_OUTBOUND_DIRECTION); } protected void auditResponseFromAdapter(MCCIIN000002UV01 response, AssertionType assertion) { getAuditLogger().auditAck(response, assertion, NhincConstants.AUDIT_LOG_INBOUND_DIRECTION, NhincConstants.AUDIT_LOG_ADAPTER_INTERFACE); } }
sailajaa/CONNECT
Product/Production/Services/PatientDiscoveryCore/src/main/java/gov/hhs/fha/nhinc/patientdiscovery/inbound/deferred/request/AbstractInboundPatientDiscoveryDeferredRequest.java
Java
bsd-3-clause
5,048
/* -- MAGMA (version 2.1.0) -- Univ. of Tennessee, Knoxville Univ. of California, Berkeley Univ. of Colorado, Denver @date August 2016 @author Stan Tomov @author Hartwig Anzt @precisions normal z -> s d c */ #include "magmasparse_internal.h" #define PRECISION_z #define COMPLEX #define RTOLERANCE lapackf77_dlamch( "E" ) #define ATOLERANCE lapackf77_dlamch( "E" ) /** Purpose ------- Solves an eigenvalue problem A * X = evalues X where A is a complex sparse matrix stored in the GPU memory. X and B are complex vectors stored on the GPU memory. This is a GPU implementation of the LOBPCG method. This method allocates all required memory space inside the routine. Also, the memory is not allocated as one big chunk, but seperatly for the different blocks. This allows to use texture also for large matrices. Arguments --------- @param[in] A magma_z_matrix input matrix A @param[in,out] solver_par magma_z_solver_par* solver parameters @param[in,out] precond_par magma_z_precond_par* preconditioner parameters @param[in] queue magma_queue_t Queue to execute in. @ingroup magmasparse_zheev ********************************************************************/ extern "C" magma_int_t magma_zlobpcg( magma_z_matrix A, magma_z_solver_par *solver_par, magma_z_preconditioner *precond_par, magma_queue_t queue ) { magma_int_t info = 0; #define residualNorms(i,iter) ( residualNorms + (i) + (iter)*n ) #define SWAP(x, y) { pointer = x; x = y; y = pointer; } #define hresidualNorms(i,iter) (hresidualNorms + (i) + (iter)*n ) #define gramA( m, n) (gramA + (m) + (n)*ldgram) #define gramB( m, n) (gramB + (m) + (n)*ldgram) #define gevectors(m, n) (gevectors + (m) + (n)*ldgram) #define h_gramB( m, n) (h_gramB + (m) + (n)*ldgram) #define magma_z_bspmv_tuned(m, n, alpha, A, X, beta, AX, queue) { \ magma_z_matrix x={Magma_CSR}, ax={Magma_CSR}; \ x.memory_location = Magma_DEV; x.num_rows = m; x.num_cols = n; x.major = MagmaColMajor; x.nnz = m*n; x.dval = X; x.storage_type = Magma_DENSE; \ ax.memory_location= Magma_DEV; ax.num_rows = m; ax.num_cols = n; ax.major = MagmaColMajor; ax.nnz = m*n; ax.dval = AX; ax.storage_type = Magma_DENSE; \ CHECK( magma_z_spmv(alpha, A, x, beta, ax, queue )); \ } //************************************************************** // %Memory allocation for the eigenvectors, eigenvalues, and workspace solver_par->solver = Magma_LOBPCG; magma_int_t m = A.num_rows; magma_int_t n = (solver_par->num_eigenvalues); magmaDoubleComplex *blockX = solver_par->eigenvectors; double *evalues = solver_par->eigenvalues; solver_par->numiter = 0; solver_par->spmv_count = 0; magmaDoubleComplex *dwork=NULL, *hwork=NULL; magmaDoubleComplex *blockP=NULL, *blockAP=NULL, *blockR=NULL, *blockAR=NULL, *blockAX=NULL, *blockW=NULL; magmaDoubleComplex *gramA=NULL, *gramB=NULL, *gramM=NULL; magmaDoubleComplex *gevectors=NULL, *h_gramB=NULL; dwork = NULL; hwork = NULL; blockP = NULL; blockR = NULL; blockAP = NULL; blockAR = NULL; blockAX = NULL; blockW = NULL; gramA = NULL; gramB = NULL; gramM = NULL; gevectors = NULL; h_gramB = NULL; magmaDoubleComplex *pointer, *origX = blockX; double *eval_gpu=NULL; magma_int_t iterationNumber, cBlockSize, restart = 1, iter; //Chronometry real_Double_t tempo1, tempo2; magma_int_t lwork = max( 2*n+n*magma_get_dsytrd_nb(n), 1 + 6*3*n + 2* 3*n* 3*n); magma_int_t *iwork={0}, liwork = 15*n+9; magma_int_t gramDim, ldgram = 3*n, ikind = 3; magmaDoubleComplex *hW={0}; // === Set solver parameters === double residualTolerance = solver_par->rtol; magma_int_t maxIterations = solver_par->maxiter; double tmp; double r0=0; // set in 1st iteration // === Set some constants & defaults === magmaDoubleComplex c_zero = MAGMA_Z_ZERO; magmaDoubleComplex c_one = MAGMA_Z_ONE; magmaDoubleComplex c_neg_one = MAGMA_Z_NEG_ONE; double *residualNorms={0}, *condestGhistory={0}, condestG={0}; double *gevalues={0}; magma_int_t *activeMask={0}; double *hresidualNorms={0}; #ifdef COMPLEX double *rwork={0}; magma_int_t lrwork = 1 + 5*(3*n) + 2*(3*n)*(3*n); CHECK( magma_dmalloc_cpu(&rwork, lrwork)); #endif CHECK( magma_zmalloc_pinned( &hwork , lwork )); CHECK( magma_zmalloc( &blockAX , m*n )); CHECK( magma_zmalloc( &blockAR , m*n )); CHECK( magma_zmalloc( &blockAP , m*n )); CHECK( magma_zmalloc( &blockR , m*n )); CHECK( magma_zmalloc( &blockP , m*n )); CHECK( magma_zmalloc( &blockW , m*n )); CHECK( magma_zmalloc( &dwork , m*n )); CHECK( magma_dmalloc( &eval_gpu , 3*n )); //**********************************************************+ // === Check some parameters for possible quick exit === solver_par->info = MAGMA_SUCCESS; if (m < 2) info = MAGMA_DIVERGENCE; else if (n > m) info = MAGMA_SLOW_CONVERGENCE; if (solver_par->info != 0) { magma_xerbla( __func__, -(info) ); goto cleanup; } solver_par->info = info; // local info variable; // === Allocate GPU memory for the residual norms' history === CHECK( magma_dmalloc(&residualNorms, (maxIterations+1) * n)); CHECK( magma_malloc( (void **)&activeMask, (n+1) * sizeof(magma_int_t) )); // === Allocate CPU work space === CHECK( magma_dmalloc_cpu(&condestGhistory, maxIterations+1)); CHECK( magma_dmalloc_cpu(&gevalues, 3 * n)); CHECK( magma_malloc_cpu((void **)&iwork, liwork * sizeof(magma_int_t))); CHECK( magma_zmalloc_pinned(&hW, n*n)); CHECK( magma_zmalloc_pinned(&gevectors, 9*n*n)); CHECK( magma_zmalloc_pinned(&h_gramB , 9*n*n)); // === Allocate GPU workspace === CHECK( magma_zmalloc(&gramM, n * n)); CHECK( magma_zmalloc(&gramA, 9 * n * n)); CHECK( magma_zmalloc(&gramB, 9 * n * n)); // === Set activemask to one === for(magma_int_t k =0; k<n; k++){ iwork[k]=1; } magma_setmatrix(n, 1, sizeof(magma_int_t), iwork, n , activeMask, n, queue); #if defined(PRECISION_s) ikind = 3; #endif // === Make the initial vectors orthonormal === magma_zgegqr_gpu(ikind, m, n, blockX, m, dwork, hwork, &info ); //magma_zorthomgs( m, n, blockX, queue ); magma_z_bspmv_tuned(m, n, c_one, A, blockX, c_zero, blockAX, queue ); solver_par->spmv_count++; // === Compute the Gram matrix = (X, AX) & its eigenstates === magma_zgemm( MagmaConjTrans, MagmaNoTrans, n, n, m, c_one, blockX, m, blockAX, m, c_zero, gramM, n, queue ); magma_zheevd_gpu( MagmaVec, MagmaUpper, n, gramM, n, evalues, hW, n, hwork, lwork, #ifdef COMPLEX rwork, lrwork, #endif iwork, liwork, &info ); // === Update X = X * evectors === magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, n, c_one, blockX, m, gramM, n, c_zero, blockW, m, queue ); SWAP(blockW, blockX); // === Update AX = AX * evectors === magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, n, c_one, blockAX, m, gramM, n, c_zero, blockW, m, queue ); SWAP(blockW, blockAX); condestGhistory[1] = 7.82; tempo1 = magma_sync_wtime( queue ); // === Main LOBPCG loop ============================================================ for(iterationNumber = 1; iterationNumber < maxIterations; iterationNumber++) { // === compute the residuals (R = Ax - x evalues ) magmablas_zlacpy( MagmaFull, m, n, blockAX, m, blockR, m, queue ); /* for(magma_int_t i=0; i<n; i++) { magma_zaxpy( m, MAGMA_Z_MAKE(-evalues[i],0), blockX+i*m, 1, blockR+i*m, 1, queue ); } */ magma_dsetmatrix( 3*n, 1, evalues, 3*n, eval_gpu, 3*n, queue ); CHECK( magma_zlobpcg_res( m, n, eval_gpu, blockX, blockR, eval_gpu, queue )); magmablas_dznrm2_cols( m, n, blockR, m, residualNorms(0, iterationNumber), queue ); // === remove the residuals corresponding to already converged evectors CHECK( magma_zcompact(m, n, blockR, m, residualNorms(0, iterationNumber), residualTolerance, activeMask, &cBlockSize, queue )); if (cBlockSize == 0) break; // === apply a preconditioner P to the active residulas: R_new = P R_old // === for now set P to be identity (no preconditioner => nothing to be done ) //magmablas_zlacpy( MagmaFull, m, cBlockSize, blockR, m, blockW, m, queue ); //SWAP(blockW, blockR); // preconditioner magma_z_matrix bWv={Magma_CSR}, bRv={Magma_CSR}; bWv.memory_location = Magma_DEV; bWv.num_rows = m; bWv.num_cols = cBlockSize; bWv.major = MagmaColMajor; bWv.nnz = m*cBlockSize; bWv.dval = blockW; bRv.memory_location = Magma_DEV; bRv.num_rows = m; bRv.num_cols = cBlockSize; bRv.major = MagmaColMajor; bRv.nnz = m*cBlockSize; bRv.dval = blockR; CHECK( magma_z_applyprecond_left( MagmaNoTrans, A, bRv, &bWv, precond_par, queue )); CHECK( magma_z_applyprecond_right( MagmaNoTrans, A, bWv, &bRv, precond_par, queue )); // === make the preconditioned residuals orthogonal to X if( precond_par->solver != Magma_NONE){ magma_zgemm( MagmaConjTrans, MagmaNoTrans, n, cBlockSize, m, c_one, blockX, m, blockR, m, c_zero, gramB(0,0), ldgram, queue ); magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, cBlockSize, n, c_neg_one, blockX, m, gramB(0,0), ldgram, c_one, blockR, m, queue ); } // === make the active preconditioned residuals orthonormal magma_zgegqr_gpu(ikind, m, cBlockSize, blockR, m, dwork, hwork, &info ); #if defined(PRECISION_s) // re-orthogonalization SWAP(blockX, dwork); magma_zgegqr_gpu(ikind, m, cBlockSize, blockR, m, dwork, hwork, &info ); #endif //magma_zorthomgs( m, cBlockSize, blockR, queue ); // === compute AR magma_z_bspmv_tuned(m, cBlockSize, c_one, A, blockR, c_zero, blockAR, queue ); solver_par->spmv_count++; if (!restart) { // === compact P & AP as well CHECK( magma_zcompactActive(m, n, blockP, m, activeMask, queue )); CHECK( magma_zcompactActive(m, n, blockAP, m, activeMask, queue )); /* // === make P orthogonal to X ? magma_zgemm( MagmaConjTrans, MagmaNoTrans, n, cBlockSize, m, c_one, blockX, m, blockP, m, c_zero, gramB(0,0), ldgram, queue ); magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, cBlockSize, n, c_neg_one, blockX, m, gramB(0,0), ldgram, c_one, blockP, m, queue ); // === make P orthogonal to R ? magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, cBlockSize, m, c_one, blockR, m, blockP, m, c_zero, gramB(0,0), ldgram, queue ); magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, cBlockSize, cBlockSize, c_neg_one, blockR, m, gramB(0,0), ldgram, c_one, blockP, m, queue ); */ // === Make P orthonormal & properly change AP (without multiplication by A) magma_zgegqr_gpu(ikind, m, cBlockSize, blockP, m, dwork, hwork, &info ); #if defined(PRECISION_s) // re-orthogonalization SWAP(blockX, dwork); magma_zgegqr_gpu(ikind, m, cBlockSize, blockP, m, dwork, hwork, &info ); #endif //magma_zorthomgs( m, cBlockSize, blockP, queue ); //magma_z_bspmv_tuned(m, cBlockSize, c_one, A, blockP, c_zero, blockAP, queue ); magma_zsetmatrix( cBlockSize, cBlockSize, hwork, cBlockSize, dwork, cBlockSize, queue ); // replacement according to Stan #if defined(PRECISION_s) || defined(PRECISION_d) magmablas_ztrsm( MagmaRight, MagmaUpper, MagmaNoTrans, MagmaNonUnit, m, cBlockSize, c_one, dwork, cBlockSize, blockAP, m, queue ); #else magma_ztrsm( MagmaRight, MagmaUpper, MagmaNoTrans, MagmaNonUnit, m, cBlockSize, c_one, dwork, cBlockSize, blockAP, m, queue ); #endif } iter = max( 1, iterationNumber - 10 - int(log(1.*cBlockSize)) ); double condestGmean = 0.; for(magma_int_t i = 0; i<iterationNumber-iter+1; i++){ condestGmean += condestGhistory[i]; } condestGmean = condestGmean / (iterationNumber-iter+1); if (restart) gramDim = n+cBlockSize; else gramDim = n+2*cBlockSize; /* --- The Raileight-Ritz method for [X R P] ----------------------- [ X R P ]' [AX AR AP] y = evalues [ X R P ]' [ X R P ], i.e., GramA GramB / X'AX X'AR X'AP \ / X'X X'R X'P \ | R'AX R'AR R'AP | y = evalues | R'X R'R R'P | \ P'AX P'AR P'AP / \ P'X P'R P'P / ----------------------------------------------------------------- */ // === assemble GramB; first, set it to I magmablas_zlaset( MagmaFull, ldgram, ldgram, c_zero, c_one, gramB, ldgram, queue ); // identity if (!restart) { magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, n, m, c_one, blockP, m, blockX, m, c_zero, gramB(n+cBlockSize,0), ldgram, queue ); magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, cBlockSize, m, c_one, blockP, m, blockR, m, c_zero, gramB(n+cBlockSize,n), ldgram, queue ); } magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, n, m, c_one, blockR, m, blockX, m, c_zero, gramB(n,0), ldgram, queue ); // === get GramB from the GPU to the CPU and compute its eigenvalues only magma_zgetmatrix( gramDim, gramDim, gramB, ldgram, h_gramB, ldgram, queue ); lapackf77_zheev("N", "L", &gramDim, h_gramB, &ldgram, gevalues, hwork, &lwork, #ifdef COMPLEX rwork, #endif &info); // === check stability criteria if we need to restart condestG = log10( gevalues[gramDim-1]/gevalues[0] ) + 1.; if ((condestG/condestGmean>2 && condestG>2) || condestG>8) { // Steepest descent restart for stability restart=1; printf("restart at step #%d\n", int(iterationNumber)); } // === assemble GramA; first, set it to I magmablas_zlaset( MagmaFull, ldgram, ldgram, c_zero, c_one, gramA, ldgram, queue ); // identity magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, n, m, c_one, blockR, m, blockAX, m, c_zero, gramA(n,0), ldgram, queue ); magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, cBlockSize, m, c_one, blockR, m, blockAR, m, c_zero, gramA(n,n), ldgram, queue ); if (!restart) { magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, n, m, c_one, blockP, m, blockAX, m, c_zero, gramA(n+cBlockSize,0), ldgram, queue ); magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, cBlockSize, m, c_one, blockP, m, blockAR, m, c_zero, gramA(n+cBlockSize,n), ldgram, queue ); magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, cBlockSize, m, c_one, blockP, m, blockAP, m, c_zero, gramA(n+cBlockSize,n+cBlockSize), ldgram, queue ); } /* // === Compute X' AX or just use the eigenvalues below ? magma_zgemm( MagmaConjTrans, MagmaNoTrans, n, n, m, c_one, blockX, m, blockAX, m, c_zero, gramA(0,0), ldgram, queue ); */ if (restart==0) { magma_zgetmatrix( gramDim, gramDim, gramA, ldgram, gevectors, ldgram, queue ); } else { gramDim = n+cBlockSize; magma_zgetmatrix( gramDim, gramDim, gramA, ldgram, gevectors, ldgram, queue ); } for(magma_int_t k=0; k<n; k++) *gevectors(k,k) = MAGMA_Z_MAKE(evalues[k], 0); // === the previous eigensolver destroyed what is in h_gramB => must copy it again magma_zgetmatrix( gramDim, gramDim, gramB, ldgram, h_gramB, ldgram, queue ); magma_int_t itype = 1; lapackf77_zhegvd(&itype, "V", "L", &gramDim, gevectors, &ldgram, h_gramB, &ldgram, gevalues, hwork, &lwork, #ifdef COMPLEX rwork, &lrwork, #endif iwork, &liwork, &info); for(magma_int_t k =0; k<n; k++) evalues[k] = gevalues[k]; // === copy back the result to gramA on the GPU and use it for the updates magma_zsetmatrix( gramDim, gramDim, gevectors, ldgram, gramA, ldgram, queue ); if (restart == 0) { // === contribution from P to the new X (in new search direction P) magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, cBlockSize, c_one, blockP, m, gramA(n+cBlockSize,0), ldgram, c_zero, dwork, m, queue ); SWAP(dwork, blockP); // === contribution from R to the new X (in new search direction P) magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, cBlockSize, c_one, blockR, m, gramA(n,0), ldgram, c_one, blockP, m, queue ); // === corresponding contribution from AP to the new AX (in AP) magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, cBlockSize, c_one, blockAP, m, gramA(n+cBlockSize,0), ldgram, c_zero, dwork, m, queue ); SWAP(dwork, blockAP); // === corresponding contribution from AR to the new AX (in AP) magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, cBlockSize, c_one, blockAR, m, gramA(n,0), ldgram, c_one, blockAP, m, queue ); } else { // === contribution from R (only) to the new X magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, cBlockSize, c_one, blockR, m, gramA(n,0), ldgram, c_zero, blockP, m, queue ); // === corresponding contribution from AR (only) to the new AX magma_zgemm( MagmaNoTrans, MagmaNoTrans,m, n, cBlockSize, c_one, blockAR, m, gramA(n,0), ldgram, c_zero, blockAP, m, queue ); } // === contribution from old X to the new X + the new search direction P magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, n, c_one, blockX, m, gramA, ldgram, c_zero, dwork, m, queue ); SWAP(dwork, blockX); //magma_zaxpy( m*n, c_one, blockP, 1, blockX, 1, queue ); CHECK( magma_zlobpcg_maxpy( m, n, blockP, blockX, queue )); // === corresponding contribution from old AX to new AX + AP magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, n, c_one, blockAX, m, gramA, ldgram, c_zero, dwork, m, queue ); SWAP(dwork, blockAX); //magma_zaxpy( m*n, c_one, blockAP, 1, blockAX, 1, queue ); CHECK( magma_zlobpcg_maxpy( m, n, blockAP, blockAX, queue )); condestGhistory[iterationNumber+1]=condestG; magma_dgetmatrix( 1, 1, residualNorms(0, iterationNumber), 1, &tmp, 1, queue ); if ( iterationNumber == 1 ) { solver_par->init_res = tmp; r0 = tmp * solver_par->rtol; if ( r0 < ATOLERANCE ) r0 = ATOLERANCE; } solver_par->final_res = tmp; if ( tmp < r0 ) { break; } if (cBlockSize == 0) { break; } if ( solver_par->verbose!=0 ) { if ( iterationNumber%solver_par->verbose == 0 ) { // double res; // magma_zgetmatrix( 1, 1, // (magmaDoubleComplex*)residualNorms(0, iterationNumber), 1, // (magmaDoubleComplex*)&res, 1, queue ); // // printf("Iteration %4d, CBS %4d, Residual: %10.7f\n", // iterationNumber, cBlockSize, res); printf("%4d-%2d ", int(iterationNumber), int(cBlockSize)); magma_dprint_gpu(1, n, residualNorms(0, iterationNumber), 1); } } restart = 0; } // === end for iterationNumber = 1,maxIterations ======================= // fill solver info tempo2 = magma_sync_wtime( queue ); solver_par->runtime = (real_Double_t) tempo2-tempo1; solver_par->numiter = iterationNumber; if ( solver_par->numiter < solver_par->maxiter) { info = MAGMA_SUCCESS; } else if ( solver_par->init_res > solver_par->final_res ) info = MAGMA_SLOW_CONVERGENCE; else info = MAGMA_DIVERGENCE; // ============================================================================= // === postprocessing; // ============================================================================= // === compute the real AX and corresponding eigenvalues magma_z_bspmv_tuned(m, n, c_one, A, blockX, c_zero, blockAX, queue ); magma_zgemm( MagmaConjTrans, MagmaNoTrans, n, n, m, c_one, blockX, m, blockAX, m, c_zero, gramM, n, queue ); magma_zheevd_gpu( MagmaVec, MagmaUpper, n, gramM, n, gevalues, dwork, n, hwork, lwork, #ifdef COMPLEX rwork, lrwork, #endif iwork, liwork, &info ); for(magma_int_t k =0; k<n; k++) evalues[k] = gevalues[k]; // === update X = X * evectors SWAP(blockX, dwork); magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, n, c_one, dwork, m, gramM, n, c_zero, blockX, m, queue ); // === update AX = AX * evectors to compute the final residual SWAP(blockAX, dwork); magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, n, c_one, dwork, m, gramM, n, c_zero, blockAX, m, queue ); // === compute R = AX - evalues X magmablas_zlacpy( MagmaFull, m, n, blockAX, m, blockR, m, queue ); for(magma_int_t i=0; i<n; i++) magma_zaxpy( m, MAGMA_Z_MAKE(-evalues[i], 0), blockX+i*m, 1, blockR+i*m, 1, queue ); // === residualNorms[iterationNumber] = || R || magmablas_dznrm2_cols( m, n, blockR, m, residualNorms(0, iterationNumber), queue ); // === restore blockX if needed if (blockX != origX) magmablas_zlacpy( MagmaFull, m, n, blockX, m, origX, m, queue ); printf("Eigenvalues:\n"); for(magma_int_t i =0; i<n; i++) printf("%e ", evalues[i]); printf("\n\n"); printf("Final residuals:\n"); magma_dprint_gpu(1, n, residualNorms(0, iterationNumber), 1); printf("\n\n"); //=== Prmagma_int_t residual history in a file for plotting ==== CHECK( magma_dmalloc_cpu(&hresidualNorms, (iterationNumber+1) * n)); magma_dgetmatrix( n, iterationNumber, residualNorms, n, hresidualNorms, n, queue ); solver_par->iter_res = *hresidualNorms(0, iterationNumber-1); printf("Residuals are stored in file residualNorms\n"); printf("Plot the residuals using: myplot \n"); FILE *residuals_file; residuals_file = fopen("residualNorms", "w"); for(magma_int_t i =1; i<iterationNumber; i++) { for(magma_int_t j = 0; j<n; j++) fprintf(residuals_file, "%f ", *hresidualNorms(j,i)); fprintf(residuals_file, "\n"); } fclose(residuals_file); cleanup: magma_free_cpu(hresidualNorms); // === free work space magma_free( residualNorms ); magma_free_cpu( condestGhistory ); magma_free_cpu( gevalues ); magma_free_cpu( iwork ); magma_free_pinned( hW ); magma_free_pinned( gevectors ); magma_free_pinned( h_gramB ); magma_free( gramM ); magma_free( gramA ); magma_free( gramB ); magma_free( activeMask ); if (blockX != (solver_par->eigenvectors)) magma_free( blockX ); if (blockAX != (solver_par->eigenvectors)) magma_free( blockAX ); if (blockAR != (solver_par->eigenvectors)) magma_free( blockAR ); if (blockAP != (solver_par->eigenvectors)) magma_free( blockAP ); if (blockR != (solver_par->eigenvectors)) magma_free( blockR ); if (blockP != (solver_par->eigenvectors)) magma_free( blockP ); if (blockW != (solver_par->eigenvectors)) magma_free( blockW ); if (dwork != (solver_par->eigenvectors)) magma_free( dwork ); magma_free( eval_gpu ); magma_free_pinned( hwork ); #ifdef COMPLEX magma_free_cpu( rwork ); rwork = NULL; #endif return info; }
maxhutch/magma
sparse-iter/src/zlobpcg.cpp
C++
bsd-3-clause
27,130
import * as fs from "fs" import * as path from "path" import * as ts from "typescript" const coffee = require("coffeescript") const less = require("less") import {argv} from "yargs" import {collect_deps} from "./dependencies" const mkCoffeescriptError = (error: any, file?: string) => { const message = error.message if (error.location == null) { const text = [file || "<string>", message].join(":") return {message, text} } else { const location = error.location const line = location.first_line + 1 const column = location.first_column + 1 const text = [file || "<string>", line, column, message].join(":") let markerLen = 2 if (location.first_line === location.last_line) markerLen += location.last_column - location.first_column const extract = error.code.split('\n')[line - 1] const annotated = [ text, " " + extract, " " + Array(column).join(' ') + Array(markerLen).join('^'), ].join('\n') return {message, line, column, text, extract, annotated} } } const mkLessError = (error: any, file?: string) => { const message = error.message const line = error.line const column = error.column + 1 const text = [file || "<string>", line, column, message].join(":") const extract = error.extract[line] const annotated = [text, " " + extract].join("\n") return {message, line, column, text, extract, annotated} } const reply = (data: any) => { process.stdout.write(JSON.stringify(data)) process.stdout.write("\n") } type Files = {[name: string]: string} function compile_typescript(inputs: Files, bokehjs_dir: string): {outputs: Files, error?: string} { const options: ts.CompilerOptions = { noImplicitAny: true, noImplicitThis: true, noImplicitReturns: true, noUnusedLocals: true, noUnusedParameters: true, strictNullChecks: true, strictBindCallApply: false, strictFunctionTypes: false, strictPropertyInitialization: false, alwaysStrict: true, noErrorTruncation: true, noEmitOnError: false, declaration: false, sourceMap: false, importHelpers: false, experimentalDecorators: true, module: ts.ModuleKind.CommonJS, moduleResolution: ts.ModuleResolutionKind.NodeJs, target: ts.ScriptTarget.ES5, lib: [ "lib.es5.d.ts", "lib.dom.d.ts", "lib.es2015.core.d.ts", "lib.es2015.promise.d.ts", "lib.es2015.symbol.d.ts", "lib.es2015.iterable.d.ts", ], types: [], baseUrl: ".", paths: { "*": [ path.join(bokehjs_dir, "js/lib/*"), path.join(bokehjs_dir, "js/types/*"), ], }, } const host: ts.CompilerHost = { getDefaultLibFileName: () => "lib.d.ts", getDefaultLibLocation: () => { // bokeh/server/static or bokehjs/build if (path.basename(bokehjs_dir) == "static") return path.join(bokehjs_dir, "lib") else return path.join(path.dirname(bokehjs_dir), "node_modules/typescript/lib") }, getCurrentDirectory: () => ts.sys.getCurrentDirectory(), getDirectories: (path) => ts.sys.getDirectories(path), getCanonicalFileName: (name) => ts.sys.useCaseSensitiveFileNames ? name : name.toLowerCase(), useCaseSensitiveFileNames: () => ts.sys.useCaseSensitiveFileNames, getNewLine: () => ts.sys.newLine, fileExists(name: string): boolean { return inputs[name] != null || ts.sys.fileExists(name) }, readFile(name: string): string | undefined { return inputs[name] != null ? inputs[name] : ts.sys.readFile(name) }, writeFile(name, content): void { ts.sys.writeFile(name, content) }, getSourceFile(name: string, target: ts.ScriptTarget, _onError?: (message: string) => void) { const source = inputs[name] != null ? inputs[name] : ts.sys.readFile(name) return source !== undefined ? ts.createSourceFile(name, source, target) : undefined }, } const program = ts.createProgram(Object.keys(inputs), options, host) const outputs: Files = {} const emitted = program.emit(undefined, (name, output) => outputs[name] = output) const diagnostics = ts.getPreEmitDiagnostics(program).concat(emitted.diagnostics) if (diagnostics.length == 0) return {outputs} else { const format_host: ts.FormatDiagnosticsHost = { getCanonicalFileName: (path) => path, getCurrentDirectory: ts.sys.getCurrentDirectory, getNewLine: () => ts.sys.newLine, } const error = ts.formatDiagnosticsWithColorAndContext( ts.sortAndDeduplicateDiagnostics(diagnostics), format_host) return {outputs, error} } } function compile_javascript(file: string, code: string): {output: string, error?: string} { const result = ts.transpileModule(code, { fileName: file, reportDiagnostics: true, compilerOptions: { target: ts.ScriptTarget.ES5, module: ts.ModuleKind.CommonJS, }, }) const format_host: ts.FormatDiagnosticsHost = { getCanonicalFileName: (path) => path, getCurrentDirectory: ts.sys.getCurrentDirectory, getNewLine: () => ts.sys.newLine, } const {outputText, diagnostics} = result if (diagnostics == null || diagnostics.length == 0) return {output: outputText} else { const error = ts.formatDiagnosticsWithColorAndContext( ts.sortAndDeduplicateDiagnostics(diagnostics), format_host) return {output: outputText, error} } } function rename(p: string, options: {dir?: string, ext?: string}): string { let {dir, name, ext} = path.parse(p) if (options.dir != null) dir = options.dir if (options.ext != null) ext = options.ext return path.format({dir, name, ext}) } function normalize(path: string): string { return path.replace(/\\/g, "/") } const compile_and_resolve_deps = (input: {code: string, lang: string, file: string, bokehjs_dir: string}) => { const {file, lang, bokehjs_dir} = input let {code} = input let output: string switch (lang) { case "typescript": const inputs = {[normalize(file)]: code} const result = compile_typescript(inputs, bokehjs_dir) if (result.error == null) output = result.outputs[normalize(rename(file, {ext: ".js"}))] else return reply({error: result.error}) break case "coffeescript": try { code = coffee.compile(code, {bare: true, shiftLine: true}) } catch (error) { return reply({error: mkCoffeescriptError(error, file)}) } case "javascript": { const result = compile_javascript(file, code) if (result.error == null) output = result.output else return reply({error: result.error}) break } case "less": const options = { paths: [path.dirname(file)], compress: true, ieCompat: false, } less.render(code, options, (error: any, output: any) => { if (error != null) reply({error: mkLessError(error, file)}) else reply({code: output.css}) }) return default: throw new Error(`unsupported input type: ${lang}`) } const source = ts.createSourceFile(file, output, ts.ScriptTarget.ES5, true, ts.ScriptKind.JS) const deps = collect_deps(source) return reply({code: output, deps}) } if (argv.file != null) { const input = { code: fs.readFileSync(argv.file as string, "utf-8"), lang: (argv.lang as string | undefined) || "coffeescript", file: argv.file as string, bokehjs_dir: (argv.bokehjsDir as string | undefined) || "./build", // this is what bokeh.settings defaults to } compile_and_resolve_deps(input) } else { const stdin = process.stdin stdin.resume() stdin.setEncoding("utf-8") let data = "" stdin.on("data", (chunk: string) => data += chunk) stdin.on("end", () => compile_and_resolve_deps(JSON.parse(data))) }
stonebig/bokeh
bokehjs/src/compiler/compile.ts
TypeScript
bsd-3-clause
7,841
# -*- coding: utf-8 -*- # # Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the copyright holder nor the names of its contributors # may be used to endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from .fetchers import NUPermissionsFetcher from .fetchers import NUMetadatasFetcher from .fetchers import NUGlobalMetadatasFetcher from bambou import NURESTObject class NUVMResync(NURESTObject): """ Represents a VMResync in the VSD Notes: Provide information about the state of a VM resync request. """ __rest_name__ = "resync" __resource_name__ = "resync" ## Constants CONST_STATUS_IN_PROGRESS = "IN_PROGRESS" CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL" CONST_STATUS_SUCCESS = "SUCCESS" CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE" def __init__(self, **kwargs): """ Initializes a VMResync instance Notes: You can specify all parameters while calling this methods. A special argument named `data` will enable you to load the object from a Python dictionary Examples: >>> vmresync = NUVMResync(id=u'xxxx-xxx-xxx-xxx', name=u'VMResync') >>> vmresync = NUVMResync(data=my_dict) """ super(NUVMResync, self).__init__() # Read/Write Attributes self._last_request_timestamp = None self._last_time_resync_initiated = None self._last_updated_by = None self._last_updated_date = None self._embedded_metadata = None self._entity_scope = None self._creation_date = None self._status = None self._owner = None self._external_id = None self.expose_attribute(local_name="last_request_timestamp", remote_name="lastRequestTimestamp", attribute_type=int, is_required=False, is_unique=False) self.expose_attribute(local_name="last_time_resync_initiated", remote_name="lastTimeResyncInitiated", attribute_type=int, is_required=False, is_unique=False) self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="last_updated_date", remote_name="lastUpdatedDate", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="embedded_metadata", remote_name="embeddedMetadata", attribute_type=list, is_required=False, is_unique=False) self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL']) self.expose_attribute(local_name="creation_date", remote_name="creationDate", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="status", remote_name="status", attribute_type=str, is_required=False, is_unique=False, choices=[u'IN_PROGRESS', u'SUCCESS']) self.expose_attribute(local_name="owner", remote_name="owner", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True) # Fetchers self.permissions = NUPermissionsFetcher.fetcher_with_object(parent_object=self, relationship="child") self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child") self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child") self._compute_args(**kwargs) # Properties @property def last_request_timestamp(self): """ Get last_request_timestamp value. Notes: Time of the last timestamp received This attribute is named `lastRequestTimestamp` in VSD API. """ return self._last_request_timestamp @last_request_timestamp.setter def last_request_timestamp(self, value): """ Set last_request_timestamp value. Notes: Time of the last timestamp received This attribute is named `lastRequestTimestamp` in VSD API. """ self._last_request_timestamp = value @property def last_time_resync_initiated(self): """ Get last_time_resync_initiated value. Notes: Time that the resync was initiated This attribute is named `lastTimeResyncInitiated` in VSD API. """ return self._last_time_resync_initiated @last_time_resync_initiated.setter def last_time_resync_initiated(self, value): """ Set last_time_resync_initiated value. Notes: Time that the resync was initiated This attribute is named `lastTimeResyncInitiated` in VSD API. """ self._last_time_resync_initiated = value @property def last_updated_by(self): """ Get last_updated_by value. Notes: ID of the user who last updated the object. This attribute is named `lastUpdatedBy` in VSD API. """ return self._last_updated_by @last_updated_by.setter def last_updated_by(self, value): """ Set last_updated_by value. Notes: ID of the user who last updated the object. This attribute is named `lastUpdatedBy` in VSD API. """ self._last_updated_by = value @property def last_updated_date(self): """ Get last_updated_date value. Notes: Time stamp when this object was last updated. This attribute is named `lastUpdatedDate` in VSD API. """ return self._last_updated_date @last_updated_date.setter def last_updated_date(self, value): """ Set last_updated_date value. Notes: Time stamp when this object was last updated. This attribute is named `lastUpdatedDate` in VSD API. """ self._last_updated_date = value @property def embedded_metadata(self): """ Get embedded_metadata value. Notes: Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration. This attribute is named `embeddedMetadata` in VSD API. """ return self._embedded_metadata @embedded_metadata.setter def embedded_metadata(self, value): """ Set embedded_metadata value. Notes: Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration. This attribute is named `embeddedMetadata` in VSD API. """ self._embedded_metadata = value @property def entity_scope(self): """ Get entity_scope value. Notes: Specify if scope of entity is Data center or Enterprise level This attribute is named `entityScope` in VSD API. """ return self._entity_scope @entity_scope.setter def entity_scope(self, value): """ Set entity_scope value. Notes: Specify if scope of entity is Data center or Enterprise level This attribute is named `entityScope` in VSD API. """ self._entity_scope = value @property def creation_date(self): """ Get creation_date value. Notes: Time stamp when this object was created. This attribute is named `creationDate` in VSD API. """ return self._creation_date @creation_date.setter def creation_date(self, value): """ Set creation_date value. Notes: Time stamp when this object was created. This attribute is named `creationDate` in VSD API. """ self._creation_date = value @property def status(self): """ Get status value. Notes: Status of the resync """ return self._status @status.setter def status(self, value): """ Set status value. Notes: Status of the resync """ self._status = value @property def owner(self): """ Get owner value. Notes: Identifies the user that has created this object. """ return self._owner @owner.setter def owner(self, value): """ Set owner value. Notes: Identifies the user that has created this object. """ self._owner = value @property def external_id(self): """ Get external_id value. Notes: External object ID. Used for integration with third party systems This attribute is named `externalID` in VSD API. """ return self._external_id @external_id.setter def external_id(self, value): """ Set external_id value. Notes: External object ID. Used for integration with third party systems This attribute is named `externalID` in VSD API. """ self._external_id = value
nuagenetworks/vspk-python
vspk/v6/nuvmresync.py
Python
bsd-3-clause
11,928
package com.mistraltech.smogen.codegenerator.javabuilder; public class InterfaceMethodBuilder extends MethodSignatureBuilder<InterfaceMethodBuilder> { private InterfaceMethodBuilder() { } public static InterfaceMethodBuilder anInterfaceMethod() { return new InterfaceMethodBuilder(); } @Override public String build(JavaBuilderContext context) { return super.build(context) + ";"; } }
mistraltechnologies/smogen
src/main/java/com/mistraltech/smogen/codegenerator/javabuilder/InterfaceMethodBuilder.java
Java
bsd-3-clause
432
class ProductTag < ActiveRecord::Base has_attached_file :icon, :url => "/assets/product_tags/:id/:basename.:extension", :path => ":rails_root/public/assets/product_tags/:id/:basename.:extension" validates :name, :presence => true, :uniqueness => true has_many :products end
secoint/spree_products_tags
app/models/product_tag.rb
Ruby
bsd-3-clause
323
"use strict" function checkEnvironmentForConfig(config:Object) : Object { let mentionBotEnvConfig; try { mentionBotEnvConfig = JSON.parse(process.env.MENTION_BOT_CONFIG); } catch(e) { mentionBotEnvConfig = {}; } return Object.keys(config).reduce((previousValue, key) => { let defaultConfigValue = config[key]; let environmentVariable = mentionBotEnvConfig[key]; let configElement = {}; configElement[key] = environmentVariable === undefined ? defaultConfigValue : environmentVariable; return {...previousValue, ...configElement}; }, {}); } module.exports = { checkEnvironmentForConfig }
ifuller1/mention-bot
environment.js
JavaScript
bsd-3-clause
641
// Copyright 2011 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "cc/resources/content_layer_updater.h" #include "base/debug/trace_event.h" #include "base/time.h" #include "cc/debug/rendering_stats_instrumentation.h" #include "cc/resources/layer_painter.h" #include "third_party/skia/include/core/SkCanvas.h" #include "third_party/skia/include/core/SkPaint.h" #include "third_party/skia/include/core/SkRect.h" #include "third_party/skia/include/core/SkScalar.h" #include "ui/gfx/rect_conversions.h" #include "ui/gfx/rect_f.h" namespace cc { ContentLayerUpdater::ContentLayerUpdater( scoped_ptr<LayerPainter> painter, RenderingStatsInstrumentation* stats_instrumentation) : rendering_stats_instrumentation_(stats_instrumentation), painter_(painter.Pass()) {} ContentLayerUpdater::~ContentLayerUpdater() {} void ContentLayerUpdater::PaintContents(SkCanvas* canvas, gfx::Rect content_rect, float contents_width_scale, float contents_height_scale, gfx::Rect* resulting_opaque_rect, RenderingStats* stats) { TRACE_EVENT0("cc", "ContentLayerUpdater::PaintContents"); canvas->save(); canvas->translate(SkFloatToScalar(-content_rect.x()), SkFloatToScalar(-content_rect.y())); gfx::Rect layer_rect = content_rect; if (contents_width_scale != 1.f || contents_height_scale != 1.f) { canvas->scale(SkFloatToScalar(contents_width_scale), SkFloatToScalar(contents_height_scale)); gfx::RectF rect = gfx::ScaleRect( content_rect, 1.f / contents_width_scale, 1.f / contents_height_scale); layer_rect = gfx::ToEnclosingRect(rect); } SkPaint paint; paint.setAntiAlias(false); paint.setXfermodeMode(SkXfermode::kClear_Mode); SkRect layer_sk_rect = SkRect::MakeXYWH( layer_rect.x(), layer_rect.y(), layer_rect.width(), layer_rect.height()); canvas->drawRect(layer_sk_rect, paint); canvas->clipRect(layer_sk_rect); gfx::RectF opaque_layer_rect; base::TimeTicks paint_begin_time; if (stats) paint_begin_time = base::TimeTicks::Now(); painter_->Paint(canvas, layer_rect, &opaque_layer_rect); if (stats) { stats->total_paint_time += base::TimeTicks::Now() - paint_begin_time; stats->total_pixels_painted += content_rect.width() * content_rect.height(); } canvas->restore(); gfx::RectF opaque_content_rect = gfx::ScaleRect( opaque_layer_rect, contents_width_scale, contents_height_scale); *resulting_opaque_rect = gfx::ToEnclosedRect(opaque_content_rect); content_rect_ = content_rect; } } // namespace cc
codenote/chromium-test
cc/resources/content_layer_updater.cc
C++
bsd-3-clause
2,848
# Copyright (C) 2010 CENATIC: Centro Nacional de Referencia de # Aplicacion de las TIC basadas en Fuentes Abiertas, Spain. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # Neither the name of the CENATIC nor the names of its contributors # may be used to endorse or promote products derived from this # software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # # You may contact the copyright holder at: Fundacion CENATIC, Edificio # de Servicios Sociales: C/ Vistahermosa, 1, 3ra planta, 06200 # Almendralejo (Badajoz), Spain from DBSlayer import Query def get_type_name (type_id): l = get_type (type_id) if not l: return None return l['name'] def get_type (type_id): q = "SELECT id, type "\ "FROM asset_types WHERE id=%(type_id)s;" % locals() query = Query(q) if len(query) != 1: return None ret = {'id': type_id, 'name': query['type'][0]} return ret def get_types (): q = "SELECT id, type "\ "FROM asset_types;" % locals() query = Query(q) if not len(query): return None ret = [] for x in query: d={'id': query[x]['id'], 'name': query[x]['type']} ret.append(d) return ret def test (): import sys try: type_id = sys.argv[1] except IndexError: print 'Required test parameters: type_id' sys.exit(1) print 'Types:', get_types() print 'type_id %s, type_name %s' % (type_id, get_type_name(type_id)) print get_type(type_id), if __name__ == '__main__': test()
helix84/activae
src/Type.py
Python
bsd-3-clause
2,833
/* -- MAGMA (version 1.5.0-beta3) -- Univ. of Tennessee, Knoxville Univ. of California, Berkeley Univ. of Colorado, Denver @date July 2014 @generated from magma_z_init.cpp normal z -> s, Fri Jul 18 17:34:30 2014 @author Hartwig Anzt */ #include <fstream> #include <stdlib.h> #include <string> #include <sstream> #include <iostream> #include <ostream> #include <assert.h> #include <stdio.h> #include "../include/magmasparse_s.h" #include "../../include/magma.h" #include "../include/mmio.h" using namespace std; /** Purpose ------- Initialize a magma_s_vector. Arguments --------- @param x magma_s_vector vector to initialize @param mem_loc magma_location_t memory for vector @param num_rows magma_int_t desired length of vector @param values float entries in vector @ingroup magmasparse_saux ********************************************************************/ magma_int_t magma_s_vinit( magma_s_vector *x, magma_location_t mem_loc, magma_int_t num_rows, float values ){ x->memory_location = Magma_CPU; x->num_rows = num_rows; x->nnz = num_rows; if( mem_loc == Magma_CPU ){ x->memory_location = Magma_CPU; magma_smalloc_cpu( &x->val, num_rows ); if ( x->val == NULL ) return MAGMA_ERR_HOST_ALLOC; for( magma_int_t i=0; i<num_rows; i++) x->val[i] = values; return MAGMA_SUCCESS; } else if( mem_loc == Magma_DEV ){ x->memory_location = Magma_DEV; float *tmp; magma_smalloc_cpu( &tmp, num_rows ); if ( tmp == NULL ) return MAGMA_ERR_HOST_ALLOC; for( magma_int_t i=0; i<num_rows; i++) tmp[i] = values; if (MAGMA_SUCCESS != magma_smalloc( &x->val, x->num_rows)) return MAGMA_ERR_DEVICE_ALLOC; // data transfer magma_ssetvector( x->num_rows, tmp, 1, x->val, 1 ); magma_free_cpu(tmp); return MAGMA_SUCCESS; } return MAGMA_SUCCESS; }
EmergentOrder/magma
sparse-iter/control/magma_s_init.cpp
C++
bsd-3-clause
2,249
//To Test:http://localhost:8080/nbia-auth/services/v3/getProtectionGrpList?format=html package gov.nih.nci.nbia.restAPI; import gov.nih.nci.nbia.dao.TrialDataProvenanceDAO; import gov.nih.nci.nbia.util.SpringApplicationContext; import gov.nih.nci.security.SecurityServiceProvider; import gov.nih.nci.security.UserProvisioningManager; import gov.nih.nci.security.authorization.domainobjects.ProtectionGroup; import gov.nih.nci.security.authorization.domainobjects.ProtectionElement; import gov.nih.nci.security.authorization.domainobjects.Role; import gov.nih.nci.security.dao.RoleSearchCriteria; import gov.nih.nci.security.dao.SearchCriteria; import gov.nih.nci.security.exceptions.CSConfigurationException; import gov.nih.nci.security.exceptions.CSException; import java.util.ArrayList; import java.util.List; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.Path; import javax.ws.rs.GET; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.springframework.dao.DataAccessException; @Path("/v3/getProtectionGrpList") public class V3_getProtectionGrpList extends getData{ private static final String[] columns={"pgName", "description", "dataSetName"}; public final static String TEXT_CSV = "text/csv"; @Context private HttpServletRequest httpRequest; /** * This method get a list of names of protection group * * @return String - list of names of protection group */ @GET @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON, MediaType.TEXT_HTML, TEXT_CSV}) public Response constructResponse(@QueryParam("format") String format) { List<Object[]> data = null; try { UserProvisioningManager upm = getUpm(); java.util.List<ProtectionGroup> protectionGrpLst = upm.getProtectionGroups(); if ( protectionGrpLst != null) { data = new ArrayList<Object []>(); for(ProtectionGroup pg : protectionGrpLst) { List<ProtectionElement> pes = new ArrayList<ProtectionElement>(upm.getProtectionElements(pg.getProtectionGroupId().toString())); for (ProtectionElement pe : pes) { Object [] objs = {pg.getProtectionGroupName(), pg.getProtectionGroupDescription(), pe.getProtectionElementName()}; data.add(objs); } } } else { Object [] objs = {"Warning: No Protection Group has defined yet!", "NA", "NA"}; data.add(objs); } } catch (CSConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (CSException e) { // TODO Auto-generated catch block e.printStackTrace(); } return formatResponse(format, data, columns); } }
NCIP/national-biomedical-image-archive
software/nbia-api/src/gov/nih/nci/nbia/restAPI/V3_getProtectionGrpList.java
Java
bsd-3-clause
2,741
# Copyright (c) 2013 LE GOFF Vincent # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # * Neither the name of the copyright holder nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT # OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. """Package containing the different outputs. Each output type is defined inside a module. """
v-legoff/croissant
croissant/output/__init__.py
Python
bsd-3-clause
1,636
/* * Copyright (c) 2013-2013, KNOPFLERFISH project * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following * conditions are met: * * - Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * - Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials * provided with the distribution. * * - Neither the name of the KNOPFLERFISH project nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.knopflerfish.service.repositorymanager; import org.osgi.framework.Constants; import org.osgi.framework.ServiceReference; import org.osgi.service.repository.Repository; public class RepositoryInfo implements Comparable<RepositoryInfo> { final private long id; final private int rank; final ServiceReference<Repository> sr; public RepositoryInfo(ServiceReference<Repository> sr) { this.id = ((Long)sr.getProperty(Constants.SERVICE_ID)).longValue(); Object r = sr.getProperty(Constants.SERVICE_RANKING); if (r != null && r instanceof Integer) { this.rank = ((Integer)r).intValue(); } else { this.rank = 0; } this.sr = sr; } public RepositoryInfo(RepositoryInfo old, int rank) { this.id = old.id; this.rank = rank; this.sr = old.sr; } public long getId() { return id; } public int getRank() { return rank; } public Object getProperty(String prop) { return sr.getProperty(prop); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + (int) (id ^ (id >>> 32)); return result; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null) return false; if (getClass() != o.getClass()) return false; RepositoryInfo rio = (RepositoryInfo) o; if (id != rio.id || rank != rio.rank) return false; return true; } @Override public int compareTo(RepositoryInfo o) { if (equals(o)) { return 0; } if (rank != o.rank) { return o.rank - rank; } else { return id < o.id ? -1 : 1; } } public ServiceReference<Repository> getServiceReference() { return sr; } @Override public String toString() { return "RepositoryInfo [id=" + id + ", rank=" + rank + "]"; } }
knopflerfish/knopflerfish.org
osgi/bundles/repository/repositorymanager/src/org/knopflerfish/service/repositorymanager/RepositoryInfo.java
Java
bsd-3-clause
3,464
package org.chasen.mecab.wrapper; import java.util.ArrayList; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import org.junit.Test; public class NodeIteratorTest { @Test public void threads() throws InterruptedException { List<Thread> threads = new ArrayList<Thread>(); threads.add(new Thread(){ public void run(){ Tagger t = Tagger.create("-r /opt/local/etc/mecabrc"); for(MecabNode<Node, Path> node: t.iterator("本日は晴天なり")){ System.out.println(node.getSurface()); } } }); threads.add(new Thread(){ public void run(){ Tagger t = Tagger.create("-r /opt/local/etc/mecabrc"); for(MecabNode<Node, Path> node: t.iterator("本日は雨です")){ System.out.println(node.getSurface()); } } }); threads.add(new Thread(){ public void run(){ Tagger t = Tagger.create("-r /opt/local/etc/mecabrc"); for(MecabNode<Node, Path> node: t.iterator("昨日は曇りでした")){ System.out.println(node.getSurface()); } } }); for(Thread th: threads){ th.start(); } for(Thread th: threads){ th.join(); } } @Test public void executors() throws InterruptedException, ExecutionException { class Hoge { public void parse(String str){ Tagger t = Tagger.create("-r /opt/local/etc/mecabrc"); for(MecabNode<Node, Path> node: t.iterator(str)){ System.out.println(node.getSurface()); } } } final Hoge hoge = new Hoge(); ExecutorService executors = Executors.newCachedThreadPool(); List<Future<?>> futures = new ArrayList<Future<?>>(); futures.add(executors.submit(new Callable<Void>(){ public Void call() throws Exception { hoge.parse("本日は晴天なり"); return null; } })); futures.add(executors.submit(new Callable<Void>(){ public Void call() throws Exception { hoge.parse("本日は雨です"); return null; } })); futures.add(executors.submit(new Callable<Void>(){ public Void call() throws Exception { hoge.parse("昨日は曇りでした"); return null; } })); for(Future<?> f: futures){ f.get(); } } @Test public void executors_runnable() throws InterruptedException, ExecutionException { class Hoge implements Runnable { String str; Hoge(String str){ this.str = str; } public void run(){ Tagger t = Tagger.create("-r /opt/local/etc/mecabrc"); for(MecabNode<Node, Path> node: t.iterator(str)){ System.out.println(node.getSurface()); } } } ExecutorService executors = Executors.newCachedThreadPool(); List<Future<?>> futures = new ArrayList<Future<?>>(); futures.add(executors.submit(new Hoge("本日は晴天なり"))); futures.add(executors.submit(new Hoge("本日は雨です"))); futures.add(executors.submit(new Hoge("昨日は曇りでした"))); for(Future<?> f: futures){ f.get(); } } }
nowelium/jna-libmecab
test/org/chasen/mecab/wrapper/NodeIteratorTest.java
Java
bsd-3-clause
3,838
#include <stdio.h> #include <stdlib.h> #include <string.h> #include <time.h> #include <iostream> #include <string> #include "parser.h" #include "CLI/wrapper.h" #include "Libraries/linenoise.h" #include "CLI/interface.h" #define HIST_FILENAME ".polyBobHistory" int main(int argc, char **argv) { char* line; unsigned int promptNb = 1; char promptMsg[100]; srand(time(NULL)); printLogo(); /* Set the completion callback. This will be called every time the * user uses the <tab> key. */ linenoiseSetCompletionCallback(completion); /* Load history from file.*/ linenoiseHistoryLoad(HIST_FILENAME); /* Load the history at startup */ snprintf(promptMsg, 100, "%s[%d]: ", "\033[0m", promptNb); while((line = linenoise(promptMsg)) != NULL) { linenoiseHistoryAdd(line); /* Add to the history. */ linenoiseHistorySave(HIST_FILENAME); /* Save the history on disk. */ /* Do something with the string. */ rmSuperscript(line); if(line[0] == '/') parseCommand(&(line[1])); else if(!strcmp(line, "exit") || !strcmp(line, "quit") || (line[1] == 0 && (line[0] == 'e' || line[0] == 'q'))) break; else if(line[0] != '\0') { simpleParserAPI(line); } snprintf(promptMsg, 100, "[%d]: ", ++promptNb); } finalProcessing(); return 0; }
Taiki-San/Polybob
entrypoint.cpp
C++
bsd-3-clause
1,286
# -*- coding: utf-8 -*- import access import util @auth.requires_login() def index(): """Produces a list of the feedback obtained for a given venue, or for all venues.""" venue_id = request.args(0) if venue_id == 'all': q = (db.submission.user == get_user_email()) else: q = ((db.submission.user == get_user_email()) & (db.submission.venue_id == venue_id)) db.submission.id.represent = lambda x, r: A(T('View'), _class='btn', _href=URL('submission', 'view_own_submission', args=['v', r.id])) db.submission.id.label = T('Submission') db.submission.id.readable = True db.submission.venue_id.readable = True grid = SQLFORM.grid(q, fields=[db.submission.id, db.submission.venue_id, db.submission.date_created, db.submission.date_updated, ], csv=False, details=False, create=False, editable=False, deletable=False, args=request.args[:1], maxtextlength=24, ) return dict(grid=grid) @auth.requires_login() def view_feedback(): """Shows detailed feedback for a user in a venue. This controller accepts various types of arguments: * 's', submission_id * 'u', venue_id, username * 'v', venue_id (in which case, shows own submission to that venue) """ if len(request.args) == 0: redirect(URL('default', 'index')) if request.args(0) == 's': # submission_id n_args = 2 subm = db.submission(request.args(1)) or redirect(URL('default', 'index')) c = db.venue(subm.venue_id) or redirect(URL('default', 'index')) username = subm.user elif request.args(0) == 'v': # venue_id n_args = 2 c = db.venue(request.args(1)) or redirect(URL('default', 'index')) username = get_user_email() subm = db((db.submission.user == username) & (db.submission.venue_id == c.id)).select().first() else: # venue_id, username n_args = 3 c = db.venue(request.args(1)) or redirect(URL('default', 'index')) username = request.args(2) or redirect(URL('default', 'index')) subm = db((db.submission.user == username) & (db.submission.venue_id == c.id)).select().first() # Checks permissions. props = db(db.user_properties.user == get_user_email()).select().first() if props == None: session.flash = T('Not authorized.') redirect(URL('default', 'index')) is_author = (username == get_user_email()) can_view_feedback = access.can_view_feedback(c, props) or is_author if (not can_view_feedback): session.flash = T('Not authorized.') redirect(URL('default', 'index')) if not (access.can_view_feedback(c, props) or datetime.utcnow() > c.rate_close_date): session.flash = T('The ratings are not yet available.') redirect(URL('feedback', 'index', args=['all'])) # Produces the link to edit the feedback. edit_feedback_link = None if subm is not None and access.can_observe(c, props): edit_feedback_link = A(T('Edit feedback'), _class='btn', _href=URL('submission', 'edit_feedback', args=[subm.id])) # Produces the download link. download_link = None if subm is not None and c.allow_file_upload and subm.content is not None: if is_author: download_link = A(T('Download'), _class='btn', _href=URL('submission', 'download_author', args=[subm.id, subm.content])) else: download_link = A(T('Download'), _class='btn', _href=URL('submission', 'download_manager', args=[subm.id, subm.content])) venue_link = A(c.name, _href=URL('venues', 'view_venue', args=[c.id])) # Submission link. subm_link = None if subm is not None and c.allow_link_submission: subm_link = A(subm.link, _href=subm.link) # Submission content and feedback. subm_comment = None subm_feedback = None if subm is not None: raw_subm_comment = keystore_read(subm.comment) if raw_subm_comment is not None and len(raw_subm_comment) > 0: subm_comment = MARKMIN(keystore_read(subm.comment)) raw_feedback = keystore_read(subm.feedback) if raw_feedback is not None and len(raw_feedback) > 0: subm_feedback = MARKMIN(raw_feedback) # Display settings. db.submission.percentile.readable = True db.submission.comment.readable = True db.submission.feedback.readable = True if access.can_observe(c, props): db.submission.quality.readable = True db.submission.error.readable = True # Reads the grade information. submission_grade = submission_percentile = None review_grade = review_percentile = user_reputation = None final_grade = final_percentile = None assigned_grade = None if c.grades_released: grade_info = db((db.grades.user == username) & (db.grades.venue_id == c.id)).select().first() if grade_info is not None: submission_grade = represent_quality(grade_info.submission_grade, None) submission_percentile = represent_percentage(grade_info.submission_percentile, None) review_grade = represent_quality_10(grade_info.accuracy, None) review_percentile = represent_percentage(grade_info.accuracy_percentile, None) user_reputation = represent_01_as_percentage(grade_info.reputation, None) final_grade = represent_quality(grade_info.grade, None) final_percentile = represent_percentage(grade_info.percentile, None) assigned_grade = represent_quality(grade_info.assigned_grade, None) # Makes a grid of comments. db.task.submission_name.readable = False db.task.assigned_date.readable = False db.task.completed_date.readable = False db.task.rejected.readable = True db.task.helpfulness.readable = db.task.helpfulness.writable = True # Prevent editing the comments; the only thing editable should be the "is bogus" field. db.task.comments.writable = False db.task.comments.readable = True ranking_link = None if access.can_observe(c, props): db.task.user.readable = True db.task.completed_date.readable = True links = [ dict(header=T('Review details'), body= lambda r: A(T('View'), _class='btn', _href=URL('ranking', 'view_comparison', args=[r.id]))), ] details = False if subm is not None: ranking_link = A(T('details'), _href=URL('ranking', 'view_comparisons_given_submission', args=[subm.id])) reviews_link = A(T('details'), _href=URL('ranking', 'view_comparisons_given_user', args=[username, c.id])) db.task.user.represent = lambda v, r: A(v, _href=URL('ranking', 'view_comparisons_given_user', args=[v, c.id], user_signature=True)) else: user_reputation = None links = [ dict(header=T('Review feedback'), body = lambda r: A(T('Give feedback'), _class='btn', _href=URL('feedback', 'reply_to_review', args=[r.id], user_signature=True))), ] details = False ranking_link = None reviews_link = None if subm is not None: q = ((db.task.submission_id == subm.id) & (db.task.is_completed == True)) # q = (db.task.submission_id == subm.id) else: q = (db.task.id == -1) grid = SQLFORM.grid(q, fields=[db.task.id, db.task.user, db.task.rejected, db.task.comments, db.task.helpfulness, ], details = details, csv=False, create=False, editable=False, deletable=False, searchable=False, links=links, args=request.args[:n_args], maxtextlength=24, ) return dict(subm=subm, download_link=download_link, subm_link=subm_link, username=username, subm_comment=subm_comment, subm_feedback=subm_feedback, edit_feedback_link=edit_feedback_link, is_admin=is_user_admin(), submission_grade=submission_grade, submission_percentile=submission_percentile, review_grade=review_grade, review_percentile=review_percentile, user_reputation=user_reputation, final_grade=final_grade, final_percentile=final_percentile, assigned_grade=assigned_grade, venue_link=venue_link, grid=grid, ranking_link=ranking_link, reviews_link=reviews_link) @auth.requires_signature() def reply_to_review(): t = db.task(request.args(0)) or redirect(URL('default', 'index')) db.task.submission_name.readable = False db.task.assigned_date.readable = False db.task.completed_date.readable = False db.task.comments.readable = False db.task.helpfulness.readable = db.task.helpfulness.writable = True db.task.feedback.readable = db.task.feedback.writable = True form = SQLFORM(db.task, record=t) form.vars.feedback = keystore_read(t.feedback) if form.process(onvalidation=validate_review_feedback(t)).accepted: session.flash = T('Updated.') redirect(URL('feedback', 'view_feedback', args=['s', t.submission_id])) link_to_submission = A(T('View submission'), _href=URL('submission', 'view_own_submission', args=['v', t.submission_id])) review_comments = MARKMIN(keystore_read(t.comments)) return dict(form=form, link_to_submission=link_to_submission, review_comments=review_comments) def validate_review_feedback(t): def f(form): if not form.errors: feedback_id = keystore_update(t.feedback, form.vars.feedback) form.vars.feedback = feedback_id return f @auth.requires_login() def view_my_reviews(): """This controller displays the reviews a user has written for a venue, along with the feedback they received.""" c = db.venue(request.args(0)) or redirect(URL('rating', 'review_index')) link_to_venue = A(c.name, _href=URL('venues', 'view_venue', args=[c.id])) link_to_eval = A(T('My evaluation in this venue'), _class='btn', _href=URL('feedback', 'view_feedback', args=['v', c.id])) q = ((db.task.user == get_user_email()) & (db.task.venue_id == c.id)) db.task.rejected.readable = True db.task.helpfulness.readable = True db.task.comments.readable = True db.task.feedback.readable = True # To prevent chopping db.task.submission_name.represent = represent_text_field grid = SQLFORM.grid(q, fields=[db.task.submission_name, db.task.rejected, db.task.helpfulness], details=True, editable=False, deletable=False, create=False, searchable=False, csv=False, args=request.args[:1], maxtextlength=24, ) return dict(grid=grid, link_to_venue=link_to_venue, link_to_eval=link_to_eval)
lucadealfaro/crowdranker
controllers/feedback.py
Python
bsd-3-clause
10,966
<?php use yii\helpers\Html; use yii\grid\GridView; /* @var $this yii\web\View */ /* @var $searchModel app\models\search\UserSearch */ /* @var $dataProvider yii\data\ActiveDataProvider */ $this->title = 'Users'; $this->params['breadcrumbs'][] = $this->title; ?> <div class="user-index"> <h1><?= Html::encode($this->title) ?></h1> <?php // echo $this->render('_search', ['model' => $searchModel]); ?> <p> <?= Html::a('Create User', ['create'], ['class' => 'btn btn-success']) ?> </p> <?= GridView::widget([ 'dataProvider' => $dataProvider, 'filterModel' => $searchModel, 'columns' => [ ['class' => 'yii\grid\SerialColumn'], 'id', 'username', 'password', 'fullname', 'is_seller', // 'lat', // 'lng', // 'category_id', // 'description:ntext', ['class' => 'yii\grid\ActionColumn'], ], ]); ?> </div>
vincentsthe/market-on
views/user/index.php
PHP
bsd-3-clause
999
from __future__ import print_function import shutil import os, sys import time import logging from .loaders import PythonLoader, YAMLLoader from .bundle import get_all_bundle_files from .exceptions import BuildError from .updater import TimestampUpdater from .merge import MemoryHunk from .version import get_manifest from .cache import FilesystemCache from .utils import set, StringIO __all__ = ('CommandError', 'CommandLineEnvironment', 'main') # logging has WARNING as default level, for the CLI we want INFO. Set this # as early as possible, so that user customizations will not be overwritten. logging.getLogger('webassets.script').setLevel(logging.INFO) class CommandError(Exception): pass class Command(object): """Base-class for a command used by :class:`CommandLineEnvironment`. Each command being a class opens up certain possibilities with respect to subclassing and customizing the default CLI. """ def __init__(self, cmd_env): self.cmd = cmd_env def __getattr__(self, name): # Make stuff from cmd environment easier to access return getattr(self.cmd, name) def __call__(self, *args, **kwargs): raise NotImplementedError() class BuildCommand(Command): def __call__(self, bundles=None, output=None, directory=None, no_cache=None, manifest=None, production=None): """Build assets. ``bundles`` A list of bundle names. If given, only this list of bundles should be built. ``output`` List of (bundle, filename) 2-tuples. If given, only these bundles will be built, using the custom output filenames. Cannot be used with ``bundles``. ``directory`` Custom output directory to use for the bundles. The original basenames defined in the bundle ``output`` attribute will be used. If the ``output`` of the bundles are pointing to different directories, they will be offset by their common prefix. Cannot be used with ``output``. ``no_cache`` If set, a cache (if one is configured) will not be used. ``manifest`` If set, the given manifest instance will be used, instead of any that might have been configured in the Environment. The value passed will be resolved through ``get_manifest()``. If this fails, a file-based manifest will be used using the given value as the filename. ``production`` If set to ``True``, then :attr:`Environment.debug`` will forcibly be disabled (set to ``False``) during the build. """ # Validate arguments if bundles and output: raise CommandError( 'When specifying explicit output filenames you must ' 'do so for all bundles you want to build.') if directory and output: raise CommandError('A custom output directory cannot be ' 'combined with explicit output filenames ' 'for individual bundles.') if production: # TODO: Reset again (refactor commands to be classes) self.environment.debug = False # TODO: Oh how nice it would be to use the future options stack. if manifest is not None: try: manifest = get_manifest(manifest, env=self.environment) except ValueError: manifest = get_manifest( # abspath() is important, or this will be considered # relative to Environment.directory. "file:%s" % os.path.abspath(manifest), env=self.environment) self.environment.manifest = manifest # Use output as a dict. if output: output = dict(output) # Validate bundle names bundle_names = bundles if bundles else (output.keys() if output else []) for name in bundle_names: if not name in self.environment: raise CommandError( 'I do not know a bundle name named "%s".' % name) # Make a list of bundles to build, and the filename to write to. if bundle_names: # TODO: It's not ok to use an internal property here. bundles = [(n,b) for n, b in self.environment._named_bundles.items() if n in bundle_names] else: # Includes unnamed bundles as well. bundles = [(None, b) for b in self.environment] # Determine common prefix for use with ``directory`` option. if directory: prefix = os.path.commonprefix( [os.path.normpath(b.resolve_output()) for _, b in bundles if b.output]) # dirname() gives the right value for a single file. prefix = os.path.dirname(prefix) to_build = [] for name, bundle in bundles: # TODO: We really should support this. This error here # is just in place of a less understandable error that would # otherwise occur. if bundle.is_container and directory: raise CommandError( 'A custom output directory cannot currently be ' 'used with container bundles.') # Determine which filename to use, if not the default. overwrite_filename = None if output: overwrite_filename = output[name] elif directory: offset = os.path.normpath( bundle.resolve_output())[len(prefix)+1:] overwrite_filename = os.path.join(directory, offset) to_build.append((bundle, overwrite_filename, name,)) # Build. built = [] for bundle, overwrite_filename, name in to_build: if name: # A name is not necessary available of the bundle was # registered without one. self.log.info("Building bundle: %s (to %s)" % ( name, overwrite_filename or bundle.output)) else: self.log.info("Building bundle: %s" % bundle.output) try: if not overwrite_filename: with bundle.bind(self.environment): bundle.build(force=True, disable_cache=no_cache) else: # TODO: Rethink how we deal with container bundles here. # As it currently stands, we write all child bundles # to the target output, merged (which is also why we # create and force writing to a StringIO instead of just # using the ``Hunk`` objects that build() would return # anyway. output = StringIO() with bundle.bind(self.environment): bundle.build(force=True, output=output, disable_cache=no_cache) if directory: # Only auto-create directories in this mode. output_dir = os.path.dirname(overwrite_filename) if not os.path.exists(output_dir): os.makedirs(output_dir) MemoryHunk(output.getvalue()).save(overwrite_filename) built.append(bundle) except BuildError as e: self.log.error("Failed, error was: %s" % e) if len(built): self.event_handlers['post_build']() if len(built) != len(to_build): return 2 class WatchCommand(Command): def __call__(self, loop=None): """Watch assets for changes. ``loop`` A callback, taking no arguments, to be called once every loop iteration. Can be useful to integrate the command with other code. If not specified, the loop wil call ``time.sleep()``. """ # TODO: This should probably also restart when the code changes. mtimes = {} try: # Before starting to watch for changes, also recognize changes # made while we did not run, and apply those immediately. for bundle in self.environment: print('Bringing up to date: %s' % bundle.output) bundle.build(force=False) self.log.info("Watching %d bundles for changes..." % len(self.environment)) while True: changed_bundles = self.check_for_changes(mtimes) built = [] for bundle in changed_bundles: print("Building bundle: %s ..." % bundle.output, end=' ') sys.stdout.flush() try: bundle.build(force=True) built.append(bundle) except BuildError as e: print("") print("Failed: %s" % e) else: print("done") if len(built): self.event_handlers['post_build']() do_end = loop() if loop else time.sleep(0.1) if do_end: break except KeyboardInterrupt: pass def check_for_changes(self, mtimes): # Do not update original mtimes dict right away, so that we detect # all bundle changes if a file is in multiple bundles. _new_mtimes = mtimes.copy() changed_bundles = set() # TODO: An optimization was lost here, skipping a bundle once # a single file has been found to have changed. Bring back. for filename, bundles_to_update in self.yield_files_to_watch(): stat = os.stat(filename) mtime = stat.st_mtime if sys.platform == "win32": mtime -= stat.st_ctime if mtimes.get(filename, mtime) != mtime: if callable(bundles_to_update): # Hook for when file has changed try: bundles_to_update = bundles_to_update() except EnvironmentError: # EnvironmentError is what the hooks is allowed to # raise for a temporary problem, like an invalid config import traceback traceback.print_exc() # Don't update anything, wait for another change bundles_to_update = set() if bundles_to_update is True: # Indicates all bundles should be rebuilt for the change bundles_to_update = set(self.environment) changed_bundles |= bundles_to_update _new_mtimes[filename] = mtime _new_mtimes[filename] = mtime mtimes.update(_new_mtimes) return changed_bundles def yield_files_to_watch(self): for bundle in self.environment: for filename in get_all_bundle_files(bundle): yield filename, set([bundle]) class CleanCommand(Command): def __call__(self): """Delete generated assets. """ self.log.info('Cleaning generated assets...') for bundle in self.environment: if not bundle.output: continue file_path = bundle.resolve_output(self.environment) if os.path.exists(file_path): os.unlink(file_path) self.log.info("Deleted asset: %s" % bundle.output) if isinstance(self.environment.cache, FilesystemCache): shutil.rmtree(self.environment.cache.directory) class CheckCommand(Command): def __call__(self): """Check to see if assets need to be rebuilt. A non-zero exit status will be returned if any of the input files are newer (based on mtime) than their output file. This is intended to be used in pre-commit hooks. """ needsupdate = False updater = self.environment.updater if not updater: self.log.debug('no updater configured, using TimestampUpdater') updater = TimestampUpdater() for bundle in self.environment: self.log.info('Checking asset: %s', bundle.output) if updater.needs_rebuild(bundle, self.environment): self.log.info(' needs update') needsupdate = True if needsupdate: sys.exit(-1) class CommandLineEnvironment(object): """Implements the core functionality for a command line frontend to ``webassets``, abstracted in a way to allow frameworks to integrate the functionality into their own tools, for example, as a Django management command, or a command for ``Flask-Script``. """ def __init__(self, env, log, post_build=None, commands=None): self.environment = env self.log = log self.event_handlers = dict(post_build=lambda: True) if callable(post_build): self.event_handlers['post_build'] = post_build # Instantiate each command command_def = self.DefaultCommands.copy() command_def.update(commands or {}) self.commands = {} for name, construct in command_def.items(): if not construct: continue if not isinstance(construct, (list, tuple)): construct = [construct, (), {}] self.commands[name] = construct[0]( self, *construct[1], **construct[2]) def __getattr__(self, item): # Allow method-like access to commands. if item in self.commands: return self.commands[item] raise AttributeError(item) def invoke(self, command, args): """Invoke ``command``, or throw a CommandError. This is essentially a simple validation mechanism. Feel free to call the individual command methods manually. """ try: function = self.commands[command] except KeyError as e: raise CommandError('unknown command: %s' % e) else: return function(**args) # List of commands installed DefaultCommands = { 'build': BuildCommand, 'watch': WatchCommand, 'clean': CleanCommand, 'check': CheckCommand } class GenericArgparseImplementation(object): """Generic command line utility to interact with an webassets environment. This is effectively a reference implementation of a command line utility based on the ``CommandLineEnvironment`` class. Implementers may find it feasible to simple base their own command line utility on this, rather than implementing something custom on top of ``CommandLineEnvironment``. In fact, if that is possible, you are encouraged to do so for greater consistency across implementations. """ class WatchCommand(WatchCommand): """Extended watch command that also looks at the config file itself.""" def __init__(self, cmd_env, argparse_ns): WatchCommand.__init__(self, cmd_env) self.ns = argparse_ns def yield_files_to_watch(self): for result in WatchCommand.yield_files_to_watch(self): yield result # If the config changes, rebuild all bundles if getattr(self.ns, 'config', None): yield self.ns.config, self.reload_config def reload_config(self): try: self.cmd.environment = YAMLLoader(self.ns.config).load_environment() except Exception as e: raise EnvironmentError(e) return True def __init__(self, env=None, log=None, prog=None, no_global_options=False): try: import argparse except ImportError: raise RuntimeError( 'The webassets command line now requires the ' '"argparse" library on Python versions <= 2.6.') else: self.argparse = argparse self.env = env self.log = log self._construct_parser(prog, no_global_options) def _construct_parser(self, prog=None, no_global_options=False): self.parser = parser = self.argparse.ArgumentParser( description="Manage assets.", prog=prog) if not no_global_options: # Start with the base arguments that are valid for any command. # XXX: Add those to the subparser? parser.add_argument("-v", dest="verbose", action="store_true", help="be verbose") parser.add_argument("-q", action="store_true", dest="quiet", help="be quiet") if self.env is None: loadenv = parser.add_mutually_exclusive_group() loadenv.add_argument("-c", "--config", dest="config", help="read environment from a YAML file") loadenv.add_argument("-m", "--module", dest="module", help="read environment from a Python module") # Add subparsers. subparsers = parser.add_subparsers(dest='command') for command in CommandLineEnvironment.DefaultCommands.keys(): command_parser = subparsers.add_parser(command) maker = getattr(self, 'make_%s_parser' % command, False) if maker: maker(command_parser) @staticmethod def make_build_parser(parser): parser.add_argument( 'bundles', nargs='*', metavar='BUNDLE', help='Optional bundle names to process. If none are ' 'specified, then all known bundles will be built.') parser.add_argument( '--output', '-o', nargs=2, action='append', metavar=('BUNDLE', 'FILE'), help='Build the given bundle, and use a custom output ' 'file. Can be given multiple times.') parser.add_argument( '--directory', '-d', help='Write built files to this directory, using the ' 'basename defined by the bundle. Will offset ' 'the original bundle output paths on their common ' 'prefix. Cannot be used with --output.') parser.add_argument( '--no-cache', action='store_true', help='Do not use a cache that might be configured.') parser.add_argument( '--manifest', help='Write a manifest to the given file. Also supports ' 'the id:arg format, if you want to use a different ' 'manifest implementation.') parser.add_argument( '--production', action='store_true', help='Forcably turn off debug mode for the build. This ' 'only has an effect if debug is set to "merge".') def _setup_logging(self, ns): if self.log: log = self.log else: log = logging.getLogger('webassets.script') if not log.handlers: # In theory, this could run multiple times (e.g. tests) handler = logging.StreamHandler() log.addHandler(handler) # Note that setting the level filter at the handler level is # better than the logger level, since this is "our" handler, # we create it, for the purposes of having a default output. # The logger itself the user may be modifying. handler.setLevel(logging.DEBUG if ns.verbose else ( logging.WARNING if ns.quiet else logging.INFO)) return log def _setup_assets_env(self, ns, log): env = self.env if env is None: assert not (ns.module and ns.config) if ns.module: env = PythonLoader(ns.module).load_environment() if ns.config: env = YAMLLoader(ns.config).load_environment() return env def _setup_cmd_env(self, assets_env, log, ns): return CommandLineEnvironment(assets_env, log, commands={ 'watch': (GenericArgparseImplementation.WatchCommand, (ns,), {}) }) def _prepare_command_args(self, ns): # Prepare a dict of arguments cleaned of values that are not # command-specific, and which the command method would not accept. args = vars(ns).copy() for action in self.parser._actions: dest = action.dest if dest in args: del args[dest] return args def run_with_ns(self, ns): log = self._setup_logging(ns) env = self._setup_assets_env(ns, log) if env is None: raise CommandError( "Error: No environment given or found. Maybe use -m?") cmd = self._setup_cmd_env(env, log, ns) # Run the selected command args = self._prepare_command_args(ns) return cmd.invoke(ns.command, args) def run_with_argv(self, argv): try: ns = self.parser.parse_args(argv) except SystemExit as e: # We do not want the main() function to exit the program. # See run() instead. return e.args[0] return self.run_with_ns(ns) def main(self, argv): """Parse the given command line. The commandline is expected to NOT including what would be sys.argv[0]. """ try: return self.run_with_argv(argv) except CommandError as e: print(e) return 1 def main(argv, env=None): """Execute the generic version of the command line interface. You only need to work directly with ``GenericArgparseImplementation`` if you desire to customize things. If no environment is given, additional arguments will be supported to allow the user to specify/construct the environment on the command line. """ return GenericArgparseImplementation(env).main(argv) def run(): """Runs the command line interface via ``main``, then exits the process with a proper return code.""" sys.exit(main(sys.argv[1:]) or 0) if __name__ == '__main__': run()
gi0baro/weppy-assets
weppy_assets/webassets/script.py
Python
bsd-3-clause
22,478
/* Copyright (c) 2016, Alexander Entinger / LXRobotics * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither the name of motor-controller-highpower-motorshield nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "ioentity.h" #include <assert.h> #include <algorithm> namespace arduinoio { /** * @brief Constructor * @param */ ioentity::ioentity(boost::shared_ptr<serial> const &serial) : m_serial(serial), m_isConfigured(false) { } /** * @brief Destructor */ ioentity::~ioentity() { m_pinVect.clear(); } } // end of namespace arduinoio
lxrobotics/arduinoio
framework/ioentity.cpp
C++
bsd-3-clause
1,958
#!/usr/bin/env python # # Copyright 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from google.net.proto import ProtocolBuffer import array import dummy_thread as thread __pychecker__ = """maxreturns=0 maxbranches=0 no-callinit unusednames=printElemNumber,debug_strs no-special""" if hasattr(ProtocolBuffer, 'ExtendableProtocolMessage'): _extension_runtime = True _ExtendableProtocolMessage = ProtocolBuffer.ExtendableProtocolMessage else: _extension_runtime = False _ExtendableProtocolMessage = ProtocolBuffer.ProtocolMessage from google.appengine.api.api_base_pb import * import google.appengine.api.api_base_pb from google.appengine.datastore.action_pb import * import google.appengine.datastore.action_pb from google.appengine.datastore.entity_pb import * import google.appengine.datastore.entity_pb from google.appengine.datastore.snapshot_pb import * import google.appengine.datastore.snapshot_pb class InternalHeader(ProtocolBuffer.ProtocolMessage): has_requesting_app_id_ = 0 requesting_app_id_ = "" has_requesting_project_id_ = 0 requesting_project_id_ = "" has_requesting_version_id_ = 0 requesting_version_id_ = "" has_api_settings_ = 0 api_settings_ = "" def __init__(self, contents=None): if contents is not None: self.MergeFromString(contents) def requesting_app_id(self): return self.requesting_app_id_ def set_requesting_app_id(self, x): self.has_requesting_app_id_ = 1 self.requesting_app_id_ = x def clear_requesting_app_id(self): if self.has_requesting_app_id_: self.has_requesting_app_id_ = 0 self.requesting_app_id_ = "" def has_requesting_app_id(self): return self.has_requesting_app_id_ def requesting_project_id(self): return self.requesting_project_id_ def set_requesting_project_id(self, x): self.has_requesting_project_id_ = 1 self.requesting_project_id_ = x def clear_requesting_project_id(self): if self.has_requesting_project_id_: self.has_requesting_project_id_ = 0 self.requesting_project_id_ = "" def has_requesting_project_id(self): return self.has_requesting_project_id_ def requesting_version_id(self): return self.requesting_version_id_ def set_requesting_version_id(self, x): self.has_requesting_version_id_ = 1 self.requesting_version_id_ = x def clear_requesting_version_id(self): if self.has_requesting_version_id_: self.has_requesting_version_id_ = 0 self.requesting_version_id_ = "" def has_requesting_version_id(self): return self.has_requesting_version_id_ def api_settings(self): return self.api_settings_ def set_api_settings(self, x): self.has_api_settings_ = 1 self.api_settings_ = x def clear_api_settings(self): if self.has_api_settings_: self.has_api_settings_ = 0 self.api_settings_ = "" def has_api_settings(self): return self.has_api_settings_ def MergeFrom(self, x): assert x is not self if (x.has_requesting_app_id()): self.set_requesting_app_id(x.requesting_app_id()) if (x.has_requesting_project_id()): self.set_requesting_project_id(x.requesting_project_id()) if (x.has_requesting_version_id()): self.set_requesting_version_id(x.requesting_version_id()) if (x.has_api_settings()): self.set_api_settings(x.api_settings()) def Equals(self, x): if x is self: return 1 if self.has_requesting_app_id_ != x.has_requesting_app_id_: return 0 if self.has_requesting_app_id_ and self.requesting_app_id_ != x.requesting_app_id_: return 0 if self.has_requesting_project_id_ != x.has_requesting_project_id_: return 0 if self.has_requesting_project_id_ and self.requesting_project_id_ != x.requesting_project_id_: return 0 if self.has_requesting_version_id_ != x.has_requesting_version_id_: return 0 if self.has_requesting_version_id_ and self.requesting_version_id_ != x.requesting_version_id_: return 0 if self.has_api_settings_ != x.has_api_settings_: return 0 if self.has_api_settings_ and self.api_settings_ != x.api_settings_: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 return initialized def ByteSize(self): n = 0 if (self.has_requesting_app_id_): n += 1 + self.lengthString(len(self.requesting_app_id_)) if (self.has_requesting_project_id_): n += 1 + self.lengthString(len(self.requesting_project_id_)) if (self.has_requesting_version_id_): n += 1 + self.lengthString(len(self.requesting_version_id_)) if (self.has_api_settings_): n += 1 + self.lengthString(len(self.api_settings_)) return n def ByteSizePartial(self): n = 0 if (self.has_requesting_app_id_): n += 1 + self.lengthString(len(self.requesting_app_id_)) if (self.has_requesting_project_id_): n += 1 + self.lengthString(len(self.requesting_project_id_)) if (self.has_requesting_version_id_): n += 1 + self.lengthString(len(self.requesting_version_id_)) if (self.has_api_settings_): n += 1 + self.lengthString(len(self.api_settings_)) return n def Clear(self): self.clear_requesting_app_id() self.clear_requesting_project_id() self.clear_requesting_version_id() self.clear_api_settings() def OutputUnchecked(self, out): if (self.has_requesting_app_id_): out.putVarInt32(18) out.putPrefixedString(self.requesting_app_id_) if (self.has_api_settings_): out.putVarInt32(26) out.putPrefixedString(self.api_settings_) if (self.has_requesting_project_id_): out.putVarInt32(34) out.putPrefixedString(self.requesting_project_id_) if (self.has_requesting_version_id_): out.putVarInt32(42) out.putPrefixedString(self.requesting_version_id_) def OutputPartial(self, out): if (self.has_requesting_app_id_): out.putVarInt32(18) out.putPrefixedString(self.requesting_app_id_) if (self.has_api_settings_): out.putVarInt32(26) out.putPrefixedString(self.api_settings_) if (self.has_requesting_project_id_): out.putVarInt32(34) out.putPrefixedString(self.requesting_project_id_) if (self.has_requesting_version_id_): out.putVarInt32(42) out.putPrefixedString(self.requesting_version_id_) def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if tt == 18: self.set_requesting_app_id(d.getPrefixedString()) continue if tt == 26: self.set_api_settings(d.getPrefixedString()) continue if tt == 34: self.set_requesting_project_id(d.getPrefixedString()) continue if tt == 42: self.set_requesting_version_id(d.getPrefixedString()) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_requesting_app_id_: res+=prefix+("requesting_app_id: %s\n" % self.DebugFormatString(self.requesting_app_id_)) if self.has_requesting_project_id_: res+=prefix+("requesting_project_id: %s\n" % self.DebugFormatString(self.requesting_project_id_)) if self.has_requesting_version_id_: res+=prefix+("requesting_version_id: %s\n" % self.DebugFormatString(self.requesting_version_id_)) if self.has_api_settings_: res+=prefix+("api_settings: %s\n" % self.DebugFormatString(self.api_settings_)) return res def _BuildTagLookupTable(sparse, maxtag, default=None): return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) krequesting_app_id = 2 krequesting_project_id = 4 krequesting_version_id = 5 kapi_settings = 3 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", 2: "requesting_app_id", 3: "api_settings", 4: "requesting_project_id", 5: "requesting_version_id", }, 5) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, 2: ProtocolBuffer.Encoder.STRING, 3: ProtocolBuffer.Encoder.STRING, 4: ProtocolBuffer.Encoder.STRING, 5: ProtocolBuffer.Encoder.STRING, }, 5, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.InternalHeader' class Transaction(ProtocolBuffer.ProtocolMessage): has_header_ = 0 header_ = None has_handle_ = 0 handle_ = 0 has_app_ = 0 app_ = "" has_mark_changes_ = 0 mark_changes_ = 0 def __init__(self, contents=None): self.lazy_init_lock_ = thread.allocate_lock() if contents is not None: self.MergeFromString(contents) def header(self): if self.header_ is None: self.lazy_init_lock_.acquire() try: if self.header_ is None: self.header_ = InternalHeader() finally: self.lazy_init_lock_.release() return self.header_ def mutable_header(self): self.has_header_ = 1; return self.header() def clear_header(self): if self.has_header_: self.has_header_ = 0; if self.header_ is not None: self.header_.Clear() def has_header(self): return self.has_header_ def handle(self): return self.handle_ def set_handle(self, x): self.has_handle_ = 1 self.handle_ = x def clear_handle(self): if self.has_handle_: self.has_handle_ = 0 self.handle_ = 0 def has_handle(self): return self.has_handle_ def app(self): return self.app_ def set_app(self, x): self.has_app_ = 1 self.app_ = x def clear_app(self): if self.has_app_: self.has_app_ = 0 self.app_ = "" def has_app(self): return self.has_app_ def mark_changes(self): return self.mark_changes_ def set_mark_changes(self, x): self.has_mark_changes_ = 1 self.mark_changes_ = x def clear_mark_changes(self): if self.has_mark_changes_: self.has_mark_changes_ = 0 self.mark_changes_ = 0 def has_mark_changes(self): return self.has_mark_changes_ def MergeFrom(self, x): assert x is not self if (x.has_header()): self.mutable_header().MergeFrom(x.header()) if (x.has_handle()): self.set_handle(x.handle()) if (x.has_app()): self.set_app(x.app()) if (x.has_mark_changes()): self.set_mark_changes(x.mark_changes()) def Equals(self, x): if x is self: return 1 if self.has_header_ != x.has_header_: return 0 if self.has_header_ and self.header_ != x.header_: return 0 if self.has_handle_ != x.has_handle_: return 0 if self.has_handle_ and self.handle_ != x.handle_: return 0 if self.has_app_ != x.has_app_: return 0 if self.has_app_ and self.app_ != x.app_: return 0 if self.has_mark_changes_ != x.has_mark_changes_: return 0 if self.has_mark_changes_ and self.mark_changes_ != x.mark_changes_: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0 if (not self.has_handle_): initialized = 0 if debug_strs is not None: debug_strs.append('Required field: handle not set.') if (not self.has_app_): initialized = 0 if debug_strs is not None: debug_strs.append('Required field: app not set.') return initialized def ByteSize(self): n = 0 if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize()) n += self.lengthString(len(self.app_)) if (self.has_mark_changes_): n += 2 return n + 10 def ByteSizePartial(self): n = 0 if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial()) if (self.has_handle_): n += 9 if (self.has_app_): n += 1 n += self.lengthString(len(self.app_)) if (self.has_mark_changes_): n += 2 return n def Clear(self): self.clear_header() self.clear_handle() self.clear_app() self.clear_mark_changes() def OutputUnchecked(self, out): out.putVarInt32(9) out.put64(self.handle_) out.putVarInt32(18) out.putPrefixedString(self.app_) if (self.has_mark_changes_): out.putVarInt32(24) out.putBoolean(self.mark_changes_) if (self.has_header_): out.putVarInt32(34) out.putVarInt32(self.header_.ByteSize()) self.header_.OutputUnchecked(out) def OutputPartial(self, out): if (self.has_handle_): out.putVarInt32(9) out.put64(self.handle_) if (self.has_app_): out.putVarInt32(18) out.putPrefixedString(self.app_) if (self.has_mark_changes_): out.putVarInt32(24) out.putBoolean(self.mark_changes_) if (self.has_header_): out.putVarInt32(34) out.putVarInt32(self.header_.ByteSizePartial()) self.header_.OutputPartial(out) def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if tt == 9: self.set_handle(d.get64()) continue if tt == 18: self.set_app(d.getPrefixedString()) continue if tt == 24: self.set_mark_changes(d.getBoolean()) continue if tt == 34: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_header().TryMerge(tmp) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_header_: res+=prefix+"header <\n" res+=self.header_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" if self.has_handle_: res+=prefix+("handle: %s\n" % self.DebugFormatFixed64(self.handle_)) if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_)) if self.has_mark_changes_: res+=prefix+("mark_changes: %s\n" % self.DebugFormatBool(self.mark_changes_)) return res def _BuildTagLookupTable(sparse, maxtag, default=None): return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) kheader = 4 khandle = 1 kapp = 2 kmark_changes = 3 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", 1: "handle", 2: "app", 3: "mark_changes", 4: "header", }, 4) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, 1: ProtocolBuffer.Encoder.DOUBLE, 2: ProtocolBuffer.Encoder.STRING, 3: ProtocolBuffer.Encoder.NUMERIC, 4: ProtocolBuffer.Encoder.STRING, }, 4, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Transaction' class Query_Filter(ProtocolBuffer.ProtocolMessage): LESS_THAN = 1 LESS_THAN_OR_EQUAL = 2 GREATER_THAN = 3 GREATER_THAN_OR_EQUAL = 4 EQUAL = 5 IN = 6 EXISTS = 7 _Operator_NAMES = { 1: "LESS_THAN", 2: "LESS_THAN_OR_EQUAL", 3: "GREATER_THAN", 4: "GREATER_THAN_OR_EQUAL", 5: "EQUAL", 6: "IN", 7: "EXISTS", } def Operator_Name(cls, x): return cls._Operator_NAMES.get(x, "") Operator_Name = classmethod(Operator_Name) has_op_ = 0 op_ = 0 def __init__(self, contents=None): self.property_ = [] if contents is not None: self.MergeFromString(contents) def op(self): return self.op_ def set_op(self, x): self.has_op_ = 1 self.op_ = x def clear_op(self): if self.has_op_: self.has_op_ = 0 self.op_ = 0 def has_op(self): return self.has_op_ def property_size(self): return len(self.property_) def property_list(self): return self.property_ def property(self, i): return self.property_[i] def mutable_property(self, i): return self.property_[i] def add_property(self): x = Property() self.property_.append(x) return x def clear_property(self): self.property_ = [] def MergeFrom(self, x): assert x is not self if (x.has_op()): self.set_op(x.op()) for i in xrange(x.property_size()): self.add_property().CopyFrom(x.property(i)) def Equals(self, x): if x is self: return 1 if self.has_op_ != x.has_op_: return 0 if self.has_op_ and self.op_ != x.op_: return 0 if len(self.property_) != len(x.property_): return 0 for e1, e2 in zip(self.property_, x.property_): if e1 != e2: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 if (not self.has_op_): initialized = 0 if debug_strs is not None: debug_strs.append('Required field: op not set.') for p in self.property_: if not p.IsInitialized(debug_strs): initialized=0 return initialized def ByteSize(self): n = 0 n += self.lengthVarInt64(self.op_) n += 1 * len(self.property_) for i in xrange(len(self.property_)): n += self.lengthString(self.property_[i].ByteSize()) return n + 1 def ByteSizePartial(self): n = 0 if (self.has_op_): n += 1 n += self.lengthVarInt64(self.op_) n += 1 * len(self.property_) for i in xrange(len(self.property_)): n += self.lengthString(self.property_[i].ByteSizePartial()) return n def Clear(self): self.clear_op() self.clear_property() def OutputUnchecked(self, out): out.putVarInt32(48) out.putVarInt32(self.op_) for i in xrange(len(self.property_)): out.putVarInt32(114) out.putVarInt32(self.property_[i].ByteSize()) self.property_[i].OutputUnchecked(out) def OutputPartial(self, out): if (self.has_op_): out.putVarInt32(48) out.putVarInt32(self.op_) for i in xrange(len(self.property_)): out.putVarInt32(114) out.putVarInt32(self.property_[i].ByteSizePartial()) self.property_[i].OutputPartial(out) def TryMerge(self, d): while 1: tt = d.getVarInt32() if tt == 36: break if tt == 48: self.set_op(d.getVarInt32()) continue if tt == 114: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.add_property().TryMerge(tmp) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_op_: res+=prefix+("op: %s\n" % self.DebugFormatInt32(self.op_)) cnt=0 for e in self.property_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("property%s <\n" % elm) res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt+=1 return res class Query_Order(ProtocolBuffer.ProtocolMessage): ASCENDING = 1 DESCENDING = 2 _Direction_NAMES = { 1: "ASCENDING", 2: "DESCENDING", } def Direction_Name(cls, x): return cls._Direction_NAMES.get(x, "") Direction_Name = classmethod(Direction_Name) has_property_ = 0 property_ = "" has_direction_ = 0 direction_ = 1 def __init__(self, contents=None): if contents is not None: self.MergeFromString(contents) def property(self): return self.property_ def set_property(self, x): self.has_property_ = 1 self.property_ = x def clear_property(self): if self.has_property_: self.has_property_ = 0 self.property_ = "" def has_property(self): return self.has_property_ def direction(self): return self.direction_ def set_direction(self, x): self.has_direction_ = 1 self.direction_ = x def clear_direction(self): if self.has_direction_: self.has_direction_ = 0 self.direction_ = 1 def has_direction(self): return self.has_direction_ def MergeFrom(self, x): assert x is not self if (x.has_property()): self.set_property(x.property()) if (x.has_direction()): self.set_direction(x.direction()) def Equals(self, x): if x is self: return 1 if self.has_property_ != x.has_property_: return 0 if self.has_property_ and self.property_ != x.property_: return 0 if self.has_direction_ != x.has_direction_: return 0 if self.has_direction_ and self.direction_ != x.direction_: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 if (not self.has_property_): initialized = 0 if debug_strs is not None: debug_strs.append('Required field: property not set.') return initialized def ByteSize(self): n = 0 n += self.lengthString(len(self.property_)) if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_) return n + 1 def ByteSizePartial(self): n = 0 if (self.has_property_): n += 1 n += self.lengthString(len(self.property_)) if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_) return n def Clear(self): self.clear_property() self.clear_direction() def OutputUnchecked(self, out): out.putVarInt32(82) out.putPrefixedString(self.property_) if (self.has_direction_): out.putVarInt32(88) out.putVarInt32(self.direction_) def OutputPartial(self, out): if (self.has_property_): out.putVarInt32(82) out.putPrefixedString(self.property_) if (self.has_direction_): out.putVarInt32(88) out.putVarInt32(self.direction_) def TryMerge(self, d): while 1: tt = d.getVarInt32() if tt == 76: break if tt == 82: self.set_property(d.getPrefixedString()) continue if tt == 88: self.set_direction(d.getVarInt32()) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_property_: res+=prefix+("property: %s\n" % self.DebugFormatString(self.property_)) if self.has_direction_: res+=prefix+("direction: %s\n" % self.DebugFormatInt32(self.direction_)) return res class Query(ProtocolBuffer.ProtocolMessage): ORDER_FIRST = 1 ANCESTOR_FIRST = 2 FILTER_FIRST = 3 _Hint_NAMES = { 1: "ORDER_FIRST", 2: "ANCESTOR_FIRST", 3: "FILTER_FIRST", } def Hint_Name(cls, x): return cls._Hint_NAMES.get(x, "") Hint_Name = classmethod(Hint_Name) has_header_ = 0 header_ = None has_app_ = 0 app_ = "" has_name_space_ = 0 name_space_ = "" has_kind_ = 0 kind_ = "" has_ancestor_ = 0 ancestor_ = None has_search_query_ = 0 search_query_ = "" has_hint_ = 0 hint_ = 0 has_count_ = 0 count_ = 0 has_offset_ = 0 offset_ = 0 has_limit_ = 0 limit_ = 0 has_compiled_cursor_ = 0 compiled_cursor_ = None has_end_compiled_cursor_ = 0 end_compiled_cursor_ = None has_require_perfect_plan_ = 0 require_perfect_plan_ = 0 has_keys_only_ = 0 keys_only_ = 0 has_transaction_ = 0 transaction_ = None has_compile_ = 0 compile_ = 0 has_failover_ms_ = 0 failover_ms_ = 0 has_strong_ = 0 strong_ = 0 has_distinct_ = 0 distinct_ = 0 has_min_safe_time_seconds_ = 0 min_safe_time_seconds_ = 0 has_persist_offset_ = 0 persist_offset_ = 1 def __init__(self, contents=None): self.filter_ = [] self.order_ = [] self.composite_index_ = [] self.property_name_ = [] self.group_by_property_name_ = [] self.safe_replica_name_ = [] self.lazy_init_lock_ = thread.allocate_lock() if contents is not None: self.MergeFromString(contents) def header(self): if self.header_ is None: self.lazy_init_lock_.acquire() try: if self.header_ is None: self.header_ = InternalHeader() finally: self.lazy_init_lock_.release() return self.header_ def mutable_header(self): self.has_header_ = 1; return self.header() def clear_header(self): if self.has_header_: self.has_header_ = 0; if self.header_ is not None: self.header_.Clear() def has_header(self): return self.has_header_ def app(self): return self.app_ def set_app(self, x): self.has_app_ = 1 self.app_ = x def clear_app(self): if self.has_app_: self.has_app_ = 0 self.app_ = "" def has_app(self): return self.has_app_ def name_space(self): return self.name_space_ def set_name_space(self, x): self.has_name_space_ = 1 self.name_space_ = x def clear_name_space(self): if self.has_name_space_: self.has_name_space_ = 0 self.name_space_ = "" def has_name_space(self): return self.has_name_space_ def kind(self): return self.kind_ def set_kind(self, x): self.has_kind_ = 1 self.kind_ = x def clear_kind(self): if self.has_kind_: self.has_kind_ = 0 self.kind_ = "" def has_kind(self): return self.has_kind_ def ancestor(self): if self.ancestor_ is None: self.lazy_init_lock_.acquire() try: if self.ancestor_ is None: self.ancestor_ = Reference() finally: self.lazy_init_lock_.release() return self.ancestor_ def mutable_ancestor(self): self.has_ancestor_ = 1; return self.ancestor() def clear_ancestor(self): if self.has_ancestor_: self.has_ancestor_ = 0; if self.ancestor_ is not None: self.ancestor_.Clear() def has_ancestor(self): return self.has_ancestor_ def filter_size(self): return len(self.filter_) def filter_list(self): return self.filter_ def filter(self, i): return self.filter_[i] def mutable_filter(self, i): return self.filter_[i] def add_filter(self): x = Query_Filter() self.filter_.append(x) return x def clear_filter(self): self.filter_ = [] def search_query(self): return self.search_query_ def set_search_query(self, x): self.has_search_query_ = 1 self.search_query_ = x def clear_search_query(self): if self.has_search_query_: self.has_search_query_ = 0 self.search_query_ = "" def has_search_query(self): return self.has_search_query_ def order_size(self): return len(self.order_) def order_list(self): return self.order_ def order(self, i): return self.order_[i] def mutable_order(self, i): return self.order_[i] def add_order(self): x = Query_Order() self.order_.append(x) return x def clear_order(self): self.order_ = [] def hint(self): return self.hint_ def set_hint(self, x): self.has_hint_ = 1 self.hint_ = x def clear_hint(self): if self.has_hint_: self.has_hint_ = 0 self.hint_ = 0 def has_hint(self): return self.has_hint_ def count(self): return self.count_ def set_count(self, x): self.has_count_ = 1 self.count_ = x def clear_count(self): if self.has_count_: self.has_count_ = 0 self.count_ = 0 def has_count(self): return self.has_count_ def offset(self): return self.offset_ def set_offset(self, x): self.has_offset_ = 1 self.offset_ = x def clear_offset(self): if self.has_offset_: self.has_offset_ = 0 self.offset_ = 0 def has_offset(self): return self.has_offset_ def limit(self): return self.limit_ def set_limit(self, x): self.has_limit_ = 1 self.limit_ = x def clear_limit(self): if self.has_limit_: self.has_limit_ = 0 self.limit_ = 0 def has_limit(self): return self.has_limit_ def compiled_cursor(self): if self.compiled_cursor_ is None: self.lazy_init_lock_.acquire() try: if self.compiled_cursor_ is None: self.compiled_cursor_ = CompiledCursor() finally: self.lazy_init_lock_.release() return self.compiled_cursor_ def mutable_compiled_cursor(self): self.has_compiled_cursor_ = 1; return self.compiled_cursor() def clear_compiled_cursor(self): if self.has_compiled_cursor_: self.has_compiled_cursor_ = 0; if self.compiled_cursor_ is not None: self.compiled_cursor_.Clear() def has_compiled_cursor(self): return self.has_compiled_cursor_ def end_compiled_cursor(self): if self.end_compiled_cursor_ is None: self.lazy_init_lock_.acquire() try: if self.end_compiled_cursor_ is None: self.end_compiled_cursor_ = CompiledCursor() finally: self.lazy_init_lock_.release() return self.end_compiled_cursor_ def mutable_end_compiled_cursor(self): self.has_end_compiled_cursor_ = 1; return self.end_compiled_cursor() def clear_end_compiled_cursor(self): if self.has_end_compiled_cursor_: self.has_end_compiled_cursor_ = 0; if self.end_compiled_cursor_ is not None: self.end_compiled_cursor_.Clear() def has_end_compiled_cursor(self): return self.has_end_compiled_cursor_ def composite_index_size(self): return len(self.composite_index_) def composite_index_list(self): return self.composite_index_ def composite_index(self, i): return self.composite_index_[i] def mutable_composite_index(self, i): return self.composite_index_[i] def add_composite_index(self): x = CompositeIndex() self.composite_index_.append(x) return x def clear_composite_index(self): self.composite_index_ = [] def require_perfect_plan(self): return self.require_perfect_plan_ def set_require_perfect_plan(self, x): self.has_require_perfect_plan_ = 1 self.require_perfect_plan_ = x def clear_require_perfect_plan(self): if self.has_require_perfect_plan_: self.has_require_perfect_plan_ = 0 self.require_perfect_plan_ = 0 def has_require_perfect_plan(self): return self.has_require_perfect_plan_ def keys_only(self): return self.keys_only_ def set_keys_only(self, x): self.has_keys_only_ = 1 self.keys_only_ = x def clear_keys_only(self): if self.has_keys_only_: self.has_keys_only_ = 0 self.keys_only_ = 0 def has_keys_only(self): return self.has_keys_only_ def transaction(self): if self.transaction_ is None: self.lazy_init_lock_.acquire() try: if self.transaction_ is None: self.transaction_ = Transaction() finally: self.lazy_init_lock_.release() return self.transaction_ def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction() def clear_transaction(self): if self.has_transaction_: self.has_transaction_ = 0; if self.transaction_ is not None: self.transaction_.Clear() def has_transaction(self): return self.has_transaction_ def compile(self): return self.compile_ def set_compile(self, x): self.has_compile_ = 1 self.compile_ = x def clear_compile(self): if self.has_compile_: self.has_compile_ = 0 self.compile_ = 0 def has_compile(self): return self.has_compile_ def failover_ms(self): return self.failover_ms_ def set_failover_ms(self, x): self.has_failover_ms_ = 1 self.failover_ms_ = x def clear_failover_ms(self): if self.has_failover_ms_: self.has_failover_ms_ = 0 self.failover_ms_ = 0 def has_failover_ms(self): return self.has_failover_ms_ def strong(self): return self.strong_ def set_strong(self, x): self.has_strong_ = 1 self.strong_ = x def clear_strong(self): if self.has_strong_: self.has_strong_ = 0 self.strong_ = 0 def has_strong(self): return self.has_strong_ def property_name_size(self): return len(self.property_name_) def property_name_list(self): return self.property_name_ def property_name(self, i): return self.property_name_[i] def set_property_name(self, i, x): self.property_name_[i] = x def add_property_name(self, x): self.property_name_.append(x) def clear_property_name(self): self.property_name_ = [] def group_by_property_name_size(self): return len(self.group_by_property_name_) def group_by_property_name_list(self): return self.group_by_property_name_ def group_by_property_name(self, i): return self.group_by_property_name_[i] def set_group_by_property_name(self, i, x): self.group_by_property_name_[i] = x def add_group_by_property_name(self, x): self.group_by_property_name_.append(x) def clear_group_by_property_name(self): self.group_by_property_name_ = [] def distinct(self): return self.distinct_ def set_distinct(self, x): self.has_distinct_ = 1 self.distinct_ = x def clear_distinct(self): if self.has_distinct_: self.has_distinct_ = 0 self.distinct_ = 0 def has_distinct(self): return self.has_distinct_ def min_safe_time_seconds(self): return self.min_safe_time_seconds_ def set_min_safe_time_seconds(self, x): self.has_min_safe_time_seconds_ = 1 self.min_safe_time_seconds_ = x def clear_min_safe_time_seconds(self): if self.has_min_safe_time_seconds_: self.has_min_safe_time_seconds_ = 0 self.min_safe_time_seconds_ = 0 def has_min_safe_time_seconds(self): return self.has_min_safe_time_seconds_ def safe_replica_name_size(self): return len(self.safe_replica_name_) def safe_replica_name_list(self): return self.safe_replica_name_ def safe_replica_name(self, i): return self.safe_replica_name_[i] def set_safe_replica_name(self, i, x): self.safe_replica_name_[i] = x def add_safe_replica_name(self, x): self.safe_replica_name_.append(x) def clear_safe_replica_name(self): self.safe_replica_name_ = [] def persist_offset(self): return self.persist_offset_ def set_persist_offset(self, x): self.has_persist_offset_ = 1 self.persist_offset_ = x def clear_persist_offset(self): if self.has_persist_offset_: self.has_persist_offset_ = 0 self.persist_offset_ = 1 def has_persist_offset(self): return self.has_persist_offset_ def MergeFrom(self, x): assert x is not self if (x.has_header()): self.mutable_header().MergeFrom(x.header()) if (x.has_app()): self.set_app(x.app()) if (x.has_name_space()): self.set_name_space(x.name_space()) if (x.has_kind()): self.set_kind(x.kind()) if (x.has_ancestor()): self.mutable_ancestor().MergeFrom(x.ancestor()) for i in xrange(x.filter_size()): self.add_filter().CopyFrom(x.filter(i)) if (x.has_search_query()): self.set_search_query(x.search_query()) for i in xrange(x.order_size()): self.add_order().CopyFrom(x.order(i)) if (x.has_hint()): self.set_hint(x.hint()) if (x.has_count()): self.set_count(x.count()) if (x.has_offset()): self.set_offset(x.offset()) if (x.has_limit()): self.set_limit(x.limit()) if (x.has_compiled_cursor()): self.mutable_compiled_cursor().MergeFrom(x.compiled_cursor()) if (x.has_end_compiled_cursor()): self.mutable_end_compiled_cursor().MergeFrom(x.end_compiled_cursor()) for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i)) if (x.has_require_perfect_plan()): self.set_require_perfect_plan(x.require_perfect_plan()) if (x.has_keys_only()): self.set_keys_only(x.keys_only()) if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction()) if (x.has_compile()): self.set_compile(x.compile()) if (x.has_failover_ms()): self.set_failover_ms(x.failover_ms()) if (x.has_strong()): self.set_strong(x.strong()) for i in xrange(x.property_name_size()): self.add_property_name(x.property_name(i)) for i in xrange(x.group_by_property_name_size()): self.add_group_by_property_name(x.group_by_property_name(i)) if (x.has_distinct()): self.set_distinct(x.distinct()) if (x.has_min_safe_time_seconds()): self.set_min_safe_time_seconds(x.min_safe_time_seconds()) for i in xrange(x.safe_replica_name_size()): self.add_safe_replica_name(x.safe_replica_name(i)) if (x.has_persist_offset()): self.set_persist_offset(x.persist_offset()) def Equals(self, x): if x is self: return 1 if self.has_header_ != x.has_header_: return 0 if self.has_header_ and self.header_ != x.header_: return 0 if self.has_app_ != x.has_app_: return 0 if self.has_app_ and self.app_ != x.app_: return 0 if self.has_name_space_ != x.has_name_space_: return 0 if self.has_name_space_ and self.name_space_ != x.name_space_: return 0 if self.has_kind_ != x.has_kind_: return 0 if self.has_kind_ and self.kind_ != x.kind_: return 0 if self.has_ancestor_ != x.has_ancestor_: return 0 if self.has_ancestor_ and self.ancestor_ != x.ancestor_: return 0 if len(self.filter_) != len(x.filter_): return 0 for e1, e2 in zip(self.filter_, x.filter_): if e1 != e2: return 0 if self.has_search_query_ != x.has_search_query_: return 0 if self.has_search_query_ and self.search_query_ != x.search_query_: return 0 if len(self.order_) != len(x.order_): return 0 for e1, e2 in zip(self.order_, x.order_): if e1 != e2: return 0 if self.has_hint_ != x.has_hint_: return 0 if self.has_hint_ and self.hint_ != x.hint_: return 0 if self.has_count_ != x.has_count_: return 0 if self.has_count_ and self.count_ != x.count_: return 0 if self.has_offset_ != x.has_offset_: return 0 if self.has_offset_ and self.offset_ != x.offset_: return 0 if self.has_limit_ != x.has_limit_: return 0 if self.has_limit_ and self.limit_ != x.limit_: return 0 if self.has_compiled_cursor_ != x.has_compiled_cursor_: return 0 if self.has_compiled_cursor_ and self.compiled_cursor_ != x.compiled_cursor_: return 0 if self.has_end_compiled_cursor_ != x.has_end_compiled_cursor_: return 0 if self.has_end_compiled_cursor_ and self.end_compiled_cursor_ != x.end_compiled_cursor_: return 0 if len(self.composite_index_) != len(x.composite_index_): return 0 for e1, e2 in zip(self.composite_index_, x.composite_index_): if e1 != e2: return 0 if self.has_require_perfect_plan_ != x.has_require_perfect_plan_: return 0 if self.has_require_perfect_plan_ and self.require_perfect_plan_ != x.require_perfect_plan_: return 0 if self.has_keys_only_ != x.has_keys_only_: return 0 if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0 if self.has_transaction_ != x.has_transaction_: return 0 if self.has_transaction_ and self.transaction_ != x.transaction_: return 0 if self.has_compile_ != x.has_compile_: return 0 if self.has_compile_ and self.compile_ != x.compile_: return 0 if self.has_failover_ms_ != x.has_failover_ms_: return 0 if self.has_failover_ms_ and self.failover_ms_ != x.failover_ms_: return 0 if self.has_strong_ != x.has_strong_: return 0 if self.has_strong_ and self.strong_ != x.strong_: return 0 if len(self.property_name_) != len(x.property_name_): return 0 for e1, e2 in zip(self.property_name_, x.property_name_): if e1 != e2: return 0 if len(self.group_by_property_name_) != len(x.group_by_property_name_): return 0 for e1, e2 in zip(self.group_by_property_name_, x.group_by_property_name_): if e1 != e2: return 0 if self.has_distinct_ != x.has_distinct_: return 0 if self.has_distinct_ and self.distinct_ != x.distinct_: return 0 if self.has_min_safe_time_seconds_ != x.has_min_safe_time_seconds_: return 0 if self.has_min_safe_time_seconds_ and self.min_safe_time_seconds_ != x.min_safe_time_seconds_: return 0 if len(self.safe_replica_name_) != len(x.safe_replica_name_): return 0 for e1, e2 in zip(self.safe_replica_name_, x.safe_replica_name_): if e1 != e2: return 0 if self.has_persist_offset_ != x.has_persist_offset_: return 0 if self.has_persist_offset_ and self.persist_offset_ != x.persist_offset_: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0 if (not self.has_app_): initialized = 0 if debug_strs is not None: debug_strs.append('Required field: app not set.') if (self.has_ancestor_ and not self.ancestor_.IsInitialized(debug_strs)): initialized = 0 for p in self.filter_: if not p.IsInitialized(debug_strs): initialized=0 for p in self.order_: if not p.IsInitialized(debug_strs): initialized=0 if (self.has_compiled_cursor_ and not self.compiled_cursor_.IsInitialized(debug_strs)): initialized = 0 if (self.has_end_compiled_cursor_ and not self.end_compiled_cursor_.IsInitialized(debug_strs)): initialized = 0 for p in self.composite_index_: if not p.IsInitialized(debug_strs): initialized=0 if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0 return initialized def ByteSize(self): n = 0 if (self.has_header_): n += 2 + self.lengthString(self.header_.ByteSize()) n += self.lengthString(len(self.app_)) if (self.has_name_space_): n += 2 + self.lengthString(len(self.name_space_)) if (self.has_kind_): n += 1 + self.lengthString(len(self.kind_)) if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSize()) n += 2 * len(self.filter_) for i in xrange(len(self.filter_)): n += self.filter_[i].ByteSize() if (self.has_search_query_): n += 1 + self.lengthString(len(self.search_query_)) n += 2 * len(self.order_) for i in xrange(len(self.order_)): n += self.order_[i].ByteSize() if (self.has_hint_): n += 2 + self.lengthVarInt64(self.hint_) if (self.has_count_): n += 2 + self.lengthVarInt64(self.count_) if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_) if (self.has_limit_): n += 2 + self.lengthVarInt64(self.limit_) if (self.has_compiled_cursor_): n += 2 + self.lengthString(self.compiled_cursor_.ByteSize()) if (self.has_end_compiled_cursor_): n += 2 + self.lengthString(self.end_compiled_cursor_.ByteSize()) n += 2 * len(self.composite_index_) for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize()) if (self.has_require_perfect_plan_): n += 3 if (self.has_keys_only_): n += 3 if (self.has_transaction_): n += 2 + self.lengthString(self.transaction_.ByteSize()) if (self.has_compile_): n += 3 if (self.has_failover_ms_): n += 2 + self.lengthVarInt64(self.failover_ms_) if (self.has_strong_): n += 3 n += 2 * len(self.property_name_) for i in xrange(len(self.property_name_)): n += self.lengthString(len(self.property_name_[i])) n += 2 * len(self.group_by_property_name_) for i in xrange(len(self.group_by_property_name_)): n += self.lengthString(len(self.group_by_property_name_[i])) if (self.has_distinct_): n += 3 if (self.has_min_safe_time_seconds_): n += 2 + self.lengthVarInt64(self.min_safe_time_seconds_) n += 2 * len(self.safe_replica_name_) for i in xrange(len(self.safe_replica_name_)): n += self.lengthString(len(self.safe_replica_name_[i])) if (self.has_persist_offset_): n += 3 return n + 1 def ByteSizePartial(self): n = 0 if (self.has_header_): n += 2 + self.lengthString(self.header_.ByteSizePartial()) if (self.has_app_): n += 1 n += self.lengthString(len(self.app_)) if (self.has_name_space_): n += 2 + self.lengthString(len(self.name_space_)) if (self.has_kind_): n += 1 + self.lengthString(len(self.kind_)) if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSizePartial()) n += 2 * len(self.filter_) for i in xrange(len(self.filter_)): n += self.filter_[i].ByteSizePartial() if (self.has_search_query_): n += 1 + self.lengthString(len(self.search_query_)) n += 2 * len(self.order_) for i in xrange(len(self.order_)): n += self.order_[i].ByteSizePartial() if (self.has_hint_): n += 2 + self.lengthVarInt64(self.hint_) if (self.has_count_): n += 2 + self.lengthVarInt64(self.count_) if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_) if (self.has_limit_): n += 2 + self.lengthVarInt64(self.limit_) if (self.has_compiled_cursor_): n += 2 + self.lengthString(self.compiled_cursor_.ByteSizePartial()) if (self.has_end_compiled_cursor_): n += 2 + self.lengthString(self.end_compiled_cursor_.ByteSizePartial()) n += 2 * len(self.composite_index_) for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSizePartial()) if (self.has_require_perfect_plan_): n += 3 if (self.has_keys_only_): n += 3 if (self.has_transaction_): n += 2 + self.lengthString(self.transaction_.ByteSizePartial()) if (self.has_compile_): n += 3 if (self.has_failover_ms_): n += 2 + self.lengthVarInt64(self.failover_ms_) if (self.has_strong_): n += 3 n += 2 * len(self.property_name_) for i in xrange(len(self.property_name_)): n += self.lengthString(len(self.property_name_[i])) n += 2 * len(self.group_by_property_name_) for i in xrange(len(self.group_by_property_name_)): n += self.lengthString(len(self.group_by_property_name_[i])) if (self.has_distinct_): n += 3 if (self.has_min_safe_time_seconds_): n += 2 + self.lengthVarInt64(self.min_safe_time_seconds_) n += 2 * len(self.safe_replica_name_) for i in xrange(len(self.safe_replica_name_)): n += self.lengthString(len(self.safe_replica_name_[i])) if (self.has_persist_offset_): n += 3 return n def Clear(self): self.clear_header() self.clear_app() self.clear_name_space() self.clear_kind() self.clear_ancestor() self.clear_filter() self.clear_search_query() self.clear_order() self.clear_hint() self.clear_count() self.clear_offset() self.clear_limit() self.clear_compiled_cursor() self.clear_end_compiled_cursor() self.clear_composite_index() self.clear_require_perfect_plan() self.clear_keys_only() self.clear_transaction() self.clear_compile() self.clear_failover_ms() self.clear_strong() self.clear_property_name() self.clear_group_by_property_name() self.clear_distinct() self.clear_min_safe_time_seconds() self.clear_safe_replica_name() self.clear_persist_offset() def OutputUnchecked(self, out): out.putVarInt32(10) out.putPrefixedString(self.app_) if (self.has_kind_): out.putVarInt32(26) out.putPrefixedString(self.kind_) for i in xrange(len(self.filter_)): out.putVarInt32(35) self.filter_[i].OutputUnchecked(out) out.putVarInt32(36) if (self.has_search_query_): out.putVarInt32(66) out.putPrefixedString(self.search_query_) for i in xrange(len(self.order_)): out.putVarInt32(75) self.order_[i].OutputUnchecked(out) out.putVarInt32(76) if (self.has_offset_): out.putVarInt32(96) out.putVarInt32(self.offset_) if (self.has_limit_): out.putVarInt32(128) out.putVarInt32(self.limit_) if (self.has_ancestor_): out.putVarInt32(138) out.putVarInt32(self.ancestor_.ByteSize()) self.ancestor_.OutputUnchecked(out) if (self.has_hint_): out.putVarInt32(144) out.putVarInt32(self.hint_) for i in xrange(len(self.composite_index_)): out.putVarInt32(154) out.putVarInt32(self.composite_index_[i].ByteSize()) self.composite_index_[i].OutputUnchecked(out) if (self.has_require_perfect_plan_): out.putVarInt32(160) out.putBoolean(self.require_perfect_plan_) if (self.has_keys_only_): out.putVarInt32(168) out.putBoolean(self.keys_only_) if (self.has_transaction_): out.putVarInt32(178) out.putVarInt32(self.transaction_.ByteSize()) self.transaction_.OutputUnchecked(out) if (self.has_count_): out.putVarInt32(184) out.putVarInt32(self.count_) if (self.has_distinct_): out.putVarInt32(192) out.putBoolean(self.distinct_) if (self.has_compile_): out.putVarInt32(200) out.putBoolean(self.compile_) if (self.has_failover_ms_): out.putVarInt32(208) out.putVarInt64(self.failover_ms_) if (self.has_name_space_): out.putVarInt32(234) out.putPrefixedString(self.name_space_) if (self.has_compiled_cursor_): out.putVarInt32(242) out.putVarInt32(self.compiled_cursor_.ByteSize()) self.compiled_cursor_.OutputUnchecked(out) if (self.has_end_compiled_cursor_): out.putVarInt32(250) out.putVarInt32(self.end_compiled_cursor_.ByteSize()) self.end_compiled_cursor_.OutputUnchecked(out) if (self.has_strong_): out.putVarInt32(256) out.putBoolean(self.strong_) for i in xrange(len(self.property_name_)): out.putVarInt32(266) out.putPrefixedString(self.property_name_[i]) for i in xrange(len(self.group_by_property_name_)): out.putVarInt32(274) out.putPrefixedString(self.group_by_property_name_[i]) if (self.has_min_safe_time_seconds_): out.putVarInt32(280) out.putVarInt64(self.min_safe_time_seconds_) for i in xrange(len(self.safe_replica_name_)): out.putVarInt32(290) out.putPrefixedString(self.safe_replica_name_[i]) if (self.has_persist_offset_): out.putVarInt32(296) out.putBoolean(self.persist_offset_) if (self.has_header_): out.putVarInt32(314) out.putVarInt32(self.header_.ByteSize()) self.header_.OutputUnchecked(out) def OutputPartial(self, out): if (self.has_app_): out.putVarInt32(10) out.putPrefixedString(self.app_) if (self.has_kind_): out.putVarInt32(26) out.putPrefixedString(self.kind_) for i in xrange(len(self.filter_)): out.putVarInt32(35) self.filter_[i].OutputPartial(out) out.putVarInt32(36) if (self.has_search_query_): out.putVarInt32(66) out.putPrefixedString(self.search_query_) for i in xrange(len(self.order_)): out.putVarInt32(75) self.order_[i].OutputPartial(out) out.putVarInt32(76) if (self.has_offset_): out.putVarInt32(96) out.putVarInt32(self.offset_) if (self.has_limit_): out.putVarInt32(128) out.putVarInt32(self.limit_) if (self.has_ancestor_): out.putVarInt32(138) out.putVarInt32(self.ancestor_.ByteSizePartial()) self.ancestor_.OutputPartial(out) if (self.has_hint_): out.putVarInt32(144) out.putVarInt32(self.hint_) for i in xrange(len(self.composite_index_)): out.putVarInt32(154) out.putVarInt32(self.composite_index_[i].ByteSizePartial()) self.composite_index_[i].OutputPartial(out) if (self.has_require_perfect_plan_): out.putVarInt32(160) out.putBoolean(self.require_perfect_plan_) if (self.has_keys_only_): out.putVarInt32(168) out.putBoolean(self.keys_only_) if (self.has_transaction_): out.putVarInt32(178) out.putVarInt32(self.transaction_.ByteSizePartial()) self.transaction_.OutputPartial(out) if (self.has_count_): out.putVarInt32(184) out.putVarInt32(self.count_) if (self.has_distinct_): out.putVarInt32(192) out.putBoolean(self.distinct_) if (self.has_compile_): out.putVarInt32(200) out.putBoolean(self.compile_) if (self.has_failover_ms_): out.putVarInt32(208) out.putVarInt64(self.failover_ms_) if (self.has_name_space_): out.putVarInt32(234) out.putPrefixedString(self.name_space_) if (self.has_compiled_cursor_): out.putVarInt32(242) out.putVarInt32(self.compiled_cursor_.ByteSizePartial()) self.compiled_cursor_.OutputPartial(out) if (self.has_end_compiled_cursor_): out.putVarInt32(250) out.putVarInt32(self.end_compiled_cursor_.ByteSizePartial()) self.end_compiled_cursor_.OutputPartial(out) if (self.has_strong_): out.putVarInt32(256) out.putBoolean(self.strong_) for i in xrange(len(self.property_name_)): out.putVarInt32(266) out.putPrefixedString(self.property_name_[i]) for i in xrange(len(self.group_by_property_name_)): out.putVarInt32(274) out.putPrefixedString(self.group_by_property_name_[i]) if (self.has_min_safe_time_seconds_): out.putVarInt32(280) out.putVarInt64(self.min_safe_time_seconds_) for i in xrange(len(self.safe_replica_name_)): out.putVarInt32(290) out.putPrefixedString(self.safe_replica_name_[i]) if (self.has_persist_offset_): out.putVarInt32(296) out.putBoolean(self.persist_offset_) if (self.has_header_): out.putVarInt32(314) out.putVarInt32(self.header_.ByteSizePartial()) self.header_.OutputPartial(out) def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if tt == 10: self.set_app(d.getPrefixedString()) continue if tt == 26: self.set_kind(d.getPrefixedString()) continue if tt == 35: self.add_filter().TryMerge(d) continue if tt == 66: self.set_search_query(d.getPrefixedString()) continue if tt == 75: self.add_order().TryMerge(d) continue if tt == 96: self.set_offset(d.getVarInt32()) continue if tt == 128: self.set_limit(d.getVarInt32()) continue if tt == 138: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_ancestor().TryMerge(tmp) continue if tt == 144: self.set_hint(d.getVarInt32()) continue if tt == 154: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.add_composite_index().TryMerge(tmp) continue if tt == 160: self.set_require_perfect_plan(d.getBoolean()) continue if tt == 168: self.set_keys_only(d.getBoolean()) continue if tt == 178: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_transaction().TryMerge(tmp) continue if tt == 184: self.set_count(d.getVarInt32()) continue if tt == 192: self.set_distinct(d.getBoolean()) continue if tt == 200: self.set_compile(d.getBoolean()) continue if tt == 208: self.set_failover_ms(d.getVarInt64()) continue if tt == 234: self.set_name_space(d.getPrefixedString()) continue if tt == 242: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_compiled_cursor().TryMerge(tmp) continue if tt == 250: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_end_compiled_cursor().TryMerge(tmp) continue if tt == 256: self.set_strong(d.getBoolean()) continue if tt == 266: self.add_property_name(d.getPrefixedString()) continue if tt == 274: self.add_group_by_property_name(d.getPrefixedString()) continue if tt == 280: self.set_min_safe_time_seconds(d.getVarInt64()) continue if tt == 290: self.add_safe_replica_name(d.getPrefixedString()) continue if tt == 296: self.set_persist_offset(d.getBoolean()) continue if tt == 314: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_header().TryMerge(tmp) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_header_: res+=prefix+"header <\n" res+=self.header_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_)) if self.has_name_space_: res+=prefix+("name_space: %s\n" % self.DebugFormatString(self.name_space_)) if self.has_kind_: res+=prefix+("kind: %s\n" % self.DebugFormatString(self.kind_)) if self.has_ancestor_: res+=prefix+"ancestor <\n" res+=self.ancestor_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt=0 for e in self.filter_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("Filter%s {\n" % elm) res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+"}\n" cnt+=1 if self.has_search_query_: res+=prefix+("search_query: %s\n" % self.DebugFormatString(self.search_query_)) cnt=0 for e in self.order_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("Order%s {\n" % elm) res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+"}\n" cnt+=1 if self.has_hint_: res+=prefix+("hint: %s\n" % self.DebugFormatInt32(self.hint_)) if self.has_count_: res+=prefix+("count: %s\n" % self.DebugFormatInt32(self.count_)) if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_)) if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt32(self.limit_)) if self.has_compiled_cursor_: res+=prefix+"compiled_cursor <\n" res+=self.compiled_cursor_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" if self.has_end_compiled_cursor_: res+=prefix+"end_compiled_cursor <\n" res+=self.end_compiled_cursor_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt=0 for e in self.composite_index_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("composite_index%s <\n" % elm) res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt+=1 if self.has_require_perfect_plan_: res+=prefix+("require_perfect_plan: %s\n" % self.DebugFormatBool(self.require_perfect_plan_)) if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_)) if self.has_transaction_: res+=prefix+"transaction <\n" res+=self.transaction_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" if self.has_compile_: res+=prefix+("compile: %s\n" % self.DebugFormatBool(self.compile_)) if self.has_failover_ms_: res+=prefix+("failover_ms: %s\n" % self.DebugFormatInt64(self.failover_ms_)) if self.has_strong_: res+=prefix+("strong: %s\n" % self.DebugFormatBool(self.strong_)) cnt=0 for e in self.property_name_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("property_name%s: %s\n" % (elm, self.DebugFormatString(e))) cnt+=1 cnt=0 for e in self.group_by_property_name_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("group_by_property_name%s: %s\n" % (elm, self.DebugFormatString(e))) cnt+=1 if self.has_distinct_: res+=prefix+("distinct: %s\n" % self.DebugFormatBool(self.distinct_)) if self.has_min_safe_time_seconds_: res+=prefix+("min_safe_time_seconds: %s\n" % self.DebugFormatInt64(self.min_safe_time_seconds_)) cnt=0 for e in self.safe_replica_name_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("safe_replica_name%s: %s\n" % (elm, self.DebugFormatString(e))) cnt+=1 if self.has_persist_offset_: res+=prefix+("persist_offset: %s\n" % self.DebugFormatBool(self.persist_offset_)) return res def _BuildTagLookupTable(sparse, maxtag, default=None): return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) kheader = 39 kapp = 1 kname_space = 29 kkind = 3 kancestor = 17 kFilterGroup = 4 kFilterop = 6 kFilterproperty = 14 ksearch_query = 8 kOrderGroup = 9 kOrderproperty = 10 kOrderdirection = 11 khint = 18 kcount = 23 koffset = 12 klimit = 16 kcompiled_cursor = 30 kend_compiled_cursor = 31 kcomposite_index = 19 krequire_perfect_plan = 20 kkeys_only = 21 ktransaction = 22 kcompile = 25 kfailover_ms = 26 kstrong = 32 kproperty_name = 33 kgroup_by_property_name = 34 kdistinct = 24 kmin_safe_time_seconds = 35 ksafe_replica_name = 36 kpersist_offset = 37 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", 1: "app", 3: "kind", 4: "Filter", 6: "op", 8: "search_query", 9: "Order", 10: "property", 11: "direction", 12: "offset", 14: "property", 16: "limit", 17: "ancestor", 18: "hint", 19: "composite_index", 20: "require_perfect_plan", 21: "keys_only", 22: "transaction", 23: "count", 24: "distinct", 25: "compile", 26: "failover_ms", 29: "name_space", 30: "compiled_cursor", 31: "end_compiled_cursor", 32: "strong", 33: "property_name", 34: "group_by_property_name", 35: "min_safe_time_seconds", 36: "safe_replica_name", 37: "persist_offset", 39: "header", }, 39) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, 1: ProtocolBuffer.Encoder.STRING, 3: ProtocolBuffer.Encoder.STRING, 4: ProtocolBuffer.Encoder.STARTGROUP, 6: ProtocolBuffer.Encoder.NUMERIC, 8: ProtocolBuffer.Encoder.STRING, 9: ProtocolBuffer.Encoder.STARTGROUP, 10: ProtocolBuffer.Encoder.STRING, 11: ProtocolBuffer.Encoder.NUMERIC, 12: ProtocolBuffer.Encoder.NUMERIC, 14: ProtocolBuffer.Encoder.STRING, 16: ProtocolBuffer.Encoder.NUMERIC, 17: ProtocolBuffer.Encoder.STRING, 18: ProtocolBuffer.Encoder.NUMERIC, 19: ProtocolBuffer.Encoder.STRING, 20: ProtocolBuffer.Encoder.NUMERIC, 21: ProtocolBuffer.Encoder.NUMERIC, 22: ProtocolBuffer.Encoder.STRING, 23: ProtocolBuffer.Encoder.NUMERIC, 24: ProtocolBuffer.Encoder.NUMERIC, 25: ProtocolBuffer.Encoder.NUMERIC, 26: ProtocolBuffer.Encoder.NUMERIC, 29: ProtocolBuffer.Encoder.STRING, 30: ProtocolBuffer.Encoder.STRING, 31: ProtocolBuffer.Encoder.STRING, 32: ProtocolBuffer.Encoder.NUMERIC, 33: ProtocolBuffer.Encoder.STRING, 34: ProtocolBuffer.Encoder.STRING, 35: ProtocolBuffer.Encoder.NUMERIC, 36: ProtocolBuffer.Encoder.STRING, 37: ProtocolBuffer.Encoder.NUMERIC, 39: ProtocolBuffer.Encoder.STRING, }, 39, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Query' class CompiledQuery_PrimaryScan(ProtocolBuffer.ProtocolMessage): has_index_name_ = 0 index_name_ = "" has_start_key_ = 0 start_key_ = "" has_start_inclusive_ = 0 start_inclusive_ = 0 has_end_key_ = 0 end_key_ = "" has_end_inclusive_ = 0 end_inclusive_ = 0 has_end_unapplied_log_timestamp_us_ = 0 end_unapplied_log_timestamp_us_ = 0 def __init__(self, contents=None): self.start_postfix_value_ = [] self.end_postfix_value_ = [] if contents is not None: self.MergeFromString(contents) def index_name(self): return self.index_name_ def set_index_name(self, x): self.has_index_name_ = 1 self.index_name_ = x def clear_index_name(self): if self.has_index_name_: self.has_index_name_ = 0 self.index_name_ = "" def has_index_name(self): return self.has_index_name_ def start_key(self): return self.start_key_ def set_start_key(self, x): self.has_start_key_ = 1 self.start_key_ = x def clear_start_key(self): if self.has_start_key_: self.has_start_key_ = 0 self.start_key_ = "" def has_start_key(self): return self.has_start_key_ def start_inclusive(self): return self.start_inclusive_ def set_start_inclusive(self, x): self.has_start_inclusive_ = 1 self.start_inclusive_ = x def clear_start_inclusive(self): if self.has_start_inclusive_: self.has_start_inclusive_ = 0 self.start_inclusive_ = 0 def has_start_inclusive(self): return self.has_start_inclusive_ def end_key(self): return self.end_key_ def set_end_key(self, x): self.has_end_key_ = 1 self.end_key_ = x def clear_end_key(self): if self.has_end_key_: self.has_end_key_ = 0 self.end_key_ = "" def has_end_key(self): return self.has_end_key_ def end_inclusive(self): return self.end_inclusive_ def set_end_inclusive(self, x): self.has_end_inclusive_ = 1 self.end_inclusive_ = x def clear_end_inclusive(self): if self.has_end_inclusive_: self.has_end_inclusive_ = 0 self.end_inclusive_ = 0 def has_end_inclusive(self): return self.has_end_inclusive_ def start_postfix_value_size(self): return len(self.start_postfix_value_) def start_postfix_value_list(self): return self.start_postfix_value_ def start_postfix_value(self, i): return self.start_postfix_value_[i] def set_start_postfix_value(self, i, x): self.start_postfix_value_[i] = x def add_start_postfix_value(self, x): self.start_postfix_value_.append(x) def clear_start_postfix_value(self): self.start_postfix_value_ = [] def end_postfix_value_size(self): return len(self.end_postfix_value_) def end_postfix_value_list(self): return self.end_postfix_value_ def end_postfix_value(self, i): return self.end_postfix_value_[i] def set_end_postfix_value(self, i, x): self.end_postfix_value_[i] = x def add_end_postfix_value(self, x): self.end_postfix_value_.append(x) def clear_end_postfix_value(self): self.end_postfix_value_ = [] def end_unapplied_log_timestamp_us(self): return self.end_unapplied_log_timestamp_us_ def set_end_unapplied_log_timestamp_us(self, x): self.has_end_unapplied_log_timestamp_us_ = 1 self.end_unapplied_log_timestamp_us_ = x def clear_end_unapplied_log_timestamp_us(self): if self.has_end_unapplied_log_timestamp_us_: self.has_end_unapplied_log_timestamp_us_ = 0 self.end_unapplied_log_timestamp_us_ = 0 def has_end_unapplied_log_timestamp_us(self): return self.has_end_unapplied_log_timestamp_us_ def MergeFrom(self, x): assert x is not self if (x.has_index_name()): self.set_index_name(x.index_name()) if (x.has_start_key()): self.set_start_key(x.start_key()) if (x.has_start_inclusive()): self.set_start_inclusive(x.start_inclusive()) if (x.has_end_key()): self.set_end_key(x.end_key()) if (x.has_end_inclusive()): self.set_end_inclusive(x.end_inclusive()) for i in xrange(x.start_postfix_value_size()): self.add_start_postfix_value(x.start_postfix_value(i)) for i in xrange(x.end_postfix_value_size()): self.add_end_postfix_value(x.end_postfix_value(i)) if (x.has_end_unapplied_log_timestamp_us()): self.set_end_unapplied_log_timestamp_us(x.end_unapplied_log_timestamp_us()) def Equals(self, x): if x is self: return 1 if self.has_index_name_ != x.has_index_name_: return 0 if self.has_index_name_ and self.index_name_ != x.index_name_: return 0 if self.has_start_key_ != x.has_start_key_: return 0 if self.has_start_key_ and self.start_key_ != x.start_key_: return 0 if self.has_start_inclusive_ != x.has_start_inclusive_: return 0 if self.has_start_inclusive_ and self.start_inclusive_ != x.start_inclusive_: return 0 if self.has_end_key_ != x.has_end_key_: return 0 if self.has_end_key_ and self.end_key_ != x.end_key_: return 0 if self.has_end_inclusive_ != x.has_end_inclusive_: return 0 if self.has_end_inclusive_ and self.end_inclusive_ != x.end_inclusive_: return 0 if len(self.start_postfix_value_) != len(x.start_postfix_value_): return 0 for e1, e2 in zip(self.start_postfix_value_, x.start_postfix_value_): if e1 != e2: return 0 if len(self.end_postfix_value_) != len(x.end_postfix_value_): return 0 for e1, e2 in zip(self.end_postfix_value_, x.end_postfix_value_): if e1 != e2: return 0 if self.has_end_unapplied_log_timestamp_us_ != x.has_end_unapplied_log_timestamp_us_: return 0 if self.has_end_unapplied_log_timestamp_us_ and self.end_unapplied_log_timestamp_us_ != x.end_unapplied_log_timestamp_us_: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 return initialized def ByteSize(self): n = 0 if (self.has_index_name_): n += 1 + self.lengthString(len(self.index_name_)) if (self.has_start_key_): n += 1 + self.lengthString(len(self.start_key_)) if (self.has_start_inclusive_): n += 2 if (self.has_end_key_): n += 1 + self.lengthString(len(self.end_key_)) if (self.has_end_inclusive_): n += 2 n += 2 * len(self.start_postfix_value_) for i in xrange(len(self.start_postfix_value_)): n += self.lengthString(len(self.start_postfix_value_[i])) n += 2 * len(self.end_postfix_value_) for i in xrange(len(self.end_postfix_value_)): n += self.lengthString(len(self.end_postfix_value_[i])) if (self.has_end_unapplied_log_timestamp_us_): n += 2 + self.lengthVarInt64(self.end_unapplied_log_timestamp_us_) return n def ByteSizePartial(self): n = 0 if (self.has_index_name_): n += 1 + self.lengthString(len(self.index_name_)) if (self.has_start_key_): n += 1 + self.lengthString(len(self.start_key_)) if (self.has_start_inclusive_): n += 2 if (self.has_end_key_): n += 1 + self.lengthString(len(self.end_key_)) if (self.has_end_inclusive_): n += 2 n += 2 * len(self.start_postfix_value_) for i in xrange(len(self.start_postfix_value_)): n += self.lengthString(len(self.start_postfix_value_[i])) n += 2 * len(self.end_postfix_value_) for i in xrange(len(self.end_postfix_value_)): n += self.lengthString(len(self.end_postfix_value_[i])) if (self.has_end_unapplied_log_timestamp_us_): n += 2 + self.lengthVarInt64(self.end_unapplied_log_timestamp_us_) return n def Clear(self): self.clear_index_name() self.clear_start_key() self.clear_start_inclusive() self.clear_end_key() self.clear_end_inclusive() self.clear_start_postfix_value() self.clear_end_postfix_value() self.clear_end_unapplied_log_timestamp_us() def OutputUnchecked(self, out): if (self.has_index_name_): out.putVarInt32(18) out.putPrefixedString(self.index_name_) if (self.has_start_key_): out.putVarInt32(26) out.putPrefixedString(self.start_key_) if (self.has_start_inclusive_): out.putVarInt32(32) out.putBoolean(self.start_inclusive_) if (self.has_end_key_): out.putVarInt32(42) out.putPrefixedString(self.end_key_) if (self.has_end_inclusive_): out.putVarInt32(48) out.putBoolean(self.end_inclusive_) if (self.has_end_unapplied_log_timestamp_us_): out.putVarInt32(152) out.putVarInt64(self.end_unapplied_log_timestamp_us_) for i in xrange(len(self.start_postfix_value_)): out.putVarInt32(178) out.putPrefixedString(self.start_postfix_value_[i]) for i in xrange(len(self.end_postfix_value_)): out.putVarInt32(186) out.putPrefixedString(self.end_postfix_value_[i]) def OutputPartial(self, out): if (self.has_index_name_): out.putVarInt32(18) out.putPrefixedString(self.index_name_) if (self.has_start_key_): out.putVarInt32(26) out.putPrefixedString(self.start_key_) if (self.has_start_inclusive_): out.putVarInt32(32) out.putBoolean(self.start_inclusive_) if (self.has_end_key_): out.putVarInt32(42) out.putPrefixedString(self.end_key_) if (self.has_end_inclusive_): out.putVarInt32(48) out.putBoolean(self.end_inclusive_) if (self.has_end_unapplied_log_timestamp_us_): out.putVarInt32(152) out.putVarInt64(self.end_unapplied_log_timestamp_us_) for i in xrange(len(self.start_postfix_value_)): out.putVarInt32(178) out.putPrefixedString(self.start_postfix_value_[i]) for i in xrange(len(self.end_postfix_value_)): out.putVarInt32(186) out.putPrefixedString(self.end_postfix_value_[i]) def TryMerge(self, d): while 1: tt = d.getVarInt32() if tt == 12: break if tt == 18: self.set_index_name(d.getPrefixedString()) continue if tt == 26: self.set_start_key(d.getPrefixedString()) continue if tt == 32: self.set_start_inclusive(d.getBoolean()) continue if tt == 42: self.set_end_key(d.getPrefixedString()) continue if tt == 48: self.set_end_inclusive(d.getBoolean()) continue if tt == 152: self.set_end_unapplied_log_timestamp_us(d.getVarInt64()) continue if tt == 178: self.add_start_postfix_value(d.getPrefixedString()) continue if tt == 186: self.add_end_postfix_value(d.getPrefixedString()) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_index_name_: res+=prefix+("index_name: %s\n" % self.DebugFormatString(self.index_name_)) if self.has_start_key_: res+=prefix+("start_key: %s\n" % self.DebugFormatString(self.start_key_)) if self.has_start_inclusive_: res+=prefix+("start_inclusive: %s\n" % self.DebugFormatBool(self.start_inclusive_)) if self.has_end_key_: res+=prefix+("end_key: %s\n" % self.DebugFormatString(self.end_key_)) if self.has_end_inclusive_: res+=prefix+("end_inclusive: %s\n" % self.DebugFormatBool(self.end_inclusive_)) cnt=0 for e in self.start_postfix_value_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("start_postfix_value%s: %s\n" % (elm, self.DebugFormatString(e))) cnt+=1 cnt=0 for e in self.end_postfix_value_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("end_postfix_value%s: %s\n" % (elm, self.DebugFormatString(e))) cnt+=1 if self.has_end_unapplied_log_timestamp_us_: res+=prefix+("end_unapplied_log_timestamp_us: %s\n" % self.DebugFormatInt64(self.end_unapplied_log_timestamp_us_)) return res class CompiledQuery_MergeJoinScan(ProtocolBuffer.ProtocolMessage): has_index_name_ = 0 index_name_ = "" has_value_prefix_ = 0 value_prefix_ = 0 def __init__(self, contents=None): self.prefix_value_ = [] if contents is not None: self.MergeFromString(contents) def index_name(self): return self.index_name_ def set_index_name(self, x): self.has_index_name_ = 1 self.index_name_ = x def clear_index_name(self): if self.has_index_name_: self.has_index_name_ = 0 self.index_name_ = "" def has_index_name(self): return self.has_index_name_ def prefix_value_size(self): return len(self.prefix_value_) def prefix_value_list(self): return self.prefix_value_ def prefix_value(self, i): return self.prefix_value_[i] def set_prefix_value(self, i, x): self.prefix_value_[i] = x def add_prefix_value(self, x): self.prefix_value_.append(x) def clear_prefix_value(self): self.prefix_value_ = [] def value_prefix(self): return self.value_prefix_ def set_value_prefix(self, x): self.has_value_prefix_ = 1 self.value_prefix_ = x def clear_value_prefix(self): if self.has_value_prefix_: self.has_value_prefix_ = 0 self.value_prefix_ = 0 def has_value_prefix(self): return self.has_value_prefix_ def MergeFrom(self, x): assert x is not self if (x.has_index_name()): self.set_index_name(x.index_name()) for i in xrange(x.prefix_value_size()): self.add_prefix_value(x.prefix_value(i)) if (x.has_value_prefix()): self.set_value_prefix(x.value_prefix()) def Equals(self, x): if x is self: return 1 if self.has_index_name_ != x.has_index_name_: return 0 if self.has_index_name_ and self.index_name_ != x.index_name_: return 0 if len(self.prefix_value_) != len(x.prefix_value_): return 0 for e1, e2 in zip(self.prefix_value_, x.prefix_value_): if e1 != e2: return 0 if self.has_value_prefix_ != x.has_value_prefix_: return 0 if self.has_value_prefix_ and self.value_prefix_ != x.value_prefix_: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 if (not self.has_index_name_): initialized = 0 if debug_strs is not None: debug_strs.append('Required field: index_name not set.') return initialized def ByteSize(self): n = 0 n += self.lengthString(len(self.index_name_)) n += 1 * len(self.prefix_value_) for i in xrange(len(self.prefix_value_)): n += self.lengthString(len(self.prefix_value_[i])) if (self.has_value_prefix_): n += 3 return n + 1 def ByteSizePartial(self): n = 0 if (self.has_index_name_): n += 1 n += self.lengthString(len(self.index_name_)) n += 1 * len(self.prefix_value_) for i in xrange(len(self.prefix_value_)): n += self.lengthString(len(self.prefix_value_[i])) if (self.has_value_prefix_): n += 3 return n def Clear(self): self.clear_index_name() self.clear_prefix_value() self.clear_value_prefix() def OutputUnchecked(self, out): out.putVarInt32(66) out.putPrefixedString(self.index_name_) for i in xrange(len(self.prefix_value_)): out.putVarInt32(74) out.putPrefixedString(self.prefix_value_[i]) if (self.has_value_prefix_): out.putVarInt32(160) out.putBoolean(self.value_prefix_) def OutputPartial(self, out): if (self.has_index_name_): out.putVarInt32(66) out.putPrefixedString(self.index_name_) for i in xrange(len(self.prefix_value_)): out.putVarInt32(74) out.putPrefixedString(self.prefix_value_[i]) if (self.has_value_prefix_): out.putVarInt32(160) out.putBoolean(self.value_prefix_) def TryMerge(self, d): while 1: tt = d.getVarInt32() if tt == 60: break if tt == 66: self.set_index_name(d.getPrefixedString()) continue if tt == 74: self.add_prefix_value(d.getPrefixedString()) continue if tt == 160: self.set_value_prefix(d.getBoolean()) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_index_name_: res+=prefix+("index_name: %s\n" % self.DebugFormatString(self.index_name_)) cnt=0 for e in self.prefix_value_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("prefix_value%s: %s\n" % (elm, self.DebugFormatString(e))) cnt+=1 if self.has_value_prefix_: res+=prefix+("value_prefix: %s\n" % self.DebugFormatBool(self.value_prefix_)) return res class CompiledQuery_EntityFilter(ProtocolBuffer.ProtocolMessage): has_distinct_ = 0 distinct_ = 0 has_kind_ = 0 kind_ = "" has_ancestor_ = 0 ancestor_ = None def __init__(self, contents=None): self.lazy_init_lock_ = thread.allocate_lock() if contents is not None: self.MergeFromString(contents) def distinct(self): return self.distinct_ def set_distinct(self, x): self.has_distinct_ = 1 self.distinct_ = x def clear_distinct(self): if self.has_distinct_: self.has_distinct_ = 0 self.distinct_ = 0 def has_distinct(self): return self.has_distinct_ def kind(self): return self.kind_ def set_kind(self, x): self.has_kind_ = 1 self.kind_ = x def clear_kind(self): if self.has_kind_: self.has_kind_ = 0 self.kind_ = "" def has_kind(self): return self.has_kind_ def ancestor(self): if self.ancestor_ is None: self.lazy_init_lock_.acquire() try: if self.ancestor_ is None: self.ancestor_ = Reference() finally: self.lazy_init_lock_.release() return self.ancestor_ def mutable_ancestor(self): self.has_ancestor_ = 1; return self.ancestor() def clear_ancestor(self): if self.has_ancestor_: self.has_ancestor_ = 0; if self.ancestor_ is not None: self.ancestor_.Clear() def has_ancestor(self): return self.has_ancestor_ def MergeFrom(self, x): assert x is not self if (x.has_distinct()): self.set_distinct(x.distinct()) if (x.has_kind()): self.set_kind(x.kind()) if (x.has_ancestor()): self.mutable_ancestor().MergeFrom(x.ancestor()) def Equals(self, x): if x is self: return 1 if self.has_distinct_ != x.has_distinct_: return 0 if self.has_distinct_ and self.distinct_ != x.distinct_: return 0 if self.has_kind_ != x.has_kind_: return 0 if self.has_kind_ and self.kind_ != x.kind_: return 0 if self.has_ancestor_ != x.has_ancestor_: return 0 if self.has_ancestor_ and self.ancestor_ != x.ancestor_: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 if (self.has_ancestor_ and not self.ancestor_.IsInitialized(debug_strs)): initialized = 0 return initialized def ByteSize(self): n = 0 if (self.has_distinct_): n += 2 if (self.has_kind_): n += 2 + self.lengthString(len(self.kind_)) if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSize()) return n def ByteSizePartial(self): n = 0 if (self.has_distinct_): n += 2 if (self.has_kind_): n += 2 + self.lengthString(len(self.kind_)) if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSizePartial()) return n def Clear(self): self.clear_distinct() self.clear_kind() self.clear_ancestor() def OutputUnchecked(self, out): if (self.has_distinct_): out.putVarInt32(112) out.putBoolean(self.distinct_) if (self.has_kind_): out.putVarInt32(138) out.putPrefixedString(self.kind_) if (self.has_ancestor_): out.putVarInt32(146) out.putVarInt32(self.ancestor_.ByteSize()) self.ancestor_.OutputUnchecked(out) def OutputPartial(self, out): if (self.has_distinct_): out.putVarInt32(112) out.putBoolean(self.distinct_) if (self.has_kind_): out.putVarInt32(138) out.putPrefixedString(self.kind_) if (self.has_ancestor_): out.putVarInt32(146) out.putVarInt32(self.ancestor_.ByteSizePartial()) self.ancestor_.OutputPartial(out) def TryMerge(self, d): while 1: tt = d.getVarInt32() if tt == 108: break if tt == 112: self.set_distinct(d.getBoolean()) continue if tt == 138: self.set_kind(d.getPrefixedString()) continue if tt == 146: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_ancestor().TryMerge(tmp) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_distinct_: res+=prefix+("distinct: %s\n" % self.DebugFormatBool(self.distinct_)) if self.has_kind_: res+=prefix+("kind: %s\n" % self.DebugFormatString(self.kind_)) if self.has_ancestor_: res+=prefix+"ancestor <\n" res+=self.ancestor_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" return res class CompiledQuery(ProtocolBuffer.ProtocolMessage): has_primaryscan_ = 0 has_index_def_ = 0 index_def_ = None has_offset_ = 0 offset_ = 0 has_limit_ = 0 limit_ = 0 has_keys_only_ = 0 keys_only_ = 0 has_distinct_infix_size_ = 0 distinct_infix_size_ = 0 has_entityfilter_ = 0 entityfilter_ = None has_plan_label_ = 0 plan_label_ = "" def __init__(self, contents=None): self.primaryscan_ = CompiledQuery_PrimaryScan() self.mergejoinscan_ = [] self.property_name_ = [] self.lazy_init_lock_ = thread.allocate_lock() if contents is not None: self.MergeFromString(contents) def primaryscan(self): return self.primaryscan_ def mutable_primaryscan(self): self.has_primaryscan_ = 1; return self.primaryscan_ def clear_primaryscan(self):self.has_primaryscan_ = 0; self.primaryscan_.Clear() def has_primaryscan(self): return self.has_primaryscan_ def mergejoinscan_size(self): return len(self.mergejoinscan_) def mergejoinscan_list(self): return self.mergejoinscan_ def mergejoinscan(self, i): return self.mergejoinscan_[i] def mutable_mergejoinscan(self, i): return self.mergejoinscan_[i] def add_mergejoinscan(self): x = CompiledQuery_MergeJoinScan() self.mergejoinscan_.append(x) return x def clear_mergejoinscan(self): self.mergejoinscan_ = [] def index_def(self): if self.index_def_ is None: self.lazy_init_lock_.acquire() try: if self.index_def_ is None: self.index_def_ = Index() finally: self.lazy_init_lock_.release() return self.index_def_ def mutable_index_def(self): self.has_index_def_ = 1; return self.index_def() def clear_index_def(self): if self.has_index_def_: self.has_index_def_ = 0; if self.index_def_ is not None: self.index_def_.Clear() def has_index_def(self): return self.has_index_def_ def offset(self): return self.offset_ def set_offset(self, x): self.has_offset_ = 1 self.offset_ = x def clear_offset(self): if self.has_offset_: self.has_offset_ = 0 self.offset_ = 0 def has_offset(self): return self.has_offset_ def limit(self): return self.limit_ def set_limit(self, x): self.has_limit_ = 1 self.limit_ = x def clear_limit(self): if self.has_limit_: self.has_limit_ = 0 self.limit_ = 0 def has_limit(self): return self.has_limit_ def keys_only(self): return self.keys_only_ def set_keys_only(self, x): self.has_keys_only_ = 1 self.keys_only_ = x def clear_keys_only(self): if self.has_keys_only_: self.has_keys_only_ = 0 self.keys_only_ = 0 def has_keys_only(self): return self.has_keys_only_ def property_name_size(self): return len(self.property_name_) def property_name_list(self): return self.property_name_ def property_name(self, i): return self.property_name_[i] def set_property_name(self, i, x): self.property_name_[i] = x def add_property_name(self, x): self.property_name_.append(x) def clear_property_name(self): self.property_name_ = [] def distinct_infix_size(self): return self.distinct_infix_size_ def set_distinct_infix_size(self, x): self.has_distinct_infix_size_ = 1 self.distinct_infix_size_ = x def clear_distinct_infix_size(self): if self.has_distinct_infix_size_: self.has_distinct_infix_size_ = 0 self.distinct_infix_size_ = 0 def has_distinct_infix_size(self): return self.has_distinct_infix_size_ def entityfilter(self): if self.entityfilter_ is None: self.lazy_init_lock_.acquire() try: if self.entityfilter_ is None: self.entityfilter_ = CompiledQuery_EntityFilter() finally: self.lazy_init_lock_.release() return self.entityfilter_ def mutable_entityfilter(self): self.has_entityfilter_ = 1; return self.entityfilter() def clear_entityfilter(self): if self.has_entityfilter_: self.has_entityfilter_ = 0; if self.entityfilter_ is not None: self.entityfilter_.Clear() def has_entityfilter(self): return self.has_entityfilter_ def plan_label(self): return self.plan_label_ def set_plan_label(self, x): self.has_plan_label_ = 1 self.plan_label_ = x def clear_plan_label(self): if self.has_plan_label_: self.has_plan_label_ = 0 self.plan_label_ = "" def has_plan_label(self): return self.has_plan_label_ def MergeFrom(self, x): assert x is not self if (x.has_primaryscan()): self.mutable_primaryscan().MergeFrom(x.primaryscan()) for i in xrange(x.mergejoinscan_size()): self.add_mergejoinscan().CopyFrom(x.mergejoinscan(i)) if (x.has_index_def()): self.mutable_index_def().MergeFrom(x.index_def()) if (x.has_offset()): self.set_offset(x.offset()) if (x.has_limit()): self.set_limit(x.limit()) if (x.has_keys_only()): self.set_keys_only(x.keys_only()) for i in xrange(x.property_name_size()): self.add_property_name(x.property_name(i)) if (x.has_distinct_infix_size()): self.set_distinct_infix_size(x.distinct_infix_size()) if (x.has_entityfilter()): self.mutable_entityfilter().MergeFrom(x.entityfilter()) if (x.has_plan_label()): self.set_plan_label(x.plan_label()) def Equals(self, x): if x is self: return 1 if self.has_primaryscan_ != x.has_primaryscan_: return 0 if self.has_primaryscan_ and self.primaryscan_ != x.primaryscan_: return 0 if len(self.mergejoinscan_) != len(x.mergejoinscan_): return 0 for e1, e2 in zip(self.mergejoinscan_, x.mergejoinscan_): if e1 != e2: return 0 if self.has_index_def_ != x.has_index_def_: return 0 if self.has_index_def_ and self.index_def_ != x.index_def_: return 0 if self.has_offset_ != x.has_offset_: return 0 if self.has_offset_ and self.offset_ != x.offset_: return 0 if self.has_limit_ != x.has_limit_: return 0 if self.has_limit_ and self.limit_ != x.limit_: return 0 if self.has_keys_only_ != x.has_keys_only_: return 0 if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0 if len(self.property_name_) != len(x.property_name_): return 0 for e1, e2 in zip(self.property_name_, x.property_name_): if e1 != e2: return 0 if self.has_distinct_infix_size_ != x.has_distinct_infix_size_: return 0 if self.has_distinct_infix_size_ and self.distinct_infix_size_ != x.distinct_infix_size_: return 0 if self.has_entityfilter_ != x.has_entityfilter_: return 0 if self.has_entityfilter_ and self.entityfilter_ != x.entityfilter_: return 0 if self.has_plan_label_ != x.has_plan_label_: return 0 if self.has_plan_label_ and self.plan_label_ != x.plan_label_: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 if (not self.has_primaryscan_): initialized = 0 if debug_strs is not None: debug_strs.append('Required field: primaryscan not set.') elif not self.primaryscan_.IsInitialized(debug_strs): initialized = 0 for p in self.mergejoinscan_: if not p.IsInitialized(debug_strs): initialized=0 if (self.has_index_def_ and not self.index_def_.IsInitialized(debug_strs)): initialized = 0 if (not self.has_keys_only_): initialized = 0 if debug_strs is not None: debug_strs.append('Required field: keys_only not set.') if (self.has_entityfilter_ and not self.entityfilter_.IsInitialized(debug_strs)): initialized = 0 return initialized def ByteSize(self): n = 0 n += self.primaryscan_.ByteSize() n += 2 * len(self.mergejoinscan_) for i in xrange(len(self.mergejoinscan_)): n += self.mergejoinscan_[i].ByteSize() if (self.has_index_def_): n += 2 + self.lengthString(self.index_def_.ByteSize()) if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_) if (self.has_limit_): n += 1 + self.lengthVarInt64(self.limit_) n += 2 * len(self.property_name_) for i in xrange(len(self.property_name_)): n += self.lengthString(len(self.property_name_[i])) if (self.has_distinct_infix_size_): n += 2 + self.lengthVarInt64(self.distinct_infix_size_) if (self.has_entityfilter_): n += 2 + self.entityfilter_.ByteSize() if (self.has_plan_label_): n += 2 + self.lengthString(len(self.plan_label_)) return n + 4 def ByteSizePartial(self): n = 0 if (self.has_primaryscan_): n += 2 n += self.primaryscan_.ByteSizePartial() n += 2 * len(self.mergejoinscan_) for i in xrange(len(self.mergejoinscan_)): n += self.mergejoinscan_[i].ByteSizePartial() if (self.has_index_def_): n += 2 + self.lengthString(self.index_def_.ByteSizePartial()) if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_) if (self.has_limit_): n += 1 + self.lengthVarInt64(self.limit_) if (self.has_keys_only_): n += 2 n += 2 * len(self.property_name_) for i in xrange(len(self.property_name_)): n += self.lengthString(len(self.property_name_[i])) if (self.has_distinct_infix_size_): n += 2 + self.lengthVarInt64(self.distinct_infix_size_) if (self.has_entityfilter_): n += 2 + self.entityfilter_.ByteSizePartial() if (self.has_plan_label_): n += 2 + self.lengthString(len(self.plan_label_)) return n def Clear(self): self.clear_primaryscan() self.clear_mergejoinscan() self.clear_index_def() self.clear_offset() self.clear_limit() self.clear_keys_only() self.clear_property_name() self.clear_distinct_infix_size() self.clear_entityfilter() self.clear_plan_label() def OutputUnchecked(self, out): out.putVarInt32(11) self.primaryscan_.OutputUnchecked(out) out.putVarInt32(12) for i in xrange(len(self.mergejoinscan_)): out.putVarInt32(59) self.mergejoinscan_[i].OutputUnchecked(out) out.putVarInt32(60) if (self.has_offset_): out.putVarInt32(80) out.putVarInt32(self.offset_) if (self.has_limit_): out.putVarInt32(88) out.putVarInt32(self.limit_) out.putVarInt32(96) out.putBoolean(self.keys_only_) if (self.has_entityfilter_): out.putVarInt32(107) self.entityfilter_.OutputUnchecked(out) out.putVarInt32(108) if (self.has_index_def_): out.putVarInt32(170) out.putVarInt32(self.index_def_.ByteSize()) self.index_def_.OutputUnchecked(out) for i in xrange(len(self.property_name_)): out.putVarInt32(194) out.putPrefixedString(self.property_name_[i]) if (self.has_distinct_infix_size_): out.putVarInt32(200) out.putVarInt32(self.distinct_infix_size_) if (self.has_plan_label_): out.putVarInt32(210) out.putPrefixedString(self.plan_label_) def OutputPartial(self, out): if (self.has_primaryscan_): out.putVarInt32(11) self.primaryscan_.OutputPartial(out) out.putVarInt32(12) for i in xrange(len(self.mergejoinscan_)): out.putVarInt32(59) self.mergejoinscan_[i].OutputPartial(out) out.putVarInt32(60) if (self.has_offset_): out.putVarInt32(80) out.putVarInt32(self.offset_) if (self.has_limit_): out.putVarInt32(88) out.putVarInt32(self.limit_) if (self.has_keys_only_): out.putVarInt32(96) out.putBoolean(self.keys_only_) if (self.has_entityfilter_): out.putVarInt32(107) self.entityfilter_.OutputPartial(out) out.putVarInt32(108) if (self.has_index_def_): out.putVarInt32(170) out.putVarInt32(self.index_def_.ByteSizePartial()) self.index_def_.OutputPartial(out) for i in xrange(len(self.property_name_)): out.putVarInt32(194) out.putPrefixedString(self.property_name_[i]) if (self.has_distinct_infix_size_): out.putVarInt32(200) out.putVarInt32(self.distinct_infix_size_) if (self.has_plan_label_): out.putVarInt32(210) out.putPrefixedString(self.plan_label_) def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if tt == 11: self.mutable_primaryscan().TryMerge(d) continue if tt == 59: self.add_mergejoinscan().TryMerge(d) continue if tt == 80: self.set_offset(d.getVarInt32()) continue if tt == 88: self.set_limit(d.getVarInt32()) continue if tt == 96: self.set_keys_only(d.getBoolean()) continue if tt == 107: self.mutable_entityfilter().TryMerge(d) continue if tt == 170: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_index_def().TryMerge(tmp) continue if tt == 194: self.add_property_name(d.getPrefixedString()) continue if tt == 200: self.set_distinct_infix_size(d.getVarInt32()) continue if tt == 210: self.set_plan_label(d.getPrefixedString()) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_primaryscan_: res+=prefix+"PrimaryScan {\n" res+=self.primaryscan_.__str__(prefix + " ", printElemNumber) res+=prefix+"}\n" cnt=0 for e in self.mergejoinscan_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("MergeJoinScan%s {\n" % elm) res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+"}\n" cnt+=1 if self.has_index_def_: res+=prefix+"index_def <\n" res+=self.index_def_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_)) if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt32(self.limit_)) if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_)) cnt=0 for e in self.property_name_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("property_name%s: %s\n" % (elm, self.DebugFormatString(e))) cnt+=1 if self.has_distinct_infix_size_: res+=prefix+("distinct_infix_size: %s\n" % self.DebugFormatInt32(self.distinct_infix_size_)) if self.has_entityfilter_: res+=prefix+"EntityFilter {\n" res+=self.entityfilter_.__str__(prefix + " ", printElemNumber) res+=prefix+"}\n" if self.has_plan_label_: res+=prefix+("plan_label: %s\n" % self.DebugFormatString(self.plan_label_)) return res def _BuildTagLookupTable(sparse, maxtag, default=None): return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) kPrimaryScanGroup = 1 kPrimaryScanindex_name = 2 kPrimaryScanstart_key = 3 kPrimaryScanstart_inclusive = 4 kPrimaryScanend_key = 5 kPrimaryScanend_inclusive = 6 kPrimaryScanstart_postfix_value = 22 kPrimaryScanend_postfix_value = 23 kPrimaryScanend_unapplied_log_timestamp_us = 19 kMergeJoinScanGroup = 7 kMergeJoinScanindex_name = 8 kMergeJoinScanprefix_value = 9 kMergeJoinScanvalue_prefix = 20 kindex_def = 21 koffset = 10 klimit = 11 kkeys_only = 12 kproperty_name = 24 kdistinct_infix_size = 25 kEntityFilterGroup = 13 kEntityFilterdistinct = 14 kEntityFilterkind = 17 kEntityFilterancestor = 18 kplan_label = 26 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", 1: "PrimaryScan", 2: "index_name", 3: "start_key", 4: "start_inclusive", 5: "end_key", 6: "end_inclusive", 7: "MergeJoinScan", 8: "index_name", 9: "prefix_value", 10: "offset", 11: "limit", 12: "keys_only", 13: "EntityFilter", 14: "distinct", 17: "kind", 18: "ancestor", 19: "end_unapplied_log_timestamp_us", 20: "value_prefix", 21: "index_def", 22: "start_postfix_value", 23: "end_postfix_value", 24: "property_name", 25: "distinct_infix_size", 26: "plan_label", }, 26) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, 1: ProtocolBuffer.Encoder.STARTGROUP, 2: ProtocolBuffer.Encoder.STRING, 3: ProtocolBuffer.Encoder.STRING, 4: ProtocolBuffer.Encoder.NUMERIC, 5: ProtocolBuffer.Encoder.STRING, 6: ProtocolBuffer.Encoder.NUMERIC, 7: ProtocolBuffer.Encoder.STARTGROUP, 8: ProtocolBuffer.Encoder.STRING, 9: ProtocolBuffer.Encoder.STRING, 10: ProtocolBuffer.Encoder.NUMERIC, 11: ProtocolBuffer.Encoder.NUMERIC, 12: ProtocolBuffer.Encoder.NUMERIC, 13: ProtocolBuffer.Encoder.STARTGROUP, 14: ProtocolBuffer.Encoder.NUMERIC, 17: ProtocolBuffer.Encoder.STRING, 18: ProtocolBuffer.Encoder.STRING, 19: ProtocolBuffer.Encoder.NUMERIC, 20: ProtocolBuffer.Encoder.NUMERIC, 21: ProtocolBuffer.Encoder.STRING, 22: ProtocolBuffer.Encoder.STRING, 23: ProtocolBuffer.Encoder.STRING, 24: ProtocolBuffer.Encoder.STRING, 25: ProtocolBuffer.Encoder.NUMERIC, 26: ProtocolBuffer.Encoder.STRING, }, 26, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CompiledQuery' class CompiledCursor_PositionIndexValue(ProtocolBuffer.ProtocolMessage): has_property_ = 0 property_ = "" has_value_ = 0 def __init__(self, contents=None): self.value_ = PropertyValue() if contents is not None: self.MergeFromString(contents) def property(self): return self.property_ def set_property(self, x): self.has_property_ = 1 self.property_ = x def clear_property(self): if self.has_property_: self.has_property_ = 0 self.property_ = "" def has_property(self): return self.has_property_ def value(self): return self.value_ def mutable_value(self): self.has_value_ = 1; return self.value_ def clear_value(self):self.has_value_ = 0; self.value_.Clear() def has_value(self): return self.has_value_ def MergeFrom(self, x): assert x is not self if (x.has_property()): self.set_property(x.property()) if (x.has_value()): self.mutable_value().MergeFrom(x.value()) def Equals(self, x): if x is self: return 1 if self.has_property_ != x.has_property_: return 0 if self.has_property_ and self.property_ != x.property_: return 0 if self.has_value_ != x.has_value_: return 0 if self.has_value_ and self.value_ != x.value_: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 if (not self.has_value_): initialized = 0 if debug_strs is not None: debug_strs.append('Required field: value not set.') elif not self.value_.IsInitialized(debug_strs): initialized = 0 return initialized def ByteSize(self): n = 0 if (self.has_property_): n += 2 + self.lengthString(len(self.property_)) n += self.lengthString(self.value_.ByteSize()) return n + 2 def ByteSizePartial(self): n = 0 if (self.has_property_): n += 2 + self.lengthString(len(self.property_)) if (self.has_value_): n += 2 n += self.lengthString(self.value_.ByteSizePartial()) return n def Clear(self): self.clear_property() self.clear_value() def OutputUnchecked(self, out): if (self.has_property_): out.putVarInt32(242) out.putPrefixedString(self.property_) out.putVarInt32(250) out.putVarInt32(self.value_.ByteSize()) self.value_.OutputUnchecked(out) def OutputPartial(self, out): if (self.has_property_): out.putVarInt32(242) out.putPrefixedString(self.property_) if (self.has_value_): out.putVarInt32(250) out.putVarInt32(self.value_.ByteSizePartial()) self.value_.OutputPartial(out) def TryMerge(self, d): while 1: tt = d.getVarInt32() if tt == 236: break if tt == 242: self.set_property(d.getPrefixedString()) continue if tt == 250: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_value().TryMerge(tmp) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_property_: res+=prefix+("property: %s\n" % self.DebugFormatString(self.property_)) if self.has_value_: res+=prefix+"value <\n" res+=self.value_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" return res class CompiledCursor_Position(ProtocolBuffer.ProtocolMessage): has_start_key_ = 0 start_key_ = "" has_key_ = 0 key_ = None has_start_inclusive_ = 0 start_inclusive_ = 1 has_before_ascending_ = 0 before_ascending_ = 0 def __init__(self, contents=None): self.indexvalue_ = [] self.lazy_init_lock_ = thread.allocate_lock() if contents is not None: self.MergeFromString(contents) def start_key(self): return self.start_key_ def set_start_key(self, x): self.has_start_key_ = 1 self.start_key_ = x def clear_start_key(self): if self.has_start_key_: self.has_start_key_ = 0 self.start_key_ = "" def has_start_key(self): return self.has_start_key_ def indexvalue_size(self): return len(self.indexvalue_) def indexvalue_list(self): return self.indexvalue_ def indexvalue(self, i): return self.indexvalue_[i] def mutable_indexvalue(self, i): return self.indexvalue_[i] def add_indexvalue(self): x = CompiledCursor_PositionIndexValue() self.indexvalue_.append(x) return x def clear_indexvalue(self): self.indexvalue_ = [] def key(self): if self.key_ is None: self.lazy_init_lock_.acquire() try: if self.key_ is None: self.key_ = Reference() finally: self.lazy_init_lock_.release() return self.key_ def mutable_key(self): self.has_key_ = 1; return self.key() def clear_key(self): if self.has_key_: self.has_key_ = 0; if self.key_ is not None: self.key_.Clear() def has_key(self): return self.has_key_ def start_inclusive(self): return self.start_inclusive_ def set_start_inclusive(self, x): self.has_start_inclusive_ = 1 self.start_inclusive_ = x def clear_start_inclusive(self): if self.has_start_inclusive_: self.has_start_inclusive_ = 0 self.start_inclusive_ = 1 def has_start_inclusive(self): return self.has_start_inclusive_ def before_ascending(self): return self.before_ascending_ def set_before_ascending(self, x): self.has_before_ascending_ = 1 self.before_ascending_ = x def clear_before_ascending(self): if self.has_before_ascending_: self.has_before_ascending_ = 0 self.before_ascending_ = 0 def has_before_ascending(self): return self.has_before_ascending_ def MergeFrom(self, x): assert x is not self if (x.has_start_key()): self.set_start_key(x.start_key()) for i in xrange(x.indexvalue_size()): self.add_indexvalue().CopyFrom(x.indexvalue(i)) if (x.has_key()): self.mutable_key().MergeFrom(x.key()) if (x.has_start_inclusive()): self.set_start_inclusive(x.start_inclusive()) if (x.has_before_ascending()): self.set_before_ascending(x.before_ascending()) def Equals(self, x): if x is self: return 1 if self.has_start_key_ != x.has_start_key_: return 0 if self.has_start_key_ and self.start_key_ != x.start_key_: return 0 if len(self.indexvalue_) != len(x.indexvalue_): return 0 for e1, e2 in zip(self.indexvalue_, x.indexvalue_): if e1 != e2: return 0 if self.has_key_ != x.has_key_: return 0 if self.has_key_ and self.key_ != x.key_: return 0 if self.has_start_inclusive_ != x.has_start_inclusive_: return 0 if self.has_start_inclusive_ and self.start_inclusive_ != x.start_inclusive_: return 0 if self.has_before_ascending_ != x.has_before_ascending_: return 0 if self.has_before_ascending_ and self.before_ascending_ != x.before_ascending_: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 for p in self.indexvalue_: if not p.IsInitialized(debug_strs): initialized=0 if (self.has_key_ and not self.key_.IsInitialized(debug_strs)): initialized = 0 return initialized def ByteSize(self): n = 0 if (self.has_start_key_): n += 2 + self.lengthString(len(self.start_key_)) n += 4 * len(self.indexvalue_) for i in xrange(len(self.indexvalue_)): n += self.indexvalue_[i].ByteSize() if (self.has_key_): n += 2 + self.lengthString(self.key_.ByteSize()) if (self.has_start_inclusive_): n += 3 if (self.has_before_ascending_): n += 3 return n def ByteSizePartial(self): n = 0 if (self.has_start_key_): n += 2 + self.lengthString(len(self.start_key_)) n += 4 * len(self.indexvalue_) for i in xrange(len(self.indexvalue_)): n += self.indexvalue_[i].ByteSizePartial() if (self.has_key_): n += 2 + self.lengthString(self.key_.ByteSizePartial()) if (self.has_start_inclusive_): n += 3 if (self.has_before_ascending_): n += 3 return n def Clear(self): self.clear_start_key() self.clear_indexvalue() self.clear_key() self.clear_start_inclusive() self.clear_before_ascending() def OutputUnchecked(self, out): if (self.has_start_key_): out.putVarInt32(218) out.putPrefixedString(self.start_key_) if (self.has_start_inclusive_): out.putVarInt32(224) out.putBoolean(self.start_inclusive_) for i in xrange(len(self.indexvalue_)): out.putVarInt32(235) self.indexvalue_[i].OutputUnchecked(out) out.putVarInt32(236) if (self.has_key_): out.putVarInt32(258) out.putVarInt32(self.key_.ByteSize()) self.key_.OutputUnchecked(out) if (self.has_before_ascending_): out.putVarInt32(264) out.putBoolean(self.before_ascending_) def OutputPartial(self, out): if (self.has_start_key_): out.putVarInt32(218) out.putPrefixedString(self.start_key_) if (self.has_start_inclusive_): out.putVarInt32(224) out.putBoolean(self.start_inclusive_) for i in xrange(len(self.indexvalue_)): out.putVarInt32(235) self.indexvalue_[i].OutputPartial(out) out.putVarInt32(236) if (self.has_key_): out.putVarInt32(258) out.putVarInt32(self.key_.ByteSizePartial()) self.key_.OutputPartial(out) if (self.has_before_ascending_): out.putVarInt32(264) out.putBoolean(self.before_ascending_) def TryMerge(self, d): while 1: tt = d.getVarInt32() if tt == 20: break if tt == 218: self.set_start_key(d.getPrefixedString()) continue if tt == 224: self.set_start_inclusive(d.getBoolean()) continue if tt == 235: self.add_indexvalue().TryMerge(d) continue if tt == 258: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_key().TryMerge(tmp) continue if tt == 264: self.set_before_ascending(d.getBoolean()) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_start_key_: res+=prefix+("start_key: %s\n" % self.DebugFormatString(self.start_key_)) cnt=0 for e in self.indexvalue_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("IndexValue%s {\n" % elm) res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+"}\n" cnt+=1 if self.has_key_: res+=prefix+"key <\n" res+=self.key_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" if self.has_start_inclusive_: res+=prefix+("start_inclusive: %s\n" % self.DebugFormatBool(self.start_inclusive_)) if self.has_before_ascending_: res+=prefix+("before_ascending: %s\n" % self.DebugFormatBool(self.before_ascending_)) return res class CompiledCursor(ProtocolBuffer.ProtocolMessage): has_position_ = 0 position_ = None has_postfix_position_ = 0 postfix_position_ = None has_absolute_position_ = 0 absolute_position_ = None def __init__(self, contents=None): self.lazy_init_lock_ = thread.allocate_lock() if contents is not None: self.MergeFromString(contents) def position(self): if self.position_ is None: self.lazy_init_lock_.acquire() try: if self.position_ is None: self.position_ = CompiledCursor_Position() finally: self.lazy_init_lock_.release() return self.position_ def mutable_position(self): self.has_position_ = 1; return self.position() def clear_position(self): if self.has_position_: self.has_position_ = 0; if self.position_ is not None: self.position_.Clear() def has_position(self): return self.has_position_ def postfix_position(self): if self.postfix_position_ is None: self.lazy_init_lock_.acquire() try: if self.postfix_position_ is None: self.postfix_position_ = IndexPostfix() finally: self.lazy_init_lock_.release() return self.postfix_position_ def mutable_postfix_position(self): self.has_postfix_position_ = 1; return self.postfix_position() def clear_postfix_position(self): if self.has_postfix_position_: self.has_postfix_position_ = 0; if self.postfix_position_ is not None: self.postfix_position_.Clear() def has_postfix_position(self): return self.has_postfix_position_ def absolute_position(self): if self.absolute_position_ is None: self.lazy_init_lock_.acquire() try: if self.absolute_position_ is None: self.absolute_position_ = IndexPosition() finally: self.lazy_init_lock_.release() return self.absolute_position_ def mutable_absolute_position(self): self.has_absolute_position_ = 1; return self.absolute_position() def clear_absolute_position(self): if self.has_absolute_position_: self.has_absolute_position_ = 0; if self.absolute_position_ is not None: self.absolute_position_.Clear() def has_absolute_position(self): return self.has_absolute_position_ def MergeFrom(self, x): assert x is not self if (x.has_position()): self.mutable_position().MergeFrom(x.position()) if (x.has_postfix_position()): self.mutable_postfix_position().MergeFrom(x.postfix_position()) if (x.has_absolute_position()): self.mutable_absolute_position().MergeFrom(x.absolute_position()) def Equals(self, x): if x is self: return 1 if self.has_position_ != x.has_position_: return 0 if self.has_position_ and self.position_ != x.position_: return 0 if self.has_postfix_position_ != x.has_postfix_position_: return 0 if self.has_postfix_position_ and self.postfix_position_ != x.postfix_position_: return 0 if self.has_absolute_position_ != x.has_absolute_position_: return 0 if self.has_absolute_position_ and self.absolute_position_ != x.absolute_position_: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 if (self.has_position_ and not self.position_.IsInitialized(debug_strs)): initialized = 0 if (self.has_postfix_position_ and not self.postfix_position_.IsInitialized(debug_strs)): initialized = 0 if (self.has_absolute_position_ and not self.absolute_position_.IsInitialized(debug_strs)): initialized = 0 return initialized def ByteSize(self): n = 0 if (self.has_position_): n += 2 + self.position_.ByteSize() if (self.has_postfix_position_): n += 1 + self.lengthString(self.postfix_position_.ByteSize()) if (self.has_absolute_position_): n += 1 + self.lengthString(self.absolute_position_.ByteSize()) return n def ByteSizePartial(self): n = 0 if (self.has_position_): n += 2 + self.position_.ByteSizePartial() if (self.has_postfix_position_): n += 1 + self.lengthString(self.postfix_position_.ByteSizePartial()) if (self.has_absolute_position_): n += 1 + self.lengthString(self.absolute_position_.ByteSizePartial()) return n def Clear(self): self.clear_position() self.clear_postfix_position() self.clear_absolute_position() def OutputUnchecked(self, out): if (self.has_postfix_position_): out.putVarInt32(10) out.putVarInt32(self.postfix_position_.ByteSize()) self.postfix_position_.OutputUnchecked(out) if (self.has_position_): out.putVarInt32(19) self.position_.OutputUnchecked(out) out.putVarInt32(20) if (self.has_absolute_position_): out.putVarInt32(26) out.putVarInt32(self.absolute_position_.ByteSize()) self.absolute_position_.OutputUnchecked(out) def OutputPartial(self, out): if (self.has_postfix_position_): out.putVarInt32(10) out.putVarInt32(self.postfix_position_.ByteSizePartial()) self.postfix_position_.OutputPartial(out) if (self.has_position_): out.putVarInt32(19) self.position_.OutputPartial(out) out.putVarInt32(20) if (self.has_absolute_position_): out.putVarInt32(26) out.putVarInt32(self.absolute_position_.ByteSizePartial()) self.absolute_position_.OutputPartial(out) def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if tt == 10: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_postfix_position().TryMerge(tmp) continue if tt == 19: self.mutable_position().TryMerge(d) continue if tt == 26: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_absolute_position().TryMerge(tmp) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_position_: res+=prefix+"Position {\n" res+=self.position_.__str__(prefix + " ", printElemNumber) res+=prefix+"}\n" if self.has_postfix_position_: res+=prefix+"postfix_position <\n" res+=self.postfix_position_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" if self.has_absolute_position_: res+=prefix+"absolute_position <\n" res+=self.absolute_position_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" return res def _BuildTagLookupTable(sparse, maxtag, default=None): return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) kPositionGroup = 2 kPositionstart_key = 27 kPositionIndexValueGroup = 29 kPositionIndexValueproperty = 30 kPositionIndexValuevalue = 31 kPositionkey = 32 kPositionstart_inclusive = 28 kPositionbefore_ascending = 33 kpostfix_position = 1 kabsolute_position = 3 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", 1: "postfix_position", 2: "Position", 3: "absolute_position", 27: "start_key", 28: "start_inclusive", 29: "IndexValue", 30: "property", 31: "value", 32: "key", 33: "before_ascending", }, 33) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, 1: ProtocolBuffer.Encoder.STRING, 2: ProtocolBuffer.Encoder.STARTGROUP, 3: ProtocolBuffer.Encoder.STRING, 27: ProtocolBuffer.Encoder.STRING, 28: ProtocolBuffer.Encoder.NUMERIC, 29: ProtocolBuffer.Encoder.STARTGROUP, 30: ProtocolBuffer.Encoder.STRING, 31: ProtocolBuffer.Encoder.STRING, 32: ProtocolBuffer.Encoder.STRING, 33: ProtocolBuffer.Encoder.NUMERIC, }, 33, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CompiledCursor' class Cursor(ProtocolBuffer.ProtocolMessage): has_cursor_ = 0 cursor_ = 0 has_app_ = 0 app_ = "" def __init__(self, contents=None): if contents is not None: self.MergeFromString(contents) def cursor(self): return self.cursor_ def set_cursor(self, x): self.has_cursor_ = 1 self.cursor_ = x def clear_cursor(self): if self.has_cursor_: self.has_cursor_ = 0 self.cursor_ = 0 def has_cursor(self): return self.has_cursor_ def app(self): return self.app_ def set_app(self, x): self.has_app_ = 1 self.app_ = x def clear_app(self): if self.has_app_: self.has_app_ = 0 self.app_ = "" def has_app(self): return self.has_app_ def MergeFrom(self, x): assert x is not self if (x.has_cursor()): self.set_cursor(x.cursor()) if (x.has_app()): self.set_app(x.app()) def Equals(self, x): if x is self: return 1 if self.has_cursor_ != x.has_cursor_: return 0 if self.has_cursor_ and self.cursor_ != x.cursor_: return 0 if self.has_app_ != x.has_app_: return 0 if self.has_app_ and self.app_ != x.app_: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 if (not self.has_cursor_): initialized = 0 if debug_strs is not None: debug_strs.append('Required field: cursor not set.') return initialized def ByteSize(self): n = 0 if (self.has_app_): n += 1 + self.lengthString(len(self.app_)) return n + 9 def ByteSizePartial(self): n = 0 if (self.has_cursor_): n += 9 if (self.has_app_): n += 1 + self.lengthString(len(self.app_)) return n def Clear(self): self.clear_cursor() self.clear_app() def OutputUnchecked(self, out): out.putVarInt32(9) out.put64(self.cursor_) if (self.has_app_): out.putVarInt32(18) out.putPrefixedString(self.app_) def OutputPartial(self, out): if (self.has_cursor_): out.putVarInt32(9) out.put64(self.cursor_) if (self.has_app_): out.putVarInt32(18) out.putPrefixedString(self.app_) def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if tt == 9: self.set_cursor(d.get64()) continue if tt == 18: self.set_app(d.getPrefixedString()) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_cursor_: res+=prefix+("cursor: %s\n" % self.DebugFormatFixed64(self.cursor_)) if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_)) return res def _BuildTagLookupTable(sparse, maxtag, default=None): return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) kcursor = 1 kapp = 2 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", 1: "cursor", 2: "app", }, 2) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, 1: ProtocolBuffer.Encoder.DOUBLE, 2: ProtocolBuffer.Encoder.STRING, }, 2, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Cursor' class Error(ProtocolBuffer.ProtocolMessage): BAD_REQUEST = 1 CONCURRENT_TRANSACTION = 2 INTERNAL_ERROR = 3 NEED_INDEX = 4 TIMEOUT = 5 PERMISSION_DENIED = 6 BIGTABLE_ERROR = 7 COMMITTED_BUT_STILL_APPLYING = 8 CAPABILITY_DISABLED = 9 TRY_ALTERNATE_BACKEND = 10 SAFE_TIME_TOO_OLD = 11 _ErrorCode_NAMES = { 1: "BAD_REQUEST", 2: "CONCURRENT_TRANSACTION", 3: "INTERNAL_ERROR", 4: "NEED_INDEX", 5: "TIMEOUT", 6: "PERMISSION_DENIED", 7: "BIGTABLE_ERROR", 8: "COMMITTED_BUT_STILL_APPLYING", 9: "CAPABILITY_DISABLED", 10: "TRY_ALTERNATE_BACKEND", 11: "SAFE_TIME_TOO_OLD", } def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "") ErrorCode_Name = classmethod(ErrorCode_Name) def __init__(self, contents=None): pass if contents is not None: self.MergeFromString(contents) def MergeFrom(self, x): assert x is not self def Equals(self, x): if x is self: return 1 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 return initialized def ByteSize(self): n = 0 return n def ByteSizePartial(self): n = 0 return n def Clear(self): pass def OutputUnchecked(self, out): pass def OutputPartial(self, out): pass def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" return res def _BuildTagLookupTable(sparse, maxtag, default=None): return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", }, 0) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, }, 0, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Error' class Cost_CommitCost(ProtocolBuffer.ProtocolMessage): has_requested_entity_puts_ = 0 requested_entity_puts_ = 0 has_requested_entity_deletes_ = 0 requested_entity_deletes_ = 0 def __init__(self, contents=None): if contents is not None: self.MergeFromString(contents) def requested_entity_puts(self): return self.requested_entity_puts_ def set_requested_entity_puts(self, x): self.has_requested_entity_puts_ = 1 self.requested_entity_puts_ = x def clear_requested_entity_puts(self): if self.has_requested_entity_puts_: self.has_requested_entity_puts_ = 0 self.requested_entity_puts_ = 0 def has_requested_entity_puts(self): return self.has_requested_entity_puts_ def requested_entity_deletes(self): return self.requested_entity_deletes_ def set_requested_entity_deletes(self, x): self.has_requested_entity_deletes_ = 1 self.requested_entity_deletes_ = x def clear_requested_entity_deletes(self): if self.has_requested_entity_deletes_: self.has_requested_entity_deletes_ = 0 self.requested_entity_deletes_ = 0 def has_requested_entity_deletes(self): return self.has_requested_entity_deletes_ def MergeFrom(self, x): assert x is not self if (x.has_requested_entity_puts()): self.set_requested_entity_puts(x.requested_entity_puts()) if (x.has_requested_entity_deletes()): self.set_requested_entity_deletes(x.requested_entity_deletes()) def Equals(self, x): if x is self: return 1 if self.has_requested_entity_puts_ != x.has_requested_entity_puts_: return 0 if self.has_requested_entity_puts_ and self.requested_entity_puts_ != x.requested_entity_puts_: return 0 if self.has_requested_entity_deletes_ != x.has_requested_entity_deletes_: return 0 if self.has_requested_entity_deletes_ and self.requested_entity_deletes_ != x.requested_entity_deletes_: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 return initialized def ByteSize(self): n = 0 if (self.has_requested_entity_puts_): n += 1 + self.lengthVarInt64(self.requested_entity_puts_) if (self.has_requested_entity_deletes_): n += 1 + self.lengthVarInt64(self.requested_entity_deletes_) return n def ByteSizePartial(self): n = 0 if (self.has_requested_entity_puts_): n += 1 + self.lengthVarInt64(self.requested_entity_puts_) if (self.has_requested_entity_deletes_): n += 1 + self.lengthVarInt64(self.requested_entity_deletes_) return n def Clear(self): self.clear_requested_entity_puts() self.clear_requested_entity_deletes() def OutputUnchecked(self, out): if (self.has_requested_entity_puts_): out.putVarInt32(48) out.putVarInt32(self.requested_entity_puts_) if (self.has_requested_entity_deletes_): out.putVarInt32(56) out.putVarInt32(self.requested_entity_deletes_) def OutputPartial(self, out): if (self.has_requested_entity_puts_): out.putVarInt32(48) out.putVarInt32(self.requested_entity_puts_) if (self.has_requested_entity_deletes_): out.putVarInt32(56) out.putVarInt32(self.requested_entity_deletes_) def TryMerge(self, d): while 1: tt = d.getVarInt32() if tt == 44: break if tt == 48: self.set_requested_entity_puts(d.getVarInt32()) continue if tt == 56: self.set_requested_entity_deletes(d.getVarInt32()) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_requested_entity_puts_: res+=prefix+("requested_entity_puts: %s\n" % self.DebugFormatInt32(self.requested_entity_puts_)) if self.has_requested_entity_deletes_: res+=prefix+("requested_entity_deletes: %s\n" % self.DebugFormatInt32(self.requested_entity_deletes_)) return res class Cost(ProtocolBuffer.ProtocolMessage): has_index_writes_ = 0 index_writes_ = 0 has_index_write_bytes_ = 0 index_write_bytes_ = 0 has_entity_writes_ = 0 entity_writes_ = 0 has_entity_write_bytes_ = 0 entity_write_bytes_ = 0 has_commitcost_ = 0 commitcost_ = None has_approximate_storage_delta_ = 0 approximate_storage_delta_ = 0 has_id_sequence_updates_ = 0 id_sequence_updates_ = 0 def __init__(self, contents=None): self.lazy_init_lock_ = thread.allocate_lock() if contents is not None: self.MergeFromString(contents) def index_writes(self): return self.index_writes_ def set_index_writes(self, x): self.has_index_writes_ = 1 self.index_writes_ = x def clear_index_writes(self): if self.has_index_writes_: self.has_index_writes_ = 0 self.index_writes_ = 0 def has_index_writes(self): return self.has_index_writes_ def index_write_bytes(self): return self.index_write_bytes_ def set_index_write_bytes(self, x): self.has_index_write_bytes_ = 1 self.index_write_bytes_ = x def clear_index_write_bytes(self): if self.has_index_write_bytes_: self.has_index_write_bytes_ = 0 self.index_write_bytes_ = 0 def has_index_write_bytes(self): return self.has_index_write_bytes_ def entity_writes(self): return self.entity_writes_ def set_entity_writes(self, x): self.has_entity_writes_ = 1 self.entity_writes_ = x def clear_entity_writes(self): if self.has_entity_writes_: self.has_entity_writes_ = 0 self.entity_writes_ = 0 def has_entity_writes(self): return self.has_entity_writes_ def entity_write_bytes(self): return self.entity_write_bytes_ def set_entity_write_bytes(self, x): self.has_entity_write_bytes_ = 1 self.entity_write_bytes_ = x def clear_entity_write_bytes(self): if self.has_entity_write_bytes_: self.has_entity_write_bytes_ = 0 self.entity_write_bytes_ = 0 def has_entity_write_bytes(self): return self.has_entity_write_bytes_ def commitcost(self): if self.commitcost_ is None: self.lazy_init_lock_.acquire() try: if self.commitcost_ is None: self.commitcost_ = Cost_CommitCost() finally: self.lazy_init_lock_.release() return self.commitcost_ def mutable_commitcost(self): self.has_commitcost_ = 1; return self.commitcost() def clear_commitcost(self): if self.has_commitcost_: self.has_commitcost_ = 0; if self.commitcost_ is not None: self.commitcost_.Clear() def has_commitcost(self): return self.has_commitcost_ def approximate_storage_delta(self): return self.approximate_storage_delta_ def set_approximate_storage_delta(self, x): self.has_approximate_storage_delta_ = 1 self.approximate_storage_delta_ = x def clear_approximate_storage_delta(self): if self.has_approximate_storage_delta_: self.has_approximate_storage_delta_ = 0 self.approximate_storage_delta_ = 0 def has_approximate_storage_delta(self): return self.has_approximate_storage_delta_ def id_sequence_updates(self): return self.id_sequence_updates_ def set_id_sequence_updates(self, x): self.has_id_sequence_updates_ = 1 self.id_sequence_updates_ = x def clear_id_sequence_updates(self): if self.has_id_sequence_updates_: self.has_id_sequence_updates_ = 0 self.id_sequence_updates_ = 0 def has_id_sequence_updates(self): return self.has_id_sequence_updates_ def MergeFrom(self, x): assert x is not self if (x.has_index_writes()): self.set_index_writes(x.index_writes()) if (x.has_index_write_bytes()): self.set_index_write_bytes(x.index_write_bytes()) if (x.has_entity_writes()): self.set_entity_writes(x.entity_writes()) if (x.has_entity_write_bytes()): self.set_entity_write_bytes(x.entity_write_bytes()) if (x.has_commitcost()): self.mutable_commitcost().MergeFrom(x.commitcost()) if (x.has_approximate_storage_delta()): self.set_approximate_storage_delta(x.approximate_storage_delta()) if (x.has_id_sequence_updates()): self.set_id_sequence_updates(x.id_sequence_updates()) def Equals(self, x): if x is self: return 1 if self.has_index_writes_ != x.has_index_writes_: return 0 if self.has_index_writes_ and self.index_writes_ != x.index_writes_: return 0 if self.has_index_write_bytes_ != x.has_index_write_bytes_: return 0 if self.has_index_write_bytes_ and self.index_write_bytes_ != x.index_write_bytes_: return 0 if self.has_entity_writes_ != x.has_entity_writes_: return 0 if self.has_entity_writes_ and self.entity_writes_ != x.entity_writes_: return 0 if self.has_entity_write_bytes_ != x.has_entity_write_bytes_: return 0 if self.has_entity_write_bytes_ and self.entity_write_bytes_ != x.entity_write_bytes_: return 0 if self.has_commitcost_ != x.has_commitcost_: return 0 if self.has_commitcost_ and self.commitcost_ != x.commitcost_: return 0 if self.has_approximate_storage_delta_ != x.has_approximate_storage_delta_: return 0 if self.has_approximate_storage_delta_ and self.approximate_storage_delta_ != x.approximate_storage_delta_: return 0 if self.has_id_sequence_updates_ != x.has_id_sequence_updates_: return 0 if self.has_id_sequence_updates_ and self.id_sequence_updates_ != x.id_sequence_updates_: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 if (self.has_commitcost_ and not self.commitcost_.IsInitialized(debug_strs)): initialized = 0 return initialized def ByteSize(self): n = 0 if (self.has_index_writes_): n += 1 + self.lengthVarInt64(self.index_writes_) if (self.has_index_write_bytes_): n += 1 + self.lengthVarInt64(self.index_write_bytes_) if (self.has_entity_writes_): n += 1 + self.lengthVarInt64(self.entity_writes_) if (self.has_entity_write_bytes_): n += 1 + self.lengthVarInt64(self.entity_write_bytes_) if (self.has_commitcost_): n += 2 + self.commitcost_.ByteSize() if (self.has_approximate_storage_delta_): n += 1 + self.lengthVarInt64(self.approximate_storage_delta_) if (self.has_id_sequence_updates_): n += 1 + self.lengthVarInt64(self.id_sequence_updates_) return n def ByteSizePartial(self): n = 0 if (self.has_index_writes_): n += 1 + self.lengthVarInt64(self.index_writes_) if (self.has_index_write_bytes_): n += 1 + self.lengthVarInt64(self.index_write_bytes_) if (self.has_entity_writes_): n += 1 + self.lengthVarInt64(self.entity_writes_) if (self.has_entity_write_bytes_): n += 1 + self.lengthVarInt64(self.entity_write_bytes_) if (self.has_commitcost_): n += 2 + self.commitcost_.ByteSizePartial() if (self.has_approximate_storage_delta_): n += 1 + self.lengthVarInt64(self.approximate_storage_delta_) if (self.has_id_sequence_updates_): n += 1 + self.lengthVarInt64(self.id_sequence_updates_) return n def Clear(self): self.clear_index_writes() self.clear_index_write_bytes() self.clear_entity_writes() self.clear_entity_write_bytes() self.clear_commitcost() self.clear_approximate_storage_delta() self.clear_id_sequence_updates() def OutputUnchecked(self, out): if (self.has_index_writes_): out.putVarInt32(8) out.putVarInt32(self.index_writes_) if (self.has_index_write_bytes_): out.putVarInt32(16) out.putVarInt32(self.index_write_bytes_) if (self.has_entity_writes_): out.putVarInt32(24) out.putVarInt32(self.entity_writes_) if (self.has_entity_write_bytes_): out.putVarInt32(32) out.putVarInt32(self.entity_write_bytes_) if (self.has_commitcost_): out.putVarInt32(43) self.commitcost_.OutputUnchecked(out) out.putVarInt32(44) if (self.has_approximate_storage_delta_): out.putVarInt32(64) out.putVarInt32(self.approximate_storage_delta_) if (self.has_id_sequence_updates_): out.putVarInt32(72) out.putVarInt32(self.id_sequence_updates_) def OutputPartial(self, out): if (self.has_index_writes_): out.putVarInt32(8) out.putVarInt32(self.index_writes_) if (self.has_index_write_bytes_): out.putVarInt32(16) out.putVarInt32(self.index_write_bytes_) if (self.has_entity_writes_): out.putVarInt32(24) out.putVarInt32(self.entity_writes_) if (self.has_entity_write_bytes_): out.putVarInt32(32) out.putVarInt32(self.entity_write_bytes_) if (self.has_commitcost_): out.putVarInt32(43) self.commitcost_.OutputPartial(out) out.putVarInt32(44) if (self.has_approximate_storage_delta_): out.putVarInt32(64) out.putVarInt32(self.approximate_storage_delta_) if (self.has_id_sequence_updates_): out.putVarInt32(72) out.putVarInt32(self.id_sequence_updates_) def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if tt == 8: self.set_index_writes(d.getVarInt32()) continue if tt == 16: self.set_index_write_bytes(d.getVarInt32()) continue if tt == 24: self.set_entity_writes(d.getVarInt32()) continue if tt == 32: self.set_entity_write_bytes(d.getVarInt32()) continue if tt == 43: self.mutable_commitcost().TryMerge(d) continue if tt == 64: self.set_approximate_storage_delta(d.getVarInt32()) continue if tt == 72: self.set_id_sequence_updates(d.getVarInt32()) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_index_writes_: res+=prefix+("index_writes: %s\n" % self.DebugFormatInt32(self.index_writes_)) if self.has_index_write_bytes_: res+=prefix+("index_write_bytes: %s\n" % self.DebugFormatInt32(self.index_write_bytes_)) if self.has_entity_writes_: res+=prefix+("entity_writes: %s\n" % self.DebugFormatInt32(self.entity_writes_)) if self.has_entity_write_bytes_: res+=prefix+("entity_write_bytes: %s\n" % self.DebugFormatInt32(self.entity_write_bytes_)) if self.has_commitcost_: res+=prefix+"CommitCost {\n" res+=self.commitcost_.__str__(prefix + " ", printElemNumber) res+=prefix+"}\n" if self.has_approximate_storage_delta_: res+=prefix+("approximate_storage_delta: %s\n" % self.DebugFormatInt32(self.approximate_storage_delta_)) if self.has_id_sequence_updates_: res+=prefix+("id_sequence_updates: %s\n" % self.DebugFormatInt32(self.id_sequence_updates_)) return res def _BuildTagLookupTable(sparse, maxtag, default=None): return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) kindex_writes = 1 kindex_write_bytes = 2 kentity_writes = 3 kentity_write_bytes = 4 kCommitCostGroup = 5 kCommitCostrequested_entity_puts = 6 kCommitCostrequested_entity_deletes = 7 kapproximate_storage_delta = 8 kid_sequence_updates = 9 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", 1: "index_writes", 2: "index_write_bytes", 3: "entity_writes", 4: "entity_write_bytes", 5: "CommitCost", 6: "requested_entity_puts", 7: "requested_entity_deletes", 8: "approximate_storage_delta", 9: "id_sequence_updates", }, 9) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, 1: ProtocolBuffer.Encoder.NUMERIC, 2: ProtocolBuffer.Encoder.NUMERIC, 3: ProtocolBuffer.Encoder.NUMERIC, 4: ProtocolBuffer.Encoder.NUMERIC, 5: ProtocolBuffer.Encoder.STARTGROUP, 6: ProtocolBuffer.Encoder.NUMERIC, 7: ProtocolBuffer.Encoder.NUMERIC, 8: ProtocolBuffer.Encoder.NUMERIC, 9: ProtocolBuffer.Encoder.NUMERIC, }, 9, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Cost' class GetRequest(ProtocolBuffer.ProtocolMessage): has_header_ = 0 header_ = None has_transaction_ = 0 transaction_ = None has_failover_ms_ = 0 failover_ms_ = 0 has_strong_ = 0 strong_ = 0 has_allow_deferred_ = 0 allow_deferred_ = 0 def __init__(self, contents=None): self.key_ = [] self.lazy_init_lock_ = thread.allocate_lock() if contents is not None: self.MergeFromString(contents) def header(self): if self.header_ is None: self.lazy_init_lock_.acquire() try: if self.header_ is None: self.header_ = InternalHeader() finally: self.lazy_init_lock_.release() return self.header_ def mutable_header(self): self.has_header_ = 1; return self.header() def clear_header(self): if self.has_header_: self.has_header_ = 0; if self.header_ is not None: self.header_.Clear() def has_header(self): return self.has_header_ def key_size(self): return len(self.key_) def key_list(self): return self.key_ def key(self, i): return self.key_[i] def mutable_key(self, i): return self.key_[i] def add_key(self): x = Reference() self.key_.append(x) return x def clear_key(self): self.key_ = [] def transaction(self): if self.transaction_ is None: self.lazy_init_lock_.acquire() try: if self.transaction_ is None: self.transaction_ = Transaction() finally: self.lazy_init_lock_.release() return self.transaction_ def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction() def clear_transaction(self): if self.has_transaction_: self.has_transaction_ = 0; if self.transaction_ is not None: self.transaction_.Clear() def has_transaction(self): return self.has_transaction_ def failover_ms(self): return self.failover_ms_ def set_failover_ms(self, x): self.has_failover_ms_ = 1 self.failover_ms_ = x def clear_failover_ms(self): if self.has_failover_ms_: self.has_failover_ms_ = 0 self.failover_ms_ = 0 def has_failover_ms(self): return self.has_failover_ms_ def strong(self): return self.strong_ def set_strong(self, x): self.has_strong_ = 1 self.strong_ = x def clear_strong(self): if self.has_strong_: self.has_strong_ = 0 self.strong_ = 0 def has_strong(self): return self.has_strong_ def allow_deferred(self): return self.allow_deferred_ def set_allow_deferred(self, x): self.has_allow_deferred_ = 1 self.allow_deferred_ = x def clear_allow_deferred(self): if self.has_allow_deferred_: self.has_allow_deferred_ = 0 self.allow_deferred_ = 0 def has_allow_deferred(self): return self.has_allow_deferred_ def MergeFrom(self, x): assert x is not self if (x.has_header()): self.mutable_header().MergeFrom(x.header()) for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i)) if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction()) if (x.has_failover_ms()): self.set_failover_ms(x.failover_ms()) if (x.has_strong()): self.set_strong(x.strong()) if (x.has_allow_deferred()): self.set_allow_deferred(x.allow_deferred()) def Equals(self, x): if x is self: return 1 if self.has_header_ != x.has_header_: return 0 if self.has_header_ and self.header_ != x.header_: return 0 if len(self.key_) != len(x.key_): return 0 for e1, e2 in zip(self.key_, x.key_): if e1 != e2: return 0 if self.has_transaction_ != x.has_transaction_: return 0 if self.has_transaction_ and self.transaction_ != x.transaction_: return 0 if self.has_failover_ms_ != x.has_failover_ms_: return 0 if self.has_failover_ms_ and self.failover_ms_ != x.failover_ms_: return 0 if self.has_strong_ != x.has_strong_: return 0 if self.has_strong_ and self.strong_ != x.strong_: return 0 if self.has_allow_deferred_ != x.has_allow_deferred_: return 0 if self.has_allow_deferred_ and self.allow_deferred_ != x.allow_deferred_: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0 for p in self.key_: if not p.IsInitialized(debug_strs): initialized=0 if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0 return initialized def ByteSize(self): n = 0 if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize()) n += 1 * len(self.key_) for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize()) if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize()) if (self.has_failover_ms_): n += 1 + self.lengthVarInt64(self.failover_ms_) if (self.has_strong_): n += 2 if (self.has_allow_deferred_): n += 2 return n def ByteSizePartial(self): n = 0 if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial()) n += 1 * len(self.key_) for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSizePartial()) if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSizePartial()) if (self.has_failover_ms_): n += 1 + self.lengthVarInt64(self.failover_ms_) if (self.has_strong_): n += 2 if (self.has_allow_deferred_): n += 2 return n def Clear(self): self.clear_header() self.clear_key() self.clear_transaction() self.clear_failover_ms() self.clear_strong() self.clear_allow_deferred() def OutputUnchecked(self, out): for i in xrange(len(self.key_)): out.putVarInt32(10) out.putVarInt32(self.key_[i].ByteSize()) self.key_[i].OutputUnchecked(out) if (self.has_transaction_): out.putVarInt32(18) out.putVarInt32(self.transaction_.ByteSize()) self.transaction_.OutputUnchecked(out) if (self.has_failover_ms_): out.putVarInt32(24) out.putVarInt64(self.failover_ms_) if (self.has_strong_): out.putVarInt32(32) out.putBoolean(self.strong_) if (self.has_allow_deferred_): out.putVarInt32(40) out.putBoolean(self.allow_deferred_) if (self.has_header_): out.putVarInt32(50) out.putVarInt32(self.header_.ByteSize()) self.header_.OutputUnchecked(out) def OutputPartial(self, out): for i in xrange(len(self.key_)): out.putVarInt32(10) out.putVarInt32(self.key_[i].ByteSizePartial()) self.key_[i].OutputPartial(out) if (self.has_transaction_): out.putVarInt32(18) out.putVarInt32(self.transaction_.ByteSizePartial()) self.transaction_.OutputPartial(out) if (self.has_failover_ms_): out.putVarInt32(24) out.putVarInt64(self.failover_ms_) if (self.has_strong_): out.putVarInt32(32) out.putBoolean(self.strong_) if (self.has_allow_deferred_): out.putVarInt32(40) out.putBoolean(self.allow_deferred_) if (self.has_header_): out.putVarInt32(50) out.putVarInt32(self.header_.ByteSizePartial()) self.header_.OutputPartial(out) def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if tt == 10: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.add_key().TryMerge(tmp) continue if tt == 18: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_transaction().TryMerge(tmp) continue if tt == 24: self.set_failover_ms(d.getVarInt64()) continue if tt == 32: self.set_strong(d.getBoolean()) continue if tt == 40: self.set_allow_deferred(d.getBoolean()) continue if tt == 50: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_header().TryMerge(tmp) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_header_: res+=prefix+"header <\n" res+=self.header_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt=0 for e in self.key_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("key%s <\n" % elm) res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt+=1 if self.has_transaction_: res+=prefix+"transaction <\n" res+=self.transaction_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" if self.has_failover_ms_: res+=prefix+("failover_ms: %s\n" % self.DebugFormatInt64(self.failover_ms_)) if self.has_strong_: res+=prefix+("strong: %s\n" % self.DebugFormatBool(self.strong_)) if self.has_allow_deferred_: res+=prefix+("allow_deferred: %s\n" % self.DebugFormatBool(self.allow_deferred_)) return res def _BuildTagLookupTable(sparse, maxtag, default=None): return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) kheader = 6 kkey = 1 ktransaction = 2 kfailover_ms = 3 kstrong = 4 kallow_deferred = 5 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", 1: "key", 2: "transaction", 3: "failover_ms", 4: "strong", 5: "allow_deferred", 6: "header", }, 6) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, 1: ProtocolBuffer.Encoder.STRING, 2: ProtocolBuffer.Encoder.STRING, 3: ProtocolBuffer.Encoder.NUMERIC, 4: ProtocolBuffer.Encoder.NUMERIC, 5: ProtocolBuffer.Encoder.NUMERIC, 6: ProtocolBuffer.Encoder.STRING, }, 6, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.GetRequest' class GetResponse_Entity(ProtocolBuffer.ProtocolMessage): has_entity_ = 0 entity_ = None has_key_ = 0 key_ = None has_version_ = 0 version_ = 0 def __init__(self, contents=None): self.lazy_init_lock_ = thread.allocate_lock() if contents is not None: self.MergeFromString(contents) def entity(self): if self.entity_ is None: self.lazy_init_lock_.acquire() try: if self.entity_ is None: self.entity_ = EntityProto() finally: self.lazy_init_lock_.release() return self.entity_ def mutable_entity(self): self.has_entity_ = 1; return self.entity() def clear_entity(self): if self.has_entity_: self.has_entity_ = 0; if self.entity_ is not None: self.entity_.Clear() def has_entity(self): return self.has_entity_ def key(self): if self.key_ is None: self.lazy_init_lock_.acquire() try: if self.key_ is None: self.key_ = Reference() finally: self.lazy_init_lock_.release() return self.key_ def mutable_key(self): self.has_key_ = 1; return self.key() def clear_key(self): if self.has_key_: self.has_key_ = 0; if self.key_ is not None: self.key_.Clear() def has_key(self): return self.has_key_ def version(self): return self.version_ def set_version(self, x): self.has_version_ = 1 self.version_ = x def clear_version(self): if self.has_version_: self.has_version_ = 0 self.version_ = 0 def has_version(self): return self.has_version_ def MergeFrom(self, x): assert x is not self if (x.has_entity()): self.mutable_entity().MergeFrom(x.entity()) if (x.has_key()): self.mutable_key().MergeFrom(x.key()) if (x.has_version()): self.set_version(x.version()) def Equals(self, x): if x is self: return 1 if self.has_entity_ != x.has_entity_: return 0 if self.has_entity_ and self.entity_ != x.entity_: return 0 if self.has_key_ != x.has_key_: return 0 if self.has_key_ and self.key_ != x.key_: return 0 if self.has_version_ != x.has_version_: return 0 if self.has_version_ and self.version_ != x.version_: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 if (self.has_entity_ and not self.entity_.IsInitialized(debug_strs)): initialized = 0 if (self.has_key_ and not self.key_.IsInitialized(debug_strs)): initialized = 0 return initialized def ByteSize(self): n = 0 if (self.has_entity_): n += 1 + self.lengthString(self.entity_.ByteSize()) if (self.has_key_): n += 1 + self.lengthString(self.key_.ByteSize()) if (self.has_version_): n += 1 + self.lengthVarInt64(self.version_) return n def ByteSizePartial(self): n = 0 if (self.has_entity_): n += 1 + self.lengthString(self.entity_.ByteSizePartial()) if (self.has_key_): n += 1 + self.lengthString(self.key_.ByteSizePartial()) if (self.has_version_): n += 1 + self.lengthVarInt64(self.version_) return n def Clear(self): self.clear_entity() self.clear_key() self.clear_version() def OutputUnchecked(self, out): if (self.has_entity_): out.putVarInt32(18) out.putVarInt32(self.entity_.ByteSize()) self.entity_.OutputUnchecked(out) if (self.has_version_): out.putVarInt32(24) out.putVarInt64(self.version_) if (self.has_key_): out.putVarInt32(34) out.putVarInt32(self.key_.ByteSize()) self.key_.OutputUnchecked(out) def OutputPartial(self, out): if (self.has_entity_): out.putVarInt32(18) out.putVarInt32(self.entity_.ByteSizePartial()) self.entity_.OutputPartial(out) if (self.has_version_): out.putVarInt32(24) out.putVarInt64(self.version_) if (self.has_key_): out.putVarInt32(34) out.putVarInt32(self.key_.ByteSizePartial()) self.key_.OutputPartial(out) def TryMerge(self, d): while 1: tt = d.getVarInt32() if tt == 12: break if tt == 18: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_entity().TryMerge(tmp) continue if tt == 24: self.set_version(d.getVarInt64()) continue if tt == 34: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_key().TryMerge(tmp) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_entity_: res+=prefix+"entity <\n" res+=self.entity_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" if self.has_key_: res+=prefix+"key <\n" res+=self.key_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" if self.has_version_: res+=prefix+("version: %s\n" % self.DebugFormatInt64(self.version_)) return res class GetResponse(ProtocolBuffer.ProtocolMessage): has_in_order_ = 0 in_order_ = 1 def __init__(self, contents=None): self.entity_ = [] self.deferred_ = [] if contents is not None: self.MergeFromString(contents) def entity_size(self): return len(self.entity_) def entity_list(self): return self.entity_ def entity(self, i): return self.entity_[i] def mutable_entity(self, i): return self.entity_[i] def add_entity(self): x = GetResponse_Entity() self.entity_.append(x) return x def clear_entity(self): self.entity_ = [] def deferred_size(self): return len(self.deferred_) def deferred_list(self): return self.deferred_ def deferred(self, i): return self.deferred_[i] def mutable_deferred(self, i): return self.deferred_[i] def add_deferred(self): x = Reference() self.deferred_.append(x) return x def clear_deferred(self): self.deferred_ = [] def in_order(self): return self.in_order_ def set_in_order(self, x): self.has_in_order_ = 1 self.in_order_ = x def clear_in_order(self): if self.has_in_order_: self.has_in_order_ = 0 self.in_order_ = 1 def has_in_order(self): return self.has_in_order_ def MergeFrom(self, x): assert x is not self for i in xrange(x.entity_size()): self.add_entity().CopyFrom(x.entity(i)) for i in xrange(x.deferred_size()): self.add_deferred().CopyFrom(x.deferred(i)) if (x.has_in_order()): self.set_in_order(x.in_order()) def Equals(self, x): if x is self: return 1 if len(self.entity_) != len(x.entity_): return 0 for e1, e2 in zip(self.entity_, x.entity_): if e1 != e2: return 0 if len(self.deferred_) != len(x.deferred_): return 0 for e1, e2 in zip(self.deferred_, x.deferred_): if e1 != e2: return 0 if self.has_in_order_ != x.has_in_order_: return 0 if self.has_in_order_ and self.in_order_ != x.in_order_: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 for p in self.entity_: if not p.IsInitialized(debug_strs): initialized=0 for p in self.deferred_: if not p.IsInitialized(debug_strs): initialized=0 return initialized def ByteSize(self): n = 0 n += 2 * len(self.entity_) for i in xrange(len(self.entity_)): n += self.entity_[i].ByteSize() n += 1 * len(self.deferred_) for i in xrange(len(self.deferred_)): n += self.lengthString(self.deferred_[i].ByteSize()) if (self.has_in_order_): n += 2 return n def ByteSizePartial(self): n = 0 n += 2 * len(self.entity_) for i in xrange(len(self.entity_)): n += self.entity_[i].ByteSizePartial() n += 1 * len(self.deferred_) for i in xrange(len(self.deferred_)): n += self.lengthString(self.deferred_[i].ByteSizePartial()) if (self.has_in_order_): n += 2 return n def Clear(self): self.clear_entity() self.clear_deferred() self.clear_in_order() def OutputUnchecked(self, out): for i in xrange(len(self.entity_)): out.putVarInt32(11) self.entity_[i].OutputUnchecked(out) out.putVarInt32(12) for i in xrange(len(self.deferred_)): out.putVarInt32(42) out.putVarInt32(self.deferred_[i].ByteSize()) self.deferred_[i].OutputUnchecked(out) if (self.has_in_order_): out.putVarInt32(48) out.putBoolean(self.in_order_) def OutputPartial(self, out): for i in xrange(len(self.entity_)): out.putVarInt32(11) self.entity_[i].OutputPartial(out) out.putVarInt32(12) for i in xrange(len(self.deferred_)): out.putVarInt32(42) out.putVarInt32(self.deferred_[i].ByteSizePartial()) self.deferred_[i].OutputPartial(out) if (self.has_in_order_): out.putVarInt32(48) out.putBoolean(self.in_order_) def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if tt == 11: self.add_entity().TryMerge(d) continue if tt == 42: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.add_deferred().TryMerge(tmp) continue if tt == 48: self.set_in_order(d.getBoolean()) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" cnt=0 for e in self.entity_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("Entity%s {\n" % elm) res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+"}\n" cnt+=1 cnt=0 for e in self.deferred_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("deferred%s <\n" % elm) res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt+=1 if self.has_in_order_: res+=prefix+("in_order: %s\n" % self.DebugFormatBool(self.in_order_)) return res def _BuildTagLookupTable(sparse, maxtag, default=None): return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) kEntityGroup = 1 kEntityentity = 2 kEntitykey = 4 kEntityversion = 3 kdeferred = 5 kin_order = 6 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", 1: "Entity", 2: "entity", 3: "version", 4: "key", 5: "deferred", 6: "in_order", }, 6) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, 1: ProtocolBuffer.Encoder.STARTGROUP, 2: ProtocolBuffer.Encoder.STRING, 3: ProtocolBuffer.Encoder.NUMERIC, 4: ProtocolBuffer.Encoder.STRING, 5: ProtocolBuffer.Encoder.STRING, 6: ProtocolBuffer.Encoder.NUMERIC, }, 6, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.GetResponse' class PutRequest(ProtocolBuffer.ProtocolMessage): CURRENT = 0 SEQUENTIAL = 1 _AutoIdPolicy_NAMES = { 0: "CURRENT", 1: "SEQUENTIAL", } def AutoIdPolicy_Name(cls, x): return cls._AutoIdPolicy_NAMES.get(x, "") AutoIdPolicy_Name = classmethod(AutoIdPolicy_Name) has_header_ = 0 header_ = None has_transaction_ = 0 transaction_ = None has_trusted_ = 0 trusted_ = 0 has_force_ = 0 force_ = 0 has_mark_changes_ = 0 mark_changes_ = 0 has_auto_id_policy_ = 0 auto_id_policy_ = 0 def __init__(self, contents=None): self.entity_ = [] self.composite_index_ = [] self.snapshot_ = [] self.lazy_init_lock_ = thread.allocate_lock() if contents is not None: self.MergeFromString(contents) def header(self): if self.header_ is None: self.lazy_init_lock_.acquire() try: if self.header_ is None: self.header_ = InternalHeader() finally: self.lazy_init_lock_.release() return self.header_ def mutable_header(self): self.has_header_ = 1; return self.header() def clear_header(self): if self.has_header_: self.has_header_ = 0; if self.header_ is not None: self.header_.Clear() def has_header(self): return self.has_header_ def entity_size(self): return len(self.entity_) def entity_list(self): return self.entity_ def entity(self, i): return self.entity_[i] def mutable_entity(self, i): return self.entity_[i] def add_entity(self): x = EntityProto() self.entity_.append(x) return x def clear_entity(self): self.entity_ = [] def transaction(self): if self.transaction_ is None: self.lazy_init_lock_.acquire() try: if self.transaction_ is None: self.transaction_ = Transaction() finally: self.lazy_init_lock_.release() return self.transaction_ def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction() def clear_transaction(self): if self.has_transaction_: self.has_transaction_ = 0; if self.transaction_ is not None: self.transaction_.Clear() def has_transaction(self): return self.has_transaction_ def composite_index_size(self): return len(self.composite_index_) def composite_index_list(self): return self.composite_index_ def composite_index(self, i): return self.composite_index_[i] def mutable_composite_index(self, i): return self.composite_index_[i] def add_composite_index(self): x = CompositeIndex() self.composite_index_.append(x) return x def clear_composite_index(self): self.composite_index_ = [] def trusted(self): return self.trusted_ def set_trusted(self, x): self.has_trusted_ = 1 self.trusted_ = x def clear_trusted(self): if self.has_trusted_: self.has_trusted_ = 0 self.trusted_ = 0 def has_trusted(self): return self.has_trusted_ def force(self): return self.force_ def set_force(self, x): self.has_force_ = 1 self.force_ = x def clear_force(self): if self.has_force_: self.has_force_ = 0 self.force_ = 0 def has_force(self): return self.has_force_ def mark_changes(self): return self.mark_changes_ def set_mark_changes(self, x): self.has_mark_changes_ = 1 self.mark_changes_ = x def clear_mark_changes(self): if self.has_mark_changes_: self.has_mark_changes_ = 0 self.mark_changes_ = 0 def has_mark_changes(self): return self.has_mark_changes_ def snapshot_size(self): return len(self.snapshot_) def snapshot_list(self): return self.snapshot_ def snapshot(self, i): return self.snapshot_[i] def mutable_snapshot(self, i): return self.snapshot_[i] def add_snapshot(self): x = Snapshot() self.snapshot_.append(x) return x def clear_snapshot(self): self.snapshot_ = [] def auto_id_policy(self): return self.auto_id_policy_ def set_auto_id_policy(self, x): self.has_auto_id_policy_ = 1 self.auto_id_policy_ = x def clear_auto_id_policy(self): if self.has_auto_id_policy_: self.has_auto_id_policy_ = 0 self.auto_id_policy_ = 0 def has_auto_id_policy(self): return self.has_auto_id_policy_ def MergeFrom(self, x): assert x is not self if (x.has_header()): self.mutable_header().MergeFrom(x.header()) for i in xrange(x.entity_size()): self.add_entity().CopyFrom(x.entity(i)) if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction()) for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i)) if (x.has_trusted()): self.set_trusted(x.trusted()) if (x.has_force()): self.set_force(x.force()) if (x.has_mark_changes()): self.set_mark_changes(x.mark_changes()) for i in xrange(x.snapshot_size()): self.add_snapshot().CopyFrom(x.snapshot(i)) if (x.has_auto_id_policy()): self.set_auto_id_policy(x.auto_id_policy()) def Equals(self, x): if x is self: return 1 if self.has_header_ != x.has_header_: return 0 if self.has_header_ and self.header_ != x.header_: return 0 if len(self.entity_) != len(x.entity_): return 0 for e1, e2 in zip(self.entity_, x.entity_): if e1 != e2: return 0 if self.has_transaction_ != x.has_transaction_: return 0 if self.has_transaction_ and self.transaction_ != x.transaction_: return 0 if len(self.composite_index_) != len(x.composite_index_): return 0 for e1, e2 in zip(self.composite_index_, x.composite_index_): if e1 != e2: return 0 if self.has_trusted_ != x.has_trusted_: return 0 if self.has_trusted_ and self.trusted_ != x.trusted_: return 0 if self.has_force_ != x.has_force_: return 0 if self.has_force_ and self.force_ != x.force_: return 0 if self.has_mark_changes_ != x.has_mark_changes_: return 0 if self.has_mark_changes_ and self.mark_changes_ != x.mark_changes_: return 0 if len(self.snapshot_) != len(x.snapshot_): return 0 for e1, e2 in zip(self.snapshot_, x.snapshot_): if e1 != e2: return 0 if self.has_auto_id_policy_ != x.has_auto_id_policy_: return 0 if self.has_auto_id_policy_ and self.auto_id_policy_ != x.auto_id_policy_: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0 for p in self.entity_: if not p.IsInitialized(debug_strs): initialized=0 if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0 for p in self.composite_index_: if not p.IsInitialized(debug_strs): initialized=0 for p in self.snapshot_: if not p.IsInitialized(debug_strs): initialized=0 return initialized def ByteSize(self): n = 0 if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize()) n += 1 * len(self.entity_) for i in xrange(len(self.entity_)): n += self.lengthString(self.entity_[i].ByteSize()) if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize()) n += 1 * len(self.composite_index_) for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize()) if (self.has_trusted_): n += 2 if (self.has_force_): n += 2 if (self.has_mark_changes_): n += 2 n += 1 * len(self.snapshot_) for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSize()) if (self.has_auto_id_policy_): n += 1 + self.lengthVarInt64(self.auto_id_policy_) return n def ByteSizePartial(self): n = 0 if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial()) n += 1 * len(self.entity_) for i in xrange(len(self.entity_)): n += self.lengthString(self.entity_[i].ByteSizePartial()) if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSizePartial()) n += 1 * len(self.composite_index_) for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSizePartial()) if (self.has_trusted_): n += 2 if (self.has_force_): n += 2 if (self.has_mark_changes_): n += 2 n += 1 * len(self.snapshot_) for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSizePartial()) if (self.has_auto_id_policy_): n += 1 + self.lengthVarInt64(self.auto_id_policy_) return n def Clear(self): self.clear_header() self.clear_entity() self.clear_transaction() self.clear_composite_index() self.clear_trusted() self.clear_force() self.clear_mark_changes() self.clear_snapshot() self.clear_auto_id_policy() def OutputUnchecked(self, out): for i in xrange(len(self.entity_)): out.putVarInt32(10) out.putVarInt32(self.entity_[i].ByteSize()) self.entity_[i].OutputUnchecked(out) if (self.has_transaction_): out.putVarInt32(18) out.putVarInt32(self.transaction_.ByteSize()) self.transaction_.OutputUnchecked(out) for i in xrange(len(self.composite_index_)): out.putVarInt32(26) out.putVarInt32(self.composite_index_[i].ByteSize()) self.composite_index_[i].OutputUnchecked(out) if (self.has_trusted_): out.putVarInt32(32) out.putBoolean(self.trusted_) if (self.has_force_): out.putVarInt32(56) out.putBoolean(self.force_) if (self.has_mark_changes_): out.putVarInt32(64) out.putBoolean(self.mark_changes_) for i in xrange(len(self.snapshot_)): out.putVarInt32(74) out.putVarInt32(self.snapshot_[i].ByteSize()) self.snapshot_[i].OutputUnchecked(out) if (self.has_auto_id_policy_): out.putVarInt32(80) out.putVarInt32(self.auto_id_policy_) if (self.has_header_): out.putVarInt32(90) out.putVarInt32(self.header_.ByteSize()) self.header_.OutputUnchecked(out) def OutputPartial(self, out): for i in xrange(len(self.entity_)): out.putVarInt32(10) out.putVarInt32(self.entity_[i].ByteSizePartial()) self.entity_[i].OutputPartial(out) if (self.has_transaction_): out.putVarInt32(18) out.putVarInt32(self.transaction_.ByteSizePartial()) self.transaction_.OutputPartial(out) for i in xrange(len(self.composite_index_)): out.putVarInt32(26) out.putVarInt32(self.composite_index_[i].ByteSizePartial()) self.composite_index_[i].OutputPartial(out) if (self.has_trusted_): out.putVarInt32(32) out.putBoolean(self.trusted_) if (self.has_force_): out.putVarInt32(56) out.putBoolean(self.force_) if (self.has_mark_changes_): out.putVarInt32(64) out.putBoolean(self.mark_changes_) for i in xrange(len(self.snapshot_)): out.putVarInt32(74) out.putVarInt32(self.snapshot_[i].ByteSizePartial()) self.snapshot_[i].OutputPartial(out) if (self.has_auto_id_policy_): out.putVarInt32(80) out.putVarInt32(self.auto_id_policy_) if (self.has_header_): out.putVarInt32(90) out.putVarInt32(self.header_.ByteSizePartial()) self.header_.OutputPartial(out) def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if tt == 10: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.add_entity().TryMerge(tmp) continue if tt == 18: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_transaction().TryMerge(tmp) continue if tt == 26: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.add_composite_index().TryMerge(tmp) continue if tt == 32: self.set_trusted(d.getBoolean()) continue if tt == 56: self.set_force(d.getBoolean()) continue if tt == 64: self.set_mark_changes(d.getBoolean()) continue if tt == 74: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.add_snapshot().TryMerge(tmp) continue if tt == 80: self.set_auto_id_policy(d.getVarInt32()) continue if tt == 90: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_header().TryMerge(tmp) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_header_: res+=prefix+"header <\n" res+=self.header_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt=0 for e in self.entity_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("entity%s <\n" % elm) res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt+=1 if self.has_transaction_: res+=prefix+"transaction <\n" res+=self.transaction_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt=0 for e in self.composite_index_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("composite_index%s <\n" % elm) res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt+=1 if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_)) if self.has_force_: res+=prefix+("force: %s\n" % self.DebugFormatBool(self.force_)) if self.has_mark_changes_: res+=prefix+("mark_changes: %s\n" % self.DebugFormatBool(self.mark_changes_)) cnt=0 for e in self.snapshot_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("snapshot%s <\n" % elm) res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt+=1 if self.has_auto_id_policy_: res+=prefix+("auto_id_policy: %s\n" % self.DebugFormatInt32(self.auto_id_policy_)) return res def _BuildTagLookupTable(sparse, maxtag, default=None): return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) kheader = 11 kentity = 1 ktransaction = 2 kcomposite_index = 3 ktrusted = 4 kforce = 7 kmark_changes = 8 ksnapshot = 9 kauto_id_policy = 10 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", 1: "entity", 2: "transaction", 3: "composite_index", 4: "trusted", 7: "force", 8: "mark_changes", 9: "snapshot", 10: "auto_id_policy", 11: "header", }, 11) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, 1: ProtocolBuffer.Encoder.STRING, 2: ProtocolBuffer.Encoder.STRING, 3: ProtocolBuffer.Encoder.STRING, 4: ProtocolBuffer.Encoder.NUMERIC, 7: ProtocolBuffer.Encoder.NUMERIC, 8: ProtocolBuffer.Encoder.NUMERIC, 9: ProtocolBuffer.Encoder.STRING, 10: ProtocolBuffer.Encoder.NUMERIC, 11: ProtocolBuffer.Encoder.STRING, }, 11, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.PutRequest' class PutResponse(ProtocolBuffer.ProtocolMessage): has_cost_ = 0 cost_ = None def __init__(self, contents=None): self.key_ = [] self.version_ = [] self.lazy_init_lock_ = thread.allocate_lock() if contents is not None: self.MergeFromString(contents) def key_size(self): return len(self.key_) def key_list(self): return self.key_ def key(self, i): return self.key_[i] def mutable_key(self, i): return self.key_[i] def add_key(self): x = Reference() self.key_.append(x) return x def clear_key(self): self.key_ = [] def cost(self): if self.cost_ is None: self.lazy_init_lock_.acquire() try: if self.cost_ is None: self.cost_ = Cost() finally: self.lazy_init_lock_.release() return self.cost_ def mutable_cost(self): self.has_cost_ = 1; return self.cost() def clear_cost(self): if self.has_cost_: self.has_cost_ = 0; if self.cost_ is not None: self.cost_.Clear() def has_cost(self): return self.has_cost_ def version_size(self): return len(self.version_) def version_list(self): return self.version_ def version(self, i): return self.version_[i] def set_version(self, i, x): self.version_[i] = x def add_version(self, x): self.version_.append(x) def clear_version(self): self.version_ = [] def MergeFrom(self, x): assert x is not self for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i)) if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost()) for i in xrange(x.version_size()): self.add_version(x.version(i)) def Equals(self, x): if x is self: return 1 if len(self.key_) != len(x.key_): return 0 for e1, e2 in zip(self.key_, x.key_): if e1 != e2: return 0 if self.has_cost_ != x.has_cost_: return 0 if self.has_cost_ and self.cost_ != x.cost_: return 0 if len(self.version_) != len(x.version_): return 0 for e1, e2 in zip(self.version_, x.version_): if e1 != e2: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 for p in self.key_: if not p.IsInitialized(debug_strs): initialized=0 if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0 return initialized def ByteSize(self): n = 0 n += 1 * len(self.key_) for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize()) if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize()) n += 1 * len(self.version_) for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i]) return n def ByteSizePartial(self): n = 0 n += 1 * len(self.key_) for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSizePartial()) if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial()) n += 1 * len(self.version_) for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i]) return n def Clear(self): self.clear_key() self.clear_cost() self.clear_version() def OutputUnchecked(self, out): for i in xrange(len(self.key_)): out.putVarInt32(10) out.putVarInt32(self.key_[i].ByteSize()) self.key_[i].OutputUnchecked(out) if (self.has_cost_): out.putVarInt32(18) out.putVarInt32(self.cost_.ByteSize()) self.cost_.OutputUnchecked(out) for i in xrange(len(self.version_)): out.putVarInt32(24) out.putVarInt64(self.version_[i]) def OutputPartial(self, out): for i in xrange(len(self.key_)): out.putVarInt32(10) out.putVarInt32(self.key_[i].ByteSizePartial()) self.key_[i].OutputPartial(out) if (self.has_cost_): out.putVarInt32(18) out.putVarInt32(self.cost_.ByteSizePartial()) self.cost_.OutputPartial(out) for i in xrange(len(self.version_)): out.putVarInt32(24) out.putVarInt64(self.version_[i]) def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if tt == 10: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.add_key().TryMerge(tmp) continue if tt == 18: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_cost().TryMerge(tmp) continue if tt == 24: self.add_version(d.getVarInt64()) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" cnt=0 for e in self.key_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("key%s <\n" % elm) res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt+=1 if self.has_cost_: res+=prefix+"cost <\n" res+=self.cost_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt=0 for e in self.version_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("version%s: %s\n" % (elm, self.DebugFormatInt64(e))) cnt+=1 return res def _BuildTagLookupTable(sparse, maxtag, default=None): return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) kkey = 1 kcost = 2 kversion = 3 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", 1: "key", 2: "cost", 3: "version", }, 3) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, 1: ProtocolBuffer.Encoder.STRING, 2: ProtocolBuffer.Encoder.STRING, 3: ProtocolBuffer.Encoder.NUMERIC, }, 3, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.PutResponse' class TouchRequest(ProtocolBuffer.ProtocolMessage): has_header_ = 0 header_ = None has_force_ = 0 force_ = 0 def __init__(self, contents=None): self.key_ = [] self.composite_index_ = [] self.snapshot_ = [] self.lazy_init_lock_ = thread.allocate_lock() if contents is not None: self.MergeFromString(contents) def header(self): if self.header_ is None: self.lazy_init_lock_.acquire() try: if self.header_ is None: self.header_ = InternalHeader() finally: self.lazy_init_lock_.release() return self.header_ def mutable_header(self): self.has_header_ = 1; return self.header() def clear_header(self): if self.has_header_: self.has_header_ = 0; if self.header_ is not None: self.header_.Clear() def has_header(self): return self.has_header_ def key_size(self): return len(self.key_) def key_list(self): return self.key_ def key(self, i): return self.key_[i] def mutable_key(self, i): return self.key_[i] def add_key(self): x = Reference() self.key_.append(x) return x def clear_key(self): self.key_ = [] def composite_index_size(self): return len(self.composite_index_) def composite_index_list(self): return self.composite_index_ def composite_index(self, i): return self.composite_index_[i] def mutable_composite_index(self, i): return self.composite_index_[i] def add_composite_index(self): x = CompositeIndex() self.composite_index_.append(x) return x def clear_composite_index(self): self.composite_index_ = [] def force(self): return self.force_ def set_force(self, x): self.has_force_ = 1 self.force_ = x def clear_force(self): if self.has_force_: self.has_force_ = 0 self.force_ = 0 def has_force(self): return self.has_force_ def snapshot_size(self): return len(self.snapshot_) def snapshot_list(self): return self.snapshot_ def snapshot(self, i): return self.snapshot_[i] def mutable_snapshot(self, i): return self.snapshot_[i] def add_snapshot(self): x = Snapshot() self.snapshot_.append(x) return x def clear_snapshot(self): self.snapshot_ = [] def MergeFrom(self, x): assert x is not self if (x.has_header()): self.mutable_header().MergeFrom(x.header()) for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i)) for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i)) if (x.has_force()): self.set_force(x.force()) for i in xrange(x.snapshot_size()): self.add_snapshot().CopyFrom(x.snapshot(i)) def Equals(self, x): if x is self: return 1 if self.has_header_ != x.has_header_: return 0 if self.has_header_ and self.header_ != x.header_: return 0 if len(self.key_) != len(x.key_): return 0 for e1, e2 in zip(self.key_, x.key_): if e1 != e2: return 0 if len(self.composite_index_) != len(x.composite_index_): return 0 for e1, e2 in zip(self.composite_index_, x.composite_index_): if e1 != e2: return 0 if self.has_force_ != x.has_force_: return 0 if self.has_force_ and self.force_ != x.force_: return 0 if len(self.snapshot_) != len(x.snapshot_): return 0 for e1, e2 in zip(self.snapshot_, x.snapshot_): if e1 != e2: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0 for p in self.key_: if not p.IsInitialized(debug_strs): initialized=0 for p in self.composite_index_: if not p.IsInitialized(debug_strs): initialized=0 for p in self.snapshot_: if not p.IsInitialized(debug_strs): initialized=0 return initialized def ByteSize(self): n = 0 if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize()) n += 1 * len(self.key_) for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize()) n += 1 * len(self.composite_index_) for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize()) if (self.has_force_): n += 2 n += 1 * len(self.snapshot_) for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSize()) return n def ByteSizePartial(self): n = 0 if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial()) n += 1 * len(self.key_) for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSizePartial()) n += 1 * len(self.composite_index_) for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSizePartial()) if (self.has_force_): n += 2 n += 1 * len(self.snapshot_) for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSizePartial()) return n def Clear(self): self.clear_header() self.clear_key() self.clear_composite_index() self.clear_force() self.clear_snapshot() def OutputUnchecked(self, out): for i in xrange(len(self.key_)): out.putVarInt32(10) out.putVarInt32(self.key_[i].ByteSize()) self.key_[i].OutputUnchecked(out) for i in xrange(len(self.composite_index_)): out.putVarInt32(18) out.putVarInt32(self.composite_index_[i].ByteSize()) self.composite_index_[i].OutputUnchecked(out) if (self.has_force_): out.putVarInt32(24) out.putBoolean(self.force_) for i in xrange(len(self.snapshot_)): out.putVarInt32(74) out.putVarInt32(self.snapshot_[i].ByteSize()) self.snapshot_[i].OutputUnchecked(out) if (self.has_header_): out.putVarInt32(82) out.putVarInt32(self.header_.ByteSize()) self.header_.OutputUnchecked(out) def OutputPartial(self, out): for i in xrange(len(self.key_)): out.putVarInt32(10) out.putVarInt32(self.key_[i].ByteSizePartial()) self.key_[i].OutputPartial(out) for i in xrange(len(self.composite_index_)): out.putVarInt32(18) out.putVarInt32(self.composite_index_[i].ByteSizePartial()) self.composite_index_[i].OutputPartial(out) if (self.has_force_): out.putVarInt32(24) out.putBoolean(self.force_) for i in xrange(len(self.snapshot_)): out.putVarInt32(74) out.putVarInt32(self.snapshot_[i].ByteSizePartial()) self.snapshot_[i].OutputPartial(out) if (self.has_header_): out.putVarInt32(82) out.putVarInt32(self.header_.ByteSizePartial()) self.header_.OutputPartial(out) def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if tt == 10: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.add_key().TryMerge(tmp) continue if tt == 18: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.add_composite_index().TryMerge(tmp) continue if tt == 24: self.set_force(d.getBoolean()) continue if tt == 74: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.add_snapshot().TryMerge(tmp) continue if tt == 82: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_header().TryMerge(tmp) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_header_: res+=prefix+"header <\n" res+=self.header_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt=0 for e in self.key_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("key%s <\n" % elm) res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt+=1 cnt=0 for e in self.composite_index_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("composite_index%s <\n" % elm) res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt+=1 if self.has_force_: res+=prefix+("force: %s\n" % self.DebugFormatBool(self.force_)) cnt=0 for e in self.snapshot_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("snapshot%s <\n" % elm) res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt+=1 return res def _BuildTagLookupTable(sparse, maxtag, default=None): return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) kheader = 10 kkey = 1 kcomposite_index = 2 kforce = 3 ksnapshot = 9 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", 1: "key", 2: "composite_index", 3: "force", 9: "snapshot", 10: "header", }, 10) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, 1: ProtocolBuffer.Encoder.STRING, 2: ProtocolBuffer.Encoder.STRING, 3: ProtocolBuffer.Encoder.NUMERIC, 9: ProtocolBuffer.Encoder.STRING, 10: ProtocolBuffer.Encoder.STRING, }, 10, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.TouchRequest' class TouchResponse(ProtocolBuffer.ProtocolMessage): has_cost_ = 0 cost_ = None def __init__(self, contents=None): self.lazy_init_lock_ = thread.allocate_lock() if contents is not None: self.MergeFromString(contents) def cost(self): if self.cost_ is None: self.lazy_init_lock_.acquire() try: if self.cost_ is None: self.cost_ = Cost() finally: self.lazy_init_lock_.release() return self.cost_ def mutable_cost(self): self.has_cost_ = 1; return self.cost() def clear_cost(self): if self.has_cost_: self.has_cost_ = 0; if self.cost_ is not None: self.cost_.Clear() def has_cost(self): return self.has_cost_ def MergeFrom(self, x): assert x is not self if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost()) def Equals(self, x): if x is self: return 1 if self.has_cost_ != x.has_cost_: return 0 if self.has_cost_ and self.cost_ != x.cost_: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0 return initialized def ByteSize(self): n = 0 if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize()) return n def ByteSizePartial(self): n = 0 if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial()) return n def Clear(self): self.clear_cost() def OutputUnchecked(self, out): if (self.has_cost_): out.putVarInt32(10) out.putVarInt32(self.cost_.ByteSize()) self.cost_.OutputUnchecked(out) def OutputPartial(self, out): if (self.has_cost_): out.putVarInt32(10) out.putVarInt32(self.cost_.ByteSizePartial()) self.cost_.OutputPartial(out) def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if tt == 10: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_cost().TryMerge(tmp) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_cost_: res+=prefix+"cost <\n" res+=self.cost_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" return res def _BuildTagLookupTable(sparse, maxtag, default=None): return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) kcost = 1 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", 1: "cost", }, 1) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, 1: ProtocolBuffer.Encoder.STRING, }, 1, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.TouchResponse' class DeleteRequest(ProtocolBuffer.ProtocolMessage): has_header_ = 0 header_ = None has_transaction_ = 0 transaction_ = None has_trusted_ = 0 trusted_ = 0 has_force_ = 0 force_ = 0 has_mark_changes_ = 0 mark_changes_ = 0 def __init__(self, contents=None): self.key_ = [] self.composite_index_ = [] self.snapshot_ = [] self.lazy_init_lock_ = thread.allocate_lock() if contents is not None: self.MergeFromString(contents) def header(self): if self.header_ is None: self.lazy_init_lock_.acquire() try: if self.header_ is None: self.header_ = InternalHeader() finally: self.lazy_init_lock_.release() return self.header_ def mutable_header(self): self.has_header_ = 1; return self.header() def clear_header(self): if self.has_header_: self.has_header_ = 0; if self.header_ is not None: self.header_.Clear() def has_header(self): return self.has_header_ def key_size(self): return len(self.key_) def key_list(self): return self.key_ def key(self, i): return self.key_[i] def mutable_key(self, i): return self.key_[i] def add_key(self): x = Reference() self.key_.append(x) return x def clear_key(self): self.key_ = [] def transaction(self): if self.transaction_ is None: self.lazy_init_lock_.acquire() try: if self.transaction_ is None: self.transaction_ = Transaction() finally: self.lazy_init_lock_.release() return self.transaction_ def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction() def clear_transaction(self): if self.has_transaction_: self.has_transaction_ = 0; if self.transaction_ is not None: self.transaction_.Clear() def has_transaction(self): return self.has_transaction_ def composite_index_size(self): return len(self.composite_index_) def composite_index_list(self): return self.composite_index_ def composite_index(self, i): return self.composite_index_[i] def mutable_composite_index(self, i): return self.composite_index_[i] def add_composite_index(self): x = CompositeIndex() self.composite_index_.append(x) return x def clear_composite_index(self): self.composite_index_ = [] def trusted(self): return self.trusted_ def set_trusted(self, x): self.has_trusted_ = 1 self.trusted_ = x def clear_trusted(self): if self.has_trusted_: self.has_trusted_ = 0 self.trusted_ = 0 def has_trusted(self): return self.has_trusted_ def force(self): return self.force_ def set_force(self, x): self.has_force_ = 1 self.force_ = x def clear_force(self): if self.has_force_: self.has_force_ = 0 self.force_ = 0 def has_force(self): return self.has_force_ def mark_changes(self): return self.mark_changes_ def set_mark_changes(self, x): self.has_mark_changes_ = 1 self.mark_changes_ = x def clear_mark_changes(self): if self.has_mark_changes_: self.has_mark_changes_ = 0 self.mark_changes_ = 0 def has_mark_changes(self): return self.has_mark_changes_ def snapshot_size(self): return len(self.snapshot_) def snapshot_list(self): return self.snapshot_ def snapshot(self, i): return self.snapshot_[i] def mutable_snapshot(self, i): return self.snapshot_[i] def add_snapshot(self): x = Snapshot() self.snapshot_.append(x) return x def clear_snapshot(self): self.snapshot_ = [] def MergeFrom(self, x): assert x is not self if (x.has_header()): self.mutable_header().MergeFrom(x.header()) for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i)) if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction()) for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i)) if (x.has_trusted()): self.set_trusted(x.trusted()) if (x.has_force()): self.set_force(x.force()) if (x.has_mark_changes()): self.set_mark_changes(x.mark_changes()) for i in xrange(x.snapshot_size()): self.add_snapshot().CopyFrom(x.snapshot(i)) def Equals(self, x): if x is self: return 1 if self.has_header_ != x.has_header_: return 0 if self.has_header_ and self.header_ != x.header_: return 0 if len(self.key_) != len(x.key_): return 0 for e1, e2 in zip(self.key_, x.key_): if e1 != e2: return 0 if self.has_transaction_ != x.has_transaction_: return 0 if self.has_transaction_ and self.transaction_ != x.transaction_: return 0 if len(self.composite_index_) != len(x.composite_index_): return 0 for e1, e2 in zip(self.composite_index_, x.composite_index_): if e1 != e2: return 0 if self.has_trusted_ != x.has_trusted_: return 0 if self.has_trusted_ and self.trusted_ != x.trusted_: return 0 if self.has_force_ != x.has_force_: return 0 if self.has_force_ and self.force_ != x.force_: return 0 if self.has_mark_changes_ != x.has_mark_changes_: return 0 if self.has_mark_changes_ and self.mark_changes_ != x.mark_changes_: return 0 if len(self.snapshot_) != len(x.snapshot_): return 0 for e1, e2 in zip(self.snapshot_, x.snapshot_): if e1 != e2: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0 for p in self.key_: if not p.IsInitialized(debug_strs): initialized=0 if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0 for p in self.composite_index_: if not p.IsInitialized(debug_strs): initialized=0 for p in self.snapshot_: if not p.IsInitialized(debug_strs): initialized=0 return initialized def ByteSize(self): n = 0 if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize()) n += 1 * len(self.key_) for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize()) if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize()) n += 1 * len(self.composite_index_) for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize()) if (self.has_trusted_): n += 2 if (self.has_force_): n += 2 if (self.has_mark_changes_): n += 2 n += 1 * len(self.snapshot_) for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSize()) return n def ByteSizePartial(self): n = 0 if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial()) n += 1 * len(self.key_) for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSizePartial()) if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSizePartial()) n += 1 * len(self.composite_index_) for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSizePartial()) if (self.has_trusted_): n += 2 if (self.has_force_): n += 2 if (self.has_mark_changes_): n += 2 n += 1 * len(self.snapshot_) for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSizePartial()) return n def Clear(self): self.clear_header() self.clear_key() self.clear_transaction() self.clear_composite_index() self.clear_trusted() self.clear_force() self.clear_mark_changes() self.clear_snapshot() def OutputUnchecked(self, out): if (self.has_trusted_): out.putVarInt32(32) out.putBoolean(self.trusted_) if (self.has_transaction_): out.putVarInt32(42) out.putVarInt32(self.transaction_.ByteSize()) self.transaction_.OutputUnchecked(out) for i in xrange(len(self.key_)): out.putVarInt32(50) out.putVarInt32(self.key_[i].ByteSize()) self.key_[i].OutputUnchecked(out) if (self.has_force_): out.putVarInt32(56) out.putBoolean(self.force_) if (self.has_mark_changes_): out.putVarInt32(64) out.putBoolean(self.mark_changes_) for i in xrange(len(self.snapshot_)): out.putVarInt32(74) out.putVarInt32(self.snapshot_[i].ByteSize()) self.snapshot_[i].OutputUnchecked(out) if (self.has_header_): out.putVarInt32(82) out.putVarInt32(self.header_.ByteSize()) self.header_.OutputUnchecked(out) for i in xrange(len(self.composite_index_)): out.putVarInt32(90) out.putVarInt32(self.composite_index_[i].ByteSize()) self.composite_index_[i].OutputUnchecked(out) def OutputPartial(self, out): if (self.has_trusted_): out.putVarInt32(32) out.putBoolean(self.trusted_) if (self.has_transaction_): out.putVarInt32(42) out.putVarInt32(self.transaction_.ByteSizePartial()) self.transaction_.OutputPartial(out) for i in xrange(len(self.key_)): out.putVarInt32(50) out.putVarInt32(self.key_[i].ByteSizePartial()) self.key_[i].OutputPartial(out) if (self.has_force_): out.putVarInt32(56) out.putBoolean(self.force_) if (self.has_mark_changes_): out.putVarInt32(64) out.putBoolean(self.mark_changes_) for i in xrange(len(self.snapshot_)): out.putVarInt32(74) out.putVarInt32(self.snapshot_[i].ByteSizePartial()) self.snapshot_[i].OutputPartial(out) if (self.has_header_): out.putVarInt32(82) out.putVarInt32(self.header_.ByteSizePartial()) self.header_.OutputPartial(out) for i in xrange(len(self.composite_index_)): out.putVarInt32(90) out.putVarInt32(self.composite_index_[i].ByteSizePartial()) self.composite_index_[i].OutputPartial(out) def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if tt == 32: self.set_trusted(d.getBoolean()) continue if tt == 42: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_transaction().TryMerge(tmp) continue if tt == 50: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.add_key().TryMerge(tmp) continue if tt == 56: self.set_force(d.getBoolean()) continue if tt == 64: self.set_mark_changes(d.getBoolean()) continue if tt == 74: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.add_snapshot().TryMerge(tmp) continue if tt == 82: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_header().TryMerge(tmp) continue if tt == 90: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.add_composite_index().TryMerge(tmp) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_header_: res+=prefix+"header <\n" res+=self.header_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt=0 for e in self.key_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("key%s <\n" % elm) res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt+=1 if self.has_transaction_: res+=prefix+"transaction <\n" res+=self.transaction_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt=0 for e in self.composite_index_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("composite_index%s <\n" % elm) res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt+=1 if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_)) if self.has_force_: res+=prefix+("force: %s\n" % self.DebugFormatBool(self.force_)) if self.has_mark_changes_: res+=prefix+("mark_changes: %s\n" % self.DebugFormatBool(self.mark_changes_)) cnt=0 for e in self.snapshot_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("snapshot%s <\n" % elm) res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt+=1 return res def _BuildTagLookupTable(sparse, maxtag, default=None): return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) kheader = 10 kkey = 6 ktransaction = 5 kcomposite_index = 11 ktrusted = 4 kforce = 7 kmark_changes = 8 ksnapshot = 9 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", 4: "trusted", 5: "transaction", 6: "key", 7: "force", 8: "mark_changes", 9: "snapshot", 10: "header", 11: "composite_index", }, 11) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, 4: ProtocolBuffer.Encoder.NUMERIC, 5: ProtocolBuffer.Encoder.STRING, 6: ProtocolBuffer.Encoder.STRING, 7: ProtocolBuffer.Encoder.NUMERIC, 8: ProtocolBuffer.Encoder.NUMERIC, 9: ProtocolBuffer.Encoder.STRING, 10: ProtocolBuffer.Encoder.STRING, 11: ProtocolBuffer.Encoder.STRING, }, 11, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.DeleteRequest' class DeleteResponse(ProtocolBuffer.ProtocolMessage): has_cost_ = 0 cost_ = None def __init__(self, contents=None): self.version_ = [] self.lazy_init_lock_ = thread.allocate_lock() if contents is not None: self.MergeFromString(contents) def cost(self): if self.cost_ is None: self.lazy_init_lock_.acquire() try: if self.cost_ is None: self.cost_ = Cost() finally: self.lazy_init_lock_.release() return self.cost_ def mutable_cost(self): self.has_cost_ = 1; return self.cost() def clear_cost(self): if self.has_cost_: self.has_cost_ = 0; if self.cost_ is not None: self.cost_.Clear() def has_cost(self): return self.has_cost_ def version_size(self): return len(self.version_) def version_list(self): return self.version_ def version(self, i): return self.version_[i] def set_version(self, i, x): self.version_[i] = x def add_version(self, x): self.version_.append(x) def clear_version(self): self.version_ = [] def MergeFrom(self, x): assert x is not self if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost()) for i in xrange(x.version_size()): self.add_version(x.version(i)) def Equals(self, x): if x is self: return 1 if self.has_cost_ != x.has_cost_: return 0 if self.has_cost_ and self.cost_ != x.cost_: return 0 if len(self.version_) != len(x.version_): return 0 for e1, e2 in zip(self.version_, x.version_): if e1 != e2: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0 return initialized def ByteSize(self): n = 0 if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize()) n += 1 * len(self.version_) for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i]) return n def ByteSizePartial(self): n = 0 if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial()) n += 1 * len(self.version_) for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i]) return n def Clear(self): self.clear_cost() self.clear_version() def OutputUnchecked(self, out): if (self.has_cost_): out.putVarInt32(10) out.putVarInt32(self.cost_.ByteSize()) self.cost_.OutputUnchecked(out) for i in xrange(len(self.version_)): out.putVarInt32(24) out.putVarInt64(self.version_[i]) def OutputPartial(self, out): if (self.has_cost_): out.putVarInt32(10) out.putVarInt32(self.cost_.ByteSizePartial()) self.cost_.OutputPartial(out) for i in xrange(len(self.version_)): out.putVarInt32(24) out.putVarInt64(self.version_[i]) def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if tt == 10: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_cost().TryMerge(tmp) continue if tt == 24: self.add_version(d.getVarInt64()) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_cost_: res+=prefix+"cost <\n" res+=self.cost_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt=0 for e in self.version_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("version%s: %s\n" % (elm, self.DebugFormatInt64(e))) cnt+=1 return res def _BuildTagLookupTable(sparse, maxtag, default=None): return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) kcost = 1 kversion = 3 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", 1: "cost", 3: "version", }, 3) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, 1: ProtocolBuffer.Encoder.STRING, 3: ProtocolBuffer.Encoder.NUMERIC, }, 3, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.DeleteResponse' class NextRequest(ProtocolBuffer.ProtocolMessage): has_header_ = 0 header_ = None has_cursor_ = 0 has_count_ = 0 count_ = 0 has_offset_ = 0 offset_ = 0 has_compile_ = 0 compile_ = 0 def __init__(self, contents=None): self.cursor_ = Cursor() self.lazy_init_lock_ = thread.allocate_lock() if contents is not None: self.MergeFromString(contents) def header(self): if self.header_ is None: self.lazy_init_lock_.acquire() try: if self.header_ is None: self.header_ = InternalHeader() finally: self.lazy_init_lock_.release() return self.header_ def mutable_header(self): self.has_header_ = 1; return self.header() def clear_header(self): if self.has_header_: self.has_header_ = 0; if self.header_ is not None: self.header_.Clear() def has_header(self): return self.has_header_ def cursor(self): return self.cursor_ def mutable_cursor(self): self.has_cursor_ = 1; return self.cursor_ def clear_cursor(self):self.has_cursor_ = 0; self.cursor_.Clear() def has_cursor(self): return self.has_cursor_ def count(self): return self.count_ def set_count(self, x): self.has_count_ = 1 self.count_ = x def clear_count(self): if self.has_count_: self.has_count_ = 0 self.count_ = 0 def has_count(self): return self.has_count_ def offset(self): return self.offset_ def set_offset(self, x): self.has_offset_ = 1 self.offset_ = x def clear_offset(self): if self.has_offset_: self.has_offset_ = 0 self.offset_ = 0 def has_offset(self): return self.has_offset_ def compile(self): return self.compile_ def set_compile(self, x): self.has_compile_ = 1 self.compile_ = x def clear_compile(self): if self.has_compile_: self.has_compile_ = 0 self.compile_ = 0 def has_compile(self): return self.has_compile_ def MergeFrom(self, x): assert x is not self if (x.has_header()): self.mutable_header().MergeFrom(x.header()) if (x.has_cursor()): self.mutable_cursor().MergeFrom(x.cursor()) if (x.has_count()): self.set_count(x.count()) if (x.has_offset()): self.set_offset(x.offset()) if (x.has_compile()): self.set_compile(x.compile()) def Equals(self, x): if x is self: return 1 if self.has_header_ != x.has_header_: return 0 if self.has_header_ and self.header_ != x.header_: return 0 if self.has_cursor_ != x.has_cursor_: return 0 if self.has_cursor_ and self.cursor_ != x.cursor_: return 0 if self.has_count_ != x.has_count_: return 0 if self.has_count_ and self.count_ != x.count_: return 0 if self.has_offset_ != x.has_offset_: return 0 if self.has_offset_ and self.offset_ != x.offset_: return 0 if self.has_compile_ != x.has_compile_: return 0 if self.has_compile_ and self.compile_ != x.compile_: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0 if (not self.has_cursor_): initialized = 0 if debug_strs is not None: debug_strs.append('Required field: cursor not set.') elif not self.cursor_.IsInitialized(debug_strs): initialized = 0 return initialized def ByteSize(self): n = 0 if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize()) n += self.lengthString(self.cursor_.ByteSize()) if (self.has_count_): n += 1 + self.lengthVarInt64(self.count_) if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_) if (self.has_compile_): n += 2 return n + 1 def ByteSizePartial(self): n = 0 if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial()) if (self.has_cursor_): n += 1 n += self.lengthString(self.cursor_.ByteSizePartial()) if (self.has_count_): n += 1 + self.lengthVarInt64(self.count_) if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_) if (self.has_compile_): n += 2 return n def Clear(self): self.clear_header() self.clear_cursor() self.clear_count() self.clear_offset() self.clear_compile() def OutputUnchecked(self, out): out.putVarInt32(10) out.putVarInt32(self.cursor_.ByteSize()) self.cursor_.OutputUnchecked(out) if (self.has_count_): out.putVarInt32(16) out.putVarInt32(self.count_) if (self.has_compile_): out.putVarInt32(24) out.putBoolean(self.compile_) if (self.has_offset_): out.putVarInt32(32) out.putVarInt32(self.offset_) if (self.has_header_): out.putVarInt32(42) out.putVarInt32(self.header_.ByteSize()) self.header_.OutputUnchecked(out) def OutputPartial(self, out): if (self.has_cursor_): out.putVarInt32(10) out.putVarInt32(self.cursor_.ByteSizePartial()) self.cursor_.OutputPartial(out) if (self.has_count_): out.putVarInt32(16) out.putVarInt32(self.count_) if (self.has_compile_): out.putVarInt32(24) out.putBoolean(self.compile_) if (self.has_offset_): out.putVarInt32(32) out.putVarInt32(self.offset_) if (self.has_header_): out.putVarInt32(42) out.putVarInt32(self.header_.ByteSizePartial()) self.header_.OutputPartial(out) def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if tt == 10: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_cursor().TryMerge(tmp) continue if tt == 16: self.set_count(d.getVarInt32()) continue if tt == 24: self.set_compile(d.getBoolean()) continue if tt == 32: self.set_offset(d.getVarInt32()) continue if tt == 42: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_header().TryMerge(tmp) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_header_: res+=prefix+"header <\n" res+=self.header_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" if self.has_cursor_: res+=prefix+"cursor <\n" res+=self.cursor_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" if self.has_count_: res+=prefix+("count: %s\n" % self.DebugFormatInt32(self.count_)) if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_)) if self.has_compile_: res+=prefix+("compile: %s\n" % self.DebugFormatBool(self.compile_)) return res def _BuildTagLookupTable(sparse, maxtag, default=None): return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) kheader = 5 kcursor = 1 kcount = 2 koffset = 4 kcompile = 3 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", 1: "cursor", 2: "count", 3: "compile", 4: "offset", 5: "header", }, 5) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, 1: ProtocolBuffer.Encoder.STRING, 2: ProtocolBuffer.Encoder.NUMERIC, 3: ProtocolBuffer.Encoder.NUMERIC, 4: ProtocolBuffer.Encoder.NUMERIC, 5: ProtocolBuffer.Encoder.STRING, }, 5, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.NextRequest' class QueryResult(ProtocolBuffer.ProtocolMessage): has_cursor_ = 0 cursor_ = None has_skipped_results_ = 0 skipped_results_ = 0 has_more_results_ = 0 more_results_ = 0 has_keys_only_ = 0 keys_only_ = 0 has_index_only_ = 0 index_only_ = 0 has_small_ops_ = 0 small_ops_ = 0 has_compiled_query_ = 0 compiled_query_ = None has_compiled_cursor_ = 0 compiled_cursor_ = None has_skipped_results_compiled_cursor_ = 0 skipped_results_compiled_cursor_ = None def __init__(self, contents=None): self.result_ = [] self.index_ = [] self.version_ = [] self.result_compiled_cursor_ = [] self.lazy_init_lock_ = thread.allocate_lock() if contents is not None: self.MergeFromString(contents) def cursor(self): if self.cursor_ is None: self.lazy_init_lock_.acquire() try: if self.cursor_ is None: self.cursor_ = Cursor() finally: self.lazy_init_lock_.release() return self.cursor_ def mutable_cursor(self): self.has_cursor_ = 1; return self.cursor() def clear_cursor(self): if self.has_cursor_: self.has_cursor_ = 0; if self.cursor_ is not None: self.cursor_.Clear() def has_cursor(self): return self.has_cursor_ def result_size(self): return len(self.result_) def result_list(self): return self.result_ def result(self, i): return self.result_[i] def mutable_result(self, i): return self.result_[i] def add_result(self): x = EntityProto() self.result_.append(x) return x def clear_result(self): self.result_ = [] def skipped_results(self): return self.skipped_results_ def set_skipped_results(self, x): self.has_skipped_results_ = 1 self.skipped_results_ = x def clear_skipped_results(self): if self.has_skipped_results_: self.has_skipped_results_ = 0 self.skipped_results_ = 0 def has_skipped_results(self): return self.has_skipped_results_ def more_results(self): return self.more_results_ def set_more_results(self, x): self.has_more_results_ = 1 self.more_results_ = x def clear_more_results(self): if self.has_more_results_: self.has_more_results_ = 0 self.more_results_ = 0 def has_more_results(self): return self.has_more_results_ def keys_only(self): return self.keys_only_ def set_keys_only(self, x): self.has_keys_only_ = 1 self.keys_only_ = x def clear_keys_only(self): if self.has_keys_only_: self.has_keys_only_ = 0 self.keys_only_ = 0 def has_keys_only(self): return self.has_keys_only_ def index_only(self): return self.index_only_ def set_index_only(self, x): self.has_index_only_ = 1 self.index_only_ = x def clear_index_only(self): if self.has_index_only_: self.has_index_only_ = 0 self.index_only_ = 0 def has_index_only(self): return self.has_index_only_ def small_ops(self): return self.small_ops_ def set_small_ops(self, x): self.has_small_ops_ = 1 self.small_ops_ = x def clear_small_ops(self): if self.has_small_ops_: self.has_small_ops_ = 0 self.small_ops_ = 0 def has_small_ops(self): return self.has_small_ops_ def compiled_query(self): if self.compiled_query_ is None: self.lazy_init_lock_.acquire() try: if self.compiled_query_ is None: self.compiled_query_ = CompiledQuery() finally: self.lazy_init_lock_.release() return self.compiled_query_ def mutable_compiled_query(self): self.has_compiled_query_ = 1; return self.compiled_query() def clear_compiled_query(self): if self.has_compiled_query_: self.has_compiled_query_ = 0; if self.compiled_query_ is not None: self.compiled_query_.Clear() def has_compiled_query(self): return self.has_compiled_query_ def compiled_cursor(self): if self.compiled_cursor_ is None: self.lazy_init_lock_.acquire() try: if self.compiled_cursor_ is None: self.compiled_cursor_ = CompiledCursor() finally: self.lazy_init_lock_.release() return self.compiled_cursor_ def mutable_compiled_cursor(self): self.has_compiled_cursor_ = 1; return self.compiled_cursor() def clear_compiled_cursor(self): if self.has_compiled_cursor_: self.has_compiled_cursor_ = 0; if self.compiled_cursor_ is not None: self.compiled_cursor_.Clear() def has_compiled_cursor(self): return self.has_compiled_cursor_ def index_size(self): return len(self.index_) def index_list(self): return self.index_ def index(self, i): return self.index_[i] def mutable_index(self, i): return self.index_[i] def add_index(self): x = CompositeIndex() self.index_.append(x) return x def clear_index(self): self.index_ = [] def version_size(self): return len(self.version_) def version_list(self): return self.version_ def version(self, i): return self.version_[i] def set_version(self, i, x): self.version_[i] = x def add_version(self, x): self.version_.append(x) def clear_version(self): self.version_ = [] def result_compiled_cursor_size(self): return len(self.result_compiled_cursor_) def result_compiled_cursor_list(self): return self.result_compiled_cursor_ def result_compiled_cursor(self, i): return self.result_compiled_cursor_[i] def mutable_result_compiled_cursor(self, i): return self.result_compiled_cursor_[i] def add_result_compiled_cursor(self): x = CompiledCursor() self.result_compiled_cursor_.append(x) return x def clear_result_compiled_cursor(self): self.result_compiled_cursor_ = [] def skipped_results_compiled_cursor(self): if self.skipped_results_compiled_cursor_ is None: self.lazy_init_lock_.acquire() try: if self.skipped_results_compiled_cursor_ is None: self.skipped_results_compiled_cursor_ = CompiledCursor() finally: self.lazy_init_lock_.release() return self.skipped_results_compiled_cursor_ def mutable_skipped_results_compiled_cursor(self): self.has_skipped_results_compiled_cursor_ = 1; return self.skipped_results_compiled_cursor() def clear_skipped_results_compiled_cursor(self): if self.has_skipped_results_compiled_cursor_: self.has_skipped_results_compiled_cursor_ = 0; if self.skipped_results_compiled_cursor_ is not None: self.skipped_results_compiled_cursor_.Clear() def has_skipped_results_compiled_cursor(self): return self.has_skipped_results_compiled_cursor_ def MergeFrom(self, x): assert x is not self if (x.has_cursor()): self.mutable_cursor().MergeFrom(x.cursor()) for i in xrange(x.result_size()): self.add_result().CopyFrom(x.result(i)) if (x.has_skipped_results()): self.set_skipped_results(x.skipped_results()) if (x.has_more_results()): self.set_more_results(x.more_results()) if (x.has_keys_only()): self.set_keys_only(x.keys_only()) if (x.has_index_only()): self.set_index_only(x.index_only()) if (x.has_small_ops()): self.set_small_ops(x.small_ops()) if (x.has_compiled_query()): self.mutable_compiled_query().MergeFrom(x.compiled_query()) if (x.has_compiled_cursor()): self.mutable_compiled_cursor().MergeFrom(x.compiled_cursor()) for i in xrange(x.index_size()): self.add_index().CopyFrom(x.index(i)) for i in xrange(x.version_size()): self.add_version(x.version(i)) for i in xrange(x.result_compiled_cursor_size()): self.add_result_compiled_cursor().CopyFrom(x.result_compiled_cursor(i)) if (x.has_skipped_results_compiled_cursor()): self.mutable_skipped_results_compiled_cursor().MergeFrom(x.skipped_results_compiled_cursor()) def Equals(self, x): if x is self: return 1 if self.has_cursor_ != x.has_cursor_: return 0 if self.has_cursor_ and self.cursor_ != x.cursor_: return 0 if len(self.result_) != len(x.result_): return 0 for e1, e2 in zip(self.result_, x.result_): if e1 != e2: return 0 if self.has_skipped_results_ != x.has_skipped_results_: return 0 if self.has_skipped_results_ and self.skipped_results_ != x.skipped_results_: return 0 if self.has_more_results_ != x.has_more_results_: return 0 if self.has_more_results_ and self.more_results_ != x.more_results_: return 0 if self.has_keys_only_ != x.has_keys_only_: return 0 if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0 if self.has_index_only_ != x.has_index_only_: return 0 if self.has_index_only_ and self.index_only_ != x.index_only_: return 0 if self.has_small_ops_ != x.has_small_ops_: return 0 if self.has_small_ops_ and self.small_ops_ != x.small_ops_: return 0 if self.has_compiled_query_ != x.has_compiled_query_: return 0 if self.has_compiled_query_ and self.compiled_query_ != x.compiled_query_: return 0 if self.has_compiled_cursor_ != x.has_compiled_cursor_: return 0 if self.has_compiled_cursor_ and self.compiled_cursor_ != x.compiled_cursor_: return 0 if len(self.index_) != len(x.index_): return 0 for e1, e2 in zip(self.index_, x.index_): if e1 != e2: return 0 if len(self.version_) != len(x.version_): return 0 for e1, e2 in zip(self.version_, x.version_): if e1 != e2: return 0 if len(self.result_compiled_cursor_) != len(x.result_compiled_cursor_): return 0 for e1, e2 in zip(self.result_compiled_cursor_, x.result_compiled_cursor_): if e1 != e2: return 0 if self.has_skipped_results_compiled_cursor_ != x.has_skipped_results_compiled_cursor_: return 0 if self.has_skipped_results_compiled_cursor_ and self.skipped_results_compiled_cursor_ != x.skipped_results_compiled_cursor_: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 if (self.has_cursor_ and not self.cursor_.IsInitialized(debug_strs)): initialized = 0 for p in self.result_: if not p.IsInitialized(debug_strs): initialized=0 if (not self.has_more_results_): initialized = 0 if debug_strs is not None: debug_strs.append('Required field: more_results not set.') if (self.has_compiled_query_ and not self.compiled_query_.IsInitialized(debug_strs)): initialized = 0 if (self.has_compiled_cursor_ and not self.compiled_cursor_.IsInitialized(debug_strs)): initialized = 0 for p in self.index_: if not p.IsInitialized(debug_strs): initialized=0 for p in self.result_compiled_cursor_: if not p.IsInitialized(debug_strs): initialized=0 if (self.has_skipped_results_compiled_cursor_ and not self.skipped_results_compiled_cursor_.IsInitialized(debug_strs)): initialized = 0 return initialized def ByteSize(self): n = 0 if (self.has_cursor_): n += 1 + self.lengthString(self.cursor_.ByteSize()) n += 1 * len(self.result_) for i in xrange(len(self.result_)): n += self.lengthString(self.result_[i].ByteSize()) if (self.has_skipped_results_): n += 1 + self.lengthVarInt64(self.skipped_results_) if (self.has_keys_only_): n += 2 if (self.has_index_only_): n += 2 if (self.has_small_ops_): n += 2 if (self.has_compiled_query_): n += 1 + self.lengthString(self.compiled_query_.ByteSize()) if (self.has_compiled_cursor_): n += 1 + self.lengthString(self.compiled_cursor_.ByteSize()) n += 1 * len(self.index_) for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSize()) n += 1 * len(self.version_) for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i]) n += 1 * len(self.result_compiled_cursor_) for i in xrange(len(self.result_compiled_cursor_)): n += self.lengthString(self.result_compiled_cursor_[i].ByteSize()) if (self.has_skipped_results_compiled_cursor_): n += 1 + self.lengthString(self.skipped_results_compiled_cursor_.ByteSize()) return n + 2 def ByteSizePartial(self): n = 0 if (self.has_cursor_): n += 1 + self.lengthString(self.cursor_.ByteSizePartial()) n += 1 * len(self.result_) for i in xrange(len(self.result_)): n += self.lengthString(self.result_[i].ByteSizePartial()) if (self.has_skipped_results_): n += 1 + self.lengthVarInt64(self.skipped_results_) if (self.has_more_results_): n += 2 if (self.has_keys_only_): n += 2 if (self.has_index_only_): n += 2 if (self.has_small_ops_): n += 2 if (self.has_compiled_query_): n += 1 + self.lengthString(self.compiled_query_.ByteSizePartial()) if (self.has_compiled_cursor_): n += 1 + self.lengthString(self.compiled_cursor_.ByteSizePartial()) n += 1 * len(self.index_) for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSizePartial()) n += 1 * len(self.version_) for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i]) n += 1 * len(self.result_compiled_cursor_) for i in xrange(len(self.result_compiled_cursor_)): n += self.lengthString(self.result_compiled_cursor_[i].ByteSizePartial()) if (self.has_skipped_results_compiled_cursor_): n += 1 + self.lengthString(self.skipped_results_compiled_cursor_.ByteSizePartial()) return n def Clear(self): self.clear_cursor() self.clear_result() self.clear_skipped_results() self.clear_more_results() self.clear_keys_only() self.clear_index_only() self.clear_small_ops() self.clear_compiled_query() self.clear_compiled_cursor() self.clear_index() self.clear_version() self.clear_result_compiled_cursor() self.clear_skipped_results_compiled_cursor() def OutputUnchecked(self, out): if (self.has_cursor_): out.putVarInt32(10) out.putVarInt32(self.cursor_.ByteSize()) self.cursor_.OutputUnchecked(out) for i in xrange(len(self.result_)): out.putVarInt32(18) out.putVarInt32(self.result_[i].ByteSize()) self.result_[i].OutputUnchecked(out) out.putVarInt32(24) out.putBoolean(self.more_results_) if (self.has_keys_only_): out.putVarInt32(32) out.putBoolean(self.keys_only_) if (self.has_compiled_query_): out.putVarInt32(42) out.putVarInt32(self.compiled_query_.ByteSize()) self.compiled_query_.OutputUnchecked(out) if (self.has_compiled_cursor_): out.putVarInt32(50) out.putVarInt32(self.compiled_cursor_.ByteSize()) self.compiled_cursor_.OutputUnchecked(out) if (self.has_skipped_results_): out.putVarInt32(56) out.putVarInt32(self.skipped_results_) for i in xrange(len(self.index_)): out.putVarInt32(66) out.putVarInt32(self.index_[i].ByteSize()) self.index_[i].OutputUnchecked(out) if (self.has_index_only_): out.putVarInt32(72) out.putBoolean(self.index_only_) if (self.has_small_ops_): out.putVarInt32(80) out.putBoolean(self.small_ops_) for i in xrange(len(self.version_)): out.putVarInt32(88) out.putVarInt64(self.version_[i]) for i in xrange(len(self.result_compiled_cursor_)): out.putVarInt32(98) out.putVarInt32(self.result_compiled_cursor_[i].ByteSize()) self.result_compiled_cursor_[i].OutputUnchecked(out) if (self.has_skipped_results_compiled_cursor_): out.putVarInt32(106) out.putVarInt32(self.skipped_results_compiled_cursor_.ByteSize()) self.skipped_results_compiled_cursor_.OutputUnchecked(out) def OutputPartial(self, out): if (self.has_cursor_): out.putVarInt32(10) out.putVarInt32(self.cursor_.ByteSizePartial()) self.cursor_.OutputPartial(out) for i in xrange(len(self.result_)): out.putVarInt32(18) out.putVarInt32(self.result_[i].ByteSizePartial()) self.result_[i].OutputPartial(out) if (self.has_more_results_): out.putVarInt32(24) out.putBoolean(self.more_results_) if (self.has_keys_only_): out.putVarInt32(32) out.putBoolean(self.keys_only_) if (self.has_compiled_query_): out.putVarInt32(42) out.putVarInt32(self.compiled_query_.ByteSizePartial()) self.compiled_query_.OutputPartial(out) if (self.has_compiled_cursor_): out.putVarInt32(50) out.putVarInt32(self.compiled_cursor_.ByteSizePartial()) self.compiled_cursor_.OutputPartial(out) if (self.has_skipped_results_): out.putVarInt32(56) out.putVarInt32(self.skipped_results_) for i in xrange(len(self.index_)): out.putVarInt32(66) out.putVarInt32(self.index_[i].ByteSizePartial()) self.index_[i].OutputPartial(out) if (self.has_index_only_): out.putVarInt32(72) out.putBoolean(self.index_only_) if (self.has_small_ops_): out.putVarInt32(80) out.putBoolean(self.small_ops_) for i in xrange(len(self.version_)): out.putVarInt32(88) out.putVarInt64(self.version_[i]) for i in xrange(len(self.result_compiled_cursor_)): out.putVarInt32(98) out.putVarInt32(self.result_compiled_cursor_[i].ByteSizePartial()) self.result_compiled_cursor_[i].OutputPartial(out) if (self.has_skipped_results_compiled_cursor_): out.putVarInt32(106) out.putVarInt32(self.skipped_results_compiled_cursor_.ByteSizePartial()) self.skipped_results_compiled_cursor_.OutputPartial(out) def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if tt == 10: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_cursor().TryMerge(tmp) continue if tt == 18: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.add_result().TryMerge(tmp) continue if tt == 24: self.set_more_results(d.getBoolean()) continue if tt == 32: self.set_keys_only(d.getBoolean()) continue if tt == 42: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_compiled_query().TryMerge(tmp) continue if tt == 50: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_compiled_cursor().TryMerge(tmp) continue if tt == 56: self.set_skipped_results(d.getVarInt32()) continue if tt == 66: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.add_index().TryMerge(tmp) continue if tt == 72: self.set_index_only(d.getBoolean()) continue if tt == 80: self.set_small_ops(d.getBoolean()) continue if tt == 88: self.add_version(d.getVarInt64()) continue if tt == 98: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.add_result_compiled_cursor().TryMerge(tmp) continue if tt == 106: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_skipped_results_compiled_cursor().TryMerge(tmp) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_cursor_: res+=prefix+"cursor <\n" res+=self.cursor_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt=0 for e in self.result_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("result%s <\n" % elm) res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt+=1 if self.has_skipped_results_: res+=prefix+("skipped_results: %s\n" % self.DebugFormatInt32(self.skipped_results_)) if self.has_more_results_: res+=prefix+("more_results: %s\n" % self.DebugFormatBool(self.more_results_)) if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_)) if self.has_index_only_: res+=prefix+("index_only: %s\n" % self.DebugFormatBool(self.index_only_)) if self.has_small_ops_: res+=prefix+("small_ops: %s\n" % self.DebugFormatBool(self.small_ops_)) if self.has_compiled_query_: res+=prefix+"compiled_query <\n" res+=self.compiled_query_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" if self.has_compiled_cursor_: res+=prefix+"compiled_cursor <\n" res+=self.compiled_cursor_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt=0 for e in self.index_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("index%s <\n" % elm) res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt+=1 cnt=0 for e in self.version_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("version%s: %s\n" % (elm, self.DebugFormatInt64(e))) cnt+=1 cnt=0 for e in self.result_compiled_cursor_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("result_compiled_cursor%s <\n" % elm) res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt+=1 if self.has_skipped_results_compiled_cursor_: res+=prefix+"skipped_results_compiled_cursor <\n" res+=self.skipped_results_compiled_cursor_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" return res def _BuildTagLookupTable(sparse, maxtag, default=None): return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) kcursor = 1 kresult = 2 kskipped_results = 7 kmore_results = 3 kkeys_only = 4 kindex_only = 9 ksmall_ops = 10 kcompiled_query = 5 kcompiled_cursor = 6 kindex = 8 kversion = 11 kresult_compiled_cursor = 12 kskipped_results_compiled_cursor = 13 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", 1: "cursor", 2: "result", 3: "more_results", 4: "keys_only", 5: "compiled_query", 6: "compiled_cursor", 7: "skipped_results", 8: "index", 9: "index_only", 10: "small_ops", 11: "version", 12: "result_compiled_cursor", 13: "skipped_results_compiled_cursor", }, 13) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, 1: ProtocolBuffer.Encoder.STRING, 2: ProtocolBuffer.Encoder.STRING, 3: ProtocolBuffer.Encoder.NUMERIC, 4: ProtocolBuffer.Encoder.NUMERIC, 5: ProtocolBuffer.Encoder.STRING, 6: ProtocolBuffer.Encoder.STRING, 7: ProtocolBuffer.Encoder.NUMERIC, 8: ProtocolBuffer.Encoder.STRING, 9: ProtocolBuffer.Encoder.NUMERIC, 10: ProtocolBuffer.Encoder.NUMERIC, 11: ProtocolBuffer.Encoder.NUMERIC, 12: ProtocolBuffer.Encoder.STRING, 13: ProtocolBuffer.Encoder.STRING, }, 13, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.QueryResult' class AllocateIdsRequest(ProtocolBuffer.ProtocolMessage): has_header_ = 0 header_ = None has_model_key_ = 0 model_key_ = None has_size_ = 0 size_ = 0 has_max_ = 0 max_ = 0 has_trusted_ = 0 trusted_ = 0 def __init__(self, contents=None): self.reserve_ = [] self.lazy_init_lock_ = thread.allocate_lock() if contents is not None: self.MergeFromString(contents) def header(self): if self.header_ is None: self.lazy_init_lock_.acquire() try: if self.header_ is None: self.header_ = InternalHeader() finally: self.lazy_init_lock_.release() return self.header_ def mutable_header(self): self.has_header_ = 1; return self.header() def clear_header(self): if self.has_header_: self.has_header_ = 0; if self.header_ is not None: self.header_.Clear() def has_header(self): return self.has_header_ def model_key(self): if self.model_key_ is None: self.lazy_init_lock_.acquire() try: if self.model_key_ is None: self.model_key_ = Reference() finally: self.lazy_init_lock_.release() return self.model_key_ def mutable_model_key(self): self.has_model_key_ = 1; return self.model_key() def clear_model_key(self): if self.has_model_key_: self.has_model_key_ = 0; if self.model_key_ is not None: self.model_key_.Clear() def has_model_key(self): return self.has_model_key_ def size(self): return self.size_ def set_size(self, x): self.has_size_ = 1 self.size_ = x def clear_size(self): if self.has_size_: self.has_size_ = 0 self.size_ = 0 def has_size(self): return self.has_size_ def max(self): return self.max_ def set_max(self, x): self.has_max_ = 1 self.max_ = x def clear_max(self): if self.has_max_: self.has_max_ = 0 self.max_ = 0 def has_max(self): return self.has_max_ def reserve_size(self): return len(self.reserve_) def reserve_list(self): return self.reserve_ def reserve(self, i): return self.reserve_[i] def mutable_reserve(self, i): return self.reserve_[i] def add_reserve(self): x = Reference() self.reserve_.append(x) return x def clear_reserve(self): self.reserve_ = [] def trusted(self): return self.trusted_ def set_trusted(self, x): self.has_trusted_ = 1 self.trusted_ = x def clear_trusted(self): if self.has_trusted_: self.has_trusted_ = 0 self.trusted_ = 0 def has_trusted(self): return self.has_trusted_ def MergeFrom(self, x): assert x is not self if (x.has_header()): self.mutable_header().MergeFrom(x.header()) if (x.has_model_key()): self.mutable_model_key().MergeFrom(x.model_key()) if (x.has_size()): self.set_size(x.size()) if (x.has_max()): self.set_max(x.max()) for i in xrange(x.reserve_size()): self.add_reserve().CopyFrom(x.reserve(i)) if (x.has_trusted()): self.set_trusted(x.trusted()) def Equals(self, x): if x is self: return 1 if self.has_header_ != x.has_header_: return 0 if self.has_header_ and self.header_ != x.header_: return 0 if self.has_model_key_ != x.has_model_key_: return 0 if self.has_model_key_ and self.model_key_ != x.model_key_: return 0 if self.has_size_ != x.has_size_: return 0 if self.has_size_ and self.size_ != x.size_: return 0 if self.has_max_ != x.has_max_: return 0 if self.has_max_ and self.max_ != x.max_: return 0 if len(self.reserve_) != len(x.reserve_): return 0 for e1, e2 in zip(self.reserve_, x.reserve_): if e1 != e2: return 0 if self.has_trusted_ != x.has_trusted_: return 0 if self.has_trusted_ and self.trusted_ != x.trusted_: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0 if (self.has_model_key_ and not self.model_key_.IsInitialized(debug_strs)): initialized = 0 for p in self.reserve_: if not p.IsInitialized(debug_strs): initialized=0 return initialized def ByteSize(self): n = 0 if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize()) if (self.has_model_key_): n += 1 + self.lengthString(self.model_key_.ByteSize()) if (self.has_size_): n += 1 + self.lengthVarInt64(self.size_) if (self.has_max_): n += 1 + self.lengthVarInt64(self.max_) n += 1 * len(self.reserve_) for i in xrange(len(self.reserve_)): n += self.lengthString(self.reserve_[i].ByteSize()) if (self.has_trusted_): n += 2 return n def ByteSizePartial(self): n = 0 if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial()) if (self.has_model_key_): n += 1 + self.lengthString(self.model_key_.ByteSizePartial()) if (self.has_size_): n += 1 + self.lengthVarInt64(self.size_) if (self.has_max_): n += 1 + self.lengthVarInt64(self.max_) n += 1 * len(self.reserve_) for i in xrange(len(self.reserve_)): n += self.lengthString(self.reserve_[i].ByteSizePartial()) if (self.has_trusted_): n += 2 return n def Clear(self): self.clear_header() self.clear_model_key() self.clear_size() self.clear_max() self.clear_reserve() self.clear_trusted() def OutputUnchecked(self, out): if (self.has_model_key_): out.putVarInt32(10) out.putVarInt32(self.model_key_.ByteSize()) self.model_key_.OutputUnchecked(out) if (self.has_size_): out.putVarInt32(16) out.putVarInt64(self.size_) if (self.has_max_): out.putVarInt32(24) out.putVarInt64(self.max_) if (self.has_header_): out.putVarInt32(34) out.putVarInt32(self.header_.ByteSize()) self.header_.OutputUnchecked(out) for i in xrange(len(self.reserve_)): out.putVarInt32(42) out.putVarInt32(self.reserve_[i].ByteSize()) self.reserve_[i].OutputUnchecked(out) if (self.has_trusted_): out.putVarInt32(48) out.putBoolean(self.trusted_) def OutputPartial(self, out): if (self.has_model_key_): out.putVarInt32(10) out.putVarInt32(self.model_key_.ByteSizePartial()) self.model_key_.OutputPartial(out) if (self.has_size_): out.putVarInt32(16) out.putVarInt64(self.size_) if (self.has_max_): out.putVarInt32(24) out.putVarInt64(self.max_) if (self.has_header_): out.putVarInt32(34) out.putVarInt32(self.header_.ByteSizePartial()) self.header_.OutputPartial(out) for i in xrange(len(self.reserve_)): out.putVarInt32(42) out.putVarInt32(self.reserve_[i].ByteSizePartial()) self.reserve_[i].OutputPartial(out) if (self.has_trusted_): out.putVarInt32(48) out.putBoolean(self.trusted_) def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if tt == 10: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_model_key().TryMerge(tmp) continue if tt == 16: self.set_size(d.getVarInt64()) continue if tt == 24: self.set_max(d.getVarInt64()) continue if tt == 34: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_header().TryMerge(tmp) continue if tt == 42: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.add_reserve().TryMerge(tmp) continue if tt == 48: self.set_trusted(d.getBoolean()) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_header_: res+=prefix+"header <\n" res+=self.header_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" if self.has_model_key_: res+=prefix+"model_key <\n" res+=self.model_key_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" if self.has_size_: res+=prefix+("size: %s\n" % self.DebugFormatInt64(self.size_)) if self.has_max_: res+=prefix+("max: %s\n" % self.DebugFormatInt64(self.max_)) cnt=0 for e in self.reserve_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("reserve%s <\n" % elm) res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt+=1 if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_)) return res def _BuildTagLookupTable(sparse, maxtag, default=None): return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) kheader = 4 kmodel_key = 1 ksize = 2 kmax = 3 kreserve = 5 ktrusted = 6 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", 1: "model_key", 2: "size", 3: "max", 4: "header", 5: "reserve", 6: "trusted", }, 6) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, 1: ProtocolBuffer.Encoder.STRING, 2: ProtocolBuffer.Encoder.NUMERIC, 3: ProtocolBuffer.Encoder.NUMERIC, 4: ProtocolBuffer.Encoder.STRING, 5: ProtocolBuffer.Encoder.STRING, 6: ProtocolBuffer.Encoder.NUMERIC, }, 6, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AllocateIdsRequest' class AllocateIdsResponse(ProtocolBuffer.ProtocolMessage): has_start_ = 0 start_ = 0 has_end_ = 0 end_ = 0 has_cost_ = 0 cost_ = None def __init__(self, contents=None): self.lazy_init_lock_ = thread.allocate_lock() if contents is not None: self.MergeFromString(contents) def start(self): return self.start_ def set_start(self, x): self.has_start_ = 1 self.start_ = x def clear_start(self): if self.has_start_: self.has_start_ = 0 self.start_ = 0 def has_start(self): return self.has_start_ def end(self): return self.end_ def set_end(self, x): self.has_end_ = 1 self.end_ = x def clear_end(self): if self.has_end_: self.has_end_ = 0 self.end_ = 0 def has_end(self): return self.has_end_ def cost(self): if self.cost_ is None: self.lazy_init_lock_.acquire() try: if self.cost_ is None: self.cost_ = Cost() finally: self.lazy_init_lock_.release() return self.cost_ def mutable_cost(self): self.has_cost_ = 1; return self.cost() def clear_cost(self): if self.has_cost_: self.has_cost_ = 0; if self.cost_ is not None: self.cost_.Clear() def has_cost(self): return self.has_cost_ def MergeFrom(self, x): assert x is not self if (x.has_start()): self.set_start(x.start()) if (x.has_end()): self.set_end(x.end()) if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost()) def Equals(self, x): if x is self: return 1 if self.has_start_ != x.has_start_: return 0 if self.has_start_ and self.start_ != x.start_: return 0 if self.has_end_ != x.has_end_: return 0 if self.has_end_ and self.end_ != x.end_: return 0 if self.has_cost_ != x.has_cost_: return 0 if self.has_cost_ and self.cost_ != x.cost_: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 if (not self.has_start_): initialized = 0 if debug_strs is not None: debug_strs.append('Required field: start not set.') if (not self.has_end_): initialized = 0 if debug_strs is not None: debug_strs.append('Required field: end not set.') if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0 return initialized def ByteSize(self): n = 0 n += self.lengthVarInt64(self.start_) n += self.lengthVarInt64(self.end_) if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize()) return n + 2 def ByteSizePartial(self): n = 0 if (self.has_start_): n += 1 n += self.lengthVarInt64(self.start_) if (self.has_end_): n += 1 n += self.lengthVarInt64(self.end_) if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial()) return n def Clear(self): self.clear_start() self.clear_end() self.clear_cost() def OutputUnchecked(self, out): out.putVarInt32(8) out.putVarInt64(self.start_) out.putVarInt32(16) out.putVarInt64(self.end_) if (self.has_cost_): out.putVarInt32(26) out.putVarInt32(self.cost_.ByteSize()) self.cost_.OutputUnchecked(out) def OutputPartial(self, out): if (self.has_start_): out.putVarInt32(8) out.putVarInt64(self.start_) if (self.has_end_): out.putVarInt32(16) out.putVarInt64(self.end_) if (self.has_cost_): out.putVarInt32(26) out.putVarInt32(self.cost_.ByteSizePartial()) self.cost_.OutputPartial(out) def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if tt == 8: self.set_start(d.getVarInt64()) continue if tt == 16: self.set_end(d.getVarInt64()) continue if tt == 26: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_cost().TryMerge(tmp) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_start_: res+=prefix+("start: %s\n" % self.DebugFormatInt64(self.start_)) if self.has_end_: res+=prefix+("end: %s\n" % self.DebugFormatInt64(self.end_)) if self.has_cost_: res+=prefix+"cost <\n" res+=self.cost_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" return res def _BuildTagLookupTable(sparse, maxtag, default=None): return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) kstart = 1 kend = 2 kcost = 3 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", 1: "start", 2: "end", 3: "cost", }, 3) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, 1: ProtocolBuffer.Encoder.NUMERIC, 2: ProtocolBuffer.Encoder.NUMERIC, 3: ProtocolBuffer.Encoder.STRING, }, 3, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AllocateIdsResponse' class CompositeIndices(ProtocolBuffer.ProtocolMessage): def __init__(self, contents=None): self.index_ = [] if contents is not None: self.MergeFromString(contents) def index_size(self): return len(self.index_) def index_list(self): return self.index_ def index(self, i): return self.index_[i] def mutable_index(self, i): return self.index_[i] def add_index(self): x = CompositeIndex() self.index_.append(x) return x def clear_index(self): self.index_ = [] def MergeFrom(self, x): assert x is not self for i in xrange(x.index_size()): self.add_index().CopyFrom(x.index(i)) def Equals(self, x): if x is self: return 1 if len(self.index_) != len(x.index_): return 0 for e1, e2 in zip(self.index_, x.index_): if e1 != e2: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 for p in self.index_: if not p.IsInitialized(debug_strs): initialized=0 return initialized def ByteSize(self): n = 0 n += 1 * len(self.index_) for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSize()) return n def ByteSizePartial(self): n = 0 n += 1 * len(self.index_) for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSizePartial()) return n def Clear(self): self.clear_index() def OutputUnchecked(self, out): for i in xrange(len(self.index_)): out.putVarInt32(10) out.putVarInt32(self.index_[i].ByteSize()) self.index_[i].OutputUnchecked(out) def OutputPartial(self, out): for i in xrange(len(self.index_)): out.putVarInt32(10) out.putVarInt32(self.index_[i].ByteSizePartial()) self.index_[i].OutputPartial(out) def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if tt == 10: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.add_index().TryMerge(tmp) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" cnt=0 for e in self.index_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("index%s <\n" % elm) res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt+=1 return res def _BuildTagLookupTable(sparse, maxtag, default=None): return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) kindex = 1 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", 1: "index", }, 1) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, 1: ProtocolBuffer.Encoder.STRING, }, 1, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CompositeIndices' class AddActionsRequest(ProtocolBuffer.ProtocolMessage): has_header_ = 0 header_ = None has_transaction_ = 0 def __init__(self, contents=None): self.transaction_ = Transaction() self.action_ = [] self.lazy_init_lock_ = thread.allocate_lock() if contents is not None: self.MergeFromString(contents) def header(self): if self.header_ is None: self.lazy_init_lock_.acquire() try: if self.header_ is None: self.header_ = InternalHeader() finally: self.lazy_init_lock_.release() return self.header_ def mutable_header(self): self.has_header_ = 1; return self.header() def clear_header(self): if self.has_header_: self.has_header_ = 0; if self.header_ is not None: self.header_.Clear() def has_header(self): return self.has_header_ def transaction(self): return self.transaction_ def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction_ def clear_transaction(self):self.has_transaction_ = 0; self.transaction_.Clear() def has_transaction(self): return self.has_transaction_ def action_size(self): return len(self.action_) def action_list(self): return self.action_ def action(self, i): return self.action_[i] def mutable_action(self, i): return self.action_[i] def add_action(self): x = Action() self.action_.append(x) return x def clear_action(self): self.action_ = [] def MergeFrom(self, x): assert x is not self if (x.has_header()): self.mutable_header().MergeFrom(x.header()) if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction()) for i in xrange(x.action_size()): self.add_action().CopyFrom(x.action(i)) def Equals(self, x): if x is self: return 1 if self.has_header_ != x.has_header_: return 0 if self.has_header_ and self.header_ != x.header_: return 0 if self.has_transaction_ != x.has_transaction_: return 0 if self.has_transaction_ and self.transaction_ != x.transaction_: return 0 if len(self.action_) != len(x.action_): return 0 for e1, e2 in zip(self.action_, x.action_): if e1 != e2: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0 if (not self.has_transaction_): initialized = 0 if debug_strs is not None: debug_strs.append('Required field: transaction not set.') elif not self.transaction_.IsInitialized(debug_strs): initialized = 0 for p in self.action_: if not p.IsInitialized(debug_strs): initialized=0 return initialized def ByteSize(self): n = 0 if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize()) n += self.lengthString(self.transaction_.ByteSize()) n += 1 * len(self.action_) for i in xrange(len(self.action_)): n += self.lengthString(self.action_[i].ByteSize()) return n + 1 def ByteSizePartial(self): n = 0 if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial()) if (self.has_transaction_): n += 1 n += self.lengthString(self.transaction_.ByteSizePartial()) n += 1 * len(self.action_) for i in xrange(len(self.action_)): n += self.lengthString(self.action_[i].ByteSizePartial()) return n def Clear(self): self.clear_header() self.clear_transaction() self.clear_action() def OutputUnchecked(self, out): out.putVarInt32(10) out.putVarInt32(self.transaction_.ByteSize()) self.transaction_.OutputUnchecked(out) for i in xrange(len(self.action_)): out.putVarInt32(18) out.putVarInt32(self.action_[i].ByteSize()) self.action_[i].OutputUnchecked(out) if (self.has_header_): out.putVarInt32(26) out.putVarInt32(self.header_.ByteSize()) self.header_.OutputUnchecked(out) def OutputPartial(self, out): if (self.has_transaction_): out.putVarInt32(10) out.putVarInt32(self.transaction_.ByteSizePartial()) self.transaction_.OutputPartial(out) for i in xrange(len(self.action_)): out.putVarInt32(18) out.putVarInt32(self.action_[i].ByteSizePartial()) self.action_[i].OutputPartial(out) if (self.has_header_): out.putVarInt32(26) out.putVarInt32(self.header_.ByteSizePartial()) self.header_.OutputPartial(out) def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if tt == 10: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_transaction().TryMerge(tmp) continue if tt == 18: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.add_action().TryMerge(tmp) continue if tt == 26: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_header().TryMerge(tmp) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_header_: res+=prefix+"header <\n" res+=self.header_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" if self.has_transaction_: res+=prefix+"transaction <\n" res+=self.transaction_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt=0 for e in self.action_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("action%s <\n" % elm) res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt+=1 return res def _BuildTagLookupTable(sparse, maxtag, default=None): return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) kheader = 3 ktransaction = 1 kaction = 2 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", 1: "transaction", 2: "action", 3: "header", }, 3) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, 1: ProtocolBuffer.Encoder.STRING, 2: ProtocolBuffer.Encoder.STRING, 3: ProtocolBuffer.Encoder.STRING, }, 3, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AddActionsRequest' class AddActionsResponse(ProtocolBuffer.ProtocolMessage): def __init__(self, contents=None): pass if contents is not None: self.MergeFromString(contents) def MergeFrom(self, x): assert x is not self def Equals(self, x): if x is self: return 1 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 return initialized def ByteSize(self): n = 0 return n def ByteSizePartial(self): n = 0 return n def Clear(self): pass def OutputUnchecked(self, out): pass def OutputPartial(self, out): pass def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" return res def _BuildTagLookupTable(sparse, maxtag, default=None): return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", }, 0) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, }, 0, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AddActionsResponse' class BeginTransactionRequest(ProtocolBuffer.ProtocolMessage): has_header_ = 0 header_ = None has_app_ = 0 app_ = "" has_allow_multiple_eg_ = 0 allow_multiple_eg_ = 0 def __init__(self, contents=None): self.lazy_init_lock_ = thread.allocate_lock() if contents is not None: self.MergeFromString(contents) def header(self): if self.header_ is None: self.lazy_init_lock_.acquire() try: if self.header_ is None: self.header_ = InternalHeader() finally: self.lazy_init_lock_.release() return self.header_ def mutable_header(self): self.has_header_ = 1; return self.header() def clear_header(self): if self.has_header_: self.has_header_ = 0; if self.header_ is not None: self.header_.Clear() def has_header(self): return self.has_header_ def app(self): return self.app_ def set_app(self, x): self.has_app_ = 1 self.app_ = x def clear_app(self): if self.has_app_: self.has_app_ = 0 self.app_ = "" def has_app(self): return self.has_app_ def allow_multiple_eg(self): return self.allow_multiple_eg_ def set_allow_multiple_eg(self, x): self.has_allow_multiple_eg_ = 1 self.allow_multiple_eg_ = x def clear_allow_multiple_eg(self): if self.has_allow_multiple_eg_: self.has_allow_multiple_eg_ = 0 self.allow_multiple_eg_ = 0 def has_allow_multiple_eg(self): return self.has_allow_multiple_eg_ def MergeFrom(self, x): assert x is not self if (x.has_header()): self.mutable_header().MergeFrom(x.header()) if (x.has_app()): self.set_app(x.app()) if (x.has_allow_multiple_eg()): self.set_allow_multiple_eg(x.allow_multiple_eg()) def Equals(self, x): if x is self: return 1 if self.has_header_ != x.has_header_: return 0 if self.has_header_ and self.header_ != x.header_: return 0 if self.has_app_ != x.has_app_: return 0 if self.has_app_ and self.app_ != x.app_: return 0 if self.has_allow_multiple_eg_ != x.has_allow_multiple_eg_: return 0 if self.has_allow_multiple_eg_ and self.allow_multiple_eg_ != x.allow_multiple_eg_: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0 if (not self.has_app_): initialized = 0 if debug_strs is not None: debug_strs.append('Required field: app not set.') return initialized def ByteSize(self): n = 0 if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize()) n += self.lengthString(len(self.app_)) if (self.has_allow_multiple_eg_): n += 2 return n + 1 def ByteSizePartial(self): n = 0 if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial()) if (self.has_app_): n += 1 n += self.lengthString(len(self.app_)) if (self.has_allow_multiple_eg_): n += 2 return n def Clear(self): self.clear_header() self.clear_app() self.clear_allow_multiple_eg() def OutputUnchecked(self, out): out.putVarInt32(10) out.putPrefixedString(self.app_) if (self.has_allow_multiple_eg_): out.putVarInt32(16) out.putBoolean(self.allow_multiple_eg_) if (self.has_header_): out.putVarInt32(26) out.putVarInt32(self.header_.ByteSize()) self.header_.OutputUnchecked(out) def OutputPartial(self, out): if (self.has_app_): out.putVarInt32(10) out.putPrefixedString(self.app_) if (self.has_allow_multiple_eg_): out.putVarInt32(16) out.putBoolean(self.allow_multiple_eg_) if (self.has_header_): out.putVarInt32(26) out.putVarInt32(self.header_.ByteSizePartial()) self.header_.OutputPartial(out) def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if tt == 10: self.set_app(d.getPrefixedString()) continue if tt == 16: self.set_allow_multiple_eg(d.getBoolean()) continue if tt == 26: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_header().TryMerge(tmp) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_header_: res+=prefix+"header <\n" res+=self.header_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_)) if self.has_allow_multiple_eg_: res+=prefix+("allow_multiple_eg: %s\n" % self.DebugFormatBool(self.allow_multiple_eg_)) return res def _BuildTagLookupTable(sparse, maxtag, default=None): return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) kheader = 3 kapp = 1 kallow_multiple_eg = 2 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", 1: "app", 2: "allow_multiple_eg", 3: "header", }, 3) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, 1: ProtocolBuffer.Encoder.STRING, 2: ProtocolBuffer.Encoder.NUMERIC, 3: ProtocolBuffer.Encoder.STRING, }, 3, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.BeginTransactionRequest' class CommitResponse_Version(ProtocolBuffer.ProtocolMessage): has_root_entity_key_ = 0 has_version_ = 0 version_ = 0 def __init__(self, contents=None): self.root_entity_key_ = Reference() if contents is not None: self.MergeFromString(contents) def root_entity_key(self): return self.root_entity_key_ def mutable_root_entity_key(self): self.has_root_entity_key_ = 1; return self.root_entity_key_ def clear_root_entity_key(self):self.has_root_entity_key_ = 0; self.root_entity_key_.Clear() def has_root_entity_key(self): return self.has_root_entity_key_ def version(self): return self.version_ def set_version(self, x): self.has_version_ = 1 self.version_ = x def clear_version(self): if self.has_version_: self.has_version_ = 0 self.version_ = 0 def has_version(self): return self.has_version_ def MergeFrom(self, x): assert x is not self if (x.has_root_entity_key()): self.mutable_root_entity_key().MergeFrom(x.root_entity_key()) if (x.has_version()): self.set_version(x.version()) def Equals(self, x): if x is self: return 1 if self.has_root_entity_key_ != x.has_root_entity_key_: return 0 if self.has_root_entity_key_ and self.root_entity_key_ != x.root_entity_key_: return 0 if self.has_version_ != x.has_version_: return 0 if self.has_version_ and self.version_ != x.version_: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 if (not self.has_root_entity_key_): initialized = 0 if debug_strs is not None: debug_strs.append('Required field: root_entity_key not set.') elif not self.root_entity_key_.IsInitialized(debug_strs): initialized = 0 if (not self.has_version_): initialized = 0 if debug_strs is not None: debug_strs.append('Required field: version not set.') return initialized def ByteSize(self): n = 0 n += self.lengthString(self.root_entity_key_.ByteSize()) n += self.lengthVarInt64(self.version_) return n + 2 def ByteSizePartial(self): n = 0 if (self.has_root_entity_key_): n += 1 n += self.lengthString(self.root_entity_key_.ByteSizePartial()) if (self.has_version_): n += 1 n += self.lengthVarInt64(self.version_) return n def Clear(self): self.clear_root_entity_key() self.clear_version() def OutputUnchecked(self, out): out.putVarInt32(34) out.putVarInt32(self.root_entity_key_.ByteSize()) self.root_entity_key_.OutputUnchecked(out) out.putVarInt32(40) out.putVarInt64(self.version_) def OutputPartial(self, out): if (self.has_root_entity_key_): out.putVarInt32(34) out.putVarInt32(self.root_entity_key_.ByteSizePartial()) self.root_entity_key_.OutputPartial(out) if (self.has_version_): out.putVarInt32(40) out.putVarInt64(self.version_) def TryMerge(self, d): while 1: tt = d.getVarInt32() if tt == 28: break if tt == 34: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_root_entity_key().TryMerge(tmp) continue if tt == 40: self.set_version(d.getVarInt64()) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_root_entity_key_: res+=prefix+"root_entity_key <\n" res+=self.root_entity_key_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" if self.has_version_: res+=prefix+("version: %s\n" % self.DebugFormatInt64(self.version_)) return res class CommitResponse(ProtocolBuffer.ProtocolMessage): has_cost_ = 0 cost_ = None def __init__(self, contents=None): self.version_ = [] self.lazy_init_lock_ = thread.allocate_lock() if contents is not None: self.MergeFromString(contents) def cost(self): if self.cost_ is None: self.lazy_init_lock_.acquire() try: if self.cost_ is None: self.cost_ = Cost() finally: self.lazy_init_lock_.release() return self.cost_ def mutable_cost(self): self.has_cost_ = 1; return self.cost() def clear_cost(self): if self.has_cost_: self.has_cost_ = 0; if self.cost_ is not None: self.cost_.Clear() def has_cost(self): return self.has_cost_ def version_size(self): return len(self.version_) def version_list(self): return self.version_ def version(self, i): return self.version_[i] def mutable_version(self, i): return self.version_[i] def add_version(self): x = CommitResponse_Version() self.version_.append(x) return x def clear_version(self): self.version_ = [] def MergeFrom(self, x): assert x is not self if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost()) for i in xrange(x.version_size()): self.add_version().CopyFrom(x.version(i)) def Equals(self, x): if x is self: return 1 if self.has_cost_ != x.has_cost_: return 0 if self.has_cost_ and self.cost_ != x.cost_: return 0 if len(self.version_) != len(x.version_): return 0 for e1, e2 in zip(self.version_, x.version_): if e1 != e2: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0 for p in self.version_: if not p.IsInitialized(debug_strs): initialized=0 return initialized def ByteSize(self): n = 0 if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize()) n += 2 * len(self.version_) for i in xrange(len(self.version_)): n += self.version_[i].ByteSize() return n def ByteSizePartial(self): n = 0 if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial()) n += 2 * len(self.version_) for i in xrange(len(self.version_)): n += self.version_[i].ByteSizePartial() return n def Clear(self): self.clear_cost() self.clear_version() def OutputUnchecked(self, out): if (self.has_cost_): out.putVarInt32(10) out.putVarInt32(self.cost_.ByteSize()) self.cost_.OutputUnchecked(out) for i in xrange(len(self.version_)): out.putVarInt32(27) self.version_[i].OutputUnchecked(out) out.putVarInt32(28) def OutputPartial(self, out): if (self.has_cost_): out.putVarInt32(10) out.putVarInt32(self.cost_.ByteSizePartial()) self.cost_.OutputPartial(out) for i in xrange(len(self.version_)): out.putVarInt32(27) self.version_[i].OutputPartial(out) out.putVarInt32(28) def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if tt == 10: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_cost().TryMerge(tmp) continue if tt == 27: self.add_version().TryMerge(d) continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) def __str__(self, prefix="", printElemNumber=0): res="" if self.has_cost_: res+=prefix+"cost <\n" res+=self.cost_.__str__(prefix + " ", printElemNumber) res+=prefix+">\n" cnt=0 for e in self.version_: elm="" if printElemNumber: elm="(%d)" % cnt res+=prefix+("Version%s {\n" % elm) res+=e.__str__(prefix + " ", printElemNumber) res+=prefix+"}\n" cnt+=1 return res def _BuildTagLookupTable(sparse, maxtag, default=None): return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) kcost = 1 kVersionGroup = 3 kVersionroot_entity_key = 4 kVersionversion = 5 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", 1: "cost", 3: "Version", 4: "root_entity_key", 5: "version", }, 5) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, 1: ProtocolBuffer.Encoder.STRING, 3: ProtocolBuffer.Encoder.STARTGROUP, 4: ProtocolBuffer.Encoder.STRING, 5: ProtocolBuffer.Encoder.NUMERIC, }, 5, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CommitResponse' if _extension_runtime: pass __all__ = ['InternalHeader','Transaction','Query','Query_Filter','Query_Order','CompiledQuery','CompiledQuery_PrimaryScan','CompiledQuery_MergeJoinScan','CompiledQuery_EntityFilter','CompiledCursor','CompiledCursor_PositionIndexValue','CompiledCursor_Position','Cursor','Error','Cost','Cost_CommitCost','GetRequest','GetResponse','GetResponse_Entity','PutRequest','PutResponse','TouchRequest','TouchResponse','DeleteRequest','DeleteResponse','NextRequest','QueryResult','AllocateIdsRequest','AllocateIdsResponse','CompositeIndices','AddActionsRequest','AddActionsResponse','BeginTransactionRequest','CommitResponse','CommitResponse_Version']
ychen820/microblog
y/google-cloud-sdk/platform/google_appengine/google/appengine/datastore/datastore_v3_pb.py
Python
bsd-3-clause
282,355
// Copyright 2012 tsuru authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package provisiontest import ( "fmt" "io" "io/ioutil" "net/url" "sort" "sync" "sync/atomic" "time" docker "github.com/fsouza/go-dockerclient" "github.com/pkg/errors" "github.com/tsuru/tsuru/action" "github.com/tsuru/tsuru/app/bind" "github.com/tsuru/tsuru/event" "github.com/tsuru/tsuru/net" "github.com/tsuru/tsuru/provision" "github.com/tsuru/tsuru/provision/dockercommon" "github.com/tsuru/tsuru/quota" "github.com/tsuru/tsuru/router/routertest" appTypes "github.com/tsuru/tsuru/types/app" ) var ( ProvisionerInstance *FakeProvisioner errNotProvisioned = &provision.Error{Reason: "App is not provisioned."} uniqueIpCounter int32 = 0 _ provision.NodeProvisioner = &FakeProvisioner{} _ provision.Provisioner = &FakeProvisioner{} _ provision.App = &FakeApp{} _ bind.App = &FakeApp{} ) const fakeAppImage = "app-image" func init() { ProvisionerInstance = NewFakeProvisioner() provision.Register("fake", func() (provision.Provisioner, error) { return ProvisionerInstance, nil }) } // Fake implementation for provision.App. type FakeApp struct { name string cname []string IP string platform string units []provision.Unit logs []string logMut sync.Mutex Commands []string Memory int64 Swap int64 CpuShare int commMut sync.Mutex Deploys uint env map[string]bind.EnvVar bindCalls []*provision.Unit bindLock sync.Mutex serviceEnvs []bind.ServiceEnvVar serviceLock sync.Mutex Pool string UpdatePlatform bool TeamOwner string Teams []string quota.Quota } func NewFakeApp(name, platform string, units int) *FakeApp { app := FakeApp{ name: name, platform: platform, units: make([]provision.Unit, units), Quota: quota.Unlimited, Pool: "test-default", } routertest.FakeRouter.AddBackend(&app) namefmt := "%s-%d" for i := 0; i < units; i++ { val := atomic.AddInt32(&uniqueIpCounter, 1) app.units[i] = provision.Unit{ ID: fmt.Sprintf(namefmt, name, i), Status: provision.StatusStarted, IP: fmt.Sprintf("10.10.10.%d", val), Address: &url.URL{ Scheme: "http", Host: fmt.Sprintf("10.10.10.%d:%d", val, val), }, } } return &app } func (a *FakeApp) GetMemory() int64 { return a.Memory } func (a *FakeApp) GetSwap() int64 { return a.Swap } func (a *FakeApp) GetCpuShare() int { return a.CpuShare } func (a *FakeApp) GetTeamsName() []string { return a.Teams } func (a *FakeApp) HasBind(unit *provision.Unit) bool { a.bindLock.Lock() defer a.bindLock.Unlock() for _, u := range a.bindCalls { if u.ID == unit.ID { return true } } return false } func (a *FakeApp) BindUnit(unit *provision.Unit) error { a.bindLock.Lock() defer a.bindLock.Unlock() a.bindCalls = append(a.bindCalls, unit) return nil } func (a *FakeApp) UnbindUnit(unit *provision.Unit) error { a.bindLock.Lock() defer a.bindLock.Unlock() index := -1 for i, u := range a.bindCalls { if u.ID == unit.ID { index = i break } } if index < 0 { return errors.New("not bound") } length := len(a.bindCalls) a.bindCalls[index] = a.bindCalls[length-1] a.bindCalls = a.bindCalls[:length-1] return nil } func (a *FakeApp) GetQuota() quota.Quota { return a.Quota } func (a *FakeApp) SetQuotaInUse(inUse int) error { if !a.Quota.Unlimited() && inUse > a.Quota.Limit { return &quota.QuotaExceededError{ Requested: uint(inUse), Available: uint(a.Quota.Limit), } } a.Quota.InUse = inUse return nil } func (a *FakeApp) GetCname() []string { return a.cname } func (a *FakeApp) GetServiceEnvs() []bind.ServiceEnvVar { a.serviceLock.Lock() defer a.serviceLock.Unlock() return a.serviceEnvs } func (a *FakeApp) AddInstance(instanceArgs bind.AddInstanceArgs) error { a.serviceLock.Lock() defer a.serviceLock.Unlock() a.serviceEnvs = append(a.serviceEnvs, instanceArgs.Envs...) if instanceArgs.Writer != nil { instanceArgs.Writer.Write([]byte("add instance")) } return nil } func (a *FakeApp) RemoveInstance(instanceArgs bind.RemoveInstanceArgs) error { a.serviceLock.Lock() defer a.serviceLock.Unlock() lenBefore := len(a.serviceEnvs) for i := 0; i < len(a.serviceEnvs); i++ { se := a.serviceEnvs[i] if se.ServiceName == instanceArgs.ServiceName && se.InstanceName == instanceArgs.InstanceName { a.serviceEnvs = append(a.serviceEnvs[:i], a.serviceEnvs[i+1:]...) i-- } } if len(a.serviceEnvs) == lenBefore { return errors.New("instance not found") } if instanceArgs.Writer != nil { instanceArgs.Writer.Write([]byte("remove instance")) } return nil } func (a *FakeApp) Logs() []string { a.logMut.Lock() defer a.logMut.Unlock() logs := make([]string, len(a.logs)) copy(logs, a.logs) return logs } func (a *FakeApp) HasLog(source, unit, message string) bool { log := source + unit + message a.logMut.Lock() defer a.logMut.Unlock() for _, l := range a.logs { if l == log { return true } } return false } func (a *FakeApp) GetCommands() []string { a.commMut.Lock() defer a.commMut.Unlock() return a.Commands } func (a *FakeApp) Log(message, source, unit string) error { a.logMut.Lock() a.logs = append(a.logs, source+unit+message) a.logMut.Unlock() return nil } func (a *FakeApp) GetName() string { return a.name } func (a *FakeApp) GetPool() string { return a.Pool } func (a *FakeApp) GetPlatform() string { return a.platform } func (a *FakeApp) GetDeploys() uint { return a.Deploys } func (a *FakeApp) GetTeamOwner() string { return a.TeamOwner } func (a *FakeApp) Units() ([]provision.Unit, error) { return a.units, nil } func (a *FakeApp) AddUnit(u provision.Unit) { a.units = append(a.units, u) } func (a *FakeApp) SetEnv(env bind.EnvVar) { if a.env == nil { a.env = map[string]bind.EnvVar{} } a.env[env.Name] = env } func (a *FakeApp) SetEnvs(setEnvs bind.SetEnvArgs) error { for _, env := range setEnvs.Envs { a.SetEnv(env) } return nil } func (a *FakeApp) UnsetEnvs(unsetEnvs bind.UnsetEnvArgs) error { for _, env := range unsetEnvs.VariableNames { delete(a.env, env) } return nil } func (a *FakeApp) GetLock() provision.AppLock { return nil } func (a *FakeApp) GetUnits() ([]bind.Unit, error) { units := make([]bind.Unit, len(a.units)) for i := range a.units { units[i] = &a.units[i] } return units, nil } func (a *FakeApp) Envs() map[string]bind.EnvVar { return a.env } func (a *FakeApp) Run(cmd string, w io.Writer, args provision.RunArgs) error { a.commMut.Lock() a.Commands = append(a.Commands, fmt.Sprintf("ran %s", cmd)) a.commMut.Unlock() return nil } func (a *FakeApp) GetUpdatePlatform() bool { return a.UpdatePlatform } func (app *FakeApp) GetRouters() []appTypes.AppRouter { return []appTypes.AppRouter{{Name: "fake"}} } func (app *FakeApp) GetAddresses() ([]string, error) { addr, err := routertest.FakeRouter.Addr(app.GetName()) if err != nil { return nil, err } return []string{addr}, nil } type Cmd struct { Cmd string Args []string App provision.App } type failure struct { method string err error } // Fake implementation for provision.Provisioner. type FakeProvisioner struct { Name string cmds []Cmd cmdMut sync.Mutex outputs chan []byte failures chan failure apps map[string]provisionedApp mut sync.RWMutex shells map[string][]provision.ShellOptions shellMut sync.Mutex nodes map[string]FakeNode nodeContainers map[string]int } func NewFakeProvisioner() *FakeProvisioner { p := FakeProvisioner{Name: "fake"} p.outputs = make(chan []byte, 8) p.failures = make(chan failure, 8) p.apps = make(map[string]provisionedApp) p.shells = make(map[string][]provision.ShellOptions) p.nodes = make(map[string]FakeNode) p.nodeContainers = make(map[string]int) return &p } func (p *FakeProvisioner) getError(method string) error { select { case fail := <-p.failures: if fail.method == method { return fail.err } p.failures <- fail default: } return nil } type FakeNode struct { ID string Addr string PoolName string Meta map[string]string status string p *FakeProvisioner failures int hasSuccess bool } func (n *FakeNode) IaaSID() string { return n.ID } func (n *FakeNode) Pool() string { return n.PoolName } func (n *FakeNode) Address() string { return n.Addr } func (n *FakeNode) Metadata() map[string]string { return n.Meta } func (n *FakeNode) MetadataNoPrefix() map[string]string { return n.Meta } func (n *FakeNode) Units() ([]provision.Unit, error) { n.p.mut.Lock() defer n.p.mut.Unlock() return n.unitsLocked() } func (n *FakeNode) unitsLocked() ([]provision.Unit, error) { var units []provision.Unit for _, a := range n.p.apps { for _, u := range a.units { if net.URLToHost(u.Address.String()) == net.URLToHost(n.Addr) { units = append(units, u) } } } return units, nil } func (n *FakeNode) Status() string { return n.status } func (n *FakeNode) FailureCount() int { return n.failures } func (n *FakeNode) HasSuccess() bool { return n.hasSuccess } func (n *FakeNode) ResetFailures() { n.failures = 0 } func (n *FakeNode) Provisioner() provision.NodeProvisioner { return n.p } func (n *FakeNode) SetHealth(failures int, hasSuccess bool) { n.failures = failures n.hasSuccess = hasSuccess } func (p *FakeProvisioner) AddNode(opts provision.AddNodeOptions) error { p.mut.Lock() defer p.mut.Unlock() if err := p.getError("AddNode"); err != nil { return err } if err := p.getError("AddNode:" + opts.Address); err != nil { return err } metadata := opts.Metadata if metadata == nil { metadata = map[string]string{} } if _, ok := p.nodes[opts.Address]; ok { return errors.New("fake node already exists") } p.nodes[opts.Address] = FakeNode{ ID: opts.IaaSID, Addr: opts.Address, PoolName: opts.Pool, Meta: metadata, p: p, status: "enabled", } return nil } func (p *FakeProvisioner) GetNode(address string) (provision.Node, error) { p.mut.RLock() defer p.mut.RUnlock() if err := p.getError("GetNode"); err != nil { return nil, err } if n, ok := p.nodes[address]; ok { return &n, nil } return nil, provision.ErrNodeNotFound } func (p *FakeProvisioner) RemoveNode(opts provision.RemoveNodeOptions) error { p.mut.Lock() defer p.mut.Unlock() if err := p.getError("RemoveNode"); err != nil { return err } _, ok := p.nodes[opts.Address] if !ok { return provision.ErrNodeNotFound } delete(p.nodes, opts.Address) if opts.Writer != nil { if opts.Rebalance { opts.Writer.Write([]byte("rebalancing...")) p.rebalanceNodesLocked(provision.RebalanceNodesOptions{ Force: true, }) } opts.Writer.Write([]byte("remove done!")) } return nil } func (p *FakeProvisioner) UpdateNode(opts provision.UpdateNodeOptions) error { p.mut.Lock() defer p.mut.Unlock() if err := p.getError("UpdateNode"); err != nil { return err } n, ok := p.nodes[opts.Address] if !ok { return provision.ErrNodeNotFound } if opts.Pool != "" { n.PoolName = opts.Pool } if opts.Metadata != nil { n.Meta = opts.Metadata } if opts.Enable { n.status = "enabled" } if opts.Disable { n.status = "disabled" } p.nodes[opts.Address] = n return nil } type nodeList []provision.Node func (l nodeList) Len() int { return len(l) } func (l nodeList) Swap(i, j int) { l[i], l[j] = l[j], l[i] } func (l nodeList) Less(i, j int) bool { return l[i].Address() < l[j].Address() } func (p *FakeProvisioner) ListNodes(addressFilter []string) ([]provision.Node, error) { p.mut.RLock() defer p.mut.RUnlock() if err := p.getError("ListNodes"); err != nil { return nil, err } var result []provision.Node if addressFilter != nil { result = make([]provision.Node, 0, len(addressFilter)) for _, a := range addressFilter { n := p.nodes[a] result = append(result, &n) } } else { result = make([]provision.Node, 0, len(p.nodes)) for a := range p.nodes { n := p.nodes[a] result = append(result, &n) } } sort.Sort(nodeList(result)) return result, nil } func (p *FakeProvisioner) NodeForNodeData(nodeData provision.NodeStatusData) (provision.Node, error) { return provision.FindNodeByAddrs(p, nodeData.Addrs) } func (p *FakeProvisioner) RebalanceNodes(opts provision.RebalanceNodesOptions) (bool, error) { p.mut.Lock() defer p.mut.Unlock() return p.rebalanceNodesLocked(opts) } func (p *FakeProvisioner) rebalanceNodesLocked(opts provision.RebalanceNodesOptions) (bool, error) { if err := p.getError("RebalanceNodes"); err != nil { return true, err } var w io.Writer if opts.Event == nil { w = ioutil.Discard } else { w = opts.Event } fmt.Fprintf(w, "rebalancing - dry: %v, force: %v\n", opts.Dry, opts.Force) if len(opts.AppFilter) != 0 { fmt.Fprintf(w, "filtering apps: %v\n", opts.AppFilter) } if len(opts.MetadataFilter) != 0 { fmt.Fprintf(w, "filtering metadata: %v\n", opts.MetadataFilter) } if opts.Pool != "" { fmt.Fprintf(w, "filtering pool: %v\n", opts.Pool) } if len(p.nodes) == 0 || opts.Dry { return true, nil } max := 0 min := -1 var nodes []FakeNode for _, n := range p.nodes { nodes = append(nodes, n) units, err := n.unitsLocked() if err != nil { return true, err } unitCount := len(units) if unitCount > max { max = unitCount } if min == -1 || unitCount < min { min = unitCount } } if max-min < 2 && !opts.Force { return false, nil } gi := 0 for _, a := range p.apps { nodeIdx := 0 for i := range a.units { u := &a.units[i] firstIdx := nodeIdx var hostAddr string for { idx := nodeIdx nodeIdx = (nodeIdx + 1) % len(nodes) if nodes[idx].Pool() == a.app.GetPool() { hostAddr = net.URLToHost(nodes[idx].Address()) break } if nodeIdx == firstIdx { return true, errors.Errorf("unable to find node for pool %s", a.app.GetPool()) } } u.IP = hostAddr u.Address = &url.URL{ Scheme: "http", Host: fmt.Sprintf("%s:%d", hostAddr, gi), } gi++ } } return true, nil } // Restarts returns the number of restarts for a given app. func (p *FakeProvisioner) Restarts(a provision.App, process string) int { p.mut.RLock() defer p.mut.RUnlock() return p.apps[a.GetName()].restarts[process] } // Starts returns the number of starts for a given app. func (p *FakeProvisioner) Starts(app provision.App, process string) int { p.mut.RLock() defer p.mut.RUnlock() return p.apps[app.GetName()].starts[process] } // Stops returns the number of stops for a given app. func (p *FakeProvisioner) Stops(app provision.App, process string) int { p.mut.RLock() defer p.mut.RUnlock() return p.apps[app.GetName()].stops[process] } // Sleeps returns the number of sleeps for a given app. func (p *FakeProvisioner) Sleeps(app provision.App, process string) int { p.mut.RLock() defer p.mut.RUnlock() return p.apps[app.GetName()].sleeps[process] } func (p *FakeProvisioner) CustomData(app provision.App) map[string]interface{} { p.mut.RLock() defer p.mut.RUnlock() return p.apps[app.GetName()].lastData } // Shells return all shell calls to the given unit. func (p *FakeProvisioner) Shells(unit string) []provision.ShellOptions { p.shellMut.Lock() defer p.shellMut.Unlock() return p.shells[unit] } // Returns the number of calls to restart. // GetCmds returns a list of commands executed in an app. If you don't specify // the command (an empty string), it will return all commands executed in the // given app. func (p *FakeProvisioner) GetCmds(cmd string, app provision.App) []Cmd { var cmds []Cmd p.cmdMut.Lock() for _, c := range p.cmds { if (cmd == "" || c.Cmd == cmd) && app.GetName() == c.App.GetName() { cmds = append(cmds, c) } } p.cmdMut.Unlock() return cmds } // Provisioned checks whether the given app has been provisioned. func (p *FakeProvisioner) Provisioned(app provision.App) bool { p.mut.RLock() defer p.mut.RUnlock() _, ok := p.apps[app.GetName()] return ok } func (p *FakeProvisioner) GetUnits(app provision.App) []provision.Unit { p.mut.RLock() pApp := p.apps[app.GetName()] p.mut.RUnlock() return pApp.units } // GetAppFromUnitID returns an app from unitID func (p *FakeProvisioner) GetAppFromUnitID(unitID string) (provision.App, error) { p.mut.RLock() defer p.mut.RUnlock() for _, a := range p.apps { for _, u := range a.units { if u.GetID() == unitID { return a.app, nil } } } return nil, errors.New("app not found") } // PrepareOutput sends the given slice of bytes to a queue of outputs. // // Each prepared output will be used in the ExecuteCommand. It might be sent to // the standard output or standard error. See ExecuteCommand docs for more // details. func (p *FakeProvisioner) PrepareOutput(b []byte) { p.outputs <- b } // PrepareFailure prepares a failure for the given method name. // // For instance, PrepareFailure("GitDeploy", errors.New("GitDeploy failed")) will // cause next Deploy call to return the given error. Multiple calls to this // method will enqueue failures, i.e. three calls to // PrepareFailure("GitDeploy"...) means that the three next GitDeploy call will // fail. func (p *FakeProvisioner) PrepareFailure(method string, err error) { p.failures <- failure{method, err} } // Reset cleans up the FakeProvisioner, deleting all apps and their data. It // also deletes prepared failures and output. It's like calling // NewFakeProvisioner again, without all the allocations. func (p *FakeProvisioner) Reset() { p.cmdMut.Lock() p.cmds = nil p.cmdMut.Unlock() p.mut.Lock() p.apps = make(map[string]provisionedApp) p.mut.Unlock() p.shellMut.Lock() p.shells = make(map[string][]provision.ShellOptions) p.shellMut.Unlock() p.mut.Lock() p.nodes = make(map[string]FakeNode) p.mut.Unlock() uniqueIpCounter = 0 p.nodeContainers = make(map[string]int) for { select { case <-p.outputs: case <-p.failures: default: return } } } func (p *FakeProvisioner) Swap(app1, app2 provision.App, cnameOnly bool) error { return routertest.FakeRouter.Swap(app1.GetName(), app2.GetName(), cnameOnly) } func (p *FakeProvisioner) Deploy(app provision.App, img string, evt *event.Event) (string, error) { if err := p.getError("Deploy"); err != nil { return "", err } p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return "", errNotProvisioned } pApp.image = img evt.Write([]byte("Builder deploy called")) p.apps[app.GetName()] = pApp return fakeAppImage, nil } func (p *FakeProvisioner) GetClient(app provision.App) (provision.BuilderDockerClient, error) { for _, node := range p.nodes { client, err := docker.NewClient(node.Addr) if err != nil { return nil, err } return &dockercommon.PullAndCreateClient{Client: client}, nil } return nil, errors.New("No node found") } func (p *FakeProvisioner) CleanImage(appName, imgName string) error { for _, node := range p.nodes { c, err := docker.NewClient(node.Addr) if err != nil { return err } err = c.RemoveImage(imgName) if err != nil && err != docker.ErrNoSuchImage { return err } } return nil } func (p *FakeProvisioner) ArchiveDeploy(app provision.App, archiveURL string, evt *event.Event) (string, error) { if err := p.getError("ArchiveDeploy"); err != nil { return "", err } p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return "", errNotProvisioned } evt.Write([]byte("Archive deploy called")) pApp.lastArchive = archiveURL p.apps[app.GetName()] = pApp return fakeAppImage, nil } func (p *FakeProvisioner) UploadDeploy(app provision.App, file io.ReadCloser, fileSize int64, build bool, evt *event.Event) (string, error) { if err := p.getError("UploadDeploy"); err != nil { return "", err } p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return "", errNotProvisioned } evt.Write([]byte("Upload deploy called")) pApp.lastFile = file p.apps[app.GetName()] = pApp return fakeAppImage, nil } func (p *FakeProvisioner) ImageDeploy(app provision.App, img string, evt *event.Event) (string, error) { if err := p.getError("ImageDeploy"); err != nil { return "", err } p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return "", errNotProvisioned } pApp.image = img evt.Write([]byte("Image deploy called")) p.apps[app.GetName()] = pApp return img, nil } func (p *FakeProvisioner) Rollback(app provision.App, img string, evt *event.Event) (string, error) { if err := p.getError("Rollback"); err != nil { return "", err } p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return "", errNotProvisioned } evt.Write([]byte("Rollback deploy called")) p.apps[app.GetName()] = pApp return img, nil } func (p *FakeProvisioner) Rebuild(app provision.App, evt *event.Event) (string, error) { if err := p.getError("Rebuild"); err != nil { return "", err } p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return "", errNotProvisioned } evt.Write([]byte("Rebuild deploy called")) p.apps[app.GetName()] = pApp return fakeAppImage, nil } func (p *FakeProvisioner) Provision(app provision.App) error { if err := p.getError("Provision"); err != nil { return err } if p.Provisioned(app) { return &provision.Error{Reason: "App already provisioned."} } p.mut.Lock() defer p.mut.Unlock() p.apps[app.GetName()] = provisionedApp{ app: app, restarts: make(map[string]int), starts: make(map[string]int), stops: make(map[string]int), sleeps: make(map[string]int), } return nil } func (p *FakeProvisioner) Restart(app provision.App, process string, w io.Writer) error { if err := p.getError("Restart"); err != nil { return err } p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return errNotProvisioned } pApp.restarts[process]++ p.apps[app.GetName()] = pApp if w != nil { fmt.Fprintf(w, "restarting app") } return nil } func (p *FakeProvisioner) Start(app provision.App, process string) error { p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return errNotProvisioned } pApp.starts[process]++ p.apps[app.GetName()] = pApp return nil } func (p *FakeProvisioner) Destroy(app provision.App) error { if err := p.getError("Destroy"); err != nil { return err } if !p.Provisioned(app) { return errNotProvisioned } p.mut.Lock() defer p.mut.Unlock() delete(p.apps, app.GetName()) return nil } func (p *FakeProvisioner) AddUnits(app provision.App, n uint, process string, w io.Writer) error { _, err := p.AddUnitsToNode(app, n, process, w, "") return err } func (p *FakeProvisioner) AddUnitsToNode(app provision.App, n uint, process string, w io.Writer, nodeAddr string) ([]provision.Unit, error) { if err := p.getError("AddUnits"); err != nil { return nil, err } if n == 0 { return nil, errors.New("Cannot add 0 units.") } p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return nil, errNotProvisioned } name := app.GetName() platform := app.GetPlatform() length := uint(len(pApp.units)) var addresses []*url.URL for i := uint(0); i < n; i++ { val := atomic.AddInt32(&uniqueIpCounter, 1) var hostAddr string if nodeAddr != "" { hostAddr = net.URLToHost(nodeAddr) } else if len(p.nodes) > 0 { for _, n := range p.nodes { hostAddr = net.URLToHost(n.Address()) break } } else { hostAddr = fmt.Sprintf("10.10.10.%d", val) } unit := provision.Unit{ ID: fmt.Sprintf("%s-%d", name, pApp.unitLen), AppName: name, Type: platform, Status: provision.StatusStarted, IP: hostAddr, ProcessName: process, Address: &url.URL{ Scheme: "http", Host: fmt.Sprintf("%s:%d", hostAddr, val), }, } addresses = append(addresses, unit.Address) pApp.units = append(pApp.units, unit) pApp.unitLen++ } err := routertest.FakeRouter.AddRoutes(name, addresses) if err != nil { return nil, err } result := make([]provision.Unit, int(n)) copy(result, pApp.units[length:]) p.apps[app.GetName()] = pApp if w != nil { fmt.Fprintf(w, "added %d units", n) } return result, nil } func (p *FakeProvisioner) RemoveUnits(app provision.App, n uint, process string, w io.Writer) error { if err := p.getError("RemoveUnits"); err != nil { return err } if n == 0 { return errors.New("cannot remove 0 units") } p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return errNotProvisioned } var newUnits []provision.Unit removedCount := n var addresses []*url.URL for _, u := range pApp.units { if removedCount > 0 && u.ProcessName == process { removedCount-- addresses = append(addresses, u.Address) continue } newUnits = append(newUnits, u) } err := routertest.FakeRouter.RemoveRoutes(app.GetName(), addresses) if err != nil { return err } if removedCount > 0 { return errors.New("too many units to remove") } if w != nil { fmt.Fprintf(w, "removing %d units", n) } pApp.units = newUnits pApp.unitLen = len(newUnits) p.apps[app.GetName()] = pApp return nil } // ExecuteCommand will pretend to execute the given command, recording data // about it. // // The output of the command must be prepared with PrepareOutput, and failures // must be prepared with PrepareFailure. In case of failure, the prepared // output will be sent to the standard error stream, otherwise, it will be sent // to the standard error stream. // // When there is no output nor failure prepared, ExecuteCommand will return a // timeout error. func (p *FakeProvisioner) ExecuteCommand(stdout, stderr io.Writer, app provision.App, cmd string, args ...string) error { var ( output []byte err error ) command := Cmd{ Cmd: cmd, Args: args, App: app, } p.cmdMut.Lock() p.cmds = append(p.cmds, command) p.cmdMut.Unlock() units, err := p.Units(app) if err != nil { return err } for range units { select { case output = <-p.outputs: select { case fail := <-p.failures: if fail.method == "ExecuteCommand" { stderr.Write(output) return fail.err } p.failures <- fail default: stdout.Write(output) } case fail := <-p.failures: if fail.method == "ExecuteCommand" { err = fail.err select { case output = <-p.outputs: stderr.Write(output) default: } } else { p.failures <- fail } case <-time.After(2e9): return errors.New("FakeProvisioner timed out waiting for output.") } } return err } func (p *FakeProvisioner) ExecuteCommandOnce(stdout, stderr io.Writer, app provision.App, cmd string, args ...string) error { var output []byte command := Cmd{ Cmd: cmd, Args: args, App: app, } p.cmdMut.Lock() p.cmds = append(p.cmds, command) p.cmdMut.Unlock() select { case output = <-p.outputs: stdout.Write(output) case fail := <-p.failures: if fail.method == "ExecuteCommandOnce" { select { case output = <-p.outputs: stderr.Write(output) default: } return fail.err } else { p.failures <- fail } case <-time.After(2e9): return errors.New("FakeProvisioner timed out waiting for output.") } return nil } func (p *FakeProvisioner) ExecuteCommandIsolated(stdout, stderr io.Writer, app provision.App, cmd string, args ...string) error { var output []byte command := Cmd{ Cmd: cmd, Args: args, App: app, } p.cmdMut.Lock() p.cmds = append(p.cmds, command) p.cmdMut.Unlock() select { case output = <-p.outputs: stdout.Write(output) case fail := <-p.failures: if fail.method == "ExecuteCommandIsolated" { select { case output = <-p.outputs: stderr.Write(output) default: } return fail.err } else { p.failures <- fail } case <-time.After(2e9): return errors.New("FakeProvisioner timed out waiting for output.") } return nil } func (p *FakeProvisioner) AddUnit(app provision.App, unit provision.Unit) { p.mut.Lock() defer p.mut.Unlock() a := p.apps[app.GetName()] a.units = append(a.units, unit) a.unitLen++ p.apps[app.GetName()] = a } func (p *FakeProvisioner) Units(apps ...provision.App) ([]provision.Unit, error) { if err := p.getError("Units"); err != nil { return nil, err } p.mut.Lock() defer p.mut.Unlock() var allUnits []provision.Unit for _, a := range apps { allUnits = append(allUnits, p.apps[a.GetName()].units...) } return allUnits, nil } func (p *FakeProvisioner) RoutableAddresses(app provision.App) ([]url.URL, error) { p.mut.Lock() defer p.mut.Unlock() units := p.apps[app.GetName()].units addrs := make([]url.URL, len(units)) for i := range units { addrs[i] = *units[i].Address } return addrs, nil } func (p *FakeProvisioner) SetUnitStatus(unit provision.Unit, status provision.Status) error { p.mut.Lock() defer p.mut.Unlock() var units []provision.Unit if unit.AppName == "" { units = p.getAllUnits() } else { app, ok := p.apps[unit.AppName] if !ok { return errNotProvisioned } units = app.units } index := -1 for i, unt := range units { if unt.ID == unit.ID { index = i unit.AppName = unt.AppName break } } if index < 0 { return &provision.UnitNotFoundError{ID: unit.ID} } app := p.apps[unit.AppName] app.units[index].Status = status p.apps[unit.AppName] = app return nil } func (p *FakeProvisioner) getAllUnits() []provision.Unit { var units []provision.Unit for _, app := range p.apps { units = append(units, app.units...) } return units } func (p *FakeProvisioner) Addr(app provision.App) (string, error) { if err := p.getError("Addr"); err != nil { return "", err } return routertest.FakeRouter.Addr(app.GetName()) } func (p *FakeProvisioner) SetCName(app provision.App, cname string) error { if err := p.getError("SetCName"); err != nil { return err } p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return errNotProvisioned } pApp.cnames = append(pApp.cnames, cname) p.apps[app.GetName()] = pApp return routertest.FakeRouter.SetCName(cname, app.GetName()) } func (p *FakeProvisioner) UnsetCName(app provision.App, cname string) error { if err := p.getError("UnsetCName"); err != nil { return err } p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return errNotProvisioned } pApp.cnames = []string{} p.apps[app.GetName()] = pApp return routertest.FakeRouter.UnsetCName(cname, app.GetName()) } func (p *FakeProvisioner) HasCName(app provision.App, cname string) bool { p.mut.RLock() pApp, ok := p.apps[app.GetName()] p.mut.RUnlock() for _, cnameApp := range pApp.cnames { if cnameApp == cname { return ok && true } } return false } func (p *FakeProvisioner) Stop(app provision.App, process string) error { p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return errNotProvisioned } pApp.stops[process]++ for i, u := range pApp.units { u.Status = provision.StatusStopped pApp.units[i] = u } p.apps[app.GetName()] = pApp return nil } func (p *FakeProvisioner) Sleep(app provision.App, process string) error { p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return errNotProvisioned } pApp.sleeps[process]++ for i, u := range pApp.units { u.Status = provision.StatusAsleep pApp.units[i] = u } p.apps[app.GetName()] = pApp return nil } func (p *FakeProvisioner) RegisterUnit(a provision.App, unitId string, customData map[string]interface{}) error { p.mut.Lock() defer p.mut.Unlock() pa, ok := p.apps[a.GetName()] if !ok { return errors.New("app not found") } pa.lastData = customData for i, u := range pa.units { if u.ID == unitId { u.IP = u.IP + "-updated" pa.units[i] = u p.apps[a.GetName()] = pa return nil } } return &provision.UnitNotFoundError{ID: unitId} } func (p *FakeProvisioner) Shell(opts provision.ShellOptions) error { var unit provision.Unit units, err := p.Units(opts.App) if err != nil { return err } if len(units) == 0 { return errors.New("app has no units") } else if opts.Unit != "" { for _, u := range units { if u.ID == opts.Unit { unit = u break } } } else { unit = units[0] } if unit.ID == "" { return errors.New("unit not found") } p.shellMut.Lock() defer p.shellMut.Unlock() p.shells[unit.ID] = append(p.shells[unit.ID], opts) return nil } func (p *FakeProvisioner) FilterAppsByUnitStatus(apps []provision.App, status []string) ([]provision.App, error) { filteredApps := []provision.App{} for i := range apps { units, _ := p.Units(apps[i]) for _, u := range units { if stringInArray(u.Status.String(), status) { filteredApps = append(filteredApps, apps[i]) break } } } return filteredApps, nil } func (p *FakeProvisioner) GetName() string { return p.Name } func (p *FakeProvisioner) UpgradeNodeContainer(name string, pool string, writer io.Writer) error { p.nodeContainers[name+"-"+pool]++ return nil } func (p *FakeProvisioner) RemoveNodeContainer(name string, pool string, writer io.Writer) error { p.nodeContainers[name+"-"+pool] = 0 return nil } func (p *FakeProvisioner) HasNodeContainer(name string, pool string) bool { return p.nodeContainers[name+"-"+pool] > 0 } func stringInArray(value string, array []string) bool { for _, str := range array { if str == value { return true } } return false } type PipelineFakeProvisioner struct { *FakeProvisioner executedPipeline bool } func (p *PipelineFakeProvisioner) ExecutedPipeline() bool { return p.executedPipeline } func (p *PipelineFakeProvisioner) DeployPipeline() *action.Pipeline { act := action.Action{ Name: "change-executed-pipeline", Forward: func(ctx action.FWContext) (action.Result, error) { p.executedPipeline = true return nil, nil }, Backward: func(ctx action.BWContext) { }, } actions := []*action.Action{&act} pipeline := action.NewPipeline(actions...) return pipeline } type PipelineErrorFakeProvisioner struct { *FakeProvisioner } func (p *PipelineErrorFakeProvisioner) DeployPipeline() *action.Pipeline { act := action.Action{ Name: "error-pipeline", Forward: func(ctx action.FWContext) (action.Result, error) { return nil, errors.New("deploy error") }, Backward: func(ctx action.BWContext) { }, } actions := []*action.Action{&act} pipeline := action.NewPipeline(actions...) return pipeline } type provisionedApp struct { units []provision.Unit app provision.App restarts map[string]int starts map[string]int stops map[string]int sleeps map[string]int lastArchive string lastFile io.ReadCloser cnames []string unitLen int lastData map[string]interface{} image string }
ggarnier/tsuru
provision/provisiontest/fake_provisioner.go
GO
bsd-3-clause
35,129
module.exports = { env: { mocha: true }, plugins: [ 'mocha' ] };
pghalliday/recursive-semver
test/.eslintrc.js
JavaScript
isc
81
/* * Copyright (c) 2006, 2007 ThoughtWorks, Inc. * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. * */ package com.thoughtworks.cozmos; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.LineNumberReader; import java.io.OutputStream; import java.io.PrintWriter; import java.net.MalformedURLException; import java.net.Socket; import java.net.URL; import java.util.StringTokenizer; public class ModDavSvnProxyServlet2 extends HttpServlet { private String targetURL; private String newPageTemplate; public void init(ServletConfig servletConfig) throws ServletException { targetURL = servletConfig.getInitParameter("mod_dav_svn_url"); newPageTemplate = servletConfig.getInitParameter("new_page_template_file"); super.init(servletConfig); } protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { String path = req.getServletPath(); Socket socket = startGet(new URL(targetURL + path)); InputStream is = socket.getInputStream(); LineNumberReader lnr = new LineNumberReader(new InputStreamReader(is)); boolean ok = isOk(lnr); if (!ok) { socket = startGet(new URL(targetURL + newPageTemplate)); lnr = new LineNumberReader(new InputStreamReader(is)); ok = isOk(lnr); } if (ok) { lnr.readLine(); // Date: lnr.readLine(); // Server: lnr.readLine(); // ETag: lnr.readLine(); // Accept-Ranges: int contentLength = getContentLen(lnr.readLine()); lnr.readLine(); // Content-Type: lnr.readLine(); // end of header resp.setContentType(getServletContext().getMimeType(path)); OutputStream os = resp.getOutputStream(); int done = 0; while (done < contentLength) { int i = lnr.read(); done++; os.write(i); } socket.close(); } } private int getContentLen(String s) { StringTokenizer st = new StringTokenizer(s); st.nextToken(); return Integer.parseInt(st.nextToken()); } private boolean isOk(LineNumberReader lnr) throws IOException { return "HTTP/1.1 200 OK".equals(lnr.readLine()); } private Socket startGet(URL url) throws IOException { Socket socket = new Socket(url.getHost(), 80); PrintWriter pw = new PrintWriter(socket.getOutputStream(), true); pw.println("GET " + url.getPath() + " HTTP/1.1"); pw.println("Host: " + url.getHost()); pw.println(); return socket; } }
codehaus/cozmos
src/main/java/com/thoughtworks/cozmos/ModDavSvnProxyServlet2.java
Java
isc
3,638
var doNothing = function () {} /** * The `Base` log defines methods that transports will share. */ var Base = module.exports = function (config, defaults) { var cedar = require('../../cedar') // A log is a shorthand for `log.log`, among other things. var log = function () { log.log.apply(log, arguments) } // Don't run `setMethods` until all config properties are set. var setMethods = doNothing // Define properties that trigger `setMethods`. Base.resetters.forEach(function (property) { var value Object.defineProperty(log, property, { get: function () { return value }, set: function (newValue) { value = newValue setMethods.apply(log) } }) }) // Copy `config` properties to the `log`. Base.decorate(log, config, true) // Apply default properties. Base.decorate(log, defaults || Base.defaults) // Set up logging methods. Base.setMethods.apply(log) // Re-run `setMethods` if `resetters` change. setMethods = Base.setMethods // Return the fully-decorated log function. return log } /** * Some properties will reset methods if changed. */ Base.resetters = ['level', 'prefixes', 'format', 'showTrace'] /** * Cedar supports 7 levels of logging. */ Base.levels = ['trace', 'debug', 'log', 'info', 'warn', 'error', 'fatal'] /** * Share defaults between log objects. */ Base.defaults = { // Show all log messages by default. level: 'trace', // Stream to `stdout` (using `write`). stream: process.stdout, // Don't add any space to JSON. space: '', // Stringify with `JSON.stringify`. stringify: JSON.stringify, // Join arguments together as an array. join: function (args) { var list = [] for (var index = 0, length = args.length; index < length; index++) { var arg = args[index] if (arg instanceof Error) { arg = '"' + (arg.stack || arg.toString()).replace(/\n/, '\\n') + '"' } else { arg = JSON.stringify(arg, null, this.space) } list.push(arg) } return '[' + list.join(',') + ']' }, // Start messages with a prefix for each log method. prefixes: { trace: 'TRACE ', debug: 'DEBUG ', log: 'LOG ', info: 'INFO ', warn: 'WARN ', error: 'ERROR ', fatal: 'FATAL ' }, // Format a log message. format: function (message, type, prefix) { return prefix + message + '\n' } } /** * Decorate an object with the properties of another. */ Base.decorate = function (object, defaults, shouldOverwrite) { object = object || {} for (var key in defaults) { if (shouldOverwrite || (typeof object[key] === 'undefined')) { object[key] = defaults[key] } } return object } /** * Create logging methods based on the configured `level`. */ Base.setMethods = function () { var self = this var found = false if ((Base.levels.indexOf(self.level) < 0) && self.level !== 'nothing') { self.error('Unknown log level: "' + self.level + '".') } else { Base.levels.forEach(function (methodName, index) { if (methodName === self.level) { found = true } var prefix = self.prefixes[methodName] || '' var format = self.format // If this log is an Emitter, we can catch and emit errors. if (self.emit) { self[methodName] = found ? function () { var message = self.join(arguments) message = format.call(self, message, methodName, prefix) try { self.stream.write(message) } catch (e) { self.emit('error', e) } } : doNothing // Otherwise, they'll just throw. } else { self[methodName] = found ? function () { var message = self.join(arguments) message = format.call(self, message, methodName, prefix) self.stream.write(message) } : doNothing } }) // Wrap the trace method with a stack tracer. if (self.trace !== doNothing) { var traceMethod = self.trace self.trace = function () { var e = new Error('') Error.captureStackTrace(e, self.trace) var l = arguments.length arguments[l] = e.stack.split('\n').splice(2).join('\n') arguments.length = ++l traceMethod.apply(self, arguments) } } } }
zerious/cedar
lib/transports/base.js
JavaScript
isc
4,332
using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace GAPPSF.OKAPI { public class SiteInfoNetherlands: SiteInfo { public const string STR_INFO = "opencaching.nl"; public SiteInfoNetherlands() { ID = "2"; Info = STR_INFO; OKAPIBaseUrl = "http://www.opencaching.nl/okapi/"; GeocodePrefix = "OB"; } public override void LoadSettings() { Username = Core.ApplicationData.Instance.AccountInfos.GetAccountInfo(GeocodePrefix).AccountName ?? ""; UserID = Core.Settings.Default.OKAPISiteInfoNetherlandsUserID ?? ""; Token = Core.Settings.Default.OKAPISiteInfoNetherlandsToken ?? ""; TokenSecret = Core.Settings.Default.OKAPISiteInfoNetherlandsTokenSecret ?? ""; base.LoadSettings(); } public override void SaveSettings() { Core.ApplicationData.Instance.AccountInfos.GetAccountInfo(GeocodePrefix).AccountName = Username ?? ""; Core.Settings.Default.OKAPISiteInfoNetherlandsUserID = UserID ?? ""; Core.Settings.Default.OKAPISiteInfoNetherlandsToken = Token ?? ""; Core.Settings.Default.OKAPISiteInfoNetherlandsTokenSecret = TokenSecret ?? ""; } } }
RH-Code/GAPP
GAPPSF/OKAPI/SiteInfoNetherlands.cs
C#
mit
1,336
// using System; if (double.TryParse(aaa, out var bbb)) { // ... } //
general-language-syntax/GLS
test/integration/IfStringToDoubleEnd/if string to double end.cs
C#
mit
75
using System; using System.Collections.Generic; using KellermanSoftware.CompareNetObjects; using KellermanSoftware.CompareNetObjects.TypeComparers; using ProtoBuf; namespace Abc.Zebus.Testing.Comparison { internal static class ComparisonExtensions { public static bool DeepCompare<T>(this T firstObj, T secondObj, params string[] elementsToIgnore) { var comparer = CreateComparer(); comparer.Config.MembersToIgnore.AddRange(elementsToIgnore); return comparer.Compare(firstObj, secondObj).AreEqual; } public static CompareLogic CreateComparer() { return new CompareLogic { Config = { CompareStaticProperties = false, CompareStaticFields = false, CustomComparers = { // TODO : Is this still used? new EquatableComparer() }, AttributesToIgnore = new List<Type> { typeof(ProtoIgnoreAttribute) }, } }; } private class EquatableComparer : BaseTypeComparer { public EquatableComparer() : base(RootComparerFactory.GetRootComparer()) { } public override bool IsTypeMatch(Type type1, Type type2) { if (type1 != type2) return false; return typeof(IEquatable<>).MakeGenericType(type1).IsAssignableFrom(type1); } public override void CompareType(CompareParms parms) { if (!Equals(parms.Object1, parms.Object2)) AddDifference(parms); } } } }
biarne-a/Zebus
src/Abc.Zebus.Testing/Comparison/ComparisonExtensions.cs
C#
mit
1,822
// Generated on 12/11/2014 19:01:22 using System; using System.Collections.Generic; using System.Linq; using BlueSheep.Common.Protocol.Types; using BlueSheep.Common.IO; using BlueSheep.Engine.Types; namespace BlueSheep.Common.Protocol.Messages { public class SequenceNumberMessage : Message { public new const uint ID =6317; public override uint ProtocolID { get { return ID; } } public ushort number; public SequenceNumberMessage() { } public SequenceNumberMessage(ushort number) { this.number = number; } public override void Serialize(BigEndianWriter writer) { writer.WriteUShort(number); } public override void Deserialize(BigEndianReader reader) { number = reader.ReadUShort(); if (number < 0 || number > 65535) throw new Exception("Forbidden value on number = " + number + ", it doesn't respect the following condition : number < 0 || number > 65535"); } } }
Sadikk/BlueSheep
BlueSheep/Common/Protocol/messages/game/basic/SequenceNumberMessage.cs
C#
mit
1,158
#include <iostream> using namespace std; #include <omp.h> #define SIZE 8 int main(void){ int x[SIZE]; int sum=0; for(int i=0;i<SIZE;i++){ x[i]=i; } #pragma omp parallel for reduction (+:sum) for(int i=0;i<SIZE;i++){ sum+=x[i]; } cout<<sum<<std::endl; return 0; }
wasit7/cs426
lectures/week02_omp2/week022_forReduction/for.cpp
C++
mit
300
from .image import Image from .product_category import ProductCategory from .supplier import Supplier, PaymentMethod from .product import Product from .product import ProductImage from .enum_values import EnumValues from .related_values import RelatedValues from .customer import Customer from .expense import Expense from .incoming import Incoming from .shipping import Shipping, ShippingLine from .receiving import Receiving, ReceivingLine from .inventory_transaction import InventoryTransaction, InventoryTransactionLine from .purchase_order import PurchaseOrder, PurchaseOrderLine from .sales_order import SalesOrder, SalesOrderLine from .user import User from .role import Role, roles_users from .organization import Organization from .inventory_in_out_link import InventoryInOutLink from .aspects import update_menemonic from .product_inventory import ProductInventory
betterlife/psi
psi/app/models/__init__.py
Python
mit
875
import type {ResponseType} from "./base.type"; function parseJSON(response: ResponseType): Object { return response.json(); } export {parseJSON};
lingui/everest
src/parsers.js
JavaScript
mit
150
// This code contains NVIDIA Confidential Information and is disclosed to you // under a form of NVIDIA software license agreement provided separately to you. // // Notice // NVIDIA Corporation and its licensors retain all intellectual property and // proprietary rights in and to this software and related documentation and // any modifications thereto. Any use, reproduction, disclosure, or // distribution of this software and related documentation without an express // license agreement from NVIDIA Corporation is strictly prohibited. // // ALL NVIDIA DESIGN SPECIFICATIONS, CODE ARE PROVIDED "AS IS.". NVIDIA MAKES // NO WARRANTIES, EXPRESSED, IMPLIED, STATUTORY, OR OTHERWISE WITH RESPECT TO // THE MATERIALS, AND EXPRESSLY DISCLAIMS ALL IMPLIED WARRANTIES OF NONINFRINGEMENT, // MERCHANTABILITY, AND FITNESS FOR A PARTICULAR PURPOSE. // // Information and code furnished is believed to be accurate and reliable. // However, NVIDIA Corporation assumes no responsibility for the consequences of use of such // information or for any infringement of patents or other rights of third parties that may // result from its use. No license is granted by implication or otherwise under any patent // or patent rights of NVIDIA Corporation. Details are subject to change without notice. // This code supersedes and replaces all information previously supplied. // NVIDIA Corporation products are not authorized for use as critical // components in life support devices or systems without express written approval of // NVIDIA Corporation. // // Copyright (c) 2008-2013 NVIDIA Corporation. All rights reserved. #include <RendererTextureDesc.h> using namespace SampleRenderer; RendererTextureDesc::RendererTextureDesc(void) { format = RendererTexture::NUM_FORMATS; filter = RendererTexture::FILTER_LINEAR; addressingU = RendererTexture::ADDRESSING_WRAP; addressingV = RendererTexture::ADDRESSING_WRAP; addressingW = RendererTexture::ADDRESSING_WRAP; width = 0; height = 0; depth = 1; numLevels = 0; renderTarget = false; data = NULL; } bool RendererTextureDesc::isValid(void) const { bool ok = true; if(format >= RendererTexture2D::NUM_FORMATS) ok = false; if(filter >= RendererTexture2D::NUM_FILTERS) ok = false; if(addressingU >= RendererTexture2D::NUM_ADDRESSING) ok = false; if(addressingV >= RendererTexture2D::NUM_ADDRESSING) ok = false; if(width <= 0 || height <= 0 || depth <= 0) ok = false; // TODO: check for power of two. if(numLevels <= 0) ok = false; if(renderTarget) { if(depth > 1) ok = false; if(format == RendererTexture2D::FORMAT_DXT1) ok = false; if(format == RendererTexture2D::FORMAT_DXT3) ok = false; if(format == RendererTexture2D::FORMAT_DXT5) ok = false; } return ok; }
jjuiddong/KarlSims
SampleFramework/renderer/src/RendererTextureDesc.cpp
C++
mit
2,787
var request = require('request'), mongoose = require('mongoose'), util = require('util'), url = require('url'), helpers = require('./helpers'), sync = require('./sync') // turn off request pooling request.defaults({ agent:false }) // cache elasticsearch url options for elmongo.search() to use var elasticUrlOptions = null /** * Attach mongoose plugin for elasticsearch indexing * * @param {Object} schema mongoose schema * @param {Object} options elasticsearch options object. Keys: host, port, index, type */ module.exports = elmongo = function (schema, options) { // attach methods to schema schema.methods.index = index schema.methods.unindex = unindex schema.statics.sync = function (cb) { options = helpers.mergeModelOptions(options, this) return sync.call(this, schema, options, cb) } schema.statics.search = function (searchOpts, cb) { options = helpers.mergeModelOptions(options, this) var searchUri = helpers.makeTypeUri(options) + '/_search?search_type=dfs_query_then_fetch&preference=_primary_first' return helpers.doSearchAndNormalizeResults(searchUri, searchOpts, cb) } // attach mongoose middleware hooks schema.post('save', function () { options = helpers.mergeModelOptions(options, this) this.index(options) }) schema.post('remove', function () { options = helpers.mergeModelOptions(options, this) this.unindex(options) }) } /** * Search across multiple collections. Same usage as model search, but with an extra key on `searchOpts` - `collections` * @param {Object} searchOpts * @param {Function} cb */ elmongo.search = function (searchOpts, cb) { // merge elasticsearch url config options elasticUrlOptions = helpers.mergeOptions(elasticUrlOptions) // determine collections to search on var collections = searchOpts.collections; if (elasticUrlOptions.prefix) { // prefix was specified - namespace the index names to use the prefix for each collection's index if (searchOpts.collections && searchOpts.collections.length) { // collections were specified - prepend the prefix on each collection name collections = collections.map(function (collection) { return elasticUrlOptions.prefix + '-' + collection }) } else { // no collections specified, but prefix specified - use wildcard index with prefix collections = [ elasticUrlOptions.prefix + '*' ] } } else { // no prefix used // if collections specified, just use their names without the prefix if (!collections) { // no collections were specified so use _all (searches all collections), without prefix searchOpts.collections = [ '_all' ] } } var searchUri = helpers.makeDomainUri(elasticUrlOptions) + '/' + collections.join(',') + '/_search?search_type=dfs_query_then_fetch&preference=_primary_first' return helpers.doSearchAndNormalizeResults(searchUri, searchOpts, cb) } /** * Configure the Elasticsearch url options for `elmongo.search()`. * * @param {Object} options - keys: host, port, prefix (optional) */ elmongo.search.config = function (options) { // only overwrite `options` values that are being specified in this call to `config` if (elasticUrlOptions) { Object .keys(elasticUrlOptions) .forEach(function (key) { elasticUrlOptions[key] = options[key] || elasticUrlOptions[key] }) } // normalize the `options` object elasticUrlOptions = helpers.mergeOptions(options) } /** * Index a document in elasticsearch (create if not existing) * * @param {Object} options elasticsearch options object. Keys: host, port, index, type */ function index (options) { var self = this // strip mongoose-added functions, depopulate any populated fields, and serialize the doc var esearchDoc = helpers.serializeModel(this) var indexUri = helpers.makeDocumentUri(options, self) var reqOpts = { method: 'PUT', url: indexUri, body: JSON.stringify(esearchDoc) } // console.log('index:', indexUri) helpers.backOffRequest(reqOpts, function (err, res, body) { if (err) { var error = new Error('Elasticsearch document indexing error: '+util.inspect(err, true, 10, true)) error.details = err self.emit('error', error) return } self.emit('elmongo-indexed', body) }) } /** * Remove a document from elasticsearch * * @param {Object} options elasticsearch options object. Keys: host, port, index, type */ function unindex (options) { var self = this var unindexUri = helpers.makeDocumentUri(options, self) // console.log('unindex:', unindexUri) var reqOpts = { method: 'DELETE', url: unindexUri } helpers.backOffRequest(reqOpts, function (err, res, body) { if (err) { var error = new Error('Elasticsearch document index deletion error: '+util.inspect(err, true, 10, true)) error.details = err self.emit('error', error) return } self.emit('elmongo-unindexed', body) }) }
regini/inSquare
inSquareBackend/cloud.insquare/node_modules/elmongo/lib/elmongo.js
JavaScript
mit
5,351
/** * Copyright 2015 Telerik AD * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ (function(f, define){ define([], f); })(function(){ (function( window, undefined ) { var kendo = window.kendo || (window.kendo = { cultures: {} }); kendo.cultures["qut"] = { name: "qut", numberFormat: { pattern: ["-n"], decimals: 2, ",": ",", ".": ".", groupSize: [3], percent: { pattern: ["-n %","n %"], decimals: 2, ",": ",", ".": ".", groupSize: [3], symbol: "%" }, currency: { name: "", abbr: "", pattern: ["($n)","$n"], decimals: 2, ",": ",", ".": ".", groupSize: [3], symbol: "Q" } }, calendars: { standard: { days: { names: ["juq\u0027ij","kaq\u0027ij","oxq\u0027ij","kajq\u0027ij","joq\u0027ij","waqq\u0027ij","wuqq\u0027ij"], namesAbbr: ["juq\u0027","kaq\u0027","oxq\u0027","kajq\u0027","joq\u0027","waqq\u0027","wuqq\u0027"], namesShort: ["ju","ka","ox","kj","jo","wa","wu"] }, months: { names: ["nab\u0027e ik\u0027","ukab\u0027 ik\u0027","urox ik\u0027","ukaj ik\u0027","uro ik\u0027","uwaq ik\u0027","uwuq ik\u0027","uwajxaq ik\u0027","ub\u0027elej ik\u0027","ulaj ik\u0027","ujulaj ik\u0027","ukab\u0027laj ik\u0027"], namesAbbr: ["nab\u0027e","ukab\u0027","urox","ukaj","uro","uwaq","uwuq","uwajxaq","ub\u0027elej","ulaj","ujulaj","ukab\u0027laj"] }, AM: ["a.m.","a.m.","A.M."], PM: ["p.m.","p.m.","P.M."], patterns: { d: "dd/MM/yyyy", D: "dddd, dd' rech 'MMMM' rech 'yyyy", F: "dddd, dd' rech 'MMMM' rech 'yyyy h:mm:ss tt", g: "dd/MM/yyyy h:mm tt", G: "dd/MM/yyyy h:mm:ss tt", m: "d' rech 'MMMM", M: "d' rech 'MMMM", s: "yyyy'-'MM'-'dd'T'HH':'mm':'ss", t: "h:mm tt", T: "h:mm:ss tt", u: "yyyy'-'MM'-'dd HH':'mm':'ss'Z'", y: "MMMM' rech 'yyyy", Y: "MMMM' rech 'yyyy" }, "/": "/", ":": ":", firstDay: 1 } } } })(this); return window.kendo; }, typeof define == 'function' && define.amd ? define : function(_, f){ f(); });
assunluis80/Web-Starter-Template
assets/scripts/vendors/kendo_ui/cultures/kendo.culture.qut.js
JavaScript
mit
3,281
using System; using System.Collections.Generic; using System.Text; using FlatRedBall; using FlatRedBall.Gui; using FlatRedBall.AI.Pathfinding; #if FRB_MDX using Color = System.Drawing.Color; #else using Color = Microsoft.Xna.Framework.Graphics.Color; #endif using CameraPropertyGrid = EditorObjects.Gui.CameraPropertyGrid; using EditorObjects.Gui; using EditorObjects; namespace AIEditor.Gui { public static class GuiData { #region Fields static int mFramesSinceLastExpensiveGuiUpdate = 0; static Menu mMenuStrip; static CameraPropertyGrid mCameraPropertyGrid; static NodeNetworkPropertyGrid mNodeNetworkPropertyGrid; static ToolsWindow mToolsWindow; static CommandDisplay mCommandDisplay; static ScenePropertyGrid mScenePropertyGrid; static ShapeCollectionPropertyGrid mShapeCollectionPropertyGrid; public static EditorPropertiesGrid mEditorPropertiesGrid; #endregion #region Properties public static CameraPropertyGrid CameraPropertyGrid { get { return mCameraPropertyGrid; } } public static CommandDisplay CommandDisplay { get { return mCommandDisplay; } } public static EditorPropertiesGrid EditorPropertiesGrid { get { return mEditorPropertiesGrid; } } public static NodeNetworkPropertyGrid NodeNetworkPropertyGrid { get { return mNodeNetworkPropertyGrid; } set { mNodeNetworkPropertyGrid = value; } } public static ScenePropertyGrid ScenePropertyGrid { get { return mScenePropertyGrid; } } public static ShapeCollectionPropertyGrid ShapeCollectionPropertyGrid { get { return mShapeCollectionPropertyGrid; } } public static ToolsWindow ToolsWindow { get { return mToolsWindow; } } #endregion #region Events private static void CreateColorPropertyGrid(Window callingWindow) { ((PropertyGrid<Color>)callingWindow).ExcludeAllMembers(); ((PropertyGrid<Color>)callingWindow).IncludeMember("A"); ((PropertyGrid<Color>)callingWindow).IncludeMember("R"); ((PropertyGrid<Color>)callingWindow).IncludeMember("G"); ((PropertyGrid<Color>)callingWindow).IncludeMember("B"); callingWindow.Y = 40; } private static void CreatePositionedNodePropertyGrid(Window callingWindow) { PropertyGrid<PositionedNode> asPropertyGrid = callingWindow as PropertyGrid<PositionedNode>; asPropertyGrid.ExcludeMember("CostToGetHere"); asPropertyGrid.ExcludeMember("Links"); asPropertyGrid.ExcludeMember("X"); asPropertyGrid.ExcludeMember("Y"); asPropertyGrid.ExcludeMember("Z"); asPropertyGrid.Name = "Positioned Node"; } #endregion #region Methods #region Public Methods public static void Initialize() { mMenuStrip = new Menu(); mToolsWindow = new ToolsWindow(); CreatePropertyGrids(); mCommandDisplay = new CommandDisplay(); CreateListDisplayWindows(); } public static void Update() { if (EditorData.Scene != mScenePropertyGrid.SelectedObject) { mScenePropertyGrid.SelectedObject = EditorData.Scene; } mScenePropertyGrid.UpdateDisplayedProperties(); mNodeNetworkPropertyGrid.Update(); mCameraPropertyGrid.UpdateDisplayedProperties(); // This can be slow. We can speed it up by only doing it every X frames const int updateEveryXFrames = 30; mFramesSinceLastExpensiveGuiUpdate++; if (mFramesSinceLastExpensiveGuiUpdate >= updateEveryXFrames) { mNodeNetworkPropertyGrid.UpdateDisplayedProperties(); mFramesSinceLastExpensiveGuiUpdate = 0; } #region Update the ShapeCollection PropertyGrid if (mShapeCollectionPropertyGrid.Visible) { if (mShapeCollectionPropertyGrid.SelectedObject != EditorData.ShapeCollection) { mShapeCollectionPropertyGrid.SelectedObject = EditorData.ShapeCollection; } mShapeCollectionPropertyGrid.UpdateDisplayedProperties(); } #endregion } #endregion #region Private Methods private static void CreateListDisplayWindows() { } private static void CreatePropertyGrids() { #region CamerPropertyGrid mCameraPropertyGrid = new CameraPropertyGrid(GuiManager.Cursor); GuiManager.AddWindow(mCameraPropertyGrid); mCameraPropertyGrid.SelectedObject = SpriteManager.Camera; mCameraPropertyGrid.X = mCameraPropertyGrid.ScaleX; mCameraPropertyGrid.Y = 40; mCameraPropertyGrid.HasCloseButton = true; mCameraPropertyGrid.UndoInstructions = UndoManager.Instructions; #endregion #region NodeNetwork PropertyGrid mNodeNetworkPropertyGrid = new NodeNetworkPropertyGrid(); mNodeNetworkPropertyGrid.SelectedObject = EditorData.NodeNetwork; mNodeNetworkPropertyGrid.X = mNodeNetworkPropertyGrid.ScaleX; mNodeNetworkPropertyGrid.Y = 61; mNodeNetworkPropertyGrid.HasCloseButton = true; mNodeNetworkPropertyGrid.UndoInstructions = UndoManager.Instructions; #endregion #region ScenePropertyGrid mScenePropertyGrid = new ScenePropertyGrid(GuiManager.Cursor); GuiManager.AddWindow(mScenePropertyGrid); mScenePropertyGrid.X = mScenePropertyGrid.ScaleX; mScenePropertyGrid.Y = 75.7f; mScenePropertyGrid.ShowPropertyGridOnStrongSelect = true; mScenePropertyGrid.HasCloseButton = true; mScenePropertyGrid.Visible = false; mScenePropertyGrid.UndoInstructions = UndoManager.Instructions; #endregion #region ShapeCollectionPropertyGrid mShapeCollectionPropertyGrid = new ShapeCollectionPropertyGrid(GuiManager.Cursor); GuiManager.AddWindow(mShapeCollectionPropertyGrid); mShapeCollectionPropertyGrid.ShowPropertyGridOnStrongSelectAxisAlignedCube = true; mShapeCollectionPropertyGrid.ShowPropertyGridOnStrongSelectAxisAlignedRectangle = true; mShapeCollectionPropertyGrid.ShowPropertyGridOnStrongSelectCircle = true; mShapeCollectionPropertyGrid.ShowPropertyGridOnStrongSelectPolygon = true; mShapeCollectionPropertyGrid.ShowPropertyGridOnStrongSelectSphere = true; mShapeCollectionPropertyGrid.HasCloseButton = true; mShapeCollectionPropertyGrid.Visible = false; mShapeCollectionPropertyGrid.UndoInstructions = UndoManager.Instructions; #endregion PropertyGrid.SetNewWindowEvent<FlatRedBall.AI.Pathfinding.PositionedNode>(CreatePositionedNodePropertyGrid); PropertyGrid.SetNewWindowEvent<Color>(CreateColorPropertyGrid); #region EditorPropertiesGrid mEditorPropertiesGrid = new EditorPropertiesGrid(); mEditorPropertiesGrid.Visible = false; #endregion } #endregion #endregion } }
GorillaOne/FlatRedBall
FRBDK/AIEditor/AIEditor/AIEditor/Gui/GuiData.cs
C#
mit
7,748
// flow-typed signature: 573c576fe34eb3c3c65dd7a9c90a46d2 // flow-typed version: b43dff3e0e/http-errors_v1.x.x/flow_>=v0.25.x declare module 'http-errors' { declare class SpecialHttpError extends HttpError { constructor(): SpecialHttpError; } declare class HttpError extends Error { expose: bool; message: string; status: number; statusCode: number; } declare module.exports: { (status?: number, message?: string, props?: Object): HttpError; HttpError: typeof HttpError; BadRequest: typeof SpecialHttpError; Unauthorized: typeof SpecialHttpError; PaymentRequired: typeof SpecialHttpError; Forbidden: typeof SpecialHttpError; NotFound: typeof SpecialHttpError; MethodNotAllowed: typeof SpecialHttpError; NotAcceptable: typeof SpecialHttpError; ProxyAuthenticationRequired: typeof SpecialHttpError; RequestTimeout: typeof SpecialHttpError; Conflict: typeof SpecialHttpError; Gone: typeof SpecialHttpError; LengthRequired: typeof SpecialHttpError; PreconditionFailed: typeof SpecialHttpError; PayloadTooLarge: typeof SpecialHttpError; URITooLong: typeof SpecialHttpError; UnsupportedMediaType: typeof SpecialHttpError; RangeNotStatisfiable: typeof SpecialHttpError; ExpectationFailed: typeof SpecialHttpError; ImATeapot: typeof SpecialHttpError; MisdirectedRequest: typeof SpecialHttpError; UnprocessableEntity: typeof SpecialHttpError; Locked: typeof SpecialHttpError; FailedDependency: typeof SpecialHttpError; UnorderedCollection: typeof SpecialHttpError; UpgradeRequired: typeof SpecialHttpError; PreconditionRequired: typeof SpecialHttpError; TooManyRequests: typeof SpecialHttpError; RequestHeaderFieldsTooLarge: typeof SpecialHttpError; UnavailableForLegalReasons: typeof SpecialHttpError; InternalServerError: typeof SpecialHttpError; NotImplemented: typeof SpecialHttpError; BadGateway: typeof SpecialHttpError; ServiceUnavailable: typeof SpecialHttpError; GatewayTimeout: typeof SpecialHttpError; HTTPVersionNotSupported: typeof SpecialHttpError; VariantAlsoNegotiates: typeof SpecialHttpError; InsufficientStorage: typeof SpecialHttpError; LoopDetected: typeof SpecialHttpError; BandwidthLimitExceeded: typeof SpecialHttpError; NotExtended: typeof SpecialHttpError; NetworkAuthenticationRequired: typeof SpecialHttpError; } }
conveyal/scenario-editor
flow-typed/npm/http-errors_v1.x.x.js
JavaScript
mit
2,439
using System; namespace Microsoft.eShopOnContainers.Services.Catalog.API.Model { public class CatalogItem { public int Id { get; set; } public string Name { get; set; } public string Description { get; set; } public decimal Price { get; set; } public string PictureUri { get; set; } public int CatalogTypeId { get; set; } public CatalogType CatalogType { get; set; } public int CatalogBrandId { get; set; } public CatalogBrand CatalogBrand { get; set; } public CatalogItem() { } } }
oferns/eShopOnContainers
src/Services/Catalog/Catalog.API/Model/CatalogItem.cs
C#
mit
593
require 'cgi' require 'nkf' class Mechanize::Util # default mime type data for Page::Image#mime_type. # You can use another Apache-compatible mimetab. # mimetab = WEBrick::HTTPUtils.load_mime_types('/etc/mime.types') # Mechanize::Util::DefaultMimeTypes.replace(mimetab) DefaultMimeTypes = WEBrick::HTTPUtils::DefaultMimeTypes class << self # Builds a query string from a given enumerable object # +parameters+. This method uses Mechanize::Util.each_parameter # as preprocessor, which see. def build_query_string(parameters, enc = nil) each_parameter(parameters).inject(nil) { |s, (k, v)| # WEBrick::HTTP.escape* has some problems about m17n on ruby-1.9.*. (s.nil? ? '' : s << '&') << [CGI.escape(k.to_s), CGI.escape(v.to_s)].join('=') } || '' end # Parses an enumerable object +parameters+ and iterates over the # key-value pairs it contains. # # +parameters+ may be a hash, or any enumerable object which # iterates over [key, value] pairs, typically an array of arrays. # # If a key is paired with an array-like object, the pair is # expanded into multiple occurrences of the key, one for each # element of the array. e.g. { a: [1, 2] } => [:a, 1], [:a, 2] # # If a key is paired with a hash-like object, the pair is expanded # into hash-like multiple pairs, one for each pair of the hash. # e.g. { a: { x: 1, y: 2 } } => ['a[x]', 1], ['a[y]', 2] # # An array-like value is allowed to be specified as hash value. # e.g. { a: { q: [1, 2] } } => ['a[q]', 1], ['a[q]', 2] # # For a non-array-like, non-hash-like value, the key-value pair is # yielded as is. def each_parameter(parameters, &block) return to_enum(__method__, parameters) if block.nil? parameters.each { |key, value| each_parameter_1(key, value, &block) } end private def each_parameter_1(key, value, &block) return if key.nil? case when s = String.try_convert(value) yield [key, s] when a = Array.try_convert(value) a.each { |avalue| yield [key, avalue] } when h = Hash.try_convert(value) h.each { |hkey, hvalue| each_parameter_1('%s[%s]' % [key, hkey], hvalue, &block) } else yield [key, value] end end end # Converts string +s+ from +code+ to UTF-8. def self.from_native_charset(s, code, ignore_encoding_error = false, log = nil) return s unless s && code return s unless Mechanize.html_parser == Nokogiri::HTML begin s.encode(code) rescue EncodingError => ex log.debug("from_native_charset: #{ex.class}: form encoding: #{code.inspect} string: #{s}") if log if ignore_encoding_error s else raise end end end def self.html_unescape(s) return s unless s s.gsub(/&(\w+|#[0-9]+);/) { |match| number = case match when /&(\w+);/ Mechanize.html_parser::NamedCharacters[$1] when /&#([0-9]+);/ $1.to_i end number ? ([number].pack('U') rescue match) : match } end case NKF::BINARY when Encoding def self.guess_encoding(src) # NKF.guess of JRuby may return nil NKF.guess(src) || Encoding::US_ASCII end else # Old NKF from 1.8, still bundled with Rubinius NKF_ENCODING_MAP = { NKF::UNKNOWN => Encoding::US_ASCII, NKF::BINARY => Encoding::ASCII_8BIT, NKF::ASCII => Encoding::US_ASCII, NKF::JIS => Encoding::ISO_2022_JP, NKF::EUC => Encoding::EUC_JP, NKF::SJIS => Encoding::Shift_JIS, NKF::UTF8 => Encoding::UTF_8, NKF::UTF16 => Encoding::UTF_16BE, NKF::UTF32 => Encoding::UTF_32BE, } def self.guess_encoding(src) NKF_ENCODING_MAP[NKF.guess(src)] end end def self.detect_charset(src) if src guess_encoding(src).name.upcase else Encoding::ISO8859_1.name end end def self.uri_escape str, unsafe = nil @parser ||= begin URI::Parser.new rescue NameError URI end if URI == @parser then unsafe ||= URI::UNSAFE else unsafe ||= @parser.regexp[:UNSAFE] end @parser.escape str, unsafe end def self.uri_unescape str @parser ||= begin URI::Parser.new rescue NameError URI end @parser.unescape str end end
eligoenergy/mechanize
lib/mechanize/util.rb
Ruby
mit
4,585
require 'english/class' class English # = Noun Number Inflections # # This module provides english singular <-> plural noun inflections. module Inflect @singular_of = {} @plural_of = {} @singular_rules = [] @plural_rules = [] # This class provides the DSL for creating inflections, you can add additional rules. # Examples: # # word "ox", "oxen" # word "octopus", "octopi" # word "man", "men" # # rule "lf", "lves" # # word "equipment" # # Rules are evaluated by size, so rules you add to override specific cases should be longer than the rule # it overrides. For instance, if you want "pta" to pluralize to "ptas", even though a general purpose rule # for "ta" => "tum" already exists, simply add a new rule for "pta" => "ptas", and it will automatically win # since it is longer than the old rule. # # Also, single-word exceptions win over general words ("ox" pluralizes to "oxen", because it's a single word # exception, even though "fox" pluralizes to "foxes") class << self # Define a general two-way exception. # # This also defines a general rule, so foo_child will correctly become # foo_children. # # Whole words also work if they are capitalized (Goose => Geese). def word(singular, plural=nil) plural = singular unless plural singular_word(singular, plural) plural_word(singular, plural) rule(singular, plural) end # Define a singularization exception. def singular_word(singular, plural) @singular_of[plural] = singular @singular_of[plural.capitalize] = singular.capitalize end # Define a pluralization exception. def plural_word(singular, plural) @plural_of[singular] = plural @plural_of[singular.capitalize] = plural.capitalize end # Define a general rule. def rule(singular, plural) singular_rule(singular, plural) plural_rule(singular, plural) end # Define a singularization rule. def singular_rule(singular, plural) @singular_rules << [singular, plural] end # Define a plurualization rule. def plural_rule(singular, plural) @plural_rules << [singular, plural] end # Read prepared singularization rules. def singularization_rules if defined?(@singularization_regex) && @singularization_regex return [@singularization_regex, @singularization_hash] end # No sorting needed: Regexen match on longest string @singularization_regex = Regexp.new("(" + @singular_rules.map {|s,p| p}.join("|") + ")$", "i") @singularization_hash = Hash[*@singular_rules.flatten].invert [@singularization_regex, @singularization_hash] end # Read prepared singularization rules. #def singularization_rules # return @singularization_rules if @singularization_rules # sorted = @singular_rules.sort_by{ |s, p| "#{p}".size }.reverse # @singularization_rules = sorted.collect do |s, p| # [ /#{p}$/, "#{s}" ] # end #end # Read prepared pluralization rules. def pluralization_rules if defined?(@pluralization_regex) && @pluralization_regex return [@pluralization_regex, @pluralization_hash] end @pluralization_regex = Regexp.new("(" + @plural_rules.map {|s,p| s}.join("|") + ")$", "i") @pluralization_hash = Hash[*@plural_rules.flatten] [@pluralization_regex, @pluralization_hash] end # Read prepared pluralization rules. #def pluralization_rules # return @pluralization_rules if @pluralization_rules # sorted = @plural_rules.sort_by{ |s, p| "#{s}".size }.reverse # @pluralization_rules = sorted.collect do |s, p| # [ /#{s}$/, "#{p}" ] # end #end # def singular_of ; @singular_of ; end # def plural_of ; @plural_of ; end # Convert an English word from plurel to singular. # # "boys".singular #=> boy # "tomatoes".singular #=> tomato # def singular(word) return "" if word == "" if result = singular_of[word] return result.dup end result = word.dup regex, hash = singularization_rules result.sub!(regex) {|m| hash[m]} singular_of[word] = result return result #singularization_rules.each do |(match, replacement)| # break if result.gsub!(match, replacement) #end #return result end # Alias for #singular (a Railism). # alias_method(:singularize, :singular) # Convert an English word from singular to plurel. # # "boy".plural #=> boys # "tomato".plural #=> tomatoes # def plural(word) return "" if word == "" if result = plural_of[word] return result.dup end #return self.dup if /s$/ =~ self # ??? result = word.dup regex, hash = pluralization_rules result.sub!(regex) {|m| hash[m]} plural_of[word] = result return result #pluralization_rules.each do |(match, replacement)| # break if result.gsub!(match, replacement) #end #return result end # Alias for #plural (a Railism). alias_method(:pluralize, :plural) # Clear all rules. def clear(type = :all) if type == :singular || type == :all @singular_of = {} @singular_rules = [] @singularization_rules, @singularization_regex = nil, nil end if type == :plural || type == :all @singular_of = {} @singular_rules = [] @singularization_rules, @singularization_regex = nil, nil end end end # One argument means singular and plural are the same. word 'equipment' word 'information' word 'money' word 'species' word 'series' word 'fish' word 'sheep' word 'moose' word 'hovercraft' word 'news' word 'rice' word 'plurals' # Two arguments defines a singular and plural exception. word 'Swiss' , 'Swiss' word 'alias' , 'aliases' word 'analysis' , 'analyses' #word 'axis' , 'axes' word 'basis' , 'bases' word 'buffalo' , 'buffaloes' word 'child' , 'children' #word 'cow' , 'kine' word 'crisis' , 'crises' word 'criterion' , 'criteria' word 'datum' , 'data' word 'goose' , 'geese' word 'hive' , 'hives' word 'index' , 'indices' word 'life' , 'lives' word 'louse' , 'lice' word 'man' , 'men' word 'matrix' , 'matrices' word 'medium' , 'media' word 'mouse' , 'mice' word 'movie' , 'movies' word 'octopus' , 'octopi' word 'ox' , 'oxen' word 'person' , 'people' word 'potato' , 'potatoes' word 'quiz' , 'quizzes' word 'shoe' , 'shoes' word 'status' , 'statuses' word 'testis' , 'testes' word 'thesis' , 'theses' word 'thief' , 'thieves' word 'tomato' , 'tomatoes' word 'torpedo' , 'torpedoes' word 'vertex' , 'vertices' word 'virus' , 'viri' word 'wife' , 'wives' # One-way singularization exception (convert plural to singular). singular_word 'cactus', 'cacti' # One-way pluralizaton exception (convert singular to plural). plural_word 'axis', 'axes' # General rules. rule 'rf' , 'rves' rule 'ero' , 'eroes' rule 'ch' , 'ches' rule 'sh' , 'shes' rule 'ss' , 'sses' #rule 'ess' , 'esses' rule 'ta' , 'tum' rule 'ia' , 'ium' rule 'ra' , 'rum' rule 'ay' , 'ays' rule 'ey' , 'eys' rule 'oy' , 'oys' rule 'uy' , 'uys' rule 'y' , 'ies' rule 'x' , 'xes' rule 'lf' , 'lves' rule 'ffe' , 'ffes' rule 'af' , 'aves' rule 'us' , 'uses' rule 'ouse' , 'ouses' rule 'osis' , 'oses' rule 'ox' , 'oxes' rule '' , 's' # One-way singular rules. singular_rule 'of' , 'ofs' # proof singular_rule 'o' , 'oes' # hero, heroes #singular_rule 'f' , 'ves' # One-way plural rules. plural_rule 's' , 'ses' plural_rule 'ive' , 'ives' # don't want to snag wife plural_rule 'fe' , 'ves' # don't want to snag perspectives end # def self.singular(string) English::Inflect.singular(string) end # def self.plural(string) English::Inflect.plural(string) end # Convert an English word from plurel to singular. # # "boys".singular #=> boy # "tomatoes".singular #=> tomato # def singular self.class.singular(@self) end # Alias for #singular. alias_method(:singularize, :singular) # Convert an English word from plurel to singular. # # "boys".singular #=> boy # "tomatoes".singular #=> tomato # def plural self.class.plural(@self) end # Alias for #plural. alias_method(:pluralize, :plural) end
rubygengo/english
lib/english/inflect.rb
Ruby
mit
9,263
var PixiText = require('../../lib/pixi/src/core/text/Text'), utils = require('../core/utils'), math = require('../../lib/pixi/src/core/math'), Sprite = require('../display/Sprite'), CONST = require('../core/const'); function Text(text, style, resolution){ this._init(text, style, resolution); } Text.prototype = Object.create(PixiText.prototype); Text.prototype.constructor = Text; Text.fontPropertiesCache = {}; Text.fontPropertiesCanvas = document.createElement('canvas'); Text.fontPropertiesContext = Text.fontPropertiesCanvas.getContext('2d'); Text.prototype._init = function(text, style, resolution){ text = text || ' '; PixiText.call(this, text, style, resolution); this.speed = new math.Point(); this.anchor = new math.Point(0.5, 0.5); this.pivot = new math.Point(0.5, 0.5); }; Text.prototype.displayObjectUpdateTransform = function(){ // create some matrix refs for easy access var pt = this.parent.worldTransform; var wt = this.worldTransform; //anchor, pivot, and flip variables var sx = (this.flipX) ? -this.scale.x : this.scale.x, sy = (this.flipY) ? -this.scale.y : this.scale.y, ax = (this.flipX) ? 1-this.anchor.x : this.anchor.x, ay = (this.flipY) ? 1-this.anchor.y : this.anchor.y, px = (this.flipX) ? 1-this.pivot.x : this.pivot.x, py = (this.flipY) ? 1-this.pivot.y : this.pivot.y; // temporary matrix variables var a, b, c, d, tx, ty; //Avoid use _width or _height when are 0 if(!this._width||!this._height){ this._width = this.width/this.scale.x; this._height = this.height/this.scale.y; } var anchorWidth = ax * this._width * sx, anchorHeight = ay * this._height * sy, pivotWidth = px * this._width * sx, pivotHeight = py * this._height * sy; // so if rotation is between 0 then we can simplify the multiplication process... if (this.rotation % CONST.PI_2) { // check to see if the rotation is the same as the previous render. This means we only need to use sin and cos when rotation actually changes if (this.rotation !== this.rotationCache) { this.rotationCache = this.rotation; this._sr = Math.sin(this.rotation); this._cr = Math.cos(this.rotation); } // get the matrix values of the displayobject based on its transform properties.. a = this._cr * sx; b = this._sr * sx; c = -this._sr * sy; d = this._cr * sy; tx = this.position.x + pivotWidth - anchorWidth; ty = this.position.y + pivotHeight - anchorHeight; if (pivotWidth || pivotHeight) { tx -= pivotWidth * this._cr + pivotHeight * -this._sr; ty -= pivotWidth * this._sr + pivotHeight * this._cr; } // concat the parent matrix with the objects transform. wt.a = a * pt.a + b * pt.c; wt.b = a * pt.b + b * pt.d; wt.c = c * pt.a + d * pt.c; wt.d = c * pt.b + d * pt.d; wt.tx = tx * pt.a + ty * pt.c + pt.tx; wt.ty = tx * pt.b + ty * pt.d + pt.ty; } else { // lets do the fast version as we know there is no rotation.. a = sx; d = sy; tx = this.position.x - anchorWidth; ty = this.position.y - anchorHeight; wt.a = a * pt.a; wt.b = a * pt.b; wt.c = d * pt.c; wt.d = d * pt.d; wt.tx = tx * pt.a + ty * pt.c + pt.tx; wt.ty = tx * pt.b + ty * pt.d + pt.ty; } // multiply the alphas.. this.worldAlpha = this.alpha * this.parent.worldAlpha; // reset the bounds each time this is called! this._currentBounds = null; }; Text.prototype._renderCanvas = function (renderer) { if (this.dirty) { // this.resolution = 1//renderer.resolution; this.updateText(); } //Sprite.prototype._renderCanvas.call(this, renderer); this._customRenderCanvas(renderer); }; Text.prototype._customRenderCanvas = function(renderer){ if (this.texture.crop.width <= 0 || this.texture.crop.height <= 0) { return; } if (this.blendMode !== renderer.currentBlendMode) { renderer.currentBlendMode = this.blendMode; renderer.context.globalCompositeOperation = renderer.blendModes[renderer.currentBlendMode]; } // Ignore null sources if (this.texture.valid) { var texture = this._texture, wt = this.worldTransform, dx, dy, width, height; var resolution = texture.baseTexture.resolution / renderer.resolution; renderer.context.globalAlpha = this.worldAlpha; // If smoothingEnabled is supported and we need to change the smoothing property for this texture if (renderer.smoothProperty && renderer.currentScaleMode !== texture.baseTexture.scaleMode) { renderer.currentScaleMode = texture.baseTexture.scaleMode; renderer.context[renderer.smoothProperty] = (renderer.currentScaleMode === CONST.SCALE_MODES.LINEAR); } // If the texture is trimmed we offset by the trim x/y, otherwise we use the frame dimensions if(texture.rotate) { // cheeky rotation! var a = wt.a; var b = wt.b; wt.a = -wt.c; wt.b = -wt.d; wt.c = a; wt.d = b; width = texture.crop.height; //TODO: Width assigned to height??? height = texture.crop.width; dx = (texture.trim) ? texture.trim.y - this.anchor.y * texture.trim.height : this.anchor.y * -texture._frame.height; dy = (texture.trim) ? texture.trim.x - this.anchor.x * texture.trim.width : this.anchor.x * -texture._frame.width; } else { width = texture.crop.width; height = texture.crop.height; dx = (texture.trim) ? texture.trim.x - this.anchor.x * texture.trim.width : this.anchor.x * -texture._frame.width; dy = (texture.trim) ? texture.trim.y - this.anchor.y * texture.trim.height : this.anchor.y * -texture._frame.height; } // Allow for pixel rounding if (renderer.roundPixels) { renderer.context.setTransform( wt.a, wt.b, wt.c, wt.d, (wt.tx * renderer.resolution) | 0, (wt.ty * renderer.resolution) | 0 ); dx = dx | 0; dy = dy | 0; } else { renderer.context.setTransform( wt.a, wt.b, wt.c, wt.d, wt.tx * renderer.resolution, wt.ty * renderer.resolution ); } var anchorWidth = this.anchor.x * this._width/resolution, anchorHeight = this.anchor.y * this._height/resolution; if (this.tint !== 0xFFFFFF) { if (this.cachedTint !== this.tint) { this.cachedTint = this.tint; // TODO clean up caching - how to clean up the caches? // TODO: dont works with spritesheets this.tintedTexture = CanvasTinter.getTintedTexture(this, this.tint); } renderer.context.drawImage( this.tintedTexture, 0, 0, width * resolution * renderer.resolution, height * resolution * renderer.resolution, dx / resolution, dy / resolution, width * renderer.resolution, height * renderer.resolution ); } else { //TODO: cuando la resolución del renderer es mayor a 1 los sprites se muestran mal renderer.context.drawImage( texture.baseTexture.source, texture.crop.x * resolution, texture.crop.y * resolution, width * resolution * renderer.resolution, height * resolution * renderer.resolution, dx / resolution + anchorWidth, dy / resolution + anchorHeight, width * renderer.resolution, height * renderer.resolution ); } } }; Text.prototype.renderWebGL = function (renderer) { if (this.dirty) { //this.resolution = 1//renderer.resolution; this.updateText(); } Sprite.prototype.renderWebGL.call(this, renderer); }; Text.prototype.updateText = function (){ var style = this._style; this.context.font = style.font; // word wrap // preserve original text var outputText = style.wordWrap ? this.wordWrap(this._text) : this._text; // split text into lines var lines = outputText.split(/(?:\r\n|\r|\n)/); // calculate text width var lineWidths = new Array(lines.length); var maxLineWidth = 0; var fontProperties = this.determineFontProperties(style.font); for (var i = 0; i < lines.length; i++) { var lineWidth = this.context.measureText(lines[i]).width; lineWidths[i] = lineWidth; maxLineWidth = Math.max(maxLineWidth, lineWidth); } var width = maxLineWidth + style.strokeThickness; if (style.dropShadow) { width += style.dropShadowDistance; } this.canvas.width = ( width + this.context.lineWidth ) * this.resolution; // calculate text height var lineHeight = this.style.lineHeight || fontProperties.fontSize + style.strokeThickness; var height = lineHeight * lines.length; if (style.dropShadow) { height += style.dropShadowDistance; } this.canvas.height = ( height + this._style.padding * 2 ) * this.resolution; this.context.scale( this.resolution, this.resolution); if (navigator.isCocoonJS) { this.context.clearRect(0, 0, this.canvas.width, this.canvas.height); } //this.context.fillStyle="#FF0000"; //this.context.fillRect(0, 0, this.canvas.width, this.canvas.height); this.context.font = style.font; this.context.strokeStyle = (typeof style.stroke === "number") ? utils.hex2string(style.stroke) : style.stroke; this.context.lineWidth = style.strokeThickness; this.context.textBaseline = style.textBaseline; this.context.lineJoin = style.lineJoin; this.context.miterLimit = style.miterLimit; var linePositionX; var linePositionY; if (style.dropShadow) { this.context.fillStyle = style.dropShadowColor; var xShadowOffset = Math.cos(style.dropShadowAngle) * style.dropShadowDistance; var yShadowOffset = Math.sin(style.dropShadowAngle) * style.dropShadowDistance; for (i = 0; i < lines.length; i++) { linePositionX = style.strokeThickness / 2; linePositionY = (style.strokeThickness / 2 + i * lineHeight) + fontProperties.ascent; if (style.align === 'right') { linePositionX += maxLineWidth - lineWidths[i]; } else if (style.align === 'center') { linePositionX += (maxLineWidth - lineWidths[i]) / 2; } if (style.fill) { this.context.fillText(lines[i], linePositionX + xShadowOffset, linePositionY + yShadowOffset + this._style.padding); } } } //set canvas text styles this.context.fillStyle = (typeof style.fill === "number") ? utils.hex2string(style.fill) : style.fill; //draw lines line by line for (i = 0; i < lines.length; i++) { linePositionX = style.strokeThickness / 2; linePositionY = (style.strokeThickness / 2 + i * lineHeight) + fontProperties.ascent; if (style.align === 'right') { linePositionX += maxLineWidth - lineWidths[i]; } else if (style.align === 'center') { linePositionX += (maxLineWidth - lineWidths[i]) / 2; } if (style.stroke && style.strokeThickness) { this.context.strokeText(lines[i], linePositionX, linePositionY + this._style.padding); } if (style.fill) { this.context.fillText(lines[i], linePositionX, linePositionY + this._style.padding); } } this.updateTexture(); }; Text.prototype.setStyle = function(style){ this.style = style; return this; }; Text.prototype.setText = function(text, keys){ if(keys)text = utils.parseTextKeys(text, keys); this.text = text; return this; }; Text.prototype.setWordWrap = function(value){ if(value === false){ this.style.wordWrap = value; }else{ this.style.wordWrap = true; this.style.wordWrapWidth = value; } this.dirty = true; return this; }; Text.prototype.containsPoint = Sprite.prototype.containsPoint; Text.prototype.getLocalBounds = Sprite.prototype.getLocalBounds; module.exports = Text;
TarentolaDigital/perenquen
src/display/Text.js
JavaScript
mit
13,176
/** * A wrapper around JSLint to drop things into the console * * Copyright (C) 2011 Nikolay Nemshilov */ var RightJS = require('./right-server.js'); var JSLint = require('./jslint').JSLINT; var fs = require('fs'); exports.Linter = new RightJS.Class({ extend: { Options: { debug: false, // no debug devel: false, // no console.log s evil: false, // no evals passfail: false, // don't stop on errors onevar: false, // allow more than one 'var' definition forin: true , // allow for in without ownershipt checks indent: 2 , // enforce 2 spaces indent maxerr: 12 , // max number of errors }, Okays: [ "Move 'var' declarations to the top of the function.", "Do not use 'new' for side effects.", "The Function constructor is eval." ] }, /** * Basic constructor * * @param {String} the source * @param {String} the linter options * @return void */ initialize: function(src, options) { this.source = src; this.options = options; }, /** * Runs the linter * * @return {Linter} this */ run: function() { var options = {}, okays = [], patches = ''; // extracting the additional options try { // skipping non-existing patch files patches = fs.readFileSync(this.options).toString(); } catch(e) {} eval(patches); JSLint.okays = this.constructor.Okays.concat(okays); JSLint( fs.readFileSync(this.source).toString(), Object.merge(this.constructor.Options, options) ); this.errors = JSLint.errors.compact(); this.failed = this.errors.length > 0; return this; }, /** * Prints out the check report * * @return {Linter} this */ report: function() { if (this.errors.empty()) { console.log("\u001B[32m - JSLint check successfully passed\u001B[0m"); } else { console.log("\u001B[31m - JSLint check failed in: "+ this.source + "\u001B[0m"); this.errors.each(function(error) { var report = "\n", j=0, pointer=''; for (; j < error.character-1; j++) { pointer += '-'; } report += " \u001B[35m"+ error.reason +"\u001B[0m "; if (error.evidence) { report += "Line: "+ error.line + ", Char: "+ error.character + "\n"; report += " "+ error.evidence + "\n"; report += " \u001B[33m"+ pointer + "^\u001B[0m"; } console.log(report); }); console.log("\n") } return this; } });
rightjs/rightjs-ui
util/linter.js
JavaScript
mit
2,541
# coding=utf-8 import pygame import pygame.locals class Board(object): """ Plansza do gry. Odpowiada za rysowanie okna gry. """ def __init__(self, width, height): """ Konstruktor planszy do gry. Przygotowuje okienko gry. :param width: szerokość w pikselach :param height: wysokość w pikselach """ self.surface = pygame.display.set_mode((width, height), 0, 32) pygame.display.set_caption('Game of life') def draw(self, *args): """ Rysuje okno gry :param args: lista obiektów do narysowania """ background = (0, 0, 0) self.surface.fill(background) for drawable in args: drawable.draw_on(self.surface) # dopiero w tym miejscu następuje fatyczne rysowanie # w oknie gry, wcześniej tylko ustalaliśmy co i jak ma zostać narysowane pygame.display.update() class GameOfLife(object): """ Łączy wszystkie elementy gry w całość. """ def __init__(self, width, height, cell_size=10): """ Przygotowanie ustawień gry :param width: szerokość planszy mierzona liczbą komórek :param height: wysokość planszy mierzona liczbą komórek :param cell_size: bok komórki w pikselach """ pygame.init() self.board = Board(width * cell_size, height * cell_size) # zegar którego użyjemy do kontrolowania szybkości rysowania # kolejnych klatek gry self.fps_clock = pygame.time.Clock() def run(self): """ Główna pętla gry """ while not self.handle_events(): # działaj w pętli do momentu otrzymania sygnału do wyjścia self.board.draw() self.fps_clock.tick(15) def handle_events(self): """ Obsługa zdarzeń systemowych, tutaj zinterpretujemy np. ruchy myszką :return True jeżeli pygame przekazał zdarzenie wyjścia z gry """ for event in pygame.event.get(): if event.type == pygame.locals.QUIT: pygame.quit() return True # magiczne liczby używane do określenia czy komórka jest żywa DEAD = 0 ALIVE = 1 class Population(object): """ Populacja komórek """ def __init__(self, width, height, cell_size=10): """ Przygotowuje ustawienia populacji :param width: szerokość planszy mierzona liczbą komórek :param height: wysokość planszy mierzona liczbą komórek :param cell_size: bok komórki w pikselach """ self.box_size = cell_size self.height = height self.width = width self.generation = self.reset_generation() def reset_generation(self): """ Tworzy i zwraca macierz pustej populacji """ # w pętli wypełnij listę kolumnami # które także w pętli zostają wypełnione wartością 0 (DEAD) return [[DEAD for y in xrange(self.height)] for x in xrange(self.width)] def handle_mouse(self): # pobierz stan guzików myszki z wykorzystaniem funcji pygame buttons = pygame.mouse.get_pressed() if not any(buttons): # ignoruj zdarzenie jeśli żaden z guzików nie jest wciśnięty return # dodaj żywą komórką jeśli wciśnięty jest pierwszy guzik myszki # będziemy mogli nie tylko dodawać żywe komórki ale także je usuwać alive = True if buttons[0] else False # pobierz pozycję kursora na planszy mierzoną w pikselach x, y = pygame.mouse.get_pos() # przeliczamy współrzędne komórki z pikseli na współrzędne komórki w macierz # gracz może kliknąć w kwadracie o szerokości box_size by wybrać komórkę x /= self.box_size y /= self.box_size # ustaw stan komórki na macierzy self.generation[x][y] = ALIVE if alive else DEAD def draw_on(self, surface): """ Rysuje komórki na planszy """ for x, y in self.alive_cells(): size = (self.box_size, self.box_size) position = (x * self.box_size, y * self.box_size) color = (255, 255, 255) thickness = 1 pygame.draw.rect(surface, color, pygame.locals.Rect(position, size), thickness) def alive_cells(self): """ Generator zwracający współrzędne żywych komórek. """ for x in range(len(self.generation)): column = self.generation[x] for y in range(len(column)): if column[y] == ALIVE: # jeśli komórka jest żywa zwrócimy jej współrzędne yield x, y # Ta część powinna być zawsze na końcu modułu (ten plik jest modułem) # chcemy uruchomić naszą grę dopiero po tym jak wszystkie klasy zostaną zadeklarowane if __name__ == "__main__": game = GameOfLife(80, 40) game.run()
roninek/python101
docs/pygame/life/code1a.py
Python
mit
5,039
using System.Linq.Expressions; using System.Collections.Generic; namespace Bermuda.ExpressionGeneration { public partial class ValueExpression : ExpressionTreeBase { public long Value { get; private set; } public ValueExpression(long value) { Value = value; } public override string ToString() { return string.Format("@{0}", Value.ToString()); } public override IEnumerable<ExpressionTreeBase> GetChildren() { yield break; } public override Expression CreateExpression(object context) { return Expression.Constant(Value); } } }
melnx/Bermuda
Bermuda.ExpressionGenerator/QL/ValueExpression.cs
C#
mit
730
// String literal types are only valid in overload signatures function foo(x: any); function foo(x: 'hi') { } class C { foo(x: string); foo(x: 'hi') { } } interface I { (x: 'a'); (x: 'hi'); foo(x: 'a', y: 'a'); foo(x: 'hi', y: 'hi'); } var a: { (x: 'hi'); (x: 'a'); foo(x: 'hi'); foo(x: 'a'); } var b = { foo(x: 'hi') { }, foo(x: 'a') { }, }
Pajn/prettier
tests/typescript/conformance/types/objectTypeLiteral/callSignatures/stringLiteralTypesInImplementationSignatures2.ts
TypeScript
mit
395
""" File-based Checkpoints implementations. """ import os import shutil from tornado.web import HTTPError from .checkpoints import ( Checkpoints, GenericCheckpointsMixin, ) from .fileio import FileManagerMixin from IPython.utils import tz from IPython.utils.path import ensure_dir_exists from IPython.utils.py3compat import getcwd from IPython.utils.traitlets import Unicode class FileCheckpoints(FileManagerMixin, Checkpoints): """ A Checkpoints that caches checkpoints for files in adjacent directories. Only works with FileContentsManager. Use GenericFileCheckpoints if you want file-based checkpoints with another ContentsManager. """ checkpoint_dir = Unicode( '.ipynb_checkpoints', config=True, help="""The directory name in which to keep file checkpoints This is a path relative to the file's own directory. By default, it is .ipynb_checkpoints """, ) root_dir = Unicode(config=True) def _root_dir_default(self): try: return self.parent.root_dir except AttributeError: return getcwd() # ContentsManager-dependent checkpoint API def create_checkpoint(self, contents_mgr, path): """Create a checkpoint.""" checkpoint_id = u'checkpoint' src_path = contents_mgr._get_os_path(path) dest_path = self.checkpoint_path(checkpoint_id, path) self._copy(src_path, dest_path) return self.checkpoint_model(checkpoint_id, dest_path) def restore_checkpoint(self, contents_mgr, checkpoint_id, path): """Restore a checkpoint.""" src_path = self.checkpoint_path(checkpoint_id, path) dest_path = contents_mgr._get_os_path(path) self._copy(src_path, dest_path) # ContentsManager-independent checkpoint API def rename_checkpoint(self, checkpoint_id, old_path, new_path): """Rename a checkpoint from old_path to new_path.""" old_cp_path = self.checkpoint_path(checkpoint_id, old_path) new_cp_path = self.checkpoint_path(checkpoint_id, new_path) if os.path.isfile(old_cp_path): self.log.debug( "Renaming checkpoint %s -> %s", old_cp_path, new_cp_path, ) with self.perm_to_403(): shutil.move(old_cp_path, new_cp_path) def delete_checkpoint(self, checkpoint_id, path): """delete a file's checkpoint""" path = path.strip('/') cp_path = self.checkpoint_path(checkpoint_id, path) if not os.path.isfile(cp_path): self.no_such_checkpoint(path, checkpoint_id) self.log.debug("unlinking %s", cp_path) with self.perm_to_403(): os.unlink(cp_path) def list_checkpoints(self, path): """list the checkpoints for a given file This contents manager currently only supports one checkpoint per file. """ path = path.strip('/') checkpoint_id = "checkpoint" os_path = self.checkpoint_path(checkpoint_id, path) if not os.path.isfile(os_path): return [] else: return [self.checkpoint_model(checkpoint_id, os_path)] # Checkpoint-related utilities def checkpoint_path(self, checkpoint_id, path): """find the path to a checkpoint""" path = path.strip('/') parent, name = ('/' + path).rsplit('/', 1) parent = parent.strip('/') basename, ext = os.path.splitext(name) filename = u"{name}-{checkpoint_id}{ext}".format( name=basename, checkpoint_id=checkpoint_id, ext=ext, ) os_path = self._get_os_path(path=parent) cp_dir = os.path.join(os_path, self.checkpoint_dir) with self.perm_to_403(): ensure_dir_exists(cp_dir) cp_path = os.path.join(cp_dir, filename) return cp_path def checkpoint_model(self, checkpoint_id, os_path): """construct the info dict for a given checkpoint""" stats = os.stat(os_path) last_modified = tz.utcfromtimestamp(stats.st_mtime) info = dict( id=checkpoint_id, last_modified=last_modified, ) return info # Error Handling def no_such_checkpoint(self, path, checkpoint_id): raise HTTPError( 404, u'Checkpoint does not exist: %s@%s' % (path, checkpoint_id) ) class GenericFileCheckpoints(GenericCheckpointsMixin, FileCheckpoints): """ Local filesystem Checkpoints that works with any conforming ContentsManager. """ def create_file_checkpoint(self, content, format, path): """Create a checkpoint from the current content of a file.""" path = path.strip('/') # only the one checkpoint ID: checkpoint_id = u"checkpoint" os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) self.log.debug("creating checkpoint for %s", path) with self.perm_to_403(): self._save_file(os_checkpoint_path, content, format=format) # return the checkpoint info return self.checkpoint_model(checkpoint_id, os_checkpoint_path) def create_notebook_checkpoint(self, nb, path): """Create a checkpoint from the current content of a notebook.""" path = path.strip('/') # only the one checkpoint ID: checkpoint_id = u"checkpoint" os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) self.log.debug("creating checkpoint for %s", path) with self.perm_to_403(): self._save_notebook(os_checkpoint_path, nb) # return the checkpoint info return self.checkpoint_model(checkpoint_id, os_checkpoint_path) def get_notebook_checkpoint(self, checkpoint_id, path): """Get a checkpoint for a notebook.""" path = path.strip('/') self.log.info("restoring %s from checkpoint %s", path, checkpoint_id) os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) if not os.path.isfile(os_checkpoint_path): self.no_such_checkpoint(path, checkpoint_id) return { 'type': 'notebook', 'content': self._read_notebook( os_checkpoint_path, as_version=4, ), } def get_file_checkpoint(self, checkpoint_id, path): """Get a checkpoint for a file.""" path = path.strip('/') self.log.info("restoring %s from checkpoint %s", path, checkpoint_id) os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) if not os.path.isfile(os_checkpoint_path): self.no_such_checkpoint(path, checkpoint_id) content, format = self._read_file(os_checkpoint_path, format=None) return { 'type': 'file', 'content': content, 'format': format, }
wolfram74/numerical_methods_iserles_notes
venv/lib/python2.7/site-packages/IPython/html/services/contents/filecheckpoints.py
Python
mit
6,954
import React, { PropTypes } from 'react' import ActionDelete from 'material-ui/svg-icons/action/delete' import { colors } from '/styles' import moduleStyles from '/styles/fileTree' const RemoveBtn = ({ onClick }) => ( <ActionDelete onClick={onClick} style={moduleStyles.listIcon.base} color={colors.light} hoverColor={colors.hover.red} /> ) export default RemoveBtn
heshihao0813/registerScan
src/components/fileTree/RemoveBtn.js
JavaScript
mit
426
// Source : https://leetcode.com/problems/longest-substring-without-repeating-characters/ // Author : codeyu // Date : 2016-09-20 /********************************************************************************** * * Given a string, find the length of the longest substring without repeating characters. * * Examples: * * Given "abcabcbb", the answer is "abc", which the length is 3. * * Given "bbbbb", the answer is "b", with the length of 1. * * Given "pwwkew", the answer is "wke", with the length of 3. * * Note that the answer must be a substring, "pwke" is a subsequence and not a substring. * **********************************************************************************/ using System.Collections.Generic; using System; namespace Algorithms { public class Solution003 { public static int LengthOfLongestSubstring(string s) { var n = s.Length; var charSet = new HashSet<char>(); int maxLength = 0, i = 0, j = 0; while (i < n && j < n) { if (charSet.Add(s[j])) { j++; maxLength = Math.Max(maxLength, j - i); } else { charSet.Remove(s[i]); i++; } } return maxLength; } } }
AzureLocalizationTeam/leetcode
Algorithms/LengthOfLongestSubstring.cs
C#
mit
1,408
using System.Collections.ObjectModel; namespace SimpleBackgroundUploadWebAPI.Areas.HelpPage.ModelDescriptions { public class ComplexTypeModelDescription : ModelDescription { public ComplexTypeModelDescription() { Properties = new Collection<ParameterDescription>(); } public Collection<ParameterDescription> Properties { get; private set; } } }
dannycabrera/SimpleBackgroundUpload
SimpleBackgroundUploadWebAPI/SimpleBackgroundUploadWebAPI/Areas/HelpPage/ModelDescriptions/ComplexTypeModelDescription.cs
C#
mit
402
#require File.dirname(__FILE__) + '/formats/email' module DataMapper module Validate ## # # @author Guy van den Berg # @since 0.9 class CustomValidator < GenericValidator def initialize(field_name, options = {}, &b) #super(field_name, options) #@field_name, @options = field_name, options #@options[:allow_nil] = false unless @options.has_key?(:allow_nil) end def call(target) #field_value = target.instance_variable_get("@#{@field_name}") #return true if @options[:allow_nil] && field_value.nil? #validation = (@options[:as] || @options[:with]) #error_message = nil # Figure out what to use as the actual validator. # If a symbol is passed to :as, look up # the canned validation in FORMATS. # #validator = if validation.is_a? Symbol # if FORMATS[validation].is_a? Array # error_message = FORMATS[validation][1] # FORMATS[validation][0] # else # FORMATS[validation] || validation # end #else # validation #end #valid = case validator #when Proc then validator.call(field_value) #when Regexp then validator =~ field_value #else raise UnknownValidationFormat, "Can't determine how to validate #{target.class}##{@field_name} with #{validator.inspect}" #end #unless valid # field = Inflector.humanize(@field_name) # value = field_value # # error_message = @options[:message] || error_message || '%s is invalid'.t(field) # error_message = error_message.call(field, value) if Proc === error_message # # add_error(target, error_message , @field_name) #end #return valid end #class UnknownValidationFormat < StandardError; end end # class CustomValidator module ValidatesFormatOf #def validates_format_of(*fields) #opts = opts_from_validator_args(fields) #add_validator_to_context(opts, fields, DataMapper::Validate::FormatValidator) #end end # module ValidatesFormatOf end # module Validate end # module DataMapper
kad3nce/collective
gems/gems/dm-validations-0.9.5/lib/dm-validations/custom_validator.rb
Ruby
mit
2,225
#!/usr/bin/ruby require "fileutils" require 'json' require_relative "BibleReader.rb" reader = BibleReader.new translation_names = ['개역개정', '새번역', 'NIV']#BibleInfo.translation_name_to_code.keys translation_names.each do |translation_name| translation_code = BibleInfo.translation_name_to_code[translation_name] BibleInfo.bible_name_to_chapter_len.each do |bible_name, chapter_len| # puts "#{translation_name}, #{bible_name}, #{chapter_len}" bible_shortname = BibleInfo.bible_name_to_shortname[bible_name] db_name = "db/#{translation_name}/#{"%02d_" % BibleInfo.bible_name_to_code[bible_name]}#{bible_name}" unless Dir.exist? db_name FileUtils.mkdir_p(db_name) end (1..chapter_len).each do |chapter| file = File.open "#{db_name}/#{chapter}", 'w' chapter_content = reader.get_chapter_from_web(translation_name, bible_shortname, chapter) file.write(chapter_content.to_json) file.close() end end end
nofearbutlove/bible-clipper
BibleCroller.rb
Ruby
mit
976
<?php /** * TOP API: taobao.hotel.order.face.deal request * * @author auto create * @since 1.0, 2013-09-13 16:51:03 */ class Taobao_Request_HotelOrderFaceDealRequest { /** * 酒店订单oid **/ private $oid; /** * 操作类型,1:确认预订,2:取消订单 **/ private $operType; /** * 取消订单时的取消原因备注信息 **/ private $reasonText; /** * 取消订单时的取消原因,可选值:1,2,3,4; * 1:无房,2:价格变动,3:买家原因,4:其它原因 **/ private $reasonType; private $apiParas = array(); public function setOid($oid) { $this->oid = $oid; $this->apiParas["oid"] = $oid; } public function getOid() { return $this->oid; } public function setOperType($operType) { $this->operType = $operType; $this->apiParas["oper_type"] = $operType; } public function getOperType() { return $this->operType; } public function setReasonText($reasonText) { $this->reasonText = $reasonText; $this->apiParas["reason_text"] = $reasonText; } public function getReasonText() { return $this->reasonText; } public function setReasonType($reasonType) { $this->reasonType = $reasonType; $this->apiParas["reason_type"] = $reasonType; } public function getReasonType() { return $this->reasonType; } public function getApiMethodName() { return "taobao.hotel.order.face.deal"; } public function getApiParas() { return $this->apiParas; } public function check() { Taobao_RequestCheckUtil::checkNotNull($this->oid, "oid"); Taobao_RequestCheckUtil::checkNotNull($this->operType, "operType"); Taobao_RequestCheckUtil::checkMaxLength($this->operType, 1, "operType"); Taobao_RequestCheckUtil::checkMaxLength($this->reasonText, 500, "reasonText"); Taobao_RequestCheckUtil::checkMaxLength($this->reasonType, 1, "reasonType"); } public function putOtherTextParam($key, $value) { $this->apiParas[$key] = $value; $this->$key = $value; } }
musicsnap/LearnCode
php/code/yaf/application/library/Taobao/Request/HotelOrderFaceDealRequest.php
PHP
mit
1,971
import { Observable } from './observable' export default function drop(count, source) { return Observable(add => { let dropped = 0 return source.subscribe((val, name) => { if (dropped++ >= count) add(val, name) }) }) }
AlexGalays/dompteuse
src/observable/drop.js
JavaScript
mit
244
package schoolprojects; import java.util.Random; import java.util.Scanner; /** * Piedra, papel o tijera es un juego infantil. * Un juego de manos en el cual existen tres elementos. * La piedra que vence a la tijera rompiéndola; la tijera que vencen al papel cortándolo; * y el papel que vence a la piedra envolviéndola. Esto representa un ciclo, el cual * le da su esencia al juego. Este juego es muy utilizado para decidir quien de dos * personas hará algo, tal y como a veces se hace usando una moneda, o para dirimir algún asunto. * * En esta version del juego habra un Jugador Humano y un jugador artificial ( es decir el ordenador ) * * @author Velik Georgiev Chelebiev * @version 0.0.1 */ public class Juego { /** * @param args Argumentos de la linea de comandos */ public static void main(String[] args) { Scanner scan = new Scanner(System.in); Random rand = new Random(); /** * Movimientos disponibles en forma de cadena. */ String[] movimientos = {"Piedra", "Papel", "Tijera"}; /** * Moviemiento elegido por el usuario en forma de numero entero. */ int entradaUsuario = 0; /** * Un numero aleatorio que representara el movimiento del ordenador. */ int movimientoAleatorio = 0; /** * Los resultados posibles de la partida. 0 EMPATE 1 El jugador gana 2 * El jugador pierde */ String[] resultados = {"Empate", "Ganas", "Pierdes"}; /** * El resultado de la partida respecto el jugador. */ int resultadoJugador = -1; /** * Aqui es donde epieza el juego. * * Pedimos al usuario que elija uno de los movimientos disponibles * y generamos un movimiento aleatorio, que sera el movimiento del ordenador. * Despues comptrobamos si el jugador gana al ordenador , si pierde o si hay un empate. * Mostramos el resultado en la pantalla y el bucle se repite hasta que * el jugador no introduce -1 como movimiento. */ do { // Mostramos informacion sobre los movimientos validos y // los numeros que le corresponden. for (int i = 0; i < movimientos.length; i++) { System.out.print("(" + (i + 1) + ") " + movimientos[i] + "\n"); } // Valor predeterminado ( o entrada no valida, por si el usuario no introduce ningun valor ) entradaUsuario = 0; // Leemos la entrada ( el moviemiento ) del usuario try { System.out.print("Movimiento: "); entradaUsuario = Integer.parseInt(scan.nextLine()); } catch (NumberFormatException ex) { // Si la entrada no tiene un formato valido, mostraremos un mensaje de error // y le pediremos al usuario que introduzca un movimiento nuevamente. entradaUsuario = 0; } // Si la opcion elegida por el usuario no es valida imprimimos un // mensaje de error y le volvemos a pedir que introduzca una opcion if (entradaUsuario < 1 || entradaUsuario > 3) { System.out.println("\n*** El movimiento elegido no es valido. ***"); continue; } // Restamos 1 a la entrada del usuario. // Esto lo hacemos para que sea un indice de vector valido. entradaUsuario -= 1; // Generamos un movimiento aleatorio movimientoAleatorio = rand.nextInt(movimientos.length); // Para separar el "menu" de moviemientos y la entrada del usuario // con la salida/resultado de la partida marco System.out.println("\n*******************************\n"); // Imprimimos las jugadas del jugador y del ordenador System.out.println("Tu: (" + movimientos[entradaUsuario] + ") [VS] PC: (" + movimientos[movimientoAleatorio] + ")"); // Comprobamos si el jugador gana if ((entradaUsuario == 0 && movimientoAleatorio == 2) || (entradaUsuario == 1 && movimientoAleatorio == 0) || (entradaUsuario == 2 && movimientoAleatorio == 1)) { resultadoJugador = 1; } else if(entradaUsuario == movimientoAleatorio) { // Comprobamos si es un empate resultadoJugador = 0; } else { // en el resto de los casos el jugador pierde resultadoJugador = 2; } // Imprimimos el resultado de la partida System.out.println("Resultado: " + resultados[resultadoJugador]); // Para separar el "menu" de moviemientos y la entrada del usuario // con la salida/resultado de la partida marco System.out.println("\n*******************************\n"); } while (entradaUsuario != -1); } }
velikGeorgiev/School
PRG/PiedraPapelTijera/Juego.java
Java
mit
5,080
"""Support for monitoring emoncms feeds.""" from __future__ import annotations from datetime import timedelta from http import HTTPStatus import logging import requests import voluptuous as vol from homeassistant.components.sensor import ( PLATFORM_SCHEMA, SensorDeviceClass, SensorEntity, SensorStateClass, ) from homeassistant.const import ( CONF_API_KEY, CONF_ID, CONF_SCAN_INTERVAL, CONF_UNIT_OF_MEASUREMENT, CONF_URL, CONF_VALUE_TEMPLATE, POWER_WATT, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import template import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util import Throttle _LOGGER = logging.getLogger(__name__) ATTR_FEEDID = "FeedId" ATTR_FEEDNAME = "FeedName" ATTR_LASTUPDATETIME = "LastUpdated" ATTR_LASTUPDATETIMESTR = "LastUpdatedStr" ATTR_SIZE = "Size" ATTR_TAG = "Tag" ATTR_USERID = "UserId" CONF_EXCLUDE_FEEDID = "exclude_feed_id" CONF_ONLY_INCLUDE_FEEDID = "include_only_feed_id" CONF_SENSOR_NAMES = "sensor_names" DECIMALS = 2 DEFAULT_UNIT = POWER_WATT MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=5) ONLY_INCL_EXCL_NONE = "only_include_exclude_or_none" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_API_KEY): cv.string, vol.Required(CONF_URL): cv.string, vol.Required(CONF_ID): cv.positive_int, vol.Exclusive(CONF_ONLY_INCLUDE_FEEDID, ONLY_INCL_EXCL_NONE): vol.All( cv.ensure_list, [cv.positive_int] ), vol.Exclusive(CONF_EXCLUDE_FEEDID, ONLY_INCL_EXCL_NONE): vol.All( cv.ensure_list, [cv.positive_int] ), vol.Optional(CONF_SENSOR_NAMES): vol.All( {cv.positive_int: vol.All(cv.string, vol.Length(min=1))} ), vol.Optional(CONF_VALUE_TEMPLATE): cv.template, vol.Optional(CONF_UNIT_OF_MEASUREMENT, default=DEFAULT_UNIT): cv.string, } ) def get_id(sensorid, feedtag, feedname, feedid, feeduserid): """Return unique identifier for feed / sensor.""" return f"emoncms{sensorid}_{feedtag}_{feedname}_{feedid}_{feeduserid}" def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Emoncms sensor.""" apikey = config.get(CONF_API_KEY) url = config.get(CONF_URL) sensorid = config.get(CONF_ID) value_template = config.get(CONF_VALUE_TEMPLATE) config_unit = config.get(CONF_UNIT_OF_MEASUREMENT) exclude_feeds = config.get(CONF_EXCLUDE_FEEDID) include_only_feeds = config.get(CONF_ONLY_INCLUDE_FEEDID) sensor_names = config.get(CONF_SENSOR_NAMES) interval = config.get(CONF_SCAN_INTERVAL) if value_template is not None: value_template.hass = hass data = EmonCmsData(hass, url, apikey, interval) data.update() if data.data is None: return sensors = [] for elem in data.data: if exclude_feeds is not None and int(elem["id"]) in exclude_feeds: continue if include_only_feeds is not None and int(elem["id"]) not in include_only_feeds: continue name = None if sensor_names is not None: name = sensor_names.get(int(elem["id"]), None) if unit := elem.get("unit"): unit_of_measurement = unit else: unit_of_measurement = config_unit sensors.append( EmonCmsSensor( hass, data, name, value_template, unit_of_measurement, str(sensorid), elem, ) ) add_entities(sensors) class EmonCmsSensor(SensorEntity): """Implementation of an Emoncms sensor.""" def __init__( self, hass, data, name, value_template, unit_of_measurement, sensorid, elem ): """Initialize the sensor.""" if name is None: # Suppress ID in sensor name if it's 1, since most people won't # have more than one EmonCMS source and it's redundant to show the # ID if there's only one. id_for_name = "" if str(sensorid) == "1" else sensorid # Use the feed name assigned in EmonCMS or fall back to the feed ID feed_name = elem.get("name") or f"Feed {elem['id']}" self._name = f"EmonCMS{id_for_name} {feed_name}" else: self._name = name self._identifier = get_id( sensorid, elem["tag"], elem["name"], elem["id"], elem["userid"] ) self._hass = hass self._data = data self._value_template = value_template self._unit_of_measurement = unit_of_measurement self._sensorid = sensorid self._elem = elem if unit_of_measurement == "kWh": self._attr_device_class = SensorDeviceClass.ENERGY self._attr_state_class = SensorStateClass.TOTAL_INCREASING elif unit_of_measurement == "W": self._attr_device_class = SensorDeviceClass.POWER self._attr_state_class = SensorStateClass.MEASUREMENT if self._value_template is not None: self._state = self._value_template.render_with_possible_json_value( elem["value"], STATE_UNKNOWN ) else: self._state = round(float(elem["value"]), DECIMALS) @property def name(self): """Return the name of the sensor.""" return self._name @property def native_unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return self._unit_of_measurement @property def native_value(self): """Return the state of the device.""" return self._state @property def extra_state_attributes(self): """Return the attributes of the sensor.""" return { ATTR_FEEDID: self._elem["id"], ATTR_TAG: self._elem["tag"], ATTR_FEEDNAME: self._elem["name"], ATTR_SIZE: self._elem["size"], ATTR_USERID: self._elem["userid"], ATTR_LASTUPDATETIME: self._elem["time"], ATTR_LASTUPDATETIMESTR: template.timestamp_local(float(self._elem["time"])), } def update(self): """Get the latest data and updates the state.""" self._data.update() if self._data.data is None: return elem = next( ( elem for elem in self._data.data if get_id( self._sensorid, elem["tag"], elem["name"], elem["id"], elem["userid"], ) == self._identifier ), None, ) if elem is None: return self._elem = elem if self._value_template is not None: self._state = self._value_template.render_with_possible_json_value( elem["value"], STATE_UNKNOWN ) else: self._state = round(float(elem["value"]), DECIMALS) class EmonCmsData: """The class for handling the data retrieval.""" def __init__(self, hass, url, apikey, interval): """Initialize the data object.""" self._apikey = apikey self._url = f"{url}/feed/list.json" self._interval = interval self._hass = hass self.data = None @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Get the latest data from Emoncms.""" try: parameters = {"apikey": self._apikey} req = requests.get( self._url, params=parameters, allow_redirects=True, timeout=5 ) except requests.exceptions.RequestException as exception: _LOGGER.error(exception) return else: if req.status_code == HTTPStatus.OK: self.data = req.json() else: _LOGGER.error( "Please verify if the specified configuration value " "'%s' is correct! (HTTP Status_code = %d)", CONF_URL, req.status_code, )
rohitranjan1991/home-assistant
homeassistant/components/emoncms/sensor.py
Python
mit
8,487
<?php /* * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * This software consists of voluntary contributions made by many individuals * and is licensed under the MIT license. For more information, see * <http://www.doctrine-project.org>. */ namespace DoctrineMongoODMModuleTest\Service; use DoctrineMongoODMModule\Service\ConfigurationFactory; use DoctrineMongoODMModuleTest\AbstractTest; class ConfigurationFactoryTest extends AbstractTest { public function testRetryConnectValueIsSetFromConfigurationOptions() { $config = $this->getDocumentManager()->getConfiguration(); $this->assertSame(123, $config->getRetryConnect()); } public function testRetryQueryValueIsSetFromConfigurationOptions() { $config = $this->getDocumentManager()->getConfiguration(); $this->assertSame(456, $config->getRetryQuery()); } public function testCreation() { $logger = $this->getMockForAbstractClass('DoctrineMongoODMModule\Logging\Logger'); $metadataCache = $this->getMockForAbstractClass('Doctrine\Common\Cache\Cache'); $mappingDriver = $this->getMockForAbstractClass('Doctrine\Common\Persistence\Mapping\Driver\MappingDriver'); $serviceLocator = $this->getMockForAbstractClass('Zend\ServiceManager\ServiceLocatorInterface'); $serviceLocator->expects($this->exactly(4))->method('get')->withConsecutive( array('Configuration'), array('stubbed_logger'), array('doctrine.cache.stubbed_metadatacache'), array('doctrine.driver.stubbed_driver') )->willReturnOnConsecutiveCalls( array( 'doctrine' => array( 'configuration' => array( 'odm_test' => array( 'logger' => 'stubbed_logger', 'metadata_cache' => 'stubbed_metadatacache', 'driver' => 'stubbed_driver', 'generate_proxies' => true, 'proxy_dir' => 'data/DoctrineMongoODMModule/Proxy', 'proxy_namespace' => 'DoctrineMongoODMModule\Proxy', 'generate_hydrators' => true, 'hydrator_dir' => 'data/DoctrineMongoODMModule/Hydrator', 'hydrator_namespace' => 'DoctrineMongoODMModule\Hydrator', 'default_db' => 'default_db', 'filters' => array(), // array('filterName' => 'BSON\Filter\Class') // custom types 'types' => array( 'CustomType' => 'DoctrineMongoODMModuleTest\Assets\CustomType' ), 'classMetadataFactoryName' => 'stdClass' ) ) ) ), $logger, $metadataCache, $mappingDriver ); $factory = new ConfigurationFactory('odm_test'); $config = $factory->createService($serviceLocator); $this->assertInstanceOf('Doctrine\ODM\MongoDB\Configuration', $config); $this->assertNotNull($config->getLoggerCallable()); $this->assertSame($metadataCache, $config->getMetadataCacheImpl()); $this->assertSame($mappingDriver, $config->getMetadataDriverImpl()); $this->assertInstanceOf( 'DoctrineMongoODMModuleTest\Assets\CustomType', \Doctrine\ODM\MongoDB\Types\Type::getType('CustomType') ); } }
prolic/DoctrineMongoODMModule
tests/DoctrineMongoODMModuleTest/Doctrine/ConfigurationFactoryTest.php
PHP
mit
4,440
import React, { Component } from 'react' import { FlexGrid, Content, Container, AdminItemsViewTable, Table, Button } from 'components' import { AddItemAboutContainer, AddItemPhotoContainer, SpecialSetupContainer } from 'containers' import { adminLink } from 'config' import s from './AdminItemsView.sass' class ToggleButton extends Component { state = {value: this.props.value}; componentWillMount() { this.setState({value: this.props.value}) } componentWillReceiveProps(nextProps) { if (nextProps.value !== this.props.value) { this.setState({value: nextProps.value}); } } onClick = () => { if (!this.props.onChange) return; const { field, id, value } = this.props; const newValue = !value; this.props.onChange({ [field]: newValue, id }); this.setState({value: newValue}) }; render() { const { children } = this.props; const { onClick } = this; const { value } = this.state; return ( <FlexGrid className={s.toggle} direction="column" justify="center" align="center"> <Content size="5" center gray>{children}</Content> <Table.RowItem className={s.toggle__icon} tag="span" onClick={onClick} circle circleActive={value}/> </FlexGrid> ) } } export default class AdminItemsView extends Component { render() { const { data, parent_id, brands, categories, onChange, onDelete, onAboutChange, aboutData, onSave, onColorChange, onSpecialChange } = this.props; if (!data || data.id == null) return null; const { id } = data; return ( <div className={s.wrapper}> <AdminItemsViewTable data={data} brands={brands} onChange={onChange} categories={categories} /> <div className={s.line} /> <Container className={s.content}> <FlexGrid justify="start" align="start"> <div className={s.grid}> <FlexGrid className={s.toggle__wrapper} justify="start" align="start"> <ToggleButton id={id} onChange={onChange} field="is_top" value={data.is_top}>Топ</ToggleButton> <ToggleButton id={id} onChange={onChange} field="is_new" value={data.is_new}>Новинка</ToggleButton> <ToggleButton id={id} onChange={onChange} field="is_special_active" value={data.is_special_active}>Акция</ToggleButton> <ToggleButton id={id} onChange={onChange} field="warranty" value={data.warranty}>Гарантия</ToggleButton> </FlexGrid> <Content className={s.title} regular size="5"> Изображения </Content> <AddItemPhotoContainer __onChange={onColorChange} color={data.color} custom/> <SpecialSetupContainer onChange={onSpecialChange} /> </div> <div className={s.grid}> <AddItemAboutContainer data={aboutData} custom onSave={onSave} __onChange={onAboutChange}/> </div> </FlexGrid> <div className={s.actions}> <Button type="blue" to={`${adminLink.path}/items/${parent_id}`}> Отредактировать модель </Button> <Button className={s.btn} type="pink" onClick={onDelete}> Удалить </Button> </div> </Container> </div> ) } }
expdevelop/ultrastore
src/components/AdminItemsView/AdminItemsView.js
JavaScript
mit
3,762
package main import ( "reflect" "strconv" "unsafe" "github.com/STNS/STNS/stns" "github.com/STNS/libnss_stns/libstns" ) /* #include <grp.h> #include <sys/types.h> */ import "C" type Group struct { grp *C.struct_group result **C.struct_group } func (s Group) Set(groups stns.Attributes) int { for n, g := range groups { if g.ID != 0 { s.grp.gr_gid = C.__gid_t(g.ID) s.grp.gr_name = C.CString(n) s.grp.gr_passwd = C.CString("x") if g.Group != nil && !reflect.ValueOf(g.Group).IsNil() { work := make([]*C.char, len(g.Users)+1) if len(g.Users) > 0 { for i, u := range g.Users { work[i] = C.CString(u) } } s.grp.gr_mem = (**C.char)(unsafe.Pointer(&work[0])) } else { work := make([]*C.char, 1) s.grp.gr_mem = (**C.char)(unsafe.Pointer(&work[0])) } s.result = &s.grp return libstns.NSS_STATUS_SUCCESS } } return libstns.NSS_STATUS_NOTFOUND } //export _nss_stns_getgrnam_r func _nss_stns_getgrnam_r(name *C.char, grp *C.struct_group, buffer *C.char, bufsize C.size_t, result **C.struct_group) C.int { g := Group{grp, result} return set(grpNss, g, "name", C.GoString(name)) } //export _nss_stns_getgrgid_r func _nss_stns_getgrgid_r(gid C.__gid_t, grp *C.struct_group, buffer *C.char, bufsize C.size_t, result **C.struct_group) C.int { g := Group{grp, result} return set(grpNss, g, "id", strconv.Itoa(int(gid))) } //export _nss_stns_setgrent func _nss_stns_setgrent() C.int { return initList(grpNss, libstns.NSS_LIST_PRESET) } //export _nss_stns_endgrent func _nss_stns_endgrent() { initList(grpNss, libstns.NSS_LIST_PURGE) } //export _nss_stns_getgrent_r func _nss_stns_getgrent_r(grp *C.struct_group, buffer *C.char, bufsize C.size_t, result **C.struct_group) C.int { g := Group{grp, result} return setByList(grpNss, g) }
STNS/libnss_stns
nss/group.go
GO
mit
1,815
using System; using System.Collections.Generic; using System.Text; using Microsoft.Win32; namespace NetOffice.DeveloperToolbox.Utils.Registry { public class UtilsRegistryEntry { #region Fields private UtilsRegistryKey _parent; private string _valueName; private UtilsRegistryEntryType _type; #endregion #region Construction internal UtilsRegistryEntry(UtilsRegistryKey parent, string valueName) { _parent = parent; _valueName = valueName; _type = UtilsRegistryEntryType.Normal; } internal UtilsRegistryEntry(UtilsRegistryKey parent, string valueName, UtilsRegistryEntryType type) { _parent = parent; _valueName = valueName; _type = type; } #endregion #region Properties public UtilsRegistryEntryType Type { get { return _type; } } public string Name { get { if(string.IsNullOrEmpty(_valueName)) return "(Standard)"; else return _valueName; } set { RegistryKey key = _parent.Open(true); RegistryValueKind regKind = key.GetValueKind(_valueName); object regValue = key.GetValue(_valueName); key.DeleteValue(_valueName); key.SetValue(value, regValue, regKind); key.Close(); _valueName = value; } } public object Value { get { if (_type == UtilsRegistryEntryType.Faked) return null; RegistryKey key = _parent.Open(); object regValue = key.GetValue(_valueName); key.Close(); return regValue; } set { if (_type == UtilsRegistryEntryType.Faked) { RegistryKey key = _parent.Open(true); key.SetValue(_valueName, value, ValueKind); key.Close(); _type = UtilsRegistryEntryType.Default; } else { RegistryKey key = _parent.Open(true); key.SetValue(_valueName, value, ValueKind); key.Close(); } } } public RegistryValueKind ValueKind { get { if (_type == UtilsRegistryEntryType.Faked) return RegistryValueKind.String; RegistryKey key = _parent.Open(); RegistryValueKind kind = key.GetValueKind(_valueName); key.Close(); return kind; } } #endregion #region Methods public static string ByteArrayToBinaryString(byte[] byteArray) { StringBuilder builder = new StringBuilder(byteArray.Length * 2); foreach (byte value in byteArray) { builder.AppendFormat("{0:X2}", value); builder.Append(" "); } return builder.ToString(); } public static string ShiftHexValue(string value) { int lenght = value.Length; if ((10 - lenght) >= 2) value = "0x" + value; lenght = value.Length; if ((10 - lenght) > 0) { for (int i = 0; i < (10 - lenght); i++) { string first = value.Substring(0, 2); string last = value.Substring(2); value = first + "0" + last; } } return value; } public string GetValue(int lcid = 1033) { RegistryValueKind kind = ValueKind; switch (kind) { case RegistryValueKind.DWord: case RegistryValueKind.QWord: return ShiftHexValue(String.Format("{0:x4}", Value)) + " (" + Convert.ToString(Value) + ")"; case RegistryValueKind.Binary: return ByteArrayToBinaryString((Value as byte[])).ToLower(); case RegistryValueKind.ExpandString: case RegistryValueKind.MultiString: case RegistryValueKind.String: case RegistryValueKind.Unknown: if (_type == UtilsRegistryEntryType.Faked) return lcid == 1033 ? "(Value not set)" : "(Wert nicht gesetzt)"; else if ((_type == UtilsRegistryEntryType.Default) && (null == Value)) return lcid == 1033 ? "(Value not set)" : "(Wert nicht gesetzt)"; return Value as string; default: throw new ArgumentException(kind.ToString() + " is out of range"); } } public void Delete() { RegistryKey key = _parent.Open(true); key.DeleteValue(_valueName); key.Close(); } #endregion #region Static Methods private static byte[] StringToByteArray(string str) { if (null == str) return null; System.Text.UnicodeEncoding enc = new System.Text.UnicodeEncoding(); return enc.GetBytes(str); } private static string ByteArrayToString(byte[] arr) { if (null == arr) return null; System.Text.UnicodeEncoding enc = new System.Text.UnicodeEncoding(); return enc.GetString(arr); } #endregion #region Overrides public override string ToString() { return String.Format("UtilsRegistryEntry {0}", Name); } #endregion } }
NetOfficeFw/NetOffice
Toolbox/Toolbox/Utils/Registry/UtilsRegistryEntry.cs
C#
mit
6,140
import { expect } from 'chai' import browser from '../../src/util/browser' describe('util (node)', () => { describe('browser', () => { it('is false', () => { expect(browser).to.be.false }) }) })
reactjs/react-a11y
test/node/util.js
JavaScript
mit
214
// Seriously awesome GLSL noise functions. (C) Credits and kudos go to // Copyright (C) Stefan Gustavson, Ian McEwan Ashima Arts // MIT License. define(function(require, exports){ exports.permute1 = function(x){ return mod((34.0 * x + 1.0) * x, 289.0) } exports.permute3 = function(x){ return mod((34.0 * x + 1.0) * x, 289.0) } exports.permute4 = function(x){ return mod((34.0 * x + 1.0) * x, 289.0) } exports.isqrtT1 = function(r){ return 1.79284291400159 - 0.85373472095314 * r } exports.isqrtT4 = function(r){ return vec4(1.79284291400159 - 0.85373472095314 * r) } exports.snoise2 = function(x, y){ return snoise2v(vec2(x,y,z)) } exports.noise2d = exports.s2d = exports.snoise2v = function(v){ var C = vec4(0.211324865405187,0.366025403784439,-0.577350269189626,0.024390243902439) var i = floor(v + dot(v, C.yy) ) var x0 = v - i + dot(i, C.xx) var i1 = (x0.x > x0.y) ? vec2(1.0, 0.0) : vec2(0.0, 1.0) var x12 = x0.xyxy + C.xxzz x12.xy -= i1 i = mod(i, 289.0) // Avoid truncation effects in permutation var p = permute3(permute3(i.y + vec3(0.0, i1.y, 1.0)) + i.x + vec3(0.0, i1.x, 1.0 )) var m = max(0.5 - vec3(dot(x0,x0), dot(x12.xy,x12.xy), dot(x12.zw,x12.zw)), 0.0) m = m*m m = m*m var x = 2.0 * fract(p * C.www) - 1.0 var h = abs(x) - 0.5 var ox = floor(x + 0.5) var a0 = x - ox m *= (1.79284291400159 - 0.85373472095314 * ( a0*a0 + h*h )) var g = vec3() g.x = a0.x * x0.x + h.x * x0.y g.yz = a0.yz * x12.xz + h.yz * x12.yw return 130.0 * dot(m, g) } exports.snoise3 = function(x, y, z){ return snoise3v(vec3(x,y,z)) } exports.noise3d = exports.snoise3v = function(v){ var C = vec2(1.0/6.0, 1.0/3.0) var D = vec4(0.0, 0.5, 1.0, 2.0) // First corner var i = floor(v + dot(v, C.yyy)) var x0 = v - i + dot(i, C.xxx) var g = step(x0.yzx, x0.xyz) var l = 1.0 - g var i1 = min(g.xyz, l.zxy) var i2 = max(g.xyz, l.zxy) var x1 = x0 - i1 + 1.0 * C.xxx var x2 = x0 - i2 + 2.0 * C.xxx var x3 = x0 - 1. + 3.0 * C.xxx // Permutations i = mod(i, 289.0) var p = permute4(permute4(permute4( i.z + vec4(0.0, i1.z, i2.z, 1.0)) + i.y + vec4(0.0, i1.y, i2.y, 1.0)) + i.x + vec4(0.0, i1.x, i2.x, 1.0)) // ( N*N points uniformly over a square, mapped onto an octahedron.) var n_ = 1.0/7.0 var ns = n_ * D.wyz - D.xzx var j = p - 49.0 * floor(p * ns.z *ns.z) var x_ = floor(j * ns.z) var y_ = floor(j - 7.0 * x_) var x = x_ * ns.x + ns.yyyy var y = y_ * ns.x + ns.yyyy var h = 1.0 - abs(x) - abs(y) var b0 = vec4( x.xy, y.xy ) var b1 = vec4( x.zw, y.zw ) var s0 = floor(b0)*2.0 + 1.0 var s1 = floor(b1)*2.0 + 1.0 var sh = -step(h, vec4(0.0)) var a0 = b0.xzyw + s0.xzyw*sh.xxyy var a1 = b1.xzyw + s1.xzyw*sh.zzww var p0 = vec3(a0.xy, h.x) var p1 = vec3(a0.zw, h.y) var p2 = vec3(a1.xy, h.z) var p3 = vec3(a1.zw, h.w) //Normalise gradients var norm = isqrtT4(vec4(dot(p0,p0), dot(p1,p1), dot(p2, p2), dot(p3,p3))) p0 *= norm.x; p1 *= norm.y; p2 *= norm.z; p3 *= norm.w; // Mix final noise value var m = max(0.6 - vec4(dot(x0,x0), dot(x1,x1), dot(x2,x2), dot(x3,x3)), 0.0) m = m * m return 42.0 * dot( m*m, vec4( dot(p0,x0), dot(p1,x1), dot(p2,x2), dot(p3,x3) ) ) } exports.snoise4_g = function(j, ip){ var p = vec4() p.xyz = floor( fract (vec3(j) * ip.xyz) * 7.0) * ip.z - 1.0 p.w = 1.5 - dot(abs(p.xyz), vec3(1.0,1.0,1.0)) var s = vec4(lessThan(p, vec4(0.0))) p.xyz = p.xyz + (s.xyz*2.0 - 1.0) * s.www return p } exports.snoise4 = function(x, y, z, w){ return snoise4v(vec4(x,y,z,w)) } exports.snoise4v = function(v){ var C = vec4(0.138196601125011,0.276393202250021,0.414589803375032,-0.447213595499958) // First corner var i = floor(v + dot(v, vec4(0.309016994374947451)) ) var x0 = v - i + dot(i, C.xxxx) var i0 = vec4() var isX = step( x0.yzw, x0.xxx ) var isYZ = step( x0.zww, x0.yyz ) i0.x = isX.x + isX.y + isX.z i0.yzw = 1.0 - isX i0.y += isYZ.x + isYZ.y i0.zw += 1.0 - isYZ.xy i0.z += isYZ.z i0.w += 1.0 - isYZ.z var i3 = clamp( i0, 0.0, 1.0 ) var i2 = clamp( i0-1.0, 0.0, 1.0 ) var i1 = clamp( i0-2.0, 0.0, 1.0 ) var x1 = x0 - i1 + C.xxxx var x2 = x0 - i2 + C.yyyy var x3 = x0 - i3 + C.zzzz var x4 = x0 + C.wwww // Permutations i = mod(i, 289.0 ) var j0 = permute1( permute1( permute1( permute1(i.w) + i.z) + i.y) + i.x) var j1 = permute4( permute4( permute4( permute4( i.w + vec4(i1.w, i2.w, i3.w, 1.0 )) + i.z + vec4(i1.z, i2.z, i3.z, 1.0 )) + i.y + vec4(i1.y, i2.y, i3.y, 1.0 )) + i.x + vec4(i1.x, i2.x, i3.x, 1.0 )) // Gradients: 7x7x6 points over a cube, mapped onto a 4-cross polytope // 7*7*6 = 294, which is close to the ring size 17*17 = 289. var ip = vec4(1.0/294.0, 1.0/49.0, 1.0/7.0, 0.0) var p0 = snoise4_g(j0, ip) var p1 = snoise4_g(j1.x, ip) var p2 = snoise4_g(j1.y, ip) var p3 = snoise4_g(j1.z, ip) var p4 = snoise4_g(j1.w, ip) // Normalise gradients var nr = isqrtT4(vec4(dot(p0,p0), dot(p1,p1), dot(p2, p2), dot(p3,p3))) p0 *= nr.x p1 *= nr.y p2 *= nr.z p3 *= nr.w p4 *= isqrtT1(dot(p4,p4)) // Mix contributions from the five corners var m0 = max(0.6 - vec3(dot(x0,x0), dot(x1,x1), dot(x2,x2)), 0.0) var m1 = max(0.6 - vec2(dot(x3,x3), dot(x4,x4)), 0.0) m0 = m0 * m0 m1 = m1 * m1 return 49.0 * (dot(m0*m0, vec3(dot( p0, x0 ), dot(p1, x1), dot(p2, x2))) + dot(m1*m1, vec2( dot(p3, x3), dot(p4, x4)))) } exports.cell2v = function(v){ return cell3v(vec3(v.x, v.y,0)) } exports.cell3v = function(P){ var K = 0.142857142857 // 1/7 var Ko = 0.428571428571 // 1/2-K/2 var K2 = 0.020408163265306 // 1/(7*7) var Kz = 0.166666666667 // 1/6 var Kzo = 0.416666666667 // 1/2-1/6*2 var ji = 0.8 // smaller jitter gives less errors in F2 var Pi = mod(floor(P), 289.0) var Pf = fract(P) var Pfx = Pf.x + vec4(0.0, -1.0, 0.0, -1.0) var Pfy = Pf.y + vec4(0.0, 0.0, -1.0, -1.0) var p = permute4(Pi.x + vec4(0.0, 1.0, 0.0, 1.0)) p = permute4(p + Pi.y + vec4(0.0, 0.0, 1.0, 1.0)) var p1 = permute4(p + Pi.z) // z+0 var p2 = permute4(p + Pi.z + vec4(1.0)) // z+1 var ox1 = fract(p1*K) - Ko var oy1 = mod(floor(p1*K), 7.0)*K - Ko var oz1 = floor(p1*K2)*Kz - Kzo // p1 < 289 guaranteed var ox2 = fract(p2*K) - Ko var oy2 = mod(floor(p2*K), 7.0)*K - Ko var oz2 = floor(p2*K2)*Kz - Kzo var dx1 = Pfx + ji*ox1 var dy1 = Pfy + ji*oy1 var dz1 = Pf.z + ji*oz1 var dx2 = Pfx + ji*ox2 var dy2 = Pfy + ji*oy2 var dz2 = Pf.z - 1.0 + ji*oz2 var d1 = dx1 * dx1 + dy1 * dy1 + dz1 * dz1 // z+0 var d2 = dx2 * dx2 + dy2 * dy2 + dz2 * dz2 // z+1 var d = min(d1,d2) // F1 is now in d d2 = max(d1,d2) // Make sure we keep all candidates for F2 d.xy = (d.x < d.y) ? d.xy : d.yx // Swap smallest to d.x d.xz = (d.x < d.z) ? d.xz : d.zx d.xw = (d.x < d.w) ? d.xw : d.wx // F1 is now in d.x d.yzw = min(d.yzw, d2.yzw) // F2 now not in d2.yzw d.y = min(d.y, d.z) // nor in d.z d.y = min(d.y, d.w) // nor in d.w d.y = min(d.y, d2.x) // F2 is now in d.y return sqrt(d.xy) // F1 and F2 }, exports.cell3w = function(P){ var K = 0.142857142857 var Ko = 0.428571428571 // 1/2-K/2 var K2 = 0.020408163265306// 1/(7*7) var Kz = 0.166666666667// 1/6 var Kzo = 0.416666666667// 1/2-1/6*2 var ji = 1.0// smaller jitter gives more regular pattern var Pi = mod(floor(P), 289.0) var Pf = fract(P) - 0.5 var Pfx = Pf.x + vec3(1.0, 0.0, -1.0) var Pfy = Pf.y + vec3(1.0, 0.0, -1.0) var Pfz = Pf.z + vec3(1.0, 0.0, -1.0) var p = permute3(Pi.x + vec3(-1.0, 0.0, 1.0)) var p1 = permute3(p + Pi.y - 1.0) var p2 = permute3(p + Pi.y) var p3 = permute3(p + Pi.y + 1.0) var p11 = permute3(p1 + Pi.z - 1.0) var p12 = permute3(p1 + Pi.z) var p13 = permute3(p1 + Pi.z + 1.0) var p21 = permute3(p2 + Pi.z - 1.0) var p22 = permute3(p2 + Pi.z) var p23 = permute3(p2 + Pi.z + 1.0) var p31 = permute3(p3 + Pi.z - 1.0) var p32 = permute3(p3 + Pi.z) var p33 = permute3(p3 + Pi.z + 1.0) var ox11 = fract(p11*K) - Ko var oy11 = mod(floor(p11*K), 7.0)*K - Ko var oz11 = floor(p11*K2)*Kz - Kzo // p11 < 289 guaranteed var ox12 = fract(p12*K) - Ko var oy12 = mod(floor(p12*K), 7.0)*K - Ko var oz12 = floor(p12*K2)*Kz - Kzo var ox13 = fract(p13*K) - Ko var oy13 = mod(floor(p13*K), 7.0)*K - Ko var oz13 = floor(p13*K2)*Kz - Kzo var ox21 = fract(p21*K) - Ko var oy21 = mod(floor(p21*K), 7.0)*K - Ko var oz21 = floor(p21*K2)*Kz - Kzo var ox22 = fract(p22*K) - Ko var oy22 = mod(floor(p22*K), 7.0)*K - Ko var oz22 = floor(p22*K2)*Kz - Kzo var ox23 = fract(p23*K) - Ko var oy23 = mod(floor(p23*K), 7.0)*K - Ko var oz23 = floor(p23*K2)*Kz - Kzo var ox31 = fract(p31*K) - Ko var oy31 = mod(floor(p31*K), 7.0)*K - Ko var oz31 = floor(p31*K2)*Kz - Kzo var ox32 = fract(p32*K) - Ko var oy32 = mod(floor(p32*K), 7.0)*K - Ko var oz32 = floor(p32*K2)*Kz - Kzo var ox33 = fract(p33*K) - Ko var oy33 = mod(floor(p33*K), 7.0)*K - Ko var oz33 = floor(p33*K2)*Kz - Kzo var dx11 = Pfx + ji*ox11 var dy11 = Pfy.x + ji*oy11 var dz11 = Pfz.x + ji*oz11 var dx12 = Pfx + ji*ox12 var dy12 = Pfy.x + ji*oy12 var dz12 = Pfz.y + ji*oz12 var dx13 = Pfx + ji*ox13 var dy13 = Pfy.x + ji*oy13 var dz13 = Pfz.z + ji*oz13 var dx21 = Pfx + ji*ox21 var dy21 = Pfy.y + ji*oy21 var dz21 = Pfz.x + ji*oz21 var dx22 = Pfx + ji*ox22 var dy22 = Pfy.y + ji*oy22 var dz22 = Pfz.y + ji*oz22 var dx23 = Pfx + ji*ox23 var dy23 = Pfy.y + ji*oy23 var dz23 = Pfz.z + ji*oz23 var dx31 = Pfx + ji*ox31 var dy31 = Pfy.z + ji*oy31 var dz31 = Pfz.x + ji*oz31 var dx32 = Pfx + ji*ox32 var dy32 = Pfy.z + ji*oy32 var dz32 = Pfz.y + ji*oz32 var dx33 = Pfx + ji*ox33 var dy33 = Pfy.z + ji*oy33 var dz33 = Pfz.z + ji*oz33 var d11 = dx11 * dx11 + dy11 * dy11 + dz11 * dz11 var d12 = dx12 * dx12 + dy12 * dy12 + dz12 * dz12 var d13 = dx13 * dx13 + dy13 * dy13 + dz13 * dz13 var d21 = dx21 * dx21 + dy21 * dy21 + dz21 * dz21 var d22 = dx22 * dx22 + dy22 * dy22 + dz22 * dz22 var d23 = dx23 * dx23 + dy23 * dy23 + dz23 * dz23 var d31 = dx31 * dx31 + dy31 * dy31 + dz31 * dz31 var d32 = dx32 * dx32 + dy32 * dy32 + dz32 * dz32 var d33 = dx33 * dx33 + dy33 * dy33 + dz33 * dz33 var d1a = min(d11, d12) d12 = max(d11, d12) d11 = min(d1a, d13) // Smallest now not in d12 or d13 d13 = max(d1a, d13) d12 = min(d12, d13) // 2nd smallest now not in d13 var d2a = min(d21, d22) d22 = max(d21, d22) d21 = min(d2a, d23) // Smallest now not in d22 or d23 d23 = max(d2a, d23) d22 = min(d22, d23) // 2nd smallest now not in d23 var d3a = min(d31, d32) d32 = max(d31, d32) d31 = min(d3a, d33) // Smallest now not in d32 or d33 d33 = max(d3a, d33) d32 = min(d32, d33) // 2nd smallest now not in d33 var da = min(d11, d21) d21 = max(d11, d21) d11 = min(da, d31) // Smallest now in d11 d31 = max(da, d31) // 2nd smallest now not in d31 d11.xy = (d11.x < d11.y) ? d11.xy : d11.yx d11.xz = (d11.x < d11.z) ? d11.xz : d11.zx // d11.x now smallest d12 = min(d12, d21) // 2nd smallest now not in d21 d12 = min(d12, d22) // nor in d22 d12 = min(d12, d31) // nor in d31 d12 = min(d12, d32) // nor in d32 d11.yz = min(d11.yz, d12.xy) // nor in d12.yz d11.y = min(d11.y, d12.z) // Only two more to go d11.y = min(d11.y, d11.z) // Done! (Phew!) return sqrt(d11.xy) // F1, F2 } })
teem2/dreem2.1
core/gl/glnoise.js
JavaScript
mit
11,369
var $ = require('jquery'); var keymaster = require('keymaster'); var ChartEditor = require('./component-chart-editor.js'); var DbInfo = require('./component-db-info.js'); var AceSqlEditor = require('./component-ace-sql-editor.js'); var DataGrid = require('./component-data-grid.js'); var QueryEditor = function () { var chartEditor = new ChartEditor(); var dbInfo = new DbInfo(); var aceSqlEditor = new AceSqlEditor("ace-editor"); var dataGrid = new DataGrid(); function runQuery () { $('#server-run-time').html(''); $('#rowcount').html(''); dataGrid.emptyDataGrid(); var data = { queryText: aceSqlEditor.getSelectedOrAllText(), connectionId: $('#connection').val(), cacheKey: $('#cache-key').val(), queryName: getQueryName() }; dataGrid.startRunningTimer(); $.ajax({ type: "POST", url: "/run-query", data: data }).done(function (data) { chartEditor.setData(data); // TODO - if vis tab is active, render chart dataGrid.stopRunningTimer(); $('#server-run-time').html(data.serverMs/1000 + " sec."); if (data.success) { $('.hide-while-running').show(); if (data.incomplete) { $('.incomplete-notification').removeClass("hidden"); } else { $('.incomplete-notification').addClass("hidden"); } dataGrid.renderGridData(data); } else { dataGrid.renderError(data.error); } }).fail(function () { dataGrid.stopRunningTimer(); dataGrid.renderError("Something is broken :("); }); } function getQueryName () { return $('#header-query-name').val(); } function getQueryTags () { return $.map($('#tags').val().split(','), $.trim); } function saveQuery () { var $queryId = $('#query-id'); var query = { name: getQueryName(), queryText: aceSqlEditor.getEditorText(), tags: getQueryTags(), connectionId: dbInfo.getConnectionId(), chartConfiguration: chartEditor.getChartConfiguration() }; $('#btn-save-result').text('saving...').show(); $.ajax({ type: "POST", url: "/queries/" + $queryId.val(), data: query }).done(function (data) { if (data.success) { window.history.replaceState({}, "query " + data.query._id, "/queries/" + data.query._id); $queryId.val(data.query._id); $('#btn-save-result').removeClass('label-info').addClass('label-success').text('Success'); setTimeout(function () { $('#btn-save-result').fadeOut(400, function () { $('#btn-save-result').removeClass('label-success').addClass('label-info').text(''); }); }, 1000); } else { $('#btn-save-result').removeClass('label-info').addClass('label-danger').text('Failed'); } }).fail(function () { alert('ajax fail'); }); } $('#btn-save').click(function (event) { event.preventDefault(); event.stopPropagation(); saveQuery(); }); $('#btn-run-query').click(function (event) { event.preventDefault(); event.stopPropagation(); runQuery(); }); /* (re-)render the chart when the viz tab is pressed, TODO: only do this if necessary ==============================================================================*/ $('a[data-toggle="tab"]').on('shown.bs.tab', function (e) { // if shown tab was the chart tab, rerender the chart // e.target is the activated tab if (e.target.getAttribute("href") == "#tab-content-visualize") { chartEditor.rerenderChart(); } else if (e.target.getAttribute("href") == "#tab-content-sql") { dataGrid.resize(); } }); /* get query again, because not all the data is in the HTML TODO: do most the workflow this way? ==============================================================================*/ var $queryId = $('#query-id'); $.ajax({ type: "GET", url: "/queries/" + $queryId.val() + "?format=json" }).done(function (data) { console.log(data); chartEditor.loadChartConfiguration(data.chartConfiguration); }).fail(function () { alert('Failed to get additional Query info'); }); /* Tags Typeahead ==============================================================================*/ var Bloodhound = require('Bloodhound'); var bloodhoundTags = new Bloodhound({ datumTokenizer: Bloodhound.tokenizers.obj.whitespace('name'), queryTokenizer: Bloodhound.tokenizers.whitespace, prefetch: { url: '/tags', // array of tagnames ttl: 0, filter: function(list) { return $.map(list, function(tag) { return { name: tag }; }); } } }); bloodhoundTags.initialize(); $('#tags').tagsinput({ typeaheadjs: { //name: 'tags', displayKey: 'name', valueKey: 'name', source: bloodhoundTags.ttAdapter() } }); /* Shortcuts ==============================================================================*/ // keymaster doesn't fire on input/textarea events by default // since we are only using command/ctrl shortcuts, // we want the event to fire all the time for any element keymaster.filter = function (event) { return true; }; keymaster('ctrl+s, command+s', function() { saveQuery(); return false; }); keymaster('ctrl+r, command+r, ctrl+e, command+e', function() { runQuery(); return false; }); }; module.exports = function () { if ($('#ace-editor').length) { new QueryEditor(); } };
l371559739/sqlpad
client-js/query-editor.js
JavaScript
mit
6,203
package PracticeLeetCode; import java.util.HashSet; import java.util.LinkedList; import java.util.Queue; import java.util.Set; public class _127WordLadder { psvm }
darshanhs90/Java-InterviewPrep
src/PracticeLeetCode/_127WordLadder.java
Java
mit
169
import glob import logging import os import subprocess from plugins import BaseAligner from yapsy.IPlugin import IPlugin from assembly import get_qual_encoding class Bowtie2Aligner(BaseAligner, IPlugin): def run(self): """ Map READS to CONTIGS and return alignment. Set MERGED_PAIR to True if reads[1] is a merged paired end file """ contig_file = self.data.contigfiles[0] reads = self.data.readfiles ## Index contigs prefix = os.path.join(self.outpath, 'bt2') cmd_args = [self.build_bin, '-f', contig_file, prefix] self.arast_popen(cmd_args, overrides=False) ### Align reads bamfiles = [] for i, readset in enumerate(self.data.readsets): samfile = os.path.join(self.outpath, 'align.sam') reads = readset.files cmd_args = [self.executable, '-x', prefix, '-S', samfile, '-p', self.process_threads_allowed] if len(reads) == 2: cmd_args += ['-1', reads[0], '-2', reads[1]] elif len(reads) == 1: cmd_args += ['-U', reads[0]] else: raise Exception('Bowtie plugin error') self.arast_popen(cmd_args, overrides=False) if not os.path.exists(samfile): raise Exception('Unable to complete alignment') ## Convert to BAM bamfile = samfile.replace('.sam', '.bam') cmd_args = ['samtools', 'view', '-bSho', bamfile, samfile] self.arast_popen(cmd_args) bamfiles.append(bamfile) ### Merge samfiles if multiple if len(bamfiles) > 1: bamfile = os.path.join(self.outpath, '{}_{}.bam'.format(os.path.basename(contig_file), i)) self.arast_popen(['samtools', 'merge', bamfile] + bamfiles) if not os.path.exists(bamfile): raise Exception('Unable to complete alignment') else: bamfile = bamfiles[0] if not os.path.exists(bamfile): raise Exception('Unable to complete alignment') ## Convert back to sam samfile = bamfile.replace('.bam', '.sam') self.arast_popen(['samtools', 'view', '-h', '-o', samfile, bamfile]) return {'alignment': samfile, 'alignment_bam': bamfile}
levinas/assembly
lib/assembly/plugins/bowtie2.py
Python
mit
2,404
namespace EventCloud.Events.Dtos { public class GetEventListInput { public bool IncludeCanceledEvents { get; set; } } }
aspnetboilerplate/sample-eventcloud
mvc-angularjs/src/EventCloud.Application/Events/Dtos/GetEventListInput.cs
C#
mit
142
// # Frontend Route tests // As it stands, these tests depend on the database, and as such are integration tests. // Mocking out the models to not touch the DB would turn these into unit tests, and should probably be done in future, // But then again testing real code, rather than mock code, might be more useful... const should = require('should'); const sinon = require('sinon'); const supertest = require('supertest'); const moment = require('moment'); const cheerio = require('cheerio'); const _ = require('lodash'); const testUtils = require('../../utils'); const configUtils = require('../../utils/configUtils'); const urlUtils = require('../../utils/urlUtils'); const config = require('../../../core/shared/config'); const settingsCache = require('../../../core/server/services/settings/cache'); const origCache = _.cloneDeep(settingsCache); const ghost = testUtils.startGhost; let request; describe('Frontend Routing', function () { function doEnd(done) { return function (err, res) { if (err) { return done(err); } should.not.exist(res.headers['x-cache-invalidate']); should.not.exist(res.headers['X-CSRF-Token']); should.not.exist(res.headers['set-cookie']); should.exist(res.headers.date); done(); }; } function addPosts(done) { testUtils.clearData().then(function () { return testUtils.initData(); }).then(function () { return testUtils.fixtures.insertPostsAndTags(); }).then(function () { done(); }); } afterEach(function () { sinon.restore(); }); before(function () { return ghost() .then(function () { request = supertest.agent(config.get('url')); }); }); describe('Test with Initial Fixtures', function () { describe('Error', function () { it('should 404 for unknown post with invalid characters', function (done) { request.get('/$pec+acular~/') .expect('Cache-Control', testUtils.cacheRules.private) .expect(404) .expect(/Page not found/) .end(doEnd(done)); }); it('should 404 for unknown frontend route', function (done) { request.get('/spectacular/marvellous/') .set('Accept', 'application/json') .expect('Cache-Control', testUtils.cacheRules.private) .expect(404) .expect(/Page not found/) .end(doEnd(done)); }); it('should 404 for encoded char not 301 from uncapitalise', function (done) { request.get('/|/') .expect('Cache-Control', testUtils.cacheRules.private) .expect(404) .expect(/Page not found/) .end(doEnd(done)); }); }); describe('Default Redirects (clean URLS)', function () { it('Single post should redirect without slash', function (done) { request.get('/welcome') .expect('Location', '/welcome/') .expect('Cache-Control', testUtils.cacheRules.year) .expect(301) .end(doEnd(done)); }); it('Single post should redirect uppercase', function (done) { request.get('/Welcome/') .expect('Location', '/welcome/') .expect('Cache-Control', testUtils.cacheRules.year) .expect(301) .end(doEnd(done)); }); it('Single post should sanitize double slashes when redirecting uppercase', function (done) { request.get('///Google.com/') .expect('Location', '/google.com/') .expect('Cache-Control', testUtils.cacheRules.year) .expect(301) .end(doEnd(done)); }); it('AMP post should redirect without slash', function (done) { request.get('/welcome/amp') .expect('Location', '/welcome/amp/') .expect('Cache-Control', testUtils.cacheRules.year) .expect(301) .end(doEnd(done)); }); it('AMP post should redirect uppercase', function (done) { request.get('/Welcome/AMP/') .expect('Location', '/welcome/amp/') .expect('Cache-Control', testUtils.cacheRules.year) .expect(301) .end(doEnd(done)); }); }); }); describe('Test with added posts', function () { before(addPosts); describe('Static page', function () { it('should respond with html', function (done) { request.get('/static-page-test/') .expect('Content-Type', /html/) .expect('Cache-Control', testUtils.cacheRules.public) .expect(200) .end(function (err, res) { const $ = cheerio.load(res.text); should.not.exist(res.headers['x-cache-invalidate']); should.not.exist(res.headers['X-CSRF-Token']); should.not.exist(res.headers['set-cookie']); should.exist(res.headers.date); $('title').text().should.equal('This is a static page'); $('body.page-template').length.should.equal(1); $('article.post').length.should.equal(1); doEnd(done)(err, res); }); }); it('should redirect without slash', function (done) { request.get('/static-page-test') .expect('Location', '/static-page-test/') .expect('Cache-Control', testUtils.cacheRules.year) .expect(301) .end(doEnd(done)); }); describe('edit', function () { it('should redirect without slash', function (done) { request.get('/static-page-test/edit') .expect('Location', '/static-page-test/edit/') .expect('Cache-Control', testUtils.cacheRules.year) .expect(301) .end(doEnd(done)); }); it('should redirect to editor', function (done) { request.get('/static-page-test/edit/') .expect('Location', /ghost\/#\/editor\/\w+/) .expect('Cache-Control', testUtils.cacheRules.public) .expect(302) .end(doEnd(done)); }); it('should 404 for non-edit parameter', function (done) { request.get('/static-page-test/notedit/') .expect('Cache-Control', testUtils.cacheRules.private) .expect(404) .expect(/Page not found/) .end(doEnd(done)); }); }); describe('edit with admin redirects disabled', function () { before(function (done) { configUtils.set('admin:redirects', false); ghost({forceStart: true}) .then(function () { request = supertest.agent(config.get('url')); addPosts(done); }); }); after(function (done) { configUtils.restore(); ghost({forceStart: true}) .then(function () { request = supertest.agent(config.get('url')); addPosts(done); }); }); it('should redirect without slash', function (done) { request.get('/static-page-test/edit') .expect('Location', '/static-page-test/edit/') .expect('Cache-Control', testUtils.cacheRules.year) .expect(301) .end(doEnd(done)); }); it('should not redirect to editor', function (done) { request.get('/static-page-test/edit/') .expect(404) .expect('Cache-Control', testUtils.cacheRules.private) .end(doEnd(done)); }); }); describe('amp', function () { it('should 404 for amp parameter', function (done) { // NOTE: only post pages are supported so the router doesn't have a way to distinguish if // the request was done after AMP 'Page' or 'Post' request.get('/static-page-test/amp/') .expect('Cache-Control', testUtils.cacheRules.private) .expect(404) .expect(/Post not found/) .end(doEnd(done)); }); }); }); describe('Post preview', function () { it('should display draft posts accessed via uuid', function (done) { request.get('/p/d52c42ae-2755-455c-80ec-70b2ec55c903/') .expect('Content-Type', /html/) .expect(200) .end(function (err, res) { if (err) { return done(err); } const $ = cheerio.load(res.text); should.not.exist(res.headers['x-cache-invalidate']); should.not.exist(res.headers['X-CSRF-Token']); should.not.exist(res.headers['set-cookie']); should.exist(res.headers.date); $('title').text().should.equal('Not finished yet'); // @TODO: use theme from fixtures and don't rely on content/themes/casper // $('.content .post').length.should.equal(1); // $('.poweredby').text().should.equal('Proudly published with Ghost'); // $('body.post-template').length.should.equal(1); // $('article.post').length.should.equal(1); done(); }); }); it('should redirect published posts to their live url', function (done) { request.get('/p/2ac6b4f6-e1f3-406c-9247-c94a0496d39d/') .expect(301) .expect('Location', '/short-and-sweet/') .expect('Cache-Control', testUtils.cacheRules.year) .end(doEnd(done)); }); it('404s unknown uuids', function (done) { request.get('/p/aac6b4f6-e1f3-406c-9247-c94a0496d39f/') .expect(404) .end(doEnd(done)); }); }); describe('Post with Ghost in the url', function () { // All of Ghost's admin depends on the /ghost/ in the url to work properly // Badly formed regexs can cause breakage if a post slug starts with the 5 letters ghost it('should retrieve a blog post with ghost at the start of the url', function (done) { request.get('/ghostly-kitchen-sink/') .expect('Cache-Control', testUtils.cacheRules.public) .expect(200) .end(doEnd(done)); }); }); }); describe('Subdirectory (no slash)', function () { let ghostServer; before(function () { configUtils.set('url', 'http://localhost/blog'); urlUtils.stubUrlUtilsFromConfig(); return ghost({forceStart: true, subdir: true}) .then(function (_ghostServer) { ghostServer = _ghostServer; request = supertest.agent(config.get('server:host') + ':' + config.get('server:port')); }); }); after(function () { configUtils.restore(); urlUtils.restore(); }); it('http://localhost should 404', function (done) { request.get('/') .expect(404) .end(doEnd(done)); }); it('http://localhost/ should 404', function (done) { request.get('/') .expect(404) .end(doEnd(done)); }); it('http://localhost/blog should 301 to http://localhost/blog/', function (done) { request.get('/blog') .expect(301) .expect('Location', '/blog/') .end(doEnd(done)); }); it('http://localhost/blog/ should 200', function (done) { request.get('/blog/') .expect(200) .end(doEnd(done)); }); it('http://localhost/blog/welcome should 301 to http://localhost/blog/welcome/', function (done) { request.get('/blog/welcome') .expect(301) .expect('Location', '/blog/welcome/') .expect('Cache-Control', testUtils.cacheRules.year) .end(doEnd(done)); }); it('http://localhost/blog/welcome/ should 200', function (done) { request.get('/blog/welcome/') .expect(200) .end(doEnd(done)); }); it('/blog/tag/getting-started should 301 to /blog/tag/getting-started/', function (done) { request.get('/blog/tag/getting-started') .expect(301) .expect('Location', '/blog/tag/getting-started/') .expect('Cache-Control', testUtils.cacheRules.year) .end(doEnd(done)); }); it('/blog/tag/getting-started/ should 200', function (done) { request.get('/blog/tag/getting-started/') .expect(200) .end(doEnd(done)); }); it('/blog/welcome/amp/ should 200', function (done) { request.get('/blog/welcome/amp/') .expect(200) .end(doEnd(done)); }); }); describe('Subdirectory (with slash)', function () { let ghostServer; before(function () { configUtils.set('url', 'http://localhost/blog/'); urlUtils.stubUrlUtilsFromConfig(); return ghost({forceStart: true, subdir: true}) .then(function (_ghostServer) { ghostServer = _ghostServer; request = supertest.agent(config.get('server:host') + ':' + config.get('server:port')); }); }); after(function () { configUtils.restore(); urlUtils.restore(); }); it('http://localhost should 404', function (done) { request.get('/') .expect(404) .end(doEnd(done)); }); it('http://localhost/ should 404', function (done) { request.get('/') .expect(404) .end(doEnd(done)); }); it('/blog should 301 to /blog/', function (done) { request.get('/blog') .expect(301) .expect('Location', '/blog/') .end(doEnd(done)); }); it('/blog/ should 200', function (done) { request.get('/blog/') .expect(200) .end(doEnd(done)); }); it('/blog/welcome should 301 to /blog/welcome/', function (done) { request.get('/blog/welcome') .expect(301) .expect('Location', '/blog/welcome/') .expect('Cache-Control', testUtils.cacheRules.year) .end(doEnd(done)); }); it('/blog/welcome/ should 200', function (done) { request.get('/blog/welcome/') .expect(200) .end(doEnd(done)); }); it('/blog/tag/getting-started should 301 to /blog/tag/getting-started/', function (done) { request.get('/blog/tag/getting-started') .expect(301) .expect('Location', '/blog/tag/getting-started/') .expect('Cache-Control', testUtils.cacheRules.year) .end(doEnd(done)); }); it('/blog/tag/getting-started/ should 200', function (done) { request.get('/blog/tag/getting-started/') .expect(200) .end(doEnd(done)); }); it('/blog/welcome/amp/ should 200', function (done) { request.get('/blog/welcome/amp/') .expect(200) .end(doEnd(done)); }); it('should uncapitalise correctly with 301 to subdir', function (done) { request.get('/blog/AAA/') .expect('Location', '/blog/aaa/') .expect('Cache-Control', testUtils.cacheRules.year) .expect(301) .end(doEnd(done)); }); }); // we'll use X-Forwarded-Proto: https to simulate an 'https://' request behind a proxy describe('HTTPS', function () { let ghostServer; before(function () { configUtils.set('url', 'http://localhost:2370/'); urlUtils.stubUrlUtilsFromConfig(); return ghost({forceStart: true}) .then(function (_ghostServer) { ghostServer = _ghostServer; request = supertest.agent(config.get('server:host') + ':' + config.get('server:port')); }); }); after(function () { configUtils.restore(); urlUtils.restore(); }); it('should set links to url over non-HTTPS', function (done) { request.get('/') .expect(200) .expect(/<link rel="canonical" href="http:\/\/localhost:2370\/" \/\>/) .expect(/<a href="http:\/\/localhost:2370">Ghost<\/a\>/) .end(doEnd(done)); }); it('should set links over HTTPS besides canonical', function (done) { request.get('/') .set('X-Forwarded-Proto', 'https') .expect(200) .expect(/<link rel="canonical" href="http:\/\/localhost:2370\/" \/\>/) .expect(/<a href="https:\/\/localhost:2370">Ghost<\/a\>/) .end(doEnd(done)); }); }); // TODO: convert to unit tests describe('Redirects (use redirects.json from test/utils/fixtures/data)', function () { let ghostServer; before(function () { configUtils.set('url', 'http://localhost:2370/'); urlUtils.stubUrlUtilsFromConfig(); return ghost({forceStart: true}) .then(function (_ghostServer) { ghostServer = _ghostServer; request = supertest.agent(config.get('server:host') + ':' + config.get('server:port')); }); }); after(function () { configUtils.restore(); urlUtils.restore(); }); describe('1 case', function () { it('with trailing slash', function (done) { request.get('/post/10/a-nice-blog-post') .expect(302) .expect('Cache-Control', testUtils.cacheRules.public) .end(function (err, res) { res.headers.location.should.eql('/a-nice-blog-post'); doEnd(done)(err, res); }); }); it('without trailing slash', function (done) { request.get('/post/10/a-nice-blog-post/') .expect(302) .expect('Cache-Control', testUtils.cacheRules.public) .end(function (err, res) { res.headers.location.should.eql('/a-nice-blog-post'); doEnd(done)(err, res); }); }); it('with query params', function (done) { request.get('/topic?something=good') .expect(302) .expect('Cache-Control', testUtils.cacheRules.public) .end(function (err, res) { res.headers.location.should.eql('/?something=good'); doEnd(done)(err, res); }); }); it('with query params', function (done) { request.get('/post/10/a-nice-blog-post?a=b') .expect(302) .expect('Cache-Control', testUtils.cacheRules.public) .end(function (err, res) { res.headers.location.should.eql('/a-nice-blog-post?a=b'); doEnd(done)(err, res); }); }); it('with case insensitive', function (done) { request.get('/CaSe-InSeNsItIvE') .expect(302) .expect('Cache-Control', testUtils.cacheRules.public) .end(function (err, res) { res.headers.location.should.eql('/redirected-insensitive'); doEnd(done)(err, res); }); }); it('with case sensitive', function (done) { request.get('/Case-Sensitive') .expect(302) .expect('Cache-Control', testUtils.cacheRules.public) .end(function (err, res) { res.headers.location.should.eql('/redirected-sensitive'); doEnd(done)(err, res); }); }); it('defaults to case sensitive', function (done) { request.get('/Default-Sensitive') .expect(302) .expect('Cache-Control', testUtils.cacheRules.public) .end(function (err, res) { res.headers.location.should.eql('/redirected-default'); doEnd(done)(err, res); }); }); it('should not redirect with case sensitive', function (done) { request.get('/casE-sensitivE') .end(function (err, res) { res.headers.location.should.not.eql('/redirected-sensitive'); res.statusCode.should.not.eql(302); doEnd(done)(err, res); }); }); it('should not redirect with default case sensitive', function (done) { request.get('/defaulT-sensitivE') .end(function (err, res) { res.headers.location.should.not.eql('/redirected-default'); res.statusCode.should.not.eql(302); doEnd(done)(err, res); }); }); }); describe('2 case', function () { it('with trailing slash', function (done) { request.get('/my-old-blog-post/') .expect(301) .expect('Cache-Control', testUtils.cacheRules.year) .end(function (err, res) { res.headers.location.should.eql('/revamped-url/'); doEnd(done)(err, res); }); }); it('without trailing slash', function (done) { request.get('/my-old-blog-post') .expect(301) .expect('Cache-Control', testUtils.cacheRules.year) .end(function (err, res) { res.headers.location.should.eql('/revamped-url/'); doEnd(done)(err, res); }); }); }); describe('3 case', function () { it('with trailing slash', function (done) { request.get('/what/') .expect(302) .expect('Cache-Control', testUtils.cacheRules.public) .end(function (err, res) { res.headers.location.should.eql('/what-does-god-say'); doEnd(done)(err, res); }); }); it('without trailing slash', function (done) { request.get('/what') .expect(302) .expect('Cache-Control', testUtils.cacheRules.public) .end(function (err, res) { res.headers.location.should.eql('/what-does-god-say'); doEnd(done)(err, res); }); }); }); describe('4 case', function () { it('with trailing slash', function (done) { request.get('/search/label/&&&/') .expect(302) .expect('Cache-Control', testUtils.cacheRules.public) .end(function (err, res) { res.headers.location.should.eql('/tag/&&&/'); doEnd(done)(err, res); }); }); it('without trailing slash', function (done) { request.get('/search/label/&&&/') .expect(302) .expect('Cache-Control', testUtils.cacheRules.public) .end(function (err, res) { res.headers.location.should.eql('/tag/&&&/'); doEnd(done)(err, res); }); }); }); describe('5 case', function () { it('with trailing slash', function (done) { request.get('/topic/') .expect(302) .expect('Cache-Control', testUtils.cacheRules.public) .end(function (err, res) { res.headers.location.should.eql('/'); doEnd(done)(err, res); }); }); it('without trailing slash', function (done) { request.get('/topic') .expect(302) .expect('Cache-Control', testUtils.cacheRules.public) .end(function (err, res) { res.headers.location.should.eql('/'); doEnd(done)(err, res); }); }); }); describe('6 case', function () { it('with trailing slash', function (done) { request.get('/resources/download/') .expect(302) .expect('Cache-Control', testUtils.cacheRules.public) .end(function (err, res) { res.headers.location.should.eql('/shubal-stearns'); doEnd(done)(err, res); }); }); it('without trailing slash', function (done) { request.get('/resources/download') .expect(302) .expect('Cache-Control', testUtils.cacheRules.public) .end(function (err, res) { res.headers.location.should.eql('/shubal-stearns'); doEnd(done)(err, res); }); }); }); describe('7 case', function () { it('with trailing slash', function (done) { request.get('/2016/11/welcome.html') .expect(302) .expect('Cache-Control', testUtils.cacheRules.public) .end(function (err, res) { res.headers.location.should.eql('/welcome'); doEnd(done)(err, res); }); }); }); describe('last case', function () { it('default', function (done) { request.get('/prefix/') .expect(302) .expect('Cache-Control', testUtils.cacheRules.public) .end(function (err, res) { res.headers.location.should.eql('/blog/'); doEnd(done)(err, res); }); }); it('with a custom path', function (done) { request.get('/prefix/expect-redirect') .expect(302) .expect('Cache-Control', testUtils.cacheRules.public) .end(function (err, res) { res.headers.location.should.eql('/blog/expect-redirect'); doEnd(done)(err, res); }); }); }); describe('external url redirect', function () { it('with trailing slash', function (done) { request.get('/external-url/') .expect(302) .expect('Cache-Control', testUtils.cacheRules.public) .end(function (err, res) { res.headers.location.should.eql('https://ghost.org/'); doEnd(done)(err, res); }); }); it('without trailing slash', function (done) { request.get('/external-url') .expect(302) .expect('Cache-Control', testUtils.cacheRules.public) .end(function (err, res) { res.headers.location.should.eql('https://ghost.org/'); doEnd(done)(err, res); }); }); it('with capturing group', function (done) { request.get('/external-url/docs') .expect(302) .expect('Cache-Control', testUtils.cacheRules.public) .end(function (err, res) { res.headers.location.should.eql('https://ghost.org/docs'); doEnd(done)(err, res); }); }); }); }); describe('Subdirectory redirects (use redirects.json from test/utils/fixtures/data)', function () { var ghostServer; before(function () { configUtils.set('url', 'http://localhost:2370/blog/'); urlUtils.stubUrlUtilsFromConfig(); return ghost({forceStart: true, subdir: true}) .then(function (_ghostServer) { ghostServer = _ghostServer; request = supertest.agent(config.get('server:host') + ':' + config.get('server:port')); }); }); after(function () { configUtils.restore(); urlUtils.restore(); }); describe('internal url redirect', function () { it('should include the subdirectory', function (done) { request.get('/blog/my-old-blog-post/') .expect(301) .expect('Cache-Control', testUtils.cacheRules.year) .end(function (err, res) { res.headers.location.should.eql('/blog/revamped-url/'); doEnd(done)(err, res); }); }); it('should work with regex "from" redirects', function (done) { request.get('/blog/capture1/whatever') .expect(302) .expect('Cache-Control', testUtils.cacheRules.public) .end(function (err, res) { res.headers.location.should.eql('/blog/whatever'); doEnd(done)(err, res); }); }); }); describe('external url redirect', function () { it('should not include the subdirectory', function (done) { request.get('/blog/external-url/docs') .expect(302) .expect('Cache-Control', testUtils.cacheRules.public) .end(function (err, res) { res.headers.location.should.eql('https://ghost.org/docs'); doEnd(done)(err, res); }); }); }); }); });
JohnONolan/Ghost
test/regression/site/frontend_spec.js
JavaScript
mit
32,924
import { browser, by, element } from 'protractor'; describe('App', () => { beforeEach(() => { // change hash depending on router LocationStrategy browser.get('/#/home'); }); it('should have a title', () => { let subject = browser.getTitle(); let result = 'Chroma An Interactive Palette tool'; expect(subject).toEqual(result); }); it('should have `your content here` x-large', () => { let subject = element(by.css('[x-large]')).getText(); let result = 'Your Content Here'; expect(subject).toEqual(result); }); });
andalex/Chroma
src/app/sidepanel/side-panel.e2e.ts
TypeScript
mit
564
"use strict"; var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); Object.defineProperty(exports, "__esModule", { value: true }); exports.default = void 0; var _createSvgIcon = _interopRequireDefault(require("./utils/createSvgIcon")); var _jsxRuntime = require("react/jsx-runtime"); var _default = (0, _createSvgIcon.default)( /*#__PURE__*/(0, _jsxRuntime.jsx)("path", { d: "M21 5H3c-1.1 0-2 .9-2 2v10c0 1.1.9 2 2 2h18c1.1 0 2-.9 2-2V7c0-1.1-.9-2-2-2zm-2 12H5V7h14v10zm-9-1h4c.55 0 1-.45 1-1v-3c0-.55-.45-1-1-1v-1c0-1.11-.9-2-2-2-1.11 0-2 .9-2 2v1c-.55 0-1 .45-1 1v3c0 .55.45 1 1 1zm.8-6c0-.66.54-1.2 1.2-1.2s1.2.54 1.2 1.2v1h-2.4v-1z" }), 'ScreenLockLandscapeOutlined'); exports.default = _default;
oliviertassinari/material-ui
packages/mui-icons-material/lib/ScreenLockLandscapeOutlined.js
JavaScript
mit
742
using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.ComponentModel; using System.Linq; using System.Net; using System.Net.Http; using System.Net.Http.Formatting; using System.Web.Http; using Umbraco.Core; using Umbraco.Core.Logging; using Umbraco.Core.Models; using Umbraco.Core.Models.EntityBase; using Umbraco.Core.Persistence; using Umbraco.Web.Models.Trees; using Umbraco.Web.WebApi.Filters; using umbraco; using umbraco.BusinessLogic.Actions; using System.Globalization; namespace Umbraco.Web.Trees { public abstract class ContentTreeControllerBase : TreeController { #region Actions /// <summary> /// Gets an individual tree node /// </summary> /// <param name="id"></param> /// <param name="queryStrings"></param> /// <returns></returns> [HttpQueryStringFilter("queryStrings")] public TreeNode GetTreeNode(string id, FormDataCollection queryStrings) { int asInt; Guid asGuid = Guid.Empty; if (int.TryParse(id, out asInt) == false) { if (Guid.TryParse(id, out asGuid) == false) { throw new HttpResponseException(Request.CreateResponse(HttpStatusCode.NotFound)); } } var entity = asGuid == Guid.Empty ? Services.EntityService.Get(asInt, UmbracoObjectType) : Services.EntityService.GetByKey(asGuid, UmbracoObjectType); if (entity == null) { throw new HttpResponseException(Request.CreateResponse(HttpStatusCode.NotFound)); } var node = GetSingleTreeNode(entity, entity.ParentId.ToInvariantString(), queryStrings); //add the tree alias to the node since it is standalone (has no root for which this normally belongs) node.AdditionalData["treeAlias"] = TreeAlias; return node; } #endregion /// <summary> /// Ensure the noAccess metadata is applied for the root node if in dialog mode and the user doesn't have path access to it /// </summary> /// <param name="queryStrings"></param> /// <returns></returns> protected override TreeNode CreateRootNode(FormDataCollection queryStrings) { var node = base.CreateRootNode(queryStrings); if (IsDialog(queryStrings) && UserStartNodes.Contains(Constants.System.Root) == false) { node.AdditionalData["noAccess"] = true; } return node; } protected abstract TreeNode GetSingleTreeNode(IUmbracoEntity e, string parentId, FormDataCollection queryStrings); /// <summary> /// Returns a <see cref="TreeNode"/> for the <see cref="IUmbracoEntity"/> and /// attaches some meta data to the node if the user doesn't have start node access to it when in dialog mode /// </summary> /// <param name="e"></param> /// <param name="parentId"></param> /// <param name="queryStrings"></param> /// <returns></returns> internal TreeNode GetSingleTreeNodeWithAccessCheck(IUmbracoEntity e, string parentId, FormDataCollection queryStrings) { bool hasPathAccess; var entityIsAncestorOfStartNodes = Security.CurrentUser.IsInBranchOfStartNode(e, Services.EntityService, RecycleBinId, out hasPathAccess); if (entityIsAncestorOfStartNodes == false) return null; var treeNode = GetSingleTreeNode(e, parentId, queryStrings); if (treeNode == null) { //this means that the user has NO access to this node via permissions! They at least need to have browse permissions to see //the node so we need to return null; return null; } if (hasPathAccess == false) { treeNode.AdditionalData["noAccess"] = true; } return treeNode; } /// <summary> /// Returns the /// </summary> protected abstract int RecycleBinId { get; } /// <summary> /// Returns true if the recycle bin has items in it /// </summary> protected abstract bool RecycleBinSmells { get; } /// <summary> /// Returns the user's start node for this tree /// </summary> protected abstract int[] UserStartNodes { get; } protected virtual TreeNodeCollection PerformGetTreeNodes(string id, FormDataCollection queryStrings) { var nodes = new TreeNodeCollection(); var rootIdString = Constants.System.Root.ToString(CultureInfo.InvariantCulture); var hasAccessToRoot = UserStartNodes.Contains(Constants.System.Root); var startNodeId = queryStrings.HasKey(TreeQueryStringParameters.StartNodeId) ? queryStrings.GetValue<string>(TreeQueryStringParameters.StartNodeId) : string.Empty; if (string.IsNullOrEmpty(startNodeId) == false && startNodeId != "undefined" && startNodeId != rootIdString) { // request has been made to render from a specific, non-root, start node id = startNodeId; // ensure that the user has access to that node, otherwise return the empty tree nodes collection // TODO: in the future we could return a validation statement so we can have some UI to notify the user they don't have access if (HasPathAccess(id, queryStrings) == false) { LogHelper.Warn<ContentTreeControllerBase>("User " + Security.CurrentUser.Username + " does not have access to node with id " + id); return nodes; } // if the tree is rendered... // - in a dialog: render only the children of the specific start node, nothing to do // - in a section: if the current user's start nodes do not contain the root node, we need // to include these start nodes in the tree too, to provide some context - i.e. change // start node back to root node, and then GetChildEntities method will take care of the rest. if (IsDialog(queryStrings) == false && hasAccessToRoot == false) id = rootIdString; } // get child entities - if id is root, but user's start nodes do not contain the // root node, this returns the start nodes instead of root's children var entities = GetChildEntities(id).ToList(); nodes.AddRange(entities.Select(x => GetSingleTreeNodeWithAccessCheck(x, id, queryStrings)).Where(x => x != null)); // if the user does not have access to the root node, what we have is the start nodes, // but to provide some context we also need to add their topmost nodes when they are not // topmost nodes themselves (level > 1). if (id == rootIdString && hasAccessToRoot == false) { var topNodeIds = entities.Where(x => x.Level > 1).Select(GetTopNodeId).Where(x => x != 0).Distinct().ToArray(); if (topNodeIds.Length > 0) { var topNodes = Services.EntityService.GetAll(UmbracoObjectType, topNodeIds.ToArray()); nodes.AddRange(topNodes.Select(x => GetSingleTreeNodeWithAccessCheck(x, id, queryStrings)).Where(x => x != null)); } } return nodes; } private static readonly char[] Comma = { ',' }; private int GetTopNodeId(IUmbracoEntity entity) { int id; var parts = entity.Path.Split(Comma, StringSplitOptions.RemoveEmptyEntries); return parts.Length >= 2 && int.TryParse(parts[1], out id) ? id : 0; } protected abstract MenuItemCollection PerformGetMenuForNode(string id, FormDataCollection queryStrings); protected abstract UmbracoObjectTypes UmbracoObjectType { get; } protected IEnumerable<IUmbracoEntity> GetChildEntities(string id) { // try to parse id as an integer else use GetEntityFromId // which will grok Guids, Udis, etc and let use obtain the id if (int.TryParse(id, out var entityId) == false) { var entity = GetEntityFromId(id); if (entity == null) throw new HttpResponseException(HttpStatusCode.NotFound); entityId = entity.Id; } return Services.EntityService.GetChildren(entityId, UmbracoObjectType).ToArray(); } /// <summary> /// Returns true or false if the current user has access to the node based on the user's allowed start node (path) access /// </summary> /// <param name="id"></param> /// <param name="queryStrings"></param> /// <returns></returns> //we should remove this in v8, it's now here for backwards compat only protected abstract bool HasPathAccess(string id, FormDataCollection queryStrings); /// <summary> /// Returns true or false if the current user has access to the node based on the user's allowed start node (path) access /// </summary> /// <param name="entity"></param> /// <param name="queryStrings"></param> /// <returns></returns> protected bool HasPathAccess(IUmbracoEntity entity, FormDataCollection queryStrings) { if (entity == null) return false; return Security.CurrentUser.HasPathAccess(entity, Services.EntityService, RecycleBinId); } /// <summary> /// Ensures the recycle bin is appended when required (i.e. user has access to the root and it's not in dialog mode) /// </summary> /// <param name="id"></param> /// <param name="queryStrings"></param> /// <returns></returns> /// <remarks> /// This method is overwritten strictly to render the recycle bin, it should serve no other purpose /// </remarks> protected sealed override TreeNodeCollection GetTreeNodes(string id, FormDataCollection queryStrings) { //check if we're rendering the root if (id == Constants.System.Root.ToInvariantString() && UserStartNodes.Contains(Constants.System.Root)) { var altStartId = string.Empty; if (queryStrings.HasKey(TreeQueryStringParameters.StartNodeId)) altStartId = queryStrings.GetValue<string>(TreeQueryStringParameters.StartNodeId); //check if a request has been made to render from a specific start node if (string.IsNullOrEmpty(altStartId) == false && altStartId != "undefined" && altStartId != Constants.System.Root.ToString(CultureInfo.InvariantCulture)) { id = altStartId; } var nodes = GetTreeNodesInternal(id, queryStrings); //only render the recycle bin if we are not in dialog and the start id id still the root if (IsDialog(queryStrings) == false && id == Constants.System.Root.ToInvariantString()) { nodes.Add(CreateTreeNode( RecycleBinId.ToInvariantString(), id, queryStrings, ui.GetText("general", "recycleBin"), "icon-trash", RecycleBinSmells, queryStrings.GetValue<string>("application") + TreeAlias.EnsureStartsWith('/') + "/recyclebin")); } return nodes; } return GetTreeNodesInternal(id, queryStrings); } /// <summary> /// Before we make a call to get the tree nodes we have to check if they can actually be rendered /// </summary> /// <param name="id"></param> /// <param name="queryStrings"></param> /// <returns></returns> /// <remarks> /// Currently this just checks if it is a container type, if it is we cannot render children. In the future this might check for other things. /// </remarks> private TreeNodeCollection GetTreeNodesInternal(string id, FormDataCollection queryStrings) { var current = GetEntityFromId(id); //before we get the children we need to see if this is a container node //test if the parent is a listview / container if (current != null && current.IsContainer()) { //no children! return new TreeNodeCollection(); } return PerformGetTreeNodes(id, queryStrings); } /// <summary> /// Checks if the menu requested is for the recycle bin and renders that, otherwise renders the result of PerformGetMenuForNode /// </summary> /// <param name="id"></param> /// <param name="queryStrings"></param> /// <returns></returns> protected sealed override MenuItemCollection GetMenuForNode(string id, FormDataCollection queryStrings) { if (RecycleBinId.ToInvariantString() == id) { var menu = new MenuItemCollection(); menu.Items.Add<ActionEmptyTranscan>(ui.Text("actions", "emptyTrashcan")); menu.Items.Add<ActionRefresh>(ui.Text("actions", ActionRefresh.Instance.Alias), true); return menu; } return PerformGetMenuForNode(id, queryStrings); } /// <summary> /// Based on the allowed actions, this will filter the ones that the current user is allowed /// </summary> /// <param name="menuWithAllItems"></param> /// <param name="userAllowedMenuItems"></param> /// <returns></returns> protected void FilterUserAllowedMenuItems(MenuItemCollection menuWithAllItems, IEnumerable<MenuItem> userAllowedMenuItems) { var userAllowedActions = userAllowedMenuItems.Where(x => x.Action != null).Select(x => x.Action).ToArray(); var notAllowed = menuWithAllItems.Items.Where( a => (a.Action != null && a.Action.CanBePermissionAssigned && (a.Action.CanBePermissionAssigned == false || userAllowedActions.Contains(a.Action) == false))) .ToArray(); //remove the ones that aren't allowed. foreach (var m in notAllowed) { menuWithAllItems.Items.Remove(m); } } internal IEnumerable<MenuItem> GetAllowedUserMenuItemsForNode(IUmbracoEntity dd) { var actions = ActionsResolver.Current.FromActionSymbols(Security.CurrentUser.GetPermissions(dd.Path, Services.UserService)) .ToList(); // A user is allowed to delete their own stuff if (dd.CreatorId == Security.GetUserId() && actions.Contains(ActionDelete.Instance) == false) actions.Add(ActionDelete.Instance); return actions.Select(x => new MenuItem(x)); } /// <summary> /// Determins if the user has access to view the node/document /// </summary> /// <param name="doc">The Document to check permissions against</param> /// <param name="allowedUserOptions">A list of MenuItems that the user has permissions to execute on the current document</param> /// <remarks>By default the user must have Browse permissions to see the node in the Content tree</remarks> /// <returns></returns> internal bool CanUserAccessNode(IUmbracoEntity doc, IEnumerable<MenuItem> allowedUserOptions) { return allowedUserOptions.Select(x => x.Action).OfType<ActionBrowse>().Any(); } /// <summary> /// this will parse the string into either a GUID or INT /// </summary> /// <param name="id"></param> /// <returns></returns> internal Tuple<Guid?, int?> GetIdentifierFromString(string id) { Guid idGuid; int idInt; Udi idUdi; if (Guid.TryParse(id, out idGuid)) { return new Tuple<Guid?, int?>(idGuid, null); } if (int.TryParse(id, out idInt)) { return new Tuple<Guid?, int?>(null, idInt); } if (Udi.TryParse(id, out idUdi)) { var guidUdi = idUdi as GuidUdi; if (guidUdi != null) return new Tuple<Guid?, int?>(guidUdi.Guid, null); } return null; } /// <summary> /// Get an entity via an id that can be either an integer, Guid or UDI /// </summary> /// <param name="id"></param> /// <returns></returns> /// <remarks> /// This object has it's own contextual cache for these lookups /// </remarks> internal IUmbracoEntity GetEntityFromId(string id) { return _entityCache.GetOrAdd(id, s => { IUmbracoEntity entity; Guid idGuid; int idInt; Udi idUdi; if (Guid.TryParse(s, out idGuid)) { entity = Services.EntityService.GetByKey(idGuid, UmbracoObjectType); } else if (int.TryParse(s, out idInt)) { entity = Services.EntityService.Get(idInt, UmbracoObjectType); } else if (Udi.TryParse(s, out idUdi)) { var guidUdi = idUdi as GuidUdi; entity = guidUdi != null ? Services.EntityService.GetByKey(guidUdi.Guid, UmbracoObjectType) : null; } else { return null; } return entity; }); } private readonly ConcurrentDictionary<string, IUmbracoEntity> _entityCache = new ConcurrentDictionary<string, IUmbracoEntity>(); } }
base33/Umbraco-CMS
src/Umbraco.Web/Trees/ContentTreeControllerBase.cs
C#
mit
18,998
/*! * iScroll v4.1.8 ~ Copyright (c) 2011 Matteo Spinelli, http://cubiq.org * Released under MIT license, http://cubiq.org/license */ (function(){ var m = Math, vendor = (/webkit/i).test(navigator.appVersion) ? 'webkit' : (/firefox/i).test(navigator.userAgent) ? 'Moz' : 'opera' in window ? 'O' : '', // Browser capabilities has3d = 'WebKitCSSMatrix' in window && 'm11' in new WebKitCSSMatrix(), hasTouch = 'ontouchstart' in window, hasTransform = vendor + 'Transform' in document.documentElement.style, isAndroid = (/android/gi).test(navigator.appVersion), isIDevice = (/iphone|ipad/gi).test(navigator.appVersion), isPlaybook = (/playbook/gi).test(navigator.appVersion), hasTransitionEnd = isIDevice || isPlaybook, nextFrame = (function() { return window.requestAnimationFrame || window.webkitRequestAnimationFrame || window.mozRequestAnimationFrame || window.oRequestAnimationFrame || window.msRequestAnimationFrame || function(callback) { return setTimeout(callback, 1); } })(), cancelFrame = (function () { return window.cancelRequestAnimationFrame || window.webkitCancelRequestAnimationFrame || window.mozCancelRequestAnimationFrame || window.oCancelRequestAnimationFrame || window.msCancelRequestAnimationFrame || clearTimeout })(), // Events RESIZE_EV = 'onorientationchange' in window ? 'orientationchange' : 'resize', START_EV = hasTouch ? 'touchstart' : 'mousedown', MOVE_EV = hasTouch ? 'touchmove' : 'mousemove', END_EV = hasTouch ? 'touchend' : 'mouseup', CANCEL_EV = hasTouch ? 'touchcancel' : 'mouseup', WHEEL_EV = vendor == 'Moz' ? 'DOMMouseScroll' : 'mousewheel', // Helpers trnOpen = 'translate' + (has3d ? '3d(' : '('), trnClose = has3d ? ',0)' : ')', // Constructor iScroll = function (el, options) { var that = this, doc = document, i; that.wrapper = typeof el == 'object' ? el : doc.getElementById(el); that.wrapper.style.overflow = 'hidden'; that.scroller = that.wrapper.children[0]; // Default options that.options = { hScroll: true, vScroll: true, bounce: true, bounceLock: false, momentum: true, lockDirection: true, useTransform: true, useTransition: false, topOffset: 0, checkDOMChanges: false, // Experimental // Scrollbar hScrollbar: true, vScrollbar: true, fixedScrollbar: isAndroid, hideScrollbar: isIDevice, fadeScrollbar: isIDevice && has3d, scrollbarClass: '', // Zoom zoom: false, zoomMin: 1, zoomMax: 4, doubleTapZoom: 2, wheelAction: 'scroll', // Snap snap: false, snapThreshold: 1, // Events onRefresh: null, onBeforeScrollStart: function (e) { e.preventDefault(); }, onScrollStart: null, onBeforeScrollMove: null, onScrollMove: null, onBeforeScrollEnd: null, onScrollEnd: null, onTouchEnd: null, onDestroy: null, onZoomStart: null, onZoom: null, onZoomEnd: null, // Added by Lissa scrollOffsetLeft: 0, scrollOffsetTop: 0 }; // User defined options for (i in options) that.options[i] = options[i]; // Normalize options that.options.useTransform = hasTransform ? that.options.useTransform : false; that.options.hScrollbar = that.options.hScroll && that.options.hScrollbar; that.options.vScrollbar = that.options.vScroll && that.options.vScrollbar; that.options.zoom = that.options.useTransform && that.options.zoom; that.options.useTransition = hasTransitionEnd && that.options.useTransition; // Set some default styles that.scroller.style[vendor + 'TransitionProperty'] = that.options.useTransform ? '-' + vendor.toLowerCase() + '-transform' : 'top left'; that.scroller.style[vendor + 'TransitionDuration'] = '0'; that.scroller.style[vendor + 'TransformOrigin'] = '0 0'; if (that.options.useTransition) that.scroller.style[vendor + 'TransitionTimingFunction'] = 'cubic-bezier(0.33,0.66,0.66,1)'; if (that.options.useTransform) that.scroller.style[vendor + 'Transform'] = trnOpen + '0,0' + trnClose; else that.scroller.style.cssText += ';position:absolute;top:0;left:0'; if (that.options.useTransition) that.options.fixedScrollbar = true; that.refresh(); that._bind(RESIZE_EV, window); that._bind(START_EV); if (!hasTouch) { that._bind('mouseout', that.wrapper); that._bind(WHEEL_EV); } if (that.options.checkDOMChanges) that.checkDOMTime = setInterval(function () { that._checkDOMChanges(); }, 500); }; // Prototype iScroll.prototype = { enabled: true, x: 0, y: 0, steps: [], scale: 1, currPageX: 0, currPageY: 0, pagesX: [], pagesY: [], aniTime: null, wheelZoomCount: 0, handleEvent: function (e) { var that = this; switch(e.type) { case START_EV: if (!hasTouch && e.button !== 0) return; that._start(e); break; case MOVE_EV: that._move(e); break; case END_EV: case CANCEL_EV: that._end(e); break; case RESIZE_EV: that._resize(); break; case WHEEL_EV: that._wheel(e); break; case 'mouseout': that._mouseout(e); break; case 'webkitTransitionEnd': that._transitionEnd(e); break; } }, _checkDOMChanges: function () { if (this.moved || this.zoomed || this.animating || (this.scrollerW == this.scroller.offsetWidth * this.scale && this.scrollerH == this.scroller.offsetHeight * this.scale)) return; this.refresh(); }, _scrollbar: function (dir) { var that = this, doc = document, bar; if (!that[dir + 'Scrollbar']) { if (that[dir + 'ScrollbarWrapper']) { if (hasTransform) that[dir + 'ScrollbarIndicator'].style[vendor + 'Transform'] = ''; that[dir + 'ScrollbarWrapper'].parentNode.removeChild(that[dir + 'ScrollbarWrapper']); that[dir + 'ScrollbarWrapper'] = null; that[dir + 'ScrollbarIndicator'] = null; } return; } if (!that[dir + 'ScrollbarWrapper']) { // Create the scrollbar wrapper bar = doc.createElement('div'); if (that.options.scrollbarClass) bar.className = that.options.scrollbarClass + dir.toUpperCase(); else bar.style.cssText = 'position:absolute;z-index:100;' + (dir == 'h' ? 'height:7px;bottom:1px;left:2px;right:' + (that.vScrollbar ? '7' : '2') + 'px' : 'width:7px;bottom:' + (that.hScrollbar ? '7' : '2') + 'px;top:2px;right:1px'); bar.style.cssText += ';pointer-events:none;-' + vendor + '-transition-property:opacity;-' + vendor + '-transition-duration:' + (that.options.fadeScrollbar ? '350ms' : '0') + ';overflow:hidden;opacity:' + (that.options.hideScrollbar ? '0' : '1'); that.wrapper.appendChild(bar); that[dir + 'ScrollbarWrapper'] = bar; // Create the scrollbar indicator bar = doc.createElement('div'); if (!that.options.scrollbarClass) { bar.style.cssText = 'position:absolute;z-index:100;background:rgba(0,0,0,0.5);border:1px solid rgba(255,255,255,0.9);-' + vendor + '-background-clip:padding-box;-' + vendor + '-box-sizing:border-box;' + (dir == 'h' ? 'height:100%' : 'width:100%') + ';-' + vendor + '-border-radius:3px;border-radius:3px'; } bar.style.cssText += ';pointer-events:none;-' + vendor + '-transition-property:-' + vendor + '-transform;-' + vendor + '-transition-timing-function:cubic-bezier(0.33,0.66,0.66,1);-' + vendor + '-transition-duration:0;-' + vendor + '-transform:' + trnOpen + '0,0' + trnClose; if (that.options.useTransition) bar.style.cssText += ';-' + vendor + '-transition-timing-function:cubic-bezier(0.33,0.66,0.66,1)'; that[dir + 'ScrollbarWrapper'].appendChild(bar); that[dir + 'ScrollbarIndicator'] = bar; } if (dir == 'h') { that.hScrollbarSize = that.hScrollbarWrapper.clientWidth; that.hScrollbarIndicatorSize = m.max(m.round(that.hScrollbarSize * that.hScrollbarSize / that.scrollerW), 8); that.hScrollbarIndicator.style.width = that.hScrollbarIndicatorSize + 'px'; that.hScrollbarMaxScroll = that.hScrollbarSize - that.hScrollbarIndicatorSize; that.hScrollbarProp = that.hScrollbarMaxScroll / that.maxScrollX; } else { that.vScrollbarSize = that.vScrollbarWrapper.clientHeight; that.vScrollbarIndicatorSize = m.max(m.round(that.vScrollbarSize * that.vScrollbarSize / that.scrollerH), 8); that.vScrollbarIndicator.style.height = that.vScrollbarIndicatorSize + 'px'; that.vScrollbarMaxScroll = that.vScrollbarSize - that.vScrollbarIndicatorSize; that.vScrollbarProp = that.vScrollbarMaxScroll / that.maxScrollY; } // Reset position that._scrollbarPos(dir, true); }, _resize: function () { var that = this; setTimeout(function () { that.refresh(); }, isAndroid ? 200 : 0); }, _pos: function (x, y) { x = this.hScroll ? x : 0; y = this.vScroll ? y : 0; if (this.options.useTransform) { this.scroller.style[vendor + 'Transform'] = trnOpen + x + 'px,' + y + 'px' + trnClose + ' scale(' + this.scale + ')'; } else { x = m.round(x); y = m.round(y); this.scroller.style.left = x + 'px'; this.scroller.style.top = y + 'px'; } this.x = x; this.y = y; this._scrollbarPos('h'); this._scrollbarPos('v'); }, _scrollbarPos: function (dir, hidden) { var that = this, pos = dir == 'h' ? that.x : that.y, size; if (!that[dir + 'Scrollbar']) return; pos = that[dir + 'ScrollbarProp'] * pos; if (pos < 0) { if (!that.options.fixedScrollbar) { size = that[dir + 'ScrollbarIndicatorSize'] + m.round(pos * 3); if (size < 8) size = 8; that[dir + 'ScrollbarIndicator'].style[dir == 'h' ? 'width' : 'height'] = size + 'px'; } pos = 0; } else if (pos > that[dir + 'ScrollbarMaxScroll']) { if (!that.options.fixedScrollbar) { size = that[dir + 'ScrollbarIndicatorSize'] - m.round((pos - that[dir + 'ScrollbarMaxScroll']) * 3); if (size < 8) size = 8; that[dir + 'ScrollbarIndicator'].style[dir == 'h' ? 'width' : 'height'] = size + 'px'; pos = that[dir + 'ScrollbarMaxScroll'] + (that[dir + 'ScrollbarIndicatorSize'] - size); } else { pos = that[dir + 'ScrollbarMaxScroll']; } } that[dir + 'ScrollbarWrapper'].style[vendor + 'TransitionDelay'] = '0'; that[dir + 'ScrollbarWrapper'].style.opacity = hidden && that.options.hideScrollbar ? '0' : '1'; that[dir + 'ScrollbarIndicator'].style[vendor + 'Transform'] = trnOpen + (dir == 'h' ? pos + 'px,0' : '0,' + pos + 'px') + trnClose; }, _start: function (e) { var that = this, point = hasTouch ? e.touches[0] : e, matrix, x, y, c1, c2; if (!that.enabled) return; if (that.options.onBeforeScrollStart) that.options.onBeforeScrollStart.call(that, e); if (that.options.useTransition || that.options.zoom) that._transitionTime(0); that.moved = false; that.animating = false; that.zoomed = false; that.distX = 0; that.distY = 0; that.absDistX = 0; that.absDistY = 0; that.dirX = 0; that.dirY = 0; // Gesture start if (that.options.zoom && hasTouch && e.touches.length > 1) { c1 = m.abs(e.touches[0].pageX-e.touches[1].pageX); c2 = m.abs(e.touches[0].pageY-e.touches[1].pageY); that.touchesDistStart = m.sqrt(c1 * c1 + c2 * c2); that.originX = m.abs(e.touches[0].pageX + e.touches[1].pageX - that.wrapperOffsetLeft * 2) / 2 - that.x; that.originY = m.abs(e.touches[0].pageY + e.touches[1].pageY - that.wrapperOffsetTop * 2) / 2 - that.y; if (that.options.onZoomStart) that.options.onZoomStart.call(that, e); } if (that.options.momentum) { if (that.options.useTransform) { // Very lame general purpose alternative to CSSMatrix matrix = getComputedStyle(that.scroller, null)[vendor + 'Transform'].replace(/[^0-9-.,]/g, '').split(','); x = matrix[4] * 1; y = matrix[5] * 1; } else { x = getComputedStyle(that.scroller, null).left.replace(/[^0-9-]/g, '') * 1; y = getComputedStyle(that.scroller, null).top.replace(/[^0-9-]/g, '') * 1; } if (x != that.x || y != that.y) { if (that.options.useTransition) that._unbind('webkitTransitionEnd'); else cancelFrame(that.aniTime); that.steps = []; that._pos(x, y); } } that.absStartX = that.x; // Needed by snap threshold that.absStartY = that.y; that.startX = that.x; that.startY = that.y; that.pointX = point.pageX; that.pointY = point.pageY; that.startTime = e.timeStamp || (new Date()).getTime(); if (that.options.onScrollStart) that.options.onScrollStart.call(that, e); that._bind(MOVE_EV); that._bind(END_EV); that._bind(CANCEL_EV); }, _move: function (e) { var that = this, point = hasTouch ? e.touches[0] : e, deltaX = point.pageX - that.pointX, deltaY = point.pageY - that.pointY, newX = that.x + deltaX, newY = that.y + deltaY, c1, c2, scale, timestamp = e.timeStamp || (new Date()).getTime(); if (that.options.onBeforeScrollMove) that.options.onBeforeScrollMove.call(that, e); // Zoom if (that.options.zoom && hasTouch && e.touches.length > 1) { c1 = m.abs(e.touches[0].pageX - e.touches[1].pageX); c2 = m.abs(e.touches[0].pageY - e.touches[1].pageY); that.touchesDist = m.sqrt(c1*c1+c2*c2); that.zoomed = true; scale = 1 / that.touchesDistStart * that.touchesDist * this.scale; if (scale < that.options.zoomMin) scale = 0.5 * that.options.zoomMin * Math.pow(2.0, scale / that.options.zoomMin); else if (scale > that.options.zoomMax) scale = 2.0 * that.options.zoomMax * Math.pow(0.5, that.options.zoomMax / scale); that.lastScale = scale / this.scale; newX = this.originX - this.originX * that.lastScale + this.x, newY = this.originY - this.originY * that.lastScale + this.y; this.scroller.style[vendor + 'Transform'] = trnOpen + newX + 'px,' + newY + 'px' + trnClose + ' scale(' + scale + ')'; if (that.options.onZoom) that.options.onZoom.call(that, e); return; } that.pointX = point.pageX; that.pointY = point.pageY; // Slow down if outside of the boundaries if (newX > 0 || newX < that.maxScrollX) { newX = that.options.bounce ? that.x + (deltaX / 2) : newX >= 0 || that.maxScrollX >= 0 ? 0 : that.maxScrollX; } if (newY > that.minScrollY || newY < that.maxScrollY) { newY = that.options.bounce ? that.y + (deltaY / 2) : newY >= that.minScrollY || that.maxScrollY >= 0 ? that.minScrollY : that.maxScrollY; } if (that.absDistX < 6 && that.absDistY < 6) { that.distX += deltaX; that.distY += deltaY; that.absDistX = m.abs(that.distX); that.absDistY = m.abs(that.distY); return; } // Lock direction if (that.options.lockDirection) { if (that.absDistX > that.absDistY + 5) { newY = that.y; deltaY = 0; } else if (that.absDistY > that.absDistX + 5) { newX = that.x; deltaX = 0; } } that.moved = true; that._pos(newX, newY); that.dirX = deltaX > 0 ? -1 : deltaX < 0 ? 1 : 0; that.dirY = deltaY > 0 ? -1 : deltaY < 0 ? 1 : 0; if (timestamp - that.startTime > 300) { that.startTime = timestamp; that.startX = that.x; that.startY = that.y; } if (that.options.onScrollMove) that.options.onScrollMove.call(that, e); }, _end: function (e) { if (hasTouch && e.touches.length != 0) return; var that = this, point = hasTouch ? e.changedTouches[0] : e, target, ev, momentumX = { dist:0, time:0 }, momentumY = { dist:0, time:0 }, duration = (e.timeStamp || (new Date()).getTime()) - that.startTime, newPosX = that.x, newPosY = that.y, distX, distY, newDuration, snap, scale; that._unbind(MOVE_EV); that._unbind(END_EV); that._unbind(CANCEL_EV); if (that.options.onBeforeScrollEnd) that.options.onBeforeScrollEnd.call(that, e); if (that.zoomed) { scale = that.scale * that.lastScale; scale = Math.max(that.options.zoomMin, scale); scale = Math.min(that.options.zoomMax, scale); that.lastScale = scale / that.scale; that.scale = scale; that.x = that.originX - that.originX * that.lastScale + that.x; that.y = that.originY - that.originY * that.lastScale + that.y; that.scroller.style[vendor + 'TransitionDuration'] = '200ms'; that.scroller.style[vendor + 'Transform'] = trnOpen + that.x + 'px,' + that.y + 'px' + trnClose + ' scale(' + that.scale + ')'; that.zoomed = false; that.refresh(); if (that.options.onZoomEnd) that.options.onZoomEnd.call(that, e); return; } if (!that.moved) { if (hasTouch) { if (that.doubleTapTimer && that.options.zoom) { // Double tapped clearTimeout(that.doubleTapTimer); that.doubleTapTimer = null; if (that.options.onZoomStart) that.options.onZoomStart.call(that, e); that.zoom(that.pointX, that.pointY, that.scale == 1 ? that.options.doubleTapZoom : 1); if (that.options.onZoomEnd) { setTimeout(function() { that.options.onZoomEnd.call(that, e); }, 200); // 200 is default zoom duration } } else { that.doubleTapTimer = setTimeout(function () { that.doubleTapTimer = null; // Find the last touched element target = point.target; while (target.nodeType != 1) target = target.parentNode; if (target.tagName != 'SELECT' && target.tagName != 'INPUT' && target.tagName != 'TEXTAREA') { ev = document.createEvent('MouseEvents'); ev.initMouseEvent('click', true, true, e.view, 1, point.screenX, point.screenY, point.clientX, point.clientY, e.ctrlKey, e.altKey, e.shiftKey, e.metaKey, 0, null); ev._fake = true; target.dispatchEvent(ev); } }, that.options.zoom ? 250 : 0); } } that._resetPos(200); if (that.options.onTouchEnd) that.options.onTouchEnd.call(that, e); return; } if (duration < 300 && that.options.momentum) { momentumX = newPosX ? that._momentum(newPosX - that.startX, duration, -that.x, that.scrollerW - that.wrapperW + that.x, that.options.bounce ? that.wrapperW : 0) : momentumX; momentumY = newPosY ? that._momentum(newPosY - that.startY, duration, -that.y, (that.maxScrollY < 0 ? that.scrollerH - that.wrapperH + that.y - that.minScrollY : 0), that.options.bounce ? that.wrapperH : 0) : momentumY; newPosX = that.x + momentumX.dist; newPosY = that.y + momentumY.dist; if ((that.x > 0 && newPosX > 0) || (that.x < that.maxScrollX && newPosX < that.maxScrollX)) momentumX = { dist:0, time:0 }; if ((that.y > that.minScrollY && newPosY > that.minScrollY) || (that.y < that.maxScrollY && newPosY < that.maxScrollY)) momentumY = { dist:0, time:0 }; } if (momentumX.dist || momentumY.dist) { newDuration = m.max(m.max(momentumX.time, momentumY.time), 10); // Do we need to snap? if (that.options.snap) { distX = newPosX - that.absStartX; distY = newPosY - that.absStartY; if (m.abs(distX) < that.options.snapThreshold && m.abs(distY) < that.options.snapThreshold) { that.scrollTo(that.absStartX, that.absStartY, 200); } else { snap = that._snap(newPosX, newPosY); newPosX = snap.x; newPosY = snap.y; newDuration = m.max(snap.time, newDuration); } } that.scrollTo(newPosX, newPosY, newDuration); if (that.options.onTouchEnd) that.options.onTouchEnd.call(that, e); return; } // Do we need to snap? if (that.options.snap) { distX = newPosX - that.absStartX; distY = newPosY - that.absStartY; if (m.abs(distX) < that.options.snapThreshold && m.abs(distY) < that.options.snapThreshold) that.scrollTo(that.absStartX, that.absStartY, 200); else { snap = that._snap(that.x, that.y); if (snap.x != that.x || snap.y != that.y) that.scrollTo(snap.x, snap.y, snap.time); } if (that.options.onTouchEnd) that.options.onTouchEnd.call(that, e); return; } that._resetPos(200); if (that.options.onTouchEnd) that.options.onTouchEnd.call(that, e); }, _resetPos: function (time) { var that = this, resetX = that.x >= 0 ? 0 : that.x < that.maxScrollX ? that.maxScrollX : that.x, resetY = that.y >= that.minScrollY || that.maxScrollY > 0 ? that.minScrollY : that.y < that.maxScrollY ? that.maxScrollY : that.y; if (resetX == that.x && resetY == that.y) { if (that.moved) { that.moved = false; if (that.options.onScrollEnd) that.options.onScrollEnd.call(that); // Execute custom code on scroll end } /* if (that.hScrollbar && that.options.hideScrollbar) { if (vendor == 'webkit') that.hScrollbarWrapper.style[vendor + 'TransitionDelay'] = '300ms'; that.hScrollbarWrapper.style.opacity = '0'; } if (that.vScrollbar && that.options.hideScrollbar) { if (vendor == 'webkit') that.vScrollbarWrapper.style[vendor + 'TransitionDelay'] = '300ms'; that.vScrollbarWrapper.style.opacity = '0'; } */ return; } that.scrollTo(resetX, resetY, time || 0); }, _wheel: function (e) { var that = this, wheelDeltaX, wheelDeltaY, deltaX, deltaY, deltaScale; if ('wheelDeltaX' in e) { wheelDeltaX = e.wheelDeltaX / 12; wheelDeltaY = e.wheelDeltaY / 12; } else if ('detail' in e) { wheelDeltaX = wheelDeltaY = -e.detail * 3; } else { wheelDeltaX = wheelDeltaY = -e.wheelDelta; } if (that.options.wheelAction == 'zoom') { deltaScale = that.scale * Math.pow(2, 1/3 * (wheelDeltaY ? wheelDeltaY / Math.abs(wheelDeltaY) : 0)); if (deltaScale < that.options.zoomMin) deltaScale = that.options.zoomMin; if (deltaScale > that.options.zoomMax) deltaScale = that.options.zoomMax; if (deltaScale != that.scale) { if (!that.wheelZoomCount && that.options.onZoomStart) that.options.onZoomStart.call(that, e); that.wheelZoomCount++; that.zoom(e.pageX, e.pageY, deltaScale, 400); setTimeout(function() { that.wheelZoomCount--; if (!that.wheelZoomCount && that.options.onZoomEnd) that.options.onZoomEnd.call(that, e); }, 400); } return; } deltaX = that.x + wheelDeltaX; deltaY = that.y + wheelDeltaY; if (deltaX > 0) deltaX = 0; else if (deltaX < that.maxScrollX) deltaX = that.maxScrollX; if (deltaY > that.minScrollY) deltaY = that.minScrollY; else if (deltaY < that.maxScrollY) deltaY = that.maxScrollY; that.scrollTo(deltaX, deltaY, 0); }, _mouseout: function (e) { var t = e.relatedTarget; if (!t) { this._end(e); return; } while (t = t.parentNode) if (t == this.wrapper) return; this._end(e); }, _transitionEnd: function (e) { var that = this; if (e.target != that.scroller) return; that._unbind('webkitTransitionEnd'); that._startAni(); }, /** * * Utilities * */ _startAni: function () { var that = this, startX = that.x, startY = that.y, startTime = (new Date).getTime(), step, easeOut; if (that.animating) return; if (!that.steps.length) { that._resetPos(400); return; } step = that.steps.shift(); if (step.x == startX && step.y == startY) step.time = 0; that.animating = true; that.moved = true; if (that.options.useTransition) { that._transitionTime(step.time); that._pos(step.x, step.y); that.animating = false; if (step.time) that._bind('webkitTransitionEnd'); else that._resetPos(0); return; } (function animate () { var now = (new Date).getTime(), newX, newY; if (now >= startTime + step.time) { that._pos(step.x, step.y); that.animating = false; if (that.options.onAnimationEnd) that.options.onAnimationEnd.call(that); // Execute custom code on animation end that._startAni(); return; } now = (now - startTime) / step.time - 1; easeOut = m.sqrt(1 - now * now); newX = (step.x - startX) * easeOut + startX; newY = (step.y - startY) * easeOut + startY; that._pos(newX, newY); if (that.animating) that.aniTime = nextFrame(animate); })(); }, _transitionTime: function (time) { time += 'ms'; this.scroller.style[vendor + 'TransitionDuration'] = time; if (this.hScrollbar) this.hScrollbarIndicator.style[vendor + 'TransitionDuration'] = time; if (this.vScrollbar) this.vScrollbarIndicator.style[vendor + 'TransitionDuration'] = time; }, _momentum: function (dist, time, maxDistUpper, maxDistLower, size) { var deceleration = 0.0006, speed = m.abs(dist) / time, newDist = (speed * speed) / (2 * deceleration), newTime = 0, outsideDist = 0; // Proportinally reduce speed if we are outside of the boundaries if (dist > 0 && newDist > maxDistUpper) { outsideDist = size / (6 / (newDist / speed * deceleration)); maxDistUpper = maxDistUpper + outsideDist; speed = speed * maxDistUpper / newDist; newDist = maxDistUpper; } else if (dist < 0 && newDist > maxDistLower) { outsideDist = size / (6 / (newDist / speed * deceleration)); maxDistLower = maxDistLower + outsideDist; speed = speed * maxDistLower / newDist; newDist = maxDistLower; } newDist = newDist * (dist < 0 ? -1 : 1); newTime = speed / deceleration; return { dist: newDist, time: m.round(newTime) }; }, _offset: function (el) { var left = -el.offsetLeft, top = -el.offsetTop; while (el = el.offsetParent) { left -= el.offsetLeft; top -= el.offsetTop; } if (el != this.wrapper) { left *= this.scale; top *= this.scale; } return { left: left, top: top }; }, _snap: function (x, y) { var that = this, i, l, page, time, sizeX, sizeY; // Check page X page = that.pagesX.length - 1; for (i=0, l=that.pagesX.length; i<l; i++) { if (x >= that.pagesX[i]) { page = i; break; } } if (page == that.currPageX && page > 0 && that.dirX < 0) page--; x = that.pagesX[page]; sizeX = m.abs(x - that.pagesX[that.currPageX]); sizeX = sizeX ? m.abs(that.x - x) / sizeX * 500 : 0; that.currPageX = page; // Check page Y page = that.pagesY.length-1; for (i=0; i<page; i++) { if (y >= that.pagesY[i]) { page = i; break; } } if (page == that.currPageY && page > 0 && that.dirY < 0) page--; y = that.pagesY[page]; sizeY = m.abs(y - that.pagesY[that.currPageY]); sizeY = sizeY ? m.abs(that.y - y) / sizeY * 500 : 0; that.currPageY = page; // Snap with constant speed (proportional duration) time = m.round(m.max(sizeX, sizeY)) || 200; return { x: x, y: y, time: time }; }, _bind: function (type, el, bubble) { (el || this.scroller).addEventListener(type, this, !!bubble); }, _unbind: function (type, el, bubble) { (el || this.scroller).removeEventListener(type, this, !!bubble); }, /** * * Public methods * */ destroy: function () { var that = this; that.scroller.style[vendor + 'Transform'] = ''; // Remove the scrollbars that.hScrollbar = false; that.vScrollbar = false; that._scrollbar('h'); that._scrollbar('v'); // Remove the event listeners that._unbind(RESIZE_EV, window); that._unbind(START_EV); that._unbind(MOVE_EV); that._unbind(END_EV); that._unbind(CANCEL_EV); if (that.options.hasTouch) { that._unbind('mouseout', that.wrapper); that._unbind(WHEEL_EV); } if (that.options.useTransition) that._unbind('webkitTransitionEnd'); if (that.options.checkDOMChanges) clearInterval(that.checkDOMTime); if (that.options.onDestroy) that.options.onDestroy.call(that); }, refresh: function () { var that = this, offset, i, l, els, pos = 0, page = 0; if (that.scale < that.options.zoomMin) that.scale = that.options.zoomMin; that.wrapperW = that.wrapper.clientWidth || 1; that.wrapperH = that.wrapper.clientHeight || 1; that.minScrollY = -that.options.topOffset || 0; that.scrollerW = m.round(that.scroller.offsetWidth * that.scale); that.scrollerH = m.round((that.scroller.offsetHeight + that.minScrollY) * that.scale); that.maxScrollX = that.wrapperW - that.scrollerW; that.maxScrollY = that.wrapperH - that.scrollerH + that.minScrollY; that.dirX = 0; that.dirY = 0; if (that.options.onRefresh) that.options.onRefresh.call(that); that.hScroll = that.options.hScroll && that.maxScrollX < 0; that.vScroll = that.options.vScroll && (!that.options.bounceLock && !that.hScroll || that.scrollerH > that.wrapperH); that.hScrollbar = that.hScroll && that.options.hScrollbar; that.vScrollbar = that.vScroll && that.options.vScrollbar && that.scrollerH > that.wrapperH; offset = that._offset(that.wrapper); that.wrapperOffsetLeft = -offset.left; that.wrapperOffsetTop = -offset.top; // Prepare snap if (typeof that.options.snap == 'string') { that.pagesX = []; that.pagesY = []; els = that.scroller.querySelectorAll(that.options.snap); for (i=0, l=els.length; i<l; i++) { pos = that._offset(els[i]); pos.left += that.wrapperOffsetLeft; pos.top += that.wrapperOffsetTop; that.pagesX[i] = pos.left < that.maxScrollX ? that.maxScrollX : pos.left * that.scale; that.pagesY[i] = pos.top < that.maxScrollY ? that.maxScrollY : pos.top * that.scale; } } else if (that.options.snap) { that.pagesX = []; while (pos >= that.maxScrollX) { that.pagesX[page] = pos; pos = pos - that.wrapperW; page++; } if (that.maxScrollX%that.wrapperW) that.pagesX[that.pagesX.length] = that.maxScrollX - that.pagesX[that.pagesX.length-1] + that.pagesX[that.pagesX.length-1]; pos = 0; page = 0; that.pagesY = []; while (pos >= that.maxScrollY) { that.pagesY[page] = pos; pos = pos - that.wrapperH; page++; } if (that.maxScrollY%that.wrapperH) that.pagesY[that.pagesY.length] = that.maxScrollY - that.pagesY[that.pagesY.length-1] + that.pagesY[that.pagesY.length-1]; } // Prepare the scrollbars that._scrollbar('h'); that._scrollbar('v'); if (!that.zoomed) { that.scroller.style[vendor + 'TransitionDuration'] = '0'; that._resetPos(200); } }, scrollTo: function (x, y, time, relative) { var that = this, step = x, i, l; that.stop(); if (!step.length) step = [{ x: x, y: y, time: time, relative: relative }]; for (i=0, l=step.length; i<l; i++) { if (step[i].relative) { step[i].x = that.x - step[i].x; step[i].y = that.y - step[i].y; } that.steps.push({ x: step[i].x, y: step[i].y, time: step[i].time || 0 }); } that._startAni(); }, scrollToElement: function (el, time) { var that = this, pos; el = el.nodeType ? el : that.scroller.querySelector(el); if (!el) return; pos = that._offset(el); pos.left += that.wrapperOffsetLeft; pos.top += that.wrapperOffsetTop; pos.left = pos.left > 0 ? 0 : pos.left < that.maxScrollX ? that.maxScrollX : pos.left; pos.top = pos.top > that.minScrollY ? that.minScrollY : pos.top < that.maxScrollY ? that.maxScrollY : pos.top; time = time === undefined ? m.max(m.abs(pos.left)*2, m.abs(pos.top)*2) : time; // Added for scroll offset by Lissa pos.left -= that.options.scrollOffsetLeft; pos.top -= that.options.scrollOffsetTop; that.scrollTo(pos.left, pos.top, time); }, scrollToPage: function (pageX, pageY, time) { var that = this, x, y; if (that.options.snap) { pageX = pageX == 'next' ? that.currPageX+1 : pageX == 'prev' ? that.currPageX-1 : pageX; pageY = pageY == 'next' ? that.currPageY+1 : pageY == 'prev' ? that.currPageY-1 : pageY; pageX = pageX < 0 ? 0 : pageX > that.pagesX.length-1 ? that.pagesX.length-1 : pageX; pageY = pageY < 0 ? 0 : pageY > that.pagesY.length-1 ? that.pagesY.length-1 : pageY; that.currPageX = pageX; that.currPageY = pageY; x = that.pagesX[pageX]; y = that.pagesY[pageY]; } else { x = -that.wrapperW * pageX; y = -that.wrapperH * pageY; if (x < that.maxScrollX) x = that.maxScrollX; if (y < that.maxScrollY) y = that.maxScrollY; } that.scrollTo(x, y, time || 400); }, disable: function () { this.stop(); this._resetPos(0); this.enabled = false; // If disabled after touchstart we make sure that there are no left over events this._unbind(MOVE_EV); this._unbind(END_EV); this._unbind(CANCEL_EV); }, enable: function () { this.enabled = true; }, stop: function () { if (this.options.useTransition) this._unbind('webkitTransitionEnd'); else cancelFrame(this.aniTime); this.steps = []; this.moved = false; this.animating = false; }, zoom: function (x, y, scale, time) { var that = this, relScale = scale / that.scale; if (!that.options.useTransform) return; that.zoomed = true; time = time === undefined ? 200 : time; x = x - that.wrapperOffsetLeft - that.x; y = y - that.wrapperOffsetTop - that.y; that.x = x - x * relScale + that.x; that.y = y - y * relScale + that.y; that.scale = scale; that.refresh(); that.x = that.x > 0 ? 0 : that.x < that.maxScrollX ? that.maxScrollX : that.x; that.y = that.y > that.minScrollY ? that.minScrollY : that.y < that.maxScrollY ? that.maxScrollY : that.y; that.scroller.style[vendor + 'TransitionDuration'] = time + 'ms'; that.scroller.style[vendor + 'Transform'] = trnOpen + that.x + 'px,' + that.y + 'px' + trnClose + ' scale(' + scale + ')'; that.zoomed = false; }, isReady: function () { return !this.moved && !this.zoomed && !this.animating; } }; if (typeof exports !== 'undefined') exports.iScroll = iScroll; else window.iScroll = iScroll; })();
brunokoga/pathfinder-markdown
prd_original/include/iscroll.js
JavaScript
mit
32,876
<?php /** * Part of the Fuel framework. * * @package Fuel * @version 1.6 * @author Fuel Development Team * @license MIT License * @copyright 2010 - 2013 Fuel Development Team * @link http://fuelphp.com */ namespace Fuel\Core; /** * The Arr class provides a few nice functions for making * dealing with arrays easier * * @package Fuel * @subpackage Core */ class Arr { /** * Gets a dot-notated key from an array, with a default value if it does * not exist. * * @param array $array The search array * @param mixed $key The dot-notated key or array of keys * @param string $default The default value * @return mixed */ public static function get($array, $key, $default = null) { if ( ! is_array($array) and ! $array instanceof \ArrayAccess) { throw new \InvalidArgumentException('First parameter must be an array or ArrayAccess object.'); } if (is_null($key)) { return $array; } if (is_array($key)) { $return = array(); foreach ($key as $k) { $return[$k] = static::get($array, $k, $default); } return $return; } foreach (explode('.', $key) as $key_part) { if (($array instanceof \ArrayAccess and isset($array[$key_part])) === false) { if ( ! is_array($array) or ! array_key_exists($key_part, $array)) { return \Fuel::value($default); } } $array = $array[$key_part]; } return $array; } /** * Set an array item (dot-notated) to the value. * * @param array $array The array to insert it into * @param mixed $key The dot-notated key to set or array of keys * @param mixed $value The value * @return void */ public static function set(&$array, $key, $value = null) { if (is_null($key)) { $array = $value; return; } if (is_array($key)) { foreach ($key as $k => $v) { static::set($array, $k, $v); } } else { $keys = explode('.', $key); while (count($keys) > 1) { $key = array_shift($keys); if ( ! isset($array[$key]) or ! is_array($array[$key])) { $array[$key] = array(); } $array =& $array[$key]; } $array[array_shift($keys)] = $value; } } /** * Pluck an array of values from an array. * * @param array $array collection of arrays to pluck from * @param string $key key of the value to pluck * @param string $index optional return array index key, true for original index * @return array array of plucked values */ public static function pluck($array, $key, $index = null) { $return = array(); $get_deep = strpos($key, '.') !== false; if ( ! $index) { foreach ($array as $i => $a) { $return[] = (is_object($a) and ! ($a instanceof \ArrayAccess)) ? $a->{$key} : ($get_deep ? static::get($a, $key) : $a[$key]); } } else { foreach ($array as $i => $a) { $index !== true and $i = (is_object($a) and ! ($a instanceof \ArrayAccess)) ? $a->{$index} : $a[$index]; $return[$i] = (is_object($a) and ! ($a instanceof \ArrayAccess)) ? $a->{$key} : ($get_deep ? static::get($a, $key) : $a[$key]); } } return $return; } /** * Array_key_exists with a dot-notated key from an array. * * @param array $array The search array * @param mixed $key The dot-notated key or array of keys * @return mixed */ public static function key_exists($array, $key) { foreach (explode('.', $key) as $key_part) { if ( ! is_array($array) or ! array_key_exists($key_part, $array)) { return false; } $array = $array[$key_part]; } return true; } /** * Unsets dot-notated key from an array * * @param array $array The search array * @param mixed $key The dot-notated key or array of keys * @return mixed */ public static function delete(&$array, $key) { if (is_null($key)) { return false; } if (is_array($key)) { $return = array(); foreach ($key as $k) { $return[$k] = static::delete($array, $k); } return $return; } $key_parts = explode('.', $key); if ( ! is_array($array) or ! array_key_exists($key_parts[0], $array)) { return false; } $this_key = array_shift($key_parts); if ( ! empty($key_parts)) { $key = implode('.', $key_parts); return static::delete($array[$this_key], $key); } else { unset($array[$this_key]); } return true; } /** * Converts a multi-dimensional associative array into an array of key => values with the provided field names * * @param array $assoc the array to convert * @param string $key_field the field name of the key field * @param string $val_field the field name of the value field * @return array * @throws \InvalidArgumentException */ public static function assoc_to_keyval($assoc, $key_field, $val_field) { if ( ! is_array($assoc) and ! $assoc instanceof \Iterator) { throw new \InvalidArgumentException('The first parameter must be an array.'); } $output = array(); foreach ($assoc as $row) { if (isset($row[$key_field]) and isset($row[$val_field])) { $output[$row[$key_field]] = $row[$val_field]; } } return $output; } /** * Converts the given 1 dimensional non-associative array to an associative * array. * * The array given must have an even number of elements or null will be returned. * * Arr::to_assoc(array('foo','bar')); * * @param string $arr the array to change * @return array|null the new array or null * @throws \BadMethodCallException */ public static function to_assoc($arr) { if (($count = count($arr)) % 2 > 0) { throw new \BadMethodCallException('Number of values in to_assoc must be even.'); } $keys = $vals = array(); for ($i = 0; $i < $count - 1; $i += 2) { $keys[] = array_shift($arr); $vals[] = array_shift($arr); } return array_combine($keys, $vals); } /** * Checks if the given array is an assoc array. * * @param array $arr the array to check * @return bool true if its an assoc array, false if not */ public static function is_assoc($arr) { if ( ! is_array($arr)) { throw new \InvalidArgumentException('The parameter must be an array.'); } $counter = 0; foreach ($arr as $key => $unused) { if ( ! is_int($key) or $key !== $counter++) { return true; } } return false; } /** * Flattens a multi-dimensional associative array down into a 1 dimensional * associative array. * * @param array the array to flatten * @param string what to glue the keys together with * @param bool whether to reset and start over on a new array * @param bool whether to flatten only associative array's, or also indexed ones * @return array */ public static function flatten($array, $glue = ':', $reset = true, $indexed = true) { static $return = array(); static $curr_key = array(); if ($reset) { $return = array(); $curr_key = array(); } foreach ($array as $key => $val) { $curr_key[] = $key; if (is_array($val) and ($indexed or array_values($val) !== $val)) { static::flatten_assoc($val, $glue, false); } else { $return[implode($glue, $curr_key)] = $val; } array_pop($curr_key); } return $return; } /** * Flattens a multi-dimensional associative array down into a 1 dimensional * associative array. * * @param array the array to flatten * @param string what to glue the keys together with * @param bool whether to reset and start over on a new array * @return array */ public static function flatten_assoc($array, $glue = ':', $reset = true) { return static::flatten($array, $glue, $reset, false); } /** * Reverse a flattened array in its original form. * * @param array $array flattened array * @param string $glue glue used in flattening * @return array the unflattened array */ public static function reverse_flatten($array, $glue = ':') { $return = array(); foreach ($array as $key => $value) { if (stripos($key, $glue) !== false) { $keys = explode($glue, $key); $temp =& $return; while (count($keys) > 1) { $key = array_shift($keys); $key = is_numeric($key) ? (int) $key : $key; if ( ! isset($temp[$key]) or ! is_array($temp[$key])) { $temp[$key] = array(); } $temp =& $temp[$key]; } $key = array_shift($keys); $key = is_numeric($key) ? (int) $key : $key; $temp[$key] = $value; } else { $key = is_numeric($key) ? (int) $key : $key; $return[$key] = $value; } } return $return; } /** * Filters an array on prefixed associative keys. * * @param array the array to filter. * @param string prefix to filter on. * @param bool whether to remove the prefix. * @return array */ public static function filter_prefixed($array, $prefix, $remove_prefix = true) { $return = array(); foreach ($array as $key => $val) { if (preg_match('/^'.$prefix.'/', $key)) { if ($remove_prefix === true) { $key = preg_replace('/^'.$prefix.'/','',$key); } $return[$key] = $val; } } return $return; } /** * Recursive version of PHP's array_filter() * * @param array the array to filter. * @param callback the callback that determines whether or not a value is filtered * @return array */ public static function filter_recursive($array, $callback = null) { foreach ($array as &$value) { if (is_array($value)) { $value = $callback === null ? static::filter_recursive($value) : static::filter_recursive($value, $callback); } } return $callback === null ? array_filter($array) : array_filter($array, $callback); } /** * Removes items from an array that match a key prefix. * * @param array the array to remove from * @param string prefix to filter on * @return array */ public static function remove_prefixed($array, $prefix) { foreach ($array as $key => $val) { if (preg_match('/^'.$prefix.'/', $key)) { unset($array[$key]); } } return $array; } /** * Filters an array on suffixed associative keys. * * @param array the array to filter. * @param string suffix to filter on. * @param bool whether to remove the suffix. * @return array */ public static function filter_suffixed($array, $suffix, $remove_suffix = true) { $return = array(); foreach ($array as $key => $val) { if (preg_match('/'.$suffix.'$/', $key)) { if ($remove_suffix === true) { $key = preg_replace('/'.$suffix.'$/','',$key); } $return[$key] = $val; } } return $return; } /** * Removes items from an array that match a key suffix. * * @param array the array to remove from * @param string suffix to filter on * @return array */ public static function remove_suffixed($array, $suffix) { foreach ($array as $key => $val) { if (preg_match('/'.$suffix.'$/', $key)) { unset($array[$key]); } } return $array; } /** * Filters an array by an array of keys * * @param array the array to filter. * @param array the keys to filter * @param bool if true, removes the matched elements. * @return array */ public static function filter_keys($array, $keys, $remove = false) { $return = array(); foreach ($keys as $key) { if (array_key_exists($key, $array)) { $remove or $return[$key] = $array[$key]; if($remove) { unset($array[$key]); } } } return $remove ? $array : $return; } /** * Insert value(s) into an array, mostly an array_splice alias * WARNING: original array is edited by reference, only boolean success is returned * * @param array the original array (by reference) * @param array|mixed the value(s) to insert, if you want to insert an array it needs to be in an array itself * @param int the numeric position at which to insert, negative to count from the end backwards * @return bool false when array shorter then $pos, otherwise true */ public static function insert(array &$original, $value, $pos) { if (count($original) < abs($pos)) { \Error::notice('Position larger than number of elements in array in which to insert.'); return false; } array_splice($original, $pos, 0, $value); return true; } /** * Insert value(s) into an array, mostly an array_splice alias * WARNING: original array is edited by reference, only boolean success is returned * * @param array the original array (by reference) * @param array|mixed the value(s) to insert, if you want to insert an array it needs to be in an array itself * @param int the numeric position at which to insert, negative to count from the end backwards * @return bool false when array shorter then $pos, otherwise true */ public static function insert_assoc(array &$original, array $values, $pos) { if (count($original) < abs($pos)) { return false; } $original = array_slice($original, 0, $pos, true) + $values + array_slice($original, $pos, null, true); return true; } /** * Insert value(s) into an array before a specific key * WARNING: original array is edited by reference, only boolean success is returned * * @param array the original array (by reference) * @param array|mixed the value(s) to insert, if you want to insert an array it needs to be in an array itself * @param string|int the key before which to insert * @param bool wether the input is an associative array * @return bool false when key isn't found in the array, otherwise true */ public static function insert_before_key(array &$original, $value, $key, $is_assoc = false) { $pos = array_search($key, array_keys($original)); if ($pos === false) { \Error::notice('Unknown key before which to insert the new value into the array.'); return false; } return $is_assoc ? static::insert_assoc($original, $value, $pos) : static::insert($original, $value, $pos); } /** * Insert value(s) into an array after a specific key * WARNING: original array is edited by reference, only boolean success is returned * * @param array the original array (by reference) * @param array|mixed the value(s) to insert, if you want to insert an array it needs to be in an array itself * @param string|int the key after which to insert * @param bool wether the input is an associative array * @return bool false when key isn't found in the array, otherwise true */ public static function insert_after_key(array &$original, $value, $key, $is_assoc = false) { $pos = array_search($key, array_keys($original)); if ($pos === false) { \Error::notice('Unknown key after which to insert the new value into the array.'); return false; } return $is_assoc ? static::insert_assoc($original, $value, $pos + 1) : static::insert($original, $value, $pos + 1); } /** * Insert value(s) into an array after a specific value (first found in array) * * @param array the original array (by reference) * @param array|mixed the value(s) to insert, if you want to insert an array it needs to be in an array itself * @param string|int the value after which to insert * @param bool wether the input is an associative array * @return bool false when value isn't found in the array, otherwise true */ public static function insert_after_value(array &$original, $value, $search, $is_assoc = false) { $key = array_search($search, $original); if ($key === false) { \Error::notice('Unknown value after which to insert the new value into the array.'); return false; } return static::insert_after_key($original, $value, $key, $is_assoc); } /** * Insert value(s) into an array before a specific value (first found in array) * * @param array the original array (by reference) * @param array|mixed the value(s) to insert, if you want to insert an array it needs to be in an array itself * @param string|int the value after which to insert * @param bool wether the input is an associative array * @return bool false when value isn't found in the array, otherwise true */ public static function insert_before_value(array &$original, $value, $search, $is_assoc = false) { $key = array_search($search, $original); if ($key === false) { \Error::notice('Unknown value before which to insert the new value into the array.'); return false; } return static::insert_before_key($original, $value, $key, $is_assoc); } /** * Sorts a multi-dimensional array by it's values. * * @access public * @param array The array to fetch from * @param string The key to sort by * @param string The order (asc or desc) * @param int The php sort type flag * @return array */ public static function sort($array, $key, $order = 'asc', $sort_flags = SORT_REGULAR) { if ( ! is_array($array)) { throw new \InvalidArgumentException('Arr::sort() - $array must be an array.'); } if (empty($array)) { return $array; } foreach ($array as $k => $v) { $b[$k] = static::get($v, $key); } switch ($order) { case 'asc': asort($b, $sort_flags); break; case 'desc': arsort($b, $sort_flags); break; default: throw new \InvalidArgumentException('Arr::sort() - $order must be asc or desc.'); break; } foreach ($b as $key => $val) { $c[] = $array[$key]; } return $c; } /** * Sorts an array on multitiple values, with deep sorting support. * * @param array $array collection of arrays/objects to sort * @param array $conditions sorting conditions * @param bool @ignore_case wether to sort case insensitive */ public static function multisort($array, $conditions, $ignore_case = false) { $temp = array(); $keys = array_keys($conditions); foreach($keys as $key) { $temp[$key] = static::pluck($array, $key, true); is_array($conditions[$key]) or $conditions[$key] = array($conditions[$key]); } $args = array(); foreach ($keys as $key) { $args[] = $ignore_case ? array_map('strtolower', $temp[$key]) : $temp[$key]; foreach($conditions[$key] as $flag) { $args[] = $flag; } } $args[] = &$array; call_user_func_array('array_multisort', $args); return $array; } /** * Find the average of an array * * @param array the array containing the values * @return numeric the average value */ public static function average($array) { // No arguments passed, lets not divide by 0 if ( ! ($count = count($array)) > 0) { return 0; } return (array_sum($array) / $count); } /** * Replaces key names in an array by names in $replace * * @param array the array containing the key/value combinations * @param array|string key to replace or array containing the replacement keys * @param string the replacement key * @return array the array with the new keys */ public static function replace_key($source, $replace, $new_key = null) { if(is_string($replace)) { $replace = array($replace => $new_key); } if ( ! is_array($source) or ! is_array($replace)) { throw new \InvalidArgumentException('Arr::replace_key() - $source must an array. $replace must be an array or string.'); } $result = array(); foreach ($source as $key => $value) { if (array_key_exists($key, $replace)) { $result[$replace[$key]] = $value; } else { $result[$key] = $value; } } return $result; } /** * Merge 2 arrays recursively, differs in 2 important ways from array_merge_recursive() * - When there's 2 different values and not both arrays, the latter value overwrites the earlier * instead of merging both into an array * - Numeric keys that don't conflict aren't changed, only when a numeric key already exists is the * value added using array_push() * * @param array multiple variables all of which must be arrays * @return array * @throws \InvalidArgumentException */ public static function merge() { $array = func_get_arg(0); $arrays = array_slice(func_get_args(), 1); if ( ! is_array($array)) { throw new \InvalidArgumentException('Arr::merge() - all arguments must be arrays.'); } foreach ($arrays as $arr) { if ( ! is_array($arr)) { throw new \InvalidArgumentException('Arr::merge() - all arguments must be arrays.'); } foreach ($arr as $k => $v) { // numeric keys are appended if (is_int($k)) { array_key_exists($k, $array) ? array_push($array, $v) : $array[$k] = $v; } elseif (is_array($v) and array_key_exists($k, $array) and is_array($array[$k])) { $array[$k] = static::merge($array[$k], $v); } else { $array[$k] = $v; } } } return $array; } /** * Prepends a value with an asociative key to an array. * Will overwrite if the value exists. * * @param array $arr the array to prepend to * @param string|array $key the key or array of keys and values * @param mixed $valye the value to prepend */ public static function prepend(&$arr, $key, $value = null) { $arr = (is_array($key) ? $key : array($key => $value)) + $arr; } /** * Recursive in_array * * @param mixed $needle what to search for * @param array $haystack array to search in * @return bool wether the needle is found in the haystack. */ public static function in_array_recursive($needle, $haystack, $strict = false) { foreach ($haystack as $value) { if ( ! $strict and $needle == $value) { return true; } elseif ($needle === $value) { return true; } elseif (is_array($value) and static::in_array_recursive($needle, $value, $strict)) { return true; } } return false; } /** * Checks if the given array is a multidimensional array. * * @param array $arr the array to check * @param array $all_keys if true, check that all elements are arrays * @return bool true if its a multidimensional array, false if not */ public static function is_multi($arr, $all_keys = false) { $values = array_filter($arr, 'is_array'); return $all_keys ? count($arr) === count($values) : count($values) > 0; } /** * Searches the array for a given value and returns the * corresponding key or default value. * If $recursive is set to true, then the Arr::search() * function will return a delimiter-notated key using $delimiter. * * @param array $array The search array * @param mixed $value The searched value * @param string $default The default value * @param bool $recursive Whether to get keys recursive * @param string $delimiter The delimiter, when $recursive is true * @return mixed */ public static function search($array, $value, $default = null, $recursive = true, $delimiter = '.') { if ( ! is_array($array) and ! $array instanceof \ArrayAccess) { throw new \InvalidArgumentException('First parameter must be an array or ArrayAccess object.'); } if ( ! is_null($default) and ! is_int($default) and ! is_string($default)) { throw new \InvalidArgumentException('Expects parameter 3 to be an string or integer or null.'); } if ( ! is_string($delimiter)) { throw new \InvalidArgumentException('Expects parameter 5 must be an string.'); } $key = array_search($value, $array); if ($recursive and $key === false) { $keys = array(); foreach ($array as $k => $v) { if (is_array($v)) { $rk = static::search($v, $value, $default, true, $delimiter); if ($rk !== $default) { $keys = array($k, $rk); break; } } } $key = count($keys) ? implode($delimiter, $keys) : false; } return $key === false ? $default : $key; } /** * Returns only unique values in an array. It does not sort. First value is used. * * @param array $arr the array to dedup * @return array array with only de-duped values */ public static function unique($arr) { // filter out all duplicate values return array_filter($arr, function($item) { // contrary to popular belief, this is not as static as you think... static $vars = array(); if (in_array($item, $vars, true)) { // duplicate return false; } else { // record we've had this value $vars[] = $item; // unique return true; } }); } /** * Calculate the sum of an array * * @param array $array the array containing the values * @param string $key key of the value to pluck * @return numeric the sum value */ public static function sum($array, $key) { if ( ! is_array($array) and ! $array instanceof \ArrayAccess) { throw new \InvalidArgumentException('First parameter must be an array or ArrayAccess object.'); } return array_sum(static::pluck($array, $key)); } }
dailenearanas/iJMC-WebApp
fuel/core/classes/arr.php
PHP
mit
25,244
<?php /* * Created by tpay.com. * Date: 19.06.2017 * Time: 11:13 */ namespace tpayLibs\src\_class_tpay\Validators\PaymentTypes; use tpayLibs\src\_class_tpay\Validators\PaymentTypesInterface; use tpayLibs\src\Dictionaries\Payments\CardFieldsDictionary; class PaymentTypeCard implements PaymentTypesInterface { public function getRequestFields() { return CardFieldsDictionary::REQUEST_FIELDS; } public function getResponseFields() { return CardFieldsDictionary::RESPONSE_FIELDS; } }
tpaycom/transferuj
tpayLibs/src/_class_tpay/Validators/PaymentTypes/PaymentTypeCard.php
PHP
mit
530
from __future__ import unicode_literals import os import os.path import subprocess from pre_commit.util import cmd_output class PrefixedCommandRunner(object): """A PrefixedCommandRunner allows you to run subprocess commands with comand substitution. For instance: PrefixedCommandRunner('/tmp/foo').run(['{prefix}foo.sh', 'bar', 'baz']) will run ['/tmp/foo/foo.sh', 'bar', 'baz'] """ def __init__( self, prefix_dir, popen=subprocess.Popen, makedirs=os.makedirs ): self.prefix_dir = prefix_dir.rstrip(os.sep) + os.sep self.__popen = popen self.__makedirs = makedirs def _create_path_if_not_exists(self): if not os.path.exists(self.prefix_dir): self.__makedirs(self.prefix_dir) def run(self, cmd, **kwargs): self._create_path_if_not_exists() replaced_cmd = [ part.replace('{prefix}', self.prefix_dir) for part in cmd ] return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs) def path(self, *parts): path = os.path.join(self.prefix_dir, *parts) return os.path.normpath(path) def exists(self, *parts): return os.path.exists(self.path(*parts)) @classmethod def from_command_runner(cls, command_runner, path_end): """Constructs a new command runner from an existing one by appending `path_end` to the command runner's prefix directory. """ return cls( command_runner.path(path_end), popen=command_runner.__popen, makedirs=command_runner.__makedirs, )
barrysteyn/pre-commit
pre_commit/prefixed_command_runner.py
Python
mit
1,661
class Child include Mongoid::Document include Mongoid::Timestamps field :name, type: String belongs_to :parent, optional: true end
monkbroc/rails_admin_import
spec/dummy_app/app/mongoid/child.rb
Ruby
mit
140
using System; using System.Text; using ECommon.Components; using ECommon.Remoting; using ECommon.Serializing; using EQueue.Protocols; using EQueue.Protocols.Brokers; using EQueue.Protocols.Brokers.Requests; using EQueue.Protocols.NameServers.Requests; using EQueue.Utils; namespace EQueue.NameServer.RequestHandlers { public class SetQueueConsumerVisibleForClusterRequestHandler : IRequestHandler { private NameServerController _nameServerController; private IBinarySerializer _binarySerializer; public SetQueueConsumerVisibleForClusterRequestHandler(NameServerController nameServerController) { _binarySerializer = ObjectContainer.Resolve<IBinarySerializer>(); _nameServerController = nameServerController; } public RemotingResponse HandleRequest(IRequestHandlerContext context, RemotingRequest remotingRequest) { var request = _binarySerializer.Deserialize<SetQueueConsumerVisibleForClusterRequest>(remotingRequest.Body); var requestService = new BrokerRequestService(_nameServerController); requestService.ExecuteActionToAllClusterBrokers(request.ClusterName, async remotingClient => { var requestData = _binarySerializer.Serialize(new SetQueueConsumerVisibleRequest(request.Topic, request.QueueId, request.Visible)); var remotingResponse = await remotingClient.InvokeAsync(new RemotingRequest((int)BrokerRequestCode.SetQueueConsumerVisible, requestData), 30000); context.SendRemotingResponse(remotingResponse); }); return RemotingResponseFactory.CreateResponse(remotingRequest); } } }
tangxuehua/equeue
src/EQueue/NameServer/RequestHandlers/SetQueueConsumerVisibleForClusterRequestHandler.cs
C#
mit
1,757
using System; using System.Data; using System.Configuration; using System.Collections; using System.Web; using System.Web.Security; using System.Web.UI; using System.Web.UI.WebControls; using System.Web.UI.WebControls.WebParts; using System.Web.UI.HtmlControls; using LCC.ControlesLCCGestion.filtros; namespace LCC.WebGestion.presupuestos { public partial class presupuestos_FiltroPresupuesto : RaizFiltroASCX { public override PROT.ControlesEspeciales.PanelBuscador PanelBuscador { get { return bus; } } public override PROT.ControlesEspeciales.ControlFiltro ControlFiltro { get { return c; } } protected void dv_SelectedIndexChanged(object sender, EventArgs e) { suc.Param = Int32.Parse(dv.SelectedValue); suc.Preparar(bus.FiltroActual, bus.ObjetoNegocio); tip.Param = Int32.Parse(dv.SelectedValue); tip.Preparar(bus.FiltroActual, bus.ObjetoNegocio); mat.Param = Int32.Parse(dv.SelectedValue); mat.Preparar(bus.FiltroActual, bus.ObjetoNegocio); us.Param = Int32.Parse(dv.SelectedValue); us.Preparar(bus.FiltroActual, bus.ObjetoNegocio); o1.ServicioParam = dv.SelectedValue; tar.ServicioParam = dv.SelectedValue; ta1.ServicioParam = dv.SelectedValue; ta2.ServicioParam = dv.SelectedValue; des.ServicioParam = dv.SelectedValue; obn.ServicioParam = dv.SelectedValue; } } }
pacoferre/Dosconf
Frontend/PROT/Search/FiltroPresupuesto.ascx.cs
C#
mit
1,367
""" Python Blueprint ================ Does not install python itself, only develop and setup tools. Contains pip helper for other blueprints to use. **Fabric environment:** .. code-block:: yaml blueprints: - blues.python """ from fabric.decorators import task from refabric.api import run, info from refabric.context_managers import sudo from . import debian __all__ = ['setup'] pip_log_file = '/tmp/pip.log' @task def setup(): """ Install python develop tools """ install() def install(): with sudo(): info('Install python dependencies') debian.apt_get('install', 'python-dev', 'python-setuptools') run('easy_install pip') run('touch {}'.format(pip_log_file)) debian.chmod(pip_log_file, mode=777) pip('install', 'setuptools', '--upgrade') def pip(command, *options): info('Running pip {}', command) run('pip {0} {1} -v --log={2} --log-file={2}'.format(command, ' '.join(options), pip_log_file))
gelbander/blues
blues/python.py
Python
mit
997
<?php namespace Alcodo\AsyncCss\Commands; use Alcodo\AsyncCss\Cache\CssKeys; use Illuminate\Console\Command; class Show extends Command { /** * The name and signature of the console command. * * @var string */ protected $signature = 'alcodo:asynccss:show'; /** * The console command description. * * @var string */ protected $description = 'Show all generated css async paths'; /** * Create a new command instance. * * @return void */ public function __construct() { parent::__construct(); } /** * Execute the console command. * * @return mixed */ public function handle() { $keys = CssKeys::show(); if ($keys === null) { $this->info('Cache is empty'); return true; } if (is_array($keys)) { foreach ($keys as $key) { $path = CssKeys::getSinglePath($key); $this->info('Path: '.$path); } } return true; } }
alcodo/async-css
Commands/Show.php
PHP
mit
1,079
#!/usr/bin/env ruby $:.unshift File.expand_path('../../../lib', __FILE__) $stdout.sync = true require 'philotic' require 'awesome_print' class NamedQueueConsumer < Philotic::Consumer # subscribe to an existing named queue subscribe_to :test_queue # use acknowledgements auto_acknowledge # REQUEUE the message with RabbitMQ if consume throws these errors. I.e., something went wrong with the consumer # Only valid with ack_messages # requeueable_errors PossiblyTransientErrorOne, PossiblyTransientErrorTwo # REJECT the message with RabbitMQ if consume throws these errors. I.e., The message is malformed/invalid # Only valid with ack_messages # rejectable_errors BadMessageError def consume(message) ap named: message.attributes end end class AnonymousQueueConsumer < Philotic::Consumer # subscribe anonymously to a set of headers: # subscribe_to header_1: 'value_1', # header_2: 'value_2', # header_3: 'value_3' subscribe_to philotic_firehose: true def consume(message) ap anon: message.attributes end end #run the consumers AnonymousQueueConsumer.subscribe NamedQueueConsumer.subscribe # keep the parent thread alive Philotic.endure
nkeyes/philotic
examples/subscribing/consumer.rb
Ruby
mit
1,218
#include "arrow.h" /** * @brief Arrow::Arrow */ Arrow::Arrow() : Shape(SHAPES::ARROW) { } /** * @brief Arrow::Arrow * @param col Colour of the new object * @param pos Starting point for the new object */ Arrow::Arrow(QColor col, QPoint pos) : Shape(SHAPES::ARROW, col, pos) { } /** * @brief Arrow::draw * Draws the object on top of the specified frame. * @param frame Frame to draw on. * @return Returns the frame with drawing. */ cv::Mat Arrow::draw(cv::Mat &frame) { cv::arrowedLine(frame, draw_start, draw_end, colour, LINE_THICKNESS); return frame; } /** * @brief Arrow::handle_new_pos * Function to handle the new position of the mouse. * Does not need to store the new position. * @param pos */ void Arrow::handle_new_pos(QPoint pos) { } /** * @brief Arrow::write * @param json * Writes to a Json object. */ void Arrow::write(QJsonObject& json) { write_shape(json); } /** * @brief Arrow::read * @param json * Reads from a Json object. */ void Arrow::read(const QJsonObject& json) { read_shape(json); }
NFCSKL/ViAn
ViAn/Video/shapes/arrow.cpp
C++
mit
1,054
const request = require('request-promise'); const oauth = require('./config').auth; const rootUrl = 'https://api.twitter.com/1.1'; let allItems = []; /* API methods */ const API = { /** * Search for tweets * @param options {Object} Options object containing: * - text (Required) : String * - count (optional) : Number * - result_type (optional) : String * - geocode (optional) : String (lat long radius_in_miles) * - since_id (optional) : Number - start search from this ID * - max_id (optional) : Number - end search on this ID */ search: (options) => { return new Promise((resolve, reject) => { const {text, count = 100, result_type = 'popular', since_id = 0, max_id, geocode} = options; let params = `?q=${encodeURIComponent(options.text)}` + `&count=${count}` + `&result_type=${result_type}` + `&since_id=${since_id}`; if (max_id) { params += `&max_id=${max_id}`; } if (geocode) { params += `&geocode=${encodeURIComponent(geocode)}`; } allItems = []; API.searchByStringParam(params).then((items) => resolve(items)).catch((err) => reject(err)); }); }, /** * Search w/ params * @param stringParams {String} Params as string */ searchByStringParam: (stringParams) => new Promise((resolve, reject) => { const searchCallback = (res) => { const result = JSON.parse(res); if (result && result.statuses) { result.statuses.forEach(item => allItems.push(item)); console.log('[Search] So far we have', allItems.length, 'items'); // If we have the next_results, search again for the rest (sort of a pagination) const nextRes = result.search_metadata.next_results; if (nextRes) { API.searchByStringParam(nextRes).then((items) => resolve(items)); } else { resolve(allItems); } } else { resolve(null); } }; request.get({url: `${rootUrl}/search/tweets.json${stringParams}`, oauth}) .then(res => searchCallback(res)) .catch(err => reject(err)); }) , /** * Retweet a tweet * @param tweetId {String} identifier for the tweet */ retweet: (tweetId) => new Promise((resolve, reject) => request.post({url: `${rootUrl}/statuses/retweet/${tweetId}.json`, oauth}) .then((res) => resolve(res)) .catch((err) => reject(err)) ) , /** * Like (aka favorite) a tweet * @param tweetId {String} identifier for the tweet */ like: (tweetId) => new Promise((resolve, reject) => request.post({url: `${rootUrl}/favorites/create.json?id=${tweetId}`, oauth}) .then((res) => resolve(res)) .catch((err) => reject(err)) ) , /** * Follow a user by username * @param userId {String} identifier for the user */ follow: (userId) => new Promise((resolve, reject) => request.post({url: `${rootUrl}/friendships/create.json?user_id=${userId}`, oauth}) .then((res) => resolve(res)) .catch((err) => reject(err)) ) , /** * Follow a user by username * @param userName {String} username identifier for the user */ followByUsername: (userName) => new Promise((resolve, reject) => request.post({url: `${rootUrl}/friendships/create.json?screen_name=${userName}`, oauth}) .then((res) => resolve(res)) .catch((err) => reject(err)) ) , /** * Block a user * @param userId {String} ID of the user to block */ blockUser: (userId) => new Promise((resolve, reject) => request.post({url: `${rootUrl}/blocks/create.json?user_id=${userId}`, oauth}) .then((res) => resolve(res)) .catch((err) => reject(err)) ) , /** Get list of blocked users for the current user */ getBlockedUsers: () => new Promise((resolve, reject) => request.get({url: `${rootUrl}/blocks/list.json`, oauth}) .then((res) => resolve(JSON.parse(res).users.map((user) => user.id))) .catch((err) => reject(err)) ) , /** * Get a user's tweets * @param userId {String} identifier for the user * @param count {Number} max tweets to retrieve */ getTweetsForUser: (userId, count) => new Promise((resolve, reject) => request.get({url: `${rootUrl}/statuses/user_timeline.json?user_id=${userId}&count=${count}`, oauth}) .then((response) => resolve(response)) .catch((err) => reject(err)) ) , /** * Delete a tweet * @param tweetId {String} identifier for the tweet */ deleteTweet: (tweetId) => new Promise((resolve, reject) => request.post({url: `${rootUrl}/statuses/destroy/${tweetId}.json`, oauth}) .then(() => { console.log('Deleted tweet', tweetId); resolve(); }) .catch((err) => reject(err)) ) , /** * Reply to a tweet * (The Reply on Twitter is basically a Status Update containing @username, where username is author of the original tweet) * @param tweet {Object} The full Tweet we want to reply to */ replyToTweet: (tweet) => new Promise((resolve, reject) => { try { const text = encodeURIComponent(`@${tweet.user.screen_name} `); request.post({ url: `${rootUrl}/statuses/update.json?status=${text}&in_reply_to_status_id=${tweet.id}`, oauth }) .then(() => resolve()) .catch(err => reject(err)) } catch (err) { reject(err); } }) }; module.exports = API;
raulrene/Twitter-ContestJS-bot
api-functions.js
JavaScript
mit
6,401