content stringlengths 7 1.05M | fixed_cases stringlengths 1 1.28M |
|---|---|
# Databricks notebook source
# MAGIC %md
# MAGIC # Project Timesheet Source Data
# COMMAND ----------
spark.conf.set(
"fs.azure.account.key.dmstore1.blob.core.windows.net",
"s8aN23JQ1EboPql5lx++0zQOyYrYC2EvT7NbgewR/8yAmQzpPfojntRWrCr4XOuonMowUUXsEzSxP11Jzd3kTg==")
# COMMAND ----------
# MAGIC %sql
# MAGIC create database if not exists samples
# COMMAND ----------
# MAGIC %sql
# MAGIC drop table if exists samples.project_timesheet;
# MAGIC create table samples.project_timesheet
# MAGIC using csv
# MAGIC options (path "wasbs://sample-data@dmstore1.blob.core.windows.net/timesheet/sample_data.csv", header "true", mode "FAILFAST", inferschema "true")
# COMMAND ----------
# MAGIC %sql
# MAGIC describe table samples.project_timesheet
# COMMAND ----------
# MAGIC %sql
# MAGIC select
# MAGIC *
# MAGIC from
# MAGIC samples.project_timesheet
# COMMAND ----------
| spark.conf.set('fs.azure.account.key.dmstore1.blob.core.windows.net', 's8aN23JQ1EboPql5lx++0zQOyYrYC2EvT7NbgewR/8yAmQzpPfojntRWrCr4XOuonMowUUXsEzSxP11Jzd3kTg==') |
# Copyright 2018 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of the `swift_binary` and `swift_test` rules."""
load(":api.bzl", "swift_common")
load(":derived_files.bzl", "derived_files")
load(":features.bzl", "SWIFT_FEATURE_BUNDLED_XCTESTS", "is_feature_enabled")
load(":linking.bzl", "register_link_action")
load(":providers.bzl", "SwiftBinaryInfo", "SwiftToolchainInfo")
load(":swift_c_module_aspect.bzl", "swift_c_module_aspect")
load(":utils.bzl", "expand_locations")
load("@bazel_skylib//lib:dicts.bzl", "dicts")
load("@bazel_skylib//lib:partial.bzl", "partial")
load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain")
load("@bazel_tools//tools/build_defs/cc:action_names.bzl", "CPP_LINK_EXECUTABLE_ACTION_NAME")
def _swift_linking_rule_impl(
ctx,
feature_configuration,
is_test,
toolchain,
linkopts = []):
"""The shared implementation function for `swift_{binary,test}`.
Args:
ctx: The rule context.
feature_configuration: A feature configuration obtained from
`swift_common.configure_features`.
is_test: A `Boolean` value indicating whether the binary is a test target.
toolchain: The `SwiftToolchainInfo` provider of the toolchain being used to build the
target.
linkopts: Additional rule-specific flags that should be passed to the linker.
Returns:
A tuple with two values: the `File` representing the binary that was linked, and a list of
providers to be propagated by the target being built.
"""
# Bazel fails the build if you try to query a fragment that hasn't been declared, even
# dynamically with `hasattr`/`getattr`. Thus, we have to use other information to determine
# whether we can access the `objc` configuration.
objc_fragment = (ctx.fragments.objc if toolchain.supports_objc_interop else None)
copts = expand_locations(ctx, ctx.attr.copts, ctx.attr.swiftc_inputs)
linkopts = list(linkopts) + expand_locations(ctx, ctx.attr.linkopts, ctx.attr.swiftc_inputs)
additional_inputs = ctx.files.swiftc_inputs
srcs = ctx.files.srcs
out_bin = derived_files.executable(ctx.actions, target_name = ctx.label.name)
objects_to_link = []
additional_output_groups = {}
compilation_providers = []
link_args = ctx.actions.args()
link_args.add("-o", out_bin)
if not srcs:
additional_inputs_to_linker = depset(direct = additional_inputs)
else:
module_name = ctx.attr.module_name
if not module_name:
module_name = swift_common.derive_module_name(ctx.label)
compile_results = swift_common.compile_as_objects(
actions = ctx.actions,
arguments = [],
compilation_mode = ctx.var["COMPILATION_MODE"],
copts = copts,
defines = ctx.attr.defines,
feature_configuration = feature_configuration,
module_name = module_name,
srcs = srcs,
swift_fragment = ctx.fragments.swift,
target_name = ctx.label.name,
toolchain = toolchain,
additional_input_depsets = [depset(direct = additional_inputs)],
configuration = ctx.configuration,
deps = ctx.attr.deps,
genfiles_dir = ctx.genfiles_dir,
objc_fragment = objc_fragment,
)
link_args.add_all(compile_results.linker_flags)
objects_to_link.extend(compile_results.output_objects)
additional_inputs_to_linker = depset(
direct = compile_results.linker_inputs,
transitive = [compile_results.compile_inputs],
)
dicts.add(additional_output_groups, compile_results.output_groups)
compilation_providers.append(
SwiftBinaryInfo(compile_options = compile_results.compile_options),
)
# TODO(b/70228246): Also support mostly-static and fully-dynamic modes, here and for the C++
# toolchain args below.
link_args.add_all(partial.call(
toolchain.linker_opts_producer,
is_static = True,
is_test = is_test,
))
# Enable LLVM coverage in CROSSTOOL if this is a coverage build. Note that we explicitly enable
# LLVM format and disable GCC format because the former is the only one that Swift supports.
if ctx.configuration.coverage_enabled:
coverage_features_to_enable = ["llvm_coverage_map_format"]
coverage_features_to_disable = ["gcc_coverage_map_format"]
else:
coverage_features_to_enable = []
coverage_features_to_disable = []
# Get additional linker flags from the C++ toolchain.
cpp_toolchain = find_cpp_toolchain(ctx)
cc_feature_configuration = cc_common.configure_features(
cc_toolchain = cpp_toolchain,
requested_features = (
swift_common.get_enabled_features(feature_configuration) +
["static_linking_mode"] +
coverage_features_to_enable
),
unsupported_features = (
swift_common.get_disabled_features(feature_configuration) +
coverage_features_to_disable
),
)
variables = cc_common.create_link_variables(
feature_configuration = cc_feature_configuration,
cc_toolchain = cpp_toolchain,
is_static_linking_mode = True,
)
link_cpp_toolchain_flags = cc_common.get_memory_inefficient_command_line(
feature_configuration = cc_feature_configuration,
action_name = CPP_LINK_EXECUTABLE_ACTION_NAME,
variables = variables,
)
link_args.add_all(link_cpp_toolchain_flags)
register_link_action(
actions = ctx.actions,
action_environment = toolchain.action_environment,
clang_executable = toolchain.clang_executable,
deps = ctx.attr.deps + toolchain.implicit_deps,
expanded_linkopts = linkopts,
inputs = additional_inputs_to_linker,
mnemonic = "SwiftLinkExecutable",
objects = objects_to_link,
outputs = [out_bin],
rule_specific_args = link_args,
toolchain = toolchain,
)
return out_bin, compilation_providers + [
OutputGroupInfo(**additional_output_groups),
]
def _create_xctest_runner(name, actions, binary, xctest_runner_template):
"""Creates a shell script that will bundle a test binary and launch the `xctest` helper tool.
Args:
name: The name of the target being built, which will be used as the basename of the bundle
(followed by the `.xctest` bundle extension).
actions: The context's actions object.
binary: The `File` representing the test binary that should be bundled and executed.
xctest_runner_template: The `File` that will be used as a template to generate the test
runner shell script.
Returns:
A `File` representing the shell script that will launch the test bundle with the `xctest`
tool.
"""
xctest_runner = derived_files.xctest_runner_script(actions, name)
actions.expand_template(
is_executable = True,
output = xctest_runner,
template = xctest_runner_template,
substitutions = {
"%binary%": binary.short_path,
},
)
return xctest_runner
def _swift_binary_impl(ctx):
toolchain = ctx.attr._toolchain[SwiftToolchainInfo]
feature_configuration = swift_common.configure_features(
toolchain = toolchain,
requested_features = ctx.features,
unsupported_features = ctx.disabled_features,
)
binary, providers = _swift_linking_rule_impl(
ctx,
feature_configuration = feature_configuration,
is_test = False,
toolchain = toolchain,
)
return providers + [
DefaultInfo(
executable = binary,
runfiles = ctx.runfiles(
collect_data = True,
collect_default = True,
files = ctx.files.data,
),
),
]
def _swift_test_impl(ctx):
toolchain = ctx.attr._toolchain[SwiftToolchainInfo]
feature_configuration = swift_common.configure_features(
toolchain = toolchain,
requested_features = ctx.features,
unsupported_features = ctx.disabled_features,
)
is_bundled = (toolchain.supports_objc_interop and
is_feature_enabled(SWIFT_FEATURE_BUNDLED_XCTESTS, feature_configuration))
# If we need to run the test in an .xctest bundle, the binary must have Mach-O type `MH_BUNDLE`
# instead of `MH_EXECUTE`.
# TODO(allevato): This should really be done in the toolchain's linker_opts_producer partial,
# but it doesn't take the feature_configuration as an argument. We should update it to do so.
linkopts = ["-Wl,-bundle"] if is_bundled else []
binary, providers = _swift_linking_rule_impl(
ctx,
feature_configuration = feature_configuration,
is_test = True,
linkopts = linkopts,
toolchain = toolchain,
)
# If the tests are to be bundled, create the test runner script as the rule's executable and
# place the binary in runfiles so that it can be copied into place. Otherwise, just use the
# binary itself as the executable to launch.
# TODO(b/65413470): Make the output of the rule _itself_ an `.xctest` bundle once some
# limitations of directory artifacts are resolved.
if is_bundled:
xctest_runner = _create_xctest_runner(
name = ctx.label.name,
actions = ctx.actions,
binary = binary,
xctest_runner_template = ctx.file._xctest_runner_template,
)
additional_test_outputs = [binary]
executable = xctest_runner
else:
additional_test_outputs = []
executable = binary
# TODO(b/79527231): Replace `instrumented_files` with a declared provider when it is available.
return struct(
instrumented_files = struct(
dependency_attributes = ["deps"],
extensions = ["swift"],
source_attributes = ["srcs"],
),
providers = providers + [
DefaultInfo(
executable = executable,
files = depset(direct = [executable] + additional_test_outputs),
runfiles = ctx.runfiles(
collect_data = True,
collect_default = True,
files = ctx.files.data + additional_test_outputs,
),
),
testing.ExecutionInfo(toolchain.execution_requirements),
],
)
swift_binary = rule(
attrs = dicts.add(
swift_common.compilation_attrs(additional_deps_aspects = [swift_c_module_aspect]),
{
"linkopts": attr.string_list(
doc = """
Additional linker options that should be passed to `clang`. These strings are subject to
`$(location ...)` expansion.
""",
mandatory = False,
),
# Do not add references; temporary attribute for C++ toolchain Skylark migration.
"_cc_toolchain": attr.label(default = Label("@bazel_tools//tools/cpp:current_cc_toolchain")),
},
),
doc = """
Compiles and links Swift code into an executable binary.
On Linux, this rule produces an executable binary for the desired target architecture.
On Apple platforms, this rule produces a _single-architecture_ binary; it does not produce fat
binaries. As such, this rule is mainly useful for creating Swift tools intended to run on the
local build machine. However, for historical reasons, the default Apple platform in Bazel is
**iOS** instead of macOS. Therefore, if you wish to build a simple single-architecture Swift
binary that can run on macOS, you must specify the correct CPU and platform on the command line as
follows:
```shell
$ bazel build //package:target --cpu=darwin_x86_64 --apple_platform_type=macos
```
If you want to create a multi-architecture binary or a bundled application, please use one of the
platform-specific application rules in [rules_apple](https://github.com/bazelbuild/rules_apple)
instead of `swift_binary`.
""",
executable = True,
fragments = [
"cpp",
"objc",
"swift",
],
implementation = _swift_binary_impl,
)
swift_test = rule(
attrs = dicts.add(
swift_common.compilation_attrs(additional_deps_aspects = [swift_c_module_aspect]),
{
"linkopts": attr.string_list(
doc = """
Additional linker options that should be passed to `clang`. These strings are subject to
`$(location ...)` expansion.
""",
mandatory = False,
),
# Do not add references; temporary attribute for C++ toolchain Skylark migration.
"_cc_toolchain": attr.label(default = Label("@bazel_tools//tools/cpp:current_cc_toolchain")),
"_xctest_runner_template": attr.label(
allow_single_file = True,
default = Label(
"@build_bazel_rules_swift//tools/xctest_runner:xctest_runner_template",
),
),
},
),
doc = """
Compiles and links Swift code into an executable test target.
The behavior of `swift_test` differs slightly for macOS targets, in order to provide seamless
integration with Apple's XCTest framework. The output of the rule is still a binary, but one whose
Mach-O type is `MH_BUNDLE` (a loadable bundle). Thus, the binary cannot be launched directly.
Instead, running `bazel test` on the target will launch a test runner script that copies it into an
`.xctest` bundle directory and then launches the `xctest` helper tool from Xcode, which uses
Objective-C runtime reflection to locate the tests.
On Linux, the output of a `swift_test` is a standard executable binary, because the implementation
of XCTest on that platform currently requires authors to explicitly list the tests that are present
and run them from their main program.
Test bundling on macOS can be disabled on a per-target basis, if desired. You may wish to do this if
you are not using XCTest, but rather a different test framework (or no framework at all) where the
pass/fail outcome is represented as a zero/non-zero exit code (as is the case with other Bazel test
rules like `cc_test`). To do so, disable the `"swift.bundled_xctests"` feature on the target:
```python
swift_test(
name = "MyTests",
srcs = [...],
features = ["-swift.bundled_xctests"],
)
```
You can also disable this feature for all the tests in a package by applying it to your BUILD file's
`package()` declaration instead of the individual targets.
""",
executable = True,
fragments = [
"cpp",
"objc",
"swift",
],
test = True,
implementation = _swift_test_impl,
)
| """Implementation of the `swift_binary` and `swift_test` rules."""
load(':api.bzl', 'swift_common')
load(':derived_files.bzl', 'derived_files')
load(':features.bzl', 'SWIFT_FEATURE_BUNDLED_XCTESTS', 'is_feature_enabled')
load(':linking.bzl', 'register_link_action')
load(':providers.bzl', 'SwiftBinaryInfo', 'SwiftToolchainInfo')
load(':swift_c_module_aspect.bzl', 'swift_c_module_aspect')
load(':utils.bzl', 'expand_locations')
load('@bazel_skylib//lib:dicts.bzl', 'dicts')
load('@bazel_skylib//lib:partial.bzl', 'partial')
load('@bazel_tools//tools/cpp:toolchain_utils.bzl', 'find_cpp_toolchain')
load('@bazel_tools//tools/build_defs/cc:action_names.bzl', 'CPP_LINK_EXECUTABLE_ACTION_NAME')
def _swift_linking_rule_impl(ctx, feature_configuration, is_test, toolchain, linkopts=[]):
"""The shared implementation function for `swift_{binary,test}`.
Args:
ctx: The rule context.
feature_configuration: A feature configuration obtained from
`swift_common.configure_features`.
is_test: A `Boolean` value indicating whether the binary is a test target.
toolchain: The `SwiftToolchainInfo` provider of the toolchain being used to build the
target.
linkopts: Additional rule-specific flags that should be passed to the linker.
Returns:
A tuple with two values: the `File` representing the binary that was linked, and a list of
providers to be propagated by the target being built.
"""
objc_fragment = ctx.fragments.objc if toolchain.supports_objc_interop else None
copts = expand_locations(ctx, ctx.attr.copts, ctx.attr.swiftc_inputs)
linkopts = list(linkopts) + expand_locations(ctx, ctx.attr.linkopts, ctx.attr.swiftc_inputs)
additional_inputs = ctx.files.swiftc_inputs
srcs = ctx.files.srcs
out_bin = derived_files.executable(ctx.actions, target_name=ctx.label.name)
objects_to_link = []
additional_output_groups = {}
compilation_providers = []
link_args = ctx.actions.args()
link_args.add('-o', out_bin)
if not srcs:
additional_inputs_to_linker = depset(direct=additional_inputs)
else:
module_name = ctx.attr.module_name
if not module_name:
module_name = swift_common.derive_module_name(ctx.label)
compile_results = swift_common.compile_as_objects(actions=ctx.actions, arguments=[], compilation_mode=ctx.var['COMPILATION_MODE'], copts=copts, defines=ctx.attr.defines, feature_configuration=feature_configuration, module_name=module_name, srcs=srcs, swift_fragment=ctx.fragments.swift, target_name=ctx.label.name, toolchain=toolchain, additional_input_depsets=[depset(direct=additional_inputs)], configuration=ctx.configuration, deps=ctx.attr.deps, genfiles_dir=ctx.genfiles_dir, objc_fragment=objc_fragment)
link_args.add_all(compile_results.linker_flags)
objects_to_link.extend(compile_results.output_objects)
additional_inputs_to_linker = depset(direct=compile_results.linker_inputs, transitive=[compile_results.compile_inputs])
dicts.add(additional_output_groups, compile_results.output_groups)
compilation_providers.append(swift_binary_info(compile_options=compile_results.compile_options))
link_args.add_all(partial.call(toolchain.linker_opts_producer, is_static=True, is_test=is_test))
if ctx.configuration.coverage_enabled:
coverage_features_to_enable = ['llvm_coverage_map_format']
coverage_features_to_disable = ['gcc_coverage_map_format']
else:
coverage_features_to_enable = []
coverage_features_to_disable = []
cpp_toolchain = find_cpp_toolchain(ctx)
cc_feature_configuration = cc_common.configure_features(cc_toolchain=cpp_toolchain, requested_features=swift_common.get_enabled_features(feature_configuration) + ['static_linking_mode'] + coverage_features_to_enable, unsupported_features=swift_common.get_disabled_features(feature_configuration) + coverage_features_to_disable)
variables = cc_common.create_link_variables(feature_configuration=cc_feature_configuration, cc_toolchain=cpp_toolchain, is_static_linking_mode=True)
link_cpp_toolchain_flags = cc_common.get_memory_inefficient_command_line(feature_configuration=cc_feature_configuration, action_name=CPP_LINK_EXECUTABLE_ACTION_NAME, variables=variables)
link_args.add_all(link_cpp_toolchain_flags)
register_link_action(actions=ctx.actions, action_environment=toolchain.action_environment, clang_executable=toolchain.clang_executable, deps=ctx.attr.deps + toolchain.implicit_deps, expanded_linkopts=linkopts, inputs=additional_inputs_to_linker, mnemonic='SwiftLinkExecutable', objects=objects_to_link, outputs=[out_bin], rule_specific_args=link_args, toolchain=toolchain)
return (out_bin, compilation_providers + [output_group_info(**additional_output_groups)])
def _create_xctest_runner(name, actions, binary, xctest_runner_template):
"""Creates a shell script that will bundle a test binary and launch the `xctest` helper tool.
Args:
name: The name of the target being built, which will be used as the basename of the bundle
(followed by the `.xctest` bundle extension).
actions: The context's actions object.
binary: The `File` representing the test binary that should be bundled and executed.
xctest_runner_template: The `File` that will be used as a template to generate the test
runner shell script.
Returns:
A `File` representing the shell script that will launch the test bundle with the `xctest`
tool.
"""
xctest_runner = derived_files.xctest_runner_script(actions, name)
actions.expand_template(is_executable=True, output=xctest_runner, template=xctest_runner_template, substitutions={'%binary%': binary.short_path})
return xctest_runner
def _swift_binary_impl(ctx):
toolchain = ctx.attr._toolchain[SwiftToolchainInfo]
feature_configuration = swift_common.configure_features(toolchain=toolchain, requested_features=ctx.features, unsupported_features=ctx.disabled_features)
(binary, providers) = _swift_linking_rule_impl(ctx, feature_configuration=feature_configuration, is_test=False, toolchain=toolchain)
return providers + [default_info(executable=binary, runfiles=ctx.runfiles(collect_data=True, collect_default=True, files=ctx.files.data))]
def _swift_test_impl(ctx):
toolchain = ctx.attr._toolchain[SwiftToolchainInfo]
feature_configuration = swift_common.configure_features(toolchain=toolchain, requested_features=ctx.features, unsupported_features=ctx.disabled_features)
is_bundled = toolchain.supports_objc_interop and is_feature_enabled(SWIFT_FEATURE_BUNDLED_XCTESTS, feature_configuration)
linkopts = ['-Wl,-bundle'] if is_bundled else []
(binary, providers) = _swift_linking_rule_impl(ctx, feature_configuration=feature_configuration, is_test=True, linkopts=linkopts, toolchain=toolchain)
if is_bundled:
xctest_runner = _create_xctest_runner(name=ctx.label.name, actions=ctx.actions, binary=binary, xctest_runner_template=ctx.file._xctest_runner_template)
additional_test_outputs = [binary]
executable = xctest_runner
else:
additional_test_outputs = []
executable = binary
return struct(instrumented_files=struct(dependency_attributes=['deps'], extensions=['swift'], source_attributes=['srcs']), providers=providers + [default_info(executable=executable, files=depset(direct=[executable] + additional_test_outputs), runfiles=ctx.runfiles(collect_data=True, collect_default=True, files=ctx.files.data + additional_test_outputs)), testing.ExecutionInfo(toolchain.execution_requirements)])
swift_binary = rule(attrs=dicts.add(swift_common.compilation_attrs(additional_deps_aspects=[swift_c_module_aspect]), {'linkopts': attr.string_list(doc='\nAdditional linker options that should be passed to `clang`. These strings are subject to\n`$(location ...)` expansion.\n', mandatory=False), '_cc_toolchain': attr.label(default=label('@bazel_tools//tools/cpp:current_cc_toolchain'))}), doc='\nCompiles and links Swift code into an executable binary.\n\nOn Linux, this rule produces an executable binary for the desired target architecture.\n\nOn Apple platforms, this rule produces a _single-architecture_ binary; it does not produce fat\nbinaries. As such, this rule is mainly useful for creating Swift tools intended to run on the\nlocal build machine. However, for historical reasons, the default Apple platform in Bazel is\n**iOS** instead of macOS. Therefore, if you wish to build a simple single-architecture Swift\nbinary that can run on macOS, you must specify the correct CPU and platform on the command line as\nfollows:\n\n```shell\n$ bazel build //package:target --cpu=darwin_x86_64 --apple_platform_type=macos\n```\n\nIf you want to create a multi-architecture binary or a bundled application, please use one of the\nplatform-specific application rules in [rules_apple](https://github.com/bazelbuild/rules_apple)\ninstead of `swift_binary`.\n', executable=True, fragments=['cpp', 'objc', 'swift'], implementation=_swift_binary_impl)
swift_test = rule(attrs=dicts.add(swift_common.compilation_attrs(additional_deps_aspects=[swift_c_module_aspect]), {'linkopts': attr.string_list(doc='\nAdditional linker options that should be passed to `clang`. These strings are subject to\n`$(location ...)` expansion.\n', mandatory=False), '_cc_toolchain': attr.label(default=label('@bazel_tools//tools/cpp:current_cc_toolchain')), '_xctest_runner_template': attr.label(allow_single_file=True, default=label('@build_bazel_rules_swift//tools/xctest_runner:xctest_runner_template'))}), doc='\nCompiles and links Swift code into an executable test target.\n\nThe behavior of `swift_test` differs slightly for macOS targets, in order to provide seamless\nintegration with Apple\'s XCTest framework. The output of the rule is still a binary, but one whose\nMach-O type is `MH_BUNDLE` (a loadable bundle). Thus, the binary cannot be launched directly.\nInstead, running `bazel test` on the target will launch a test runner script that copies it into an\n`.xctest` bundle directory and then launches the `xctest` helper tool from Xcode, which uses\nObjective-C runtime reflection to locate the tests.\n\nOn Linux, the output of a `swift_test` is a standard executable binary, because the implementation\nof XCTest on that platform currently requires authors to explicitly list the tests that are present\nand run them from their main program.\n\nTest bundling on macOS can be disabled on a per-target basis, if desired. You may wish to do this if\nyou are not using XCTest, but rather a different test framework (or no framework at all) where the\npass/fail outcome is represented as a zero/non-zero exit code (as is the case with other Bazel test\nrules like `cc_test`). To do so, disable the `"swift.bundled_xctests"` feature on the target:\n\n```python\nswift_test(\n name = "MyTests",\n srcs = [...],\n features = ["-swift.bundled_xctests"],\n)\n```\n\nYou can also disable this feature for all the tests in a package by applying it to your BUILD file\'s\n`package()` declaration instead of the individual targets.\n', executable=True, fragments=['cpp', 'objc', 'swift'], test=True, implementation=_swift_test_impl) |
class FormatSingle:
def __init__(self, singleData: dict):
self.data = singleData
def getMonthDay(self):
fullData = self.data["TimePeriod"]["Start"]
return fullData[5:]
def getAmount(self):
stringAmountData = self.data["Total"]["BlendedCost"]["Amount"]
return float(stringAmountData)
def getAmountUnit(self):
return self.data["Total"]["BlendedCost"]["Unit"]
| class Formatsingle:
def __init__(self, singleData: dict):
self.data = singleData
def get_month_day(self):
full_data = self.data['TimePeriod']['Start']
return fullData[5:]
def get_amount(self):
string_amount_data = self.data['Total']['BlendedCost']['Amount']
return float(stringAmountData)
def get_amount_unit(self):
return self.data['Total']['BlendedCost']['Unit'] |
"""
Minimum Domino version supported by this python-domino library
"""
MINIMUM_SUPPORTED_DOMINO_VERSION = '4.1.0'
"""
Environment variable names used by this python-domino library
"""
DOMINO_TOKEN_FILE_KEY_NAME = 'DOMINO_TOKEN_FILE'
DOMINO_USER_API_KEY_KEY_NAME = 'DOMINO_USER_API_KEY'
DOMINO_HOST_KEY_NAME = 'DOMINO_API_HOST'
| """
Minimum Domino version supported by this python-domino library
"""
minimum_supported_domino_version = '4.1.0'
'\nEnvironment variable names used by this python-domino library\n'
domino_token_file_key_name = 'DOMINO_TOKEN_FILE'
domino_user_api_key_key_name = 'DOMINO_USER_API_KEY'
domino_host_key_name = 'DOMINO_API_HOST' |
def login_to_foxford(driver):
'''Foxford login'''
driver.get("about:blank")
driver.switch_to.window(driver.window_handles[0]) # <--- Needed in some cases when something popups
driver.get("https://foxford.ru/user/login/")
| def login_to_foxford(driver):
"""Foxford login"""
driver.get('about:blank')
driver.switch_to.window(driver.window_handles[0])
driver.get('https://foxford.ru/user/login/') |
# https://leetcode.com/problems/coin-change/
#You are given an integer array coins representing coins of different denominations and an integer amount representing a total amount of money.
#Return the fewest number of coins that you need to make up that amount. If that amount of money cannot be made up by any combination of the coins, return -1.
#You may assume that you have an infinite number of each kind of coin.
class Solution(object):
def coinChange(self, coins, amount):
"""
:type coins: List[int]
:type amount: int
:rtype: int
"""
dp = [0] + [float("inf")] * amount
for i in range(1, amount + 1):
for coin in coins:
if i >= coin:
dp[i] = min(dp[i], dp[i - coin] + 1)
return dp[-1] if dp[-1] != float("inf") else -1 | class Solution(object):
def coin_change(self, coins, amount):
"""
:type coins: List[int]
:type amount: int
:rtype: int
"""
dp = [0] + [float('inf')] * amount
for i in range(1, amount + 1):
for coin in coins:
if i >= coin:
dp[i] = min(dp[i], dp[i - coin] + 1)
return dp[-1] if dp[-1] != float('inf') else -1 |
# -*- coding: utf-8 -*-
LOG_TYPES = {
"s": {"event": "Success Login", "level": 1}, # Info
"seacft": {"event": "Success Exchange", "level": 1}, # Info
"seccft": {"event": "Success Exchange (Client Credentials)", "level": 1}, # Info
"feacft": {"event": "Failed Exchange", "level": 3}, # Error
"feccft": {"event": "Failed Exchange (Client Credentials)", "level": 3}, # Error
"f": {"event": "Failed Login", "level": 3}, # Error
"w": {"event": "Warnings During Login", "level": 2}, # Warning
"du": {"event": "Deleted User", "level": 1}, # Info
"fu": {"event": "Failed Login (invalid email/username)", "level": 3}, # Error
"fp": {"event": "Failed Login (wrong password)", "level": 3}, # Error
"fc": {"event": "Failed by Connector", "level": 3}, # Error
"fco": {"event": "Failed by CORS", "level": 3}, # Error
"con": {"event": "Connector Online", "level": 1}, # Info
"coff": {"event": "Connector Offline", "level": 3}, # Error
"fcpro": {"event": "Failed Connector Provisioning", "level": 4}, # Critical
"ss": {"event": "Success Signup", "level": 1}, # Info
"fs": {"event": "Failed Signup", "level": 3}, # Error
"cs": {"event": "Code Sent", "level": 0}, # Debug
"cls": {"event": "Code/Link Sent", "level": 0}, # Debug
"sv": {"event": "Success Verification Email", "level": 0}, # Debug
"fv": {"event": "Failed Verification Email", "level": 0}, # Debug
"scp": {"event": "Success Change Password", "level": 1}, # Info
"fcp": {"event": "Failed Change Password", "level": 3}, # Error
"sce": {"event": "Success Change Email", "level": 1}, # Info
"fce": {"event": "Failed Change Email", "level": 3}, # Error
"scu": {"event": "Success Change Username", "level": 1}, # Info
"fcu": {"event": "Failed Change Username", "level": 3}, # Error
"scpn": {"event": "Success Change Phone Number", "level": 1}, # Info
"fcpn": {"event": "Failed Change Phone Number", "level": 3}, # Error
"svr": {"event": "Success Verification Email Request", "level": 0}, # Debug
"fvr": {"event": "Failed Verification Email Request", "level": 3}, # Error
"scpr": {"event": "Success Change Password Request", "level": 0}, # Debug
"fcpr": {"event": "Failed Change Password Request", "level": 3}, # Error
"fn": {"event": "Failed Sending Notification", "level": 3}, # Error
"sapi": {"event": "API Operation"},
"limit_wc": {"event": "Blocked Account", "level": 4}, # Critical
"limit_ui": {"event": "Too Many Calls to /userinfo", "level": 4}, # Critical
"api_limit": {"event": "Rate Limit On API", "level": 4}, # Critical
"sdu": {"event": "Successful User Deletion", "level": 1}, # Info
"fdu": {"event": "Failed User Deletion", "level": 3}, # Error
"fapi": {"event": "Failed API Operation", "level": 3}, # Error
"limit_mu": {"event": "Blocked IP Address", "level": 3}, # Error
"slo": {"event": "Success Logout", "level": 1}, # Info
"flo": {"event": "Failed Logout", "level": 3}, # Error
"sd": {"event": "Success Delegation", "level": 1}, # Info
"fd": {"event": "Failed Delegation", "level": 3}, # Error
}
| log_types = {'s': {'event': 'Success Login', 'level': 1}, 'seacft': {'event': 'Success Exchange', 'level': 1}, 'seccft': {'event': 'Success Exchange (Client Credentials)', 'level': 1}, 'feacft': {'event': 'Failed Exchange', 'level': 3}, 'feccft': {'event': 'Failed Exchange (Client Credentials)', 'level': 3}, 'f': {'event': 'Failed Login', 'level': 3}, 'w': {'event': 'Warnings During Login', 'level': 2}, 'du': {'event': 'Deleted User', 'level': 1}, 'fu': {'event': 'Failed Login (invalid email/username)', 'level': 3}, 'fp': {'event': 'Failed Login (wrong password)', 'level': 3}, 'fc': {'event': 'Failed by Connector', 'level': 3}, 'fco': {'event': 'Failed by CORS', 'level': 3}, 'con': {'event': 'Connector Online', 'level': 1}, 'coff': {'event': 'Connector Offline', 'level': 3}, 'fcpro': {'event': 'Failed Connector Provisioning', 'level': 4}, 'ss': {'event': 'Success Signup', 'level': 1}, 'fs': {'event': 'Failed Signup', 'level': 3}, 'cs': {'event': 'Code Sent', 'level': 0}, 'cls': {'event': 'Code/Link Sent', 'level': 0}, 'sv': {'event': 'Success Verification Email', 'level': 0}, 'fv': {'event': 'Failed Verification Email', 'level': 0}, 'scp': {'event': 'Success Change Password', 'level': 1}, 'fcp': {'event': 'Failed Change Password', 'level': 3}, 'sce': {'event': 'Success Change Email', 'level': 1}, 'fce': {'event': 'Failed Change Email', 'level': 3}, 'scu': {'event': 'Success Change Username', 'level': 1}, 'fcu': {'event': 'Failed Change Username', 'level': 3}, 'scpn': {'event': 'Success Change Phone Number', 'level': 1}, 'fcpn': {'event': 'Failed Change Phone Number', 'level': 3}, 'svr': {'event': 'Success Verification Email Request', 'level': 0}, 'fvr': {'event': 'Failed Verification Email Request', 'level': 3}, 'scpr': {'event': 'Success Change Password Request', 'level': 0}, 'fcpr': {'event': 'Failed Change Password Request', 'level': 3}, 'fn': {'event': 'Failed Sending Notification', 'level': 3}, 'sapi': {'event': 'API Operation'}, 'limit_wc': {'event': 'Blocked Account', 'level': 4}, 'limit_ui': {'event': 'Too Many Calls to /userinfo', 'level': 4}, 'api_limit': {'event': 'Rate Limit On API', 'level': 4}, 'sdu': {'event': 'Successful User Deletion', 'level': 1}, 'fdu': {'event': 'Failed User Deletion', 'level': 3}, 'fapi': {'event': 'Failed API Operation', 'level': 3}, 'limit_mu': {'event': 'Blocked IP Address', 'level': 3}, 'slo': {'event': 'Success Logout', 'level': 1}, 'flo': {'event': 'Failed Logout', 'level': 3}, 'sd': {'event': 'Success Delegation', 'level': 1}, 'fd': {'event': 'Failed Delegation', 'level': 3}} |
"""
==============
Array indexing
==============
Array indexing refers to any use of the square brackets ([]) to index
array values. There are many options to indexing, which give numpy
indexing great power, but with power comes some complexity and the
potential for confusion. This section is just an overview of the
various options and issues related to indexing. Aside from single
element indexing, the details on most of these options are to be
found in related sections.
Assignment vs referencing
=========================
Most of the following examples show the use of indexing when
referencing data in an array. The examples work just as well
when assigning to an array. See the section at the end for
specific examples and explanations on how assignments work.
Single element indexing
=======================
Single element indexing for a 1-D array is what one expects. It work
exactly like that for other standard Python sequences. It is 0-based,
and accepts negative indices for indexing from the end of the array. ::
>>> x = np.arange(10)
>>> x[2]
2
>>> x[-2]
8
Unlike lists and tuples, numpy arrays support multidimensional indexing
for multidimensional arrays. That means that it is not necessary to
separate each dimension's index into its own set of square brackets. ::
>>> x.shape = (2,5) # now x is 2-dimensional
>>> x[1,3]
8
>>> x[1,-1]
9
Note that if one indexes a multidimensional array with fewer indices
than dimensions, one gets a subdimensional array. For example: ::
>>> x[0]
array([0, 1, 2, 3, 4])
That is, each index specified selects the array corresponding to the
rest of the dimensions selected. In the above example, choosing 0
means that the remaining dimension of length 5 is being left unspecified,
and that what is returned is an array of that dimensionality and size.
It must be noted that the returned array is not a copy of the original,
but points to the same values in memory as does the original array.
In this case, the 1-D array at the first position (0) is returned.
So using a single index on the returned array, results in a single
element being returned. That is: ::
>>> x[0][2]
2
So note that ``x[0,2] = x[0][2]`` though the second case is more
inefficient as a new temporary array is created after the first index
that is subsequently indexed by 2.
Note to those used to IDL or Fortran memory order as it relates to
indexing. NumPy uses C-order indexing. That means that the last
index usually represents the most rapidly changing memory location,
unlike Fortran or IDL, where the first index represents the most
rapidly changing location in memory. This difference represents a
great potential for confusion.
Other indexing options
======================
It is possible to slice and stride arrays to extract arrays of the
same number of dimensions, but of different sizes than the original.
The slicing and striding works exactly the same way it does for lists
and tuples except that they can be applied to multiple dimensions as
well. A few examples illustrates best: ::
>>> x = np.arange(10)
>>> x[2:5]
array([2, 3, 4])
>>> x[:-7]
array([0, 1, 2])
>>> x[1:7:2]
array([1, 3, 5])
>>> y = np.arange(35).reshape(5,7)
>>> y[1:5:2,::3]
array([[ 7, 10, 13],
[21, 24, 27]])
Note that slices of arrays do not copy the internal array data but
only produce new views of the original data. This is different from
list or tuple slicing and an explicit ``copy()`` is recommended if
the original data is not required anymore.
It is possible to index arrays with other arrays for the purposes of
selecting lists of values out of arrays into new arrays. There are
two different ways of accomplishing this. One uses one or more arrays
of index values. The other involves giving a boolean array of the proper
shape to indicate the values to be selected. Index arrays are a very
powerful tool that allow one to avoid looping over individual elements in
arrays and thus greatly improve performance.
It is possible to use special features to effectively increase the
number of dimensions in an array through indexing so the resulting
array acquires the shape needed for use in an expression or with a
specific function.
Index arrays
============
NumPy arrays may be indexed with other arrays (or any other sequence-
like object that can be converted to an array, such as lists, with the
exception of tuples; see the end of this document for why this is). The
use of index arrays ranges from simple, straightforward cases to
complex, hard-to-understand cases. For all cases of index arrays, what
is returned is a copy of the original data, not a view as one gets for
slices.
Index arrays must be of integer type. Each value in the array indicates
which value in the array to use in place of the index. To illustrate: ::
>>> x = np.arange(10,1,-1)
>>> x
array([10, 9, 8, 7, 6, 5, 4, 3, 2])
>>> x[np.array([3, 3, 1, 8])]
array([7, 7, 9, 2])
The index array consisting of the values 3, 3, 1 and 8 correspondingly
create an array of length 4 (same as the index array) where each index
is replaced by the value the index array has in the array being indexed.
Negative values are permitted and work as they do with single indices
or slices: ::
>>> x[np.array([3,3,-3,8])]
array([7, 7, 4, 2])
It is an error to have index values out of bounds: ::
>>> x[np.array([3, 3, 20, 8])]
<type 'exceptions.IndexError'>: index 20 out of bounds 0<=index<9
Generally speaking, what is returned when index arrays are used is
an array with the same shape as the index array, but with the type
and values of the array being indexed. As an example, we can use a
multidimensional index array instead: ::
>>> x[np.array([[1,1],[2,3]])]
array([[9, 9],
[8, 7]])
Indexing Multi-dimensional arrays
=================================
Things become more complex when multidimensional arrays are indexed,
particularly with multidimensional index arrays. These tend to be
more unusual uses, but they are permitted, and they are useful for some
problems. We'll start with the simplest multidimensional case (using
the array y from the previous examples): ::
>>> y[np.array([0,2,4]), np.array([0,1,2])]
array([ 0, 15, 30])
In this case, if the index arrays have a matching shape, and there is
an index array for each dimension of the array being indexed, the
resultant array has the same shape as the index arrays, and the values
correspond to the index set for each position in the index arrays. In
this example, the first index value is 0 for both index arrays, and
thus the first value of the resultant array is y[0,0]. The next value
is y[2,1], and the last is y[4,2].
If the index arrays do not have the same shape, there is an attempt to
broadcast them to the same shape. If they cannot be broadcast to the
same shape, an exception is raised: ::
>>> y[np.array([0,2,4]), np.array([0,1])]
<type 'exceptions.ValueError'>: shape mismatch: objects cannot be
broadcast to a single shape
The broadcasting mechanism permits index arrays to be combined with
scalars for other indices. The effect is that the scalar value is used
for all the corresponding values of the index arrays: ::
>>> y[np.array([0,2,4]), 1]
array([ 1, 15, 29])
Jumping to the next level of complexity, it is possible to only
partially index an array with index arrays. It takes a bit of thought
to understand what happens in such cases. For example if we just use
one index array with y: ::
>>> y[np.array([0,2,4])]
array([[ 0, 1, 2, 3, 4, 5, 6],
[14, 15, 16, 17, 18, 19, 20],
[28, 29, 30, 31, 32, 33, 34]])
What results is the construction of a new array where each value of
the index array selects one row from the array being indexed and the
resultant array has the resulting shape (number of index elements,
size of row).
An example of where this may be useful is for a color lookup table
where we want to map the values of an image into RGB triples for
display. The lookup table could have a shape (nlookup, 3). Indexing
such an array with an image with shape (ny, nx) with dtype=np.uint8
(or any integer type so long as values are with the bounds of the
lookup table) will result in an array of shape (ny, nx, 3) where a
triple of RGB values is associated with each pixel location.
In general, the shape of the resultant array will be the concatenation
of the shape of the index array (or the shape that all the index arrays
were broadcast to) with the shape of any unused dimensions (those not
indexed) in the array being indexed.
Boolean or "mask" index arrays
==============================
Boolean arrays used as indices are treated in a different manner
entirely than index arrays. Boolean arrays must be of the same shape
as the initial dimensions of the array being indexed. In the
most straightforward case, the boolean array has the same shape: ::
>>> b = y>20
>>> y[b]
array([21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34])
Unlike in the case of integer index arrays, in the boolean case, the
result is a 1-D array containing all the elements in the indexed array
corresponding to all the true elements in the boolean array. The
elements in the indexed array are always iterated and returned in
:term:`row-major` (C-style) order. The result is also identical to
``y[np.nonzero(b)]``. As with index arrays, what is returned is a copy
of the data, not a view as one gets with slices.
The result will be multidimensional if y has more dimensions than b.
For example: ::
>>> b[:,5] # use a 1-D boolean whose first dim agrees with the first dim of y
array([False, False, False, True, True])
>>> y[b[:,5]]
array([[21, 22, 23, 24, 25, 26, 27],
[28, 29, 30, 31, 32, 33, 34]])
Here the 4th and 5th rows are selected from the indexed array and
combined to make a 2-D array.
In general, when the boolean array has fewer dimensions than the array
being indexed, this is equivalent to y[b, ...], which means
y is indexed by b followed by as many : as are needed to fill
out the rank of y.
Thus the shape of the result is one dimension containing the number
of True elements of the boolean array, followed by the remaining
dimensions of the array being indexed.
For example, using a 2-D boolean array of shape (2,3)
with four True elements to select rows from a 3-D array of shape
(2,3,5) results in a 2-D result of shape (4,5): ::
>>> x = np.arange(30).reshape(2,3,5)
>>> x
array([[[ 0, 1, 2, 3, 4],
[ 5, 6, 7, 8, 9],
[10, 11, 12, 13, 14]],
[[15, 16, 17, 18, 19],
[20, 21, 22, 23, 24],
[25, 26, 27, 28, 29]]])
>>> b = np.array([[True, True, False], [False, True, True]])
>>> x[b]
array([[ 0, 1, 2, 3, 4],
[ 5, 6, 7, 8, 9],
[20, 21, 22, 23, 24],
[25, 26, 27, 28, 29]])
For further details, consult the numpy reference documentation on array indexing.
Combining index arrays with slices
==================================
Index arrays may be combined with slices. For example: ::
>>> y[np.array([0,2,4]),1:3]
array([[ 1, 2],
[15, 16],
[29, 30]])
In effect, the slice is converted to an index array
np.array([[1,2]]) (shape (1,2)) that is broadcast with the index array
to produce a resultant array of shape (3,2).
Likewise, slicing can be combined with broadcasted boolean indices: ::
>>> b = y > 20
>>> b
array([[False, False, False, False, False, False, False],
[False, False, False, False, False, False, False],
[False, False, False, False, False, False, False],
[ True, True, True, True, True, True, True],
[ True, True, True, True, True, True, True]])
>>> y[b[:,5],1:3]
array([[22, 23],
[29, 30]])
Structural indexing tools
=========================
To facilitate easy matching of array shapes with expressions and in
assignments, the np.newaxis object can be used within array indices
to add new dimensions with a size of 1. For example: ::
>>> y.shape
(5, 7)
>>> y[:,np.newaxis,:].shape
(5, 1, 7)
Note that there are no new elements in the array, just that the
dimensionality is increased. This can be handy to combine two
arrays in a way that otherwise would require explicitly reshaping
operations. For example: ::
>>> x = np.arange(5)
>>> x[:,np.newaxis] + x[np.newaxis,:]
array([[0, 1, 2, 3, 4],
[1, 2, 3, 4, 5],
[2, 3, 4, 5, 6],
[3, 4, 5, 6, 7],
[4, 5, 6, 7, 8]])
The ellipsis syntax maybe used to indicate selecting in full any
remaining unspecified dimensions. For example: ::
>>> z = np.arange(81).reshape(3,3,3,3)
>>> z[1,...,2]
array([[29, 32, 35],
[38, 41, 44],
[47, 50, 53]])
This is equivalent to: ::
>>> z[1,:,:,2]
array([[29, 32, 35],
[38, 41, 44],
[47, 50, 53]])
Assigning values to indexed arrays
==================================
As mentioned, one can select a subset of an array to assign to using
a single index, slices, and index and mask arrays. The value being
assigned to the indexed array must be shape consistent (the same shape
or broadcastable to the shape the index produces). For example, it is
permitted to assign a constant to a slice: ::
>>> x = np.arange(10)
>>> x[2:7] = 1
or an array of the right size: ::
>>> x[2:7] = np.arange(5)
Note that assignments may result in changes if assigning
higher types to lower types (like floats to ints) or even
exceptions (assigning complex to floats or ints): ::
>>> x[1] = 1.2
>>> x[1]
1
>>> x[1] = 1.2j
TypeError: can't convert complex to int
Unlike some of the references (such as array and mask indices)
assignments are always made to the original data in the array
(indeed, nothing else would make sense!). Note though, that some
actions may not work as one may naively expect. This particular
example is often surprising to people: ::
>>> x = np.arange(0, 50, 10)
>>> x
array([ 0, 10, 20, 30, 40])
>>> x[np.array([1, 1, 3, 1])] += 1
>>> x
array([ 0, 11, 20, 31, 40])
Where people expect that the 1st location will be incremented by 3.
In fact, it will only be incremented by 1. The reason is because
a new array is extracted from the original (as a temporary) containing
the values at 1, 1, 3, 1, then the value 1 is added to the temporary,
and then the temporary is assigned back to the original array. Thus
the value of the array at x[1]+1 is assigned to x[1] three times,
rather than being incremented 3 times.
Dealing with variable numbers of indices within programs
========================================================
The index syntax is very powerful but limiting when dealing with
a variable number of indices. For example, if you want to write
a function that can handle arguments with various numbers of
dimensions without having to write special case code for each
number of possible dimensions, how can that be done? If one
supplies to the index a tuple, the tuple will be interpreted
as a list of indices. For example (using the previous definition
for the array z): ::
>>> indices = (1,1,1,1)
>>> z[indices]
40
So one can use code to construct tuples of any number of indices
and then use these within an index.
Slices can be specified within programs by using the slice() function
in Python. For example: ::
>>> indices = (1,1,1,slice(0,2)) # same as [1,1,1,0:2]
>>> z[indices]
array([39, 40])
Likewise, ellipsis can be specified by code by using the Ellipsis
object: ::
>>> indices = (1, Ellipsis, 1) # same as [1,...,1]
>>> z[indices]
array([[28, 31, 34],
[37, 40, 43],
[46, 49, 52]])
For this reason it is possible to use the output from the np.nonzero()
function directly as an index since it always returns a tuple of index
arrays.
Because the special treatment of tuples, they are not automatically
converted to an array as a list would be. As an example: ::
>>> z[[1,1,1,1]] # produces a large array
array([[[[27, 28, 29],
[30, 31, 32], ...
>>> z[(1,1,1,1)] # returns a single value
40
"""
| """
==============
Array indexing
==============
Array indexing refers to any use of the square brackets ([]) to index
array values. There are many options to indexing, which give numpy
indexing great power, but with power comes some complexity and the
potential for confusion. This section is just an overview of the
various options and issues related to indexing. Aside from single
element indexing, the details on most of these options are to be
found in related sections.
Assignment vs referencing
=========================
Most of the following examples show the use of indexing when
referencing data in an array. The examples work just as well
when assigning to an array. See the section at the end for
specific examples and explanations on how assignments work.
Single element indexing
=======================
Single element indexing for a 1-D array is what one expects. It work
exactly like that for other standard Python sequences. It is 0-based,
and accepts negative indices for indexing from the end of the array. ::
>>> x = np.arange(10)
>>> x[2]
2
>>> x[-2]
8
Unlike lists and tuples, numpy arrays support multidimensional indexing
for multidimensional arrays. That means that it is not necessary to
separate each dimension's index into its own set of square brackets. ::
>>> x.shape = (2,5) # now x is 2-dimensional
>>> x[1,3]
8
>>> x[1,-1]
9
Note that if one indexes a multidimensional array with fewer indices
than dimensions, one gets a subdimensional array. For example: ::
>>> x[0]
array([0, 1, 2, 3, 4])
That is, each index specified selects the array corresponding to the
rest of the dimensions selected. In the above example, choosing 0
means that the remaining dimension of length 5 is being left unspecified,
and that what is returned is an array of that dimensionality and size.
It must be noted that the returned array is not a copy of the original,
but points to the same values in memory as does the original array.
In this case, the 1-D array at the first position (0) is returned.
So using a single index on the returned array, results in a single
element being returned. That is: ::
>>> x[0][2]
2
So note that ``x[0,2] = x[0][2]`` though the second case is more
inefficient as a new temporary array is created after the first index
that is subsequently indexed by 2.
Note to those used to IDL or Fortran memory order as it relates to
indexing. NumPy uses C-order indexing. That means that the last
index usually represents the most rapidly changing memory location,
unlike Fortran or IDL, where the first index represents the most
rapidly changing location in memory. This difference represents a
great potential for confusion.
Other indexing options
======================
It is possible to slice and stride arrays to extract arrays of the
same number of dimensions, but of different sizes than the original.
The slicing and striding works exactly the same way it does for lists
and tuples except that they can be applied to multiple dimensions as
well. A few examples illustrates best: ::
>>> x = np.arange(10)
>>> x[2:5]
array([2, 3, 4])
>>> x[:-7]
array([0, 1, 2])
>>> x[1:7:2]
array([1, 3, 5])
>>> y = np.arange(35).reshape(5,7)
>>> y[1:5:2,::3]
array([[ 7, 10, 13],
[21, 24, 27]])
Note that slices of arrays do not copy the internal array data but
only produce new views of the original data. This is different from
list or tuple slicing and an explicit ``copy()`` is recommended if
the original data is not required anymore.
It is possible to index arrays with other arrays for the purposes of
selecting lists of values out of arrays into new arrays. There are
two different ways of accomplishing this. One uses one or more arrays
of index values. The other involves giving a boolean array of the proper
shape to indicate the values to be selected. Index arrays are a very
powerful tool that allow one to avoid looping over individual elements in
arrays and thus greatly improve performance.
It is possible to use special features to effectively increase the
number of dimensions in an array through indexing so the resulting
array acquires the shape needed for use in an expression or with a
specific function.
Index arrays
============
NumPy arrays may be indexed with other arrays (or any other sequence-
like object that can be converted to an array, such as lists, with the
exception of tuples; see the end of this document for why this is). The
use of index arrays ranges from simple, straightforward cases to
complex, hard-to-understand cases. For all cases of index arrays, what
is returned is a copy of the original data, not a view as one gets for
slices.
Index arrays must be of integer type. Each value in the array indicates
which value in the array to use in place of the index. To illustrate: ::
>>> x = np.arange(10,1,-1)
>>> x
array([10, 9, 8, 7, 6, 5, 4, 3, 2])
>>> x[np.array([3, 3, 1, 8])]
array([7, 7, 9, 2])
The index array consisting of the values 3, 3, 1 and 8 correspondingly
create an array of length 4 (same as the index array) where each index
is replaced by the value the index array has in the array being indexed.
Negative values are permitted and work as they do with single indices
or slices: ::
>>> x[np.array([3,3,-3,8])]
array([7, 7, 4, 2])
It is an error to have index values out of bounds: ::
>>> x[np.array([3, 3, 20, 8])]
<type 'exceptions.IndexError'>: index 20 out of bounds 0<=index<9
Generally speaking, what is returned when index arrays are used is
an array with the same shape as the index array, but with the type
and values of the array being indexed. As an example, we can use a
multidimensional index array instead: ::
>>> x[np.array([[1,1],[2,3]])]
array([[9, 9],
[8, 7]])
Indexing Multi-dimensional arrays
=================================
Things become more complex when multidimensional arrays are indexed,
particularly with multidimensional index arrays. These tend to be
more unusual uses, but they are permitted, and they are useful for some
problems. We'll start with the simplest multidimensional case (using
the array y from the previous examples): ::
>>> y[np.array([0,2,4]), np.array([0,1,2])]
array([ 0, 15, 30])
In this case, if the index arrays have a matching shape, and there is
an index array for each dimension of the array being indexed, the
resultant array has the same shape as the index arrays, and the values
correspond to the index set for each position in the index arrays. In
this example, the first index value is 0 for both index arrays, and
thus the first value of the resultant array is y[0,0]. The next value
is y[2,1], and the last is y[4,2].
If the index arrays do not have the same shape, there is an attempt to
broadcast them to the same shape. If they cannot be broadcast to the
same shape, an exception is raised: ::
>>> y[np.array([0,2,4]), np.array([0,1])]
<type 'exceptions.ValueError'>: shape mismatch: objects cannot be
broadcast to a single shape
The broadcasting mechanism permits index arrays to be combined with
scalars for other indices. The effect is that the scalar value is used
for all the corresponding values of the index arrays: ::
>>> y[np.array([0,2,4]), 1]
array([ 1, 15, 29])
Jumping to the next level of complexity, it is possible to only
partially index an array with index arrays. It takes a bit of thought
to understand what happens in such cases. For example if we just use
one index array with y: ::
>>> y[np.array([0,2,4])]
array([[ 0, 1, 2, 3, 4, 5, 6],
[14, 15, 16, 17, 18, 19, 20],
[28, 29, 30, 31, 32, 33, 34]])
What results is the construction of a new array where each value of
the index array selects one row from the array being indexed and the
resultant array has the resulting shape (number of index elements,
size of row).
An example of where this may be useful is for a color lookup table
where we want to map the values of an image into RGB triples for
display. The lookup table could have a shape (nlookup, 3). Indexing
such an array with an image with shape (ny, nx) with dtype=np.uint8
(or any integer type so long as values are with the bounds of the
lookup table) will result in an array of shape (ny, nx, 3) where a
triple of RGB values is associated with each pixel location.
In general, the shape of the resultant array will be the concatenation
of the shape of the index array (or the shape that all the index arrays
were broadcast to) with the shape of any unused dimensions (those not
indexed) in the array being indexed.
Boolean or "mask" index arrays
==============================
Boolean arrays used as indices are treated in a different manner
entirely than index arrays. Boolean arrays must be of the same shape
as the initial dimensions of the array being indexed. In the
most straightforward case, the boolean array has the same shape: ::
>>> b = y>20
>>> y[b]
array([21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34])
Unlike in the case of integer index arrays, in the boolean case, the
result is a 1-D array containing all the elements in the indexed array
corresponding to all the true elements in the boolean array. The
elements in the indexed array are always iterated and returned in
:term:`row-major` (C-style) order. The result is also identical to
``y[np.nonzero(b)]``. As with index arrays, what is returned is a copy
of the data, not a view as one gets with slices.
The result will be multidimensional if y has more dimensions than b.
For example: ::
>>> b[:,5] # use a 1-D boolean whose first dim agrees with the first dim of y
array([False, False, False, True, True])
>>> y[b[:,5]]
array([[21, 22, 23, 24, 25, 26, 27],
[28, 29, 30, 31, 32, 33, 34]])
Here the 4th and 5th rows are selected from the indexed array and
combined to make a 2-D array.
In general, when the boolean array has fewer dimensions than the array
being indexed, this is equivalent to y[b, ...], which means
y is indexed by b followed by as many : as are needed to fill
out the rank of y.
Thus the shape of the result is one dimension containing the number
of True elements of the boolean array, followed by the remaining
dimensions of the array being indexed.
For example, using a 2-D boolean array of shape (2,3)
with four True elements to select rows from a 3-D array of shape
(2,3,5) results in a 2-D result of shape (4,5): ::
>>> x = np.arange(30).reshape(2,3,5)
>>> x
array([[[ 0, 1, 2, 3, 4],
[ 5, 6, 7, 8, 9],
[10, 11, 12, 13, 14]],
[[15, 16, 17, 18, 19],
[20, 21, 22, 23, 24],
[25, 26, 27, 28, 29]]])
>>> b = np.array([[True, True, False], [False, True, True]])
>>> x[b]
array([[ 0, 1, 2, 3, 4],
[ 5, 6, 7, 8, 9],
[20, 21, 22, 23, 24],
[25, 26, 27, 28, 29]])
For further details, consult the numpy reference documentation on array indexing.
Combining index arrays with slices
==================================
Index arrays may be combined with slices. For example: ::
>>> y[np.array([0,2,4]),1:3]
array([[ 1, 2],
[15, 16],
[29, 30]])
In effect, the slice is converted to an index array
np.array([[1,2]]) (shape (1,2)) that is broadcast with the index array
to produce a resultant array of shape (3,2).
Likewise, slicing can be combined with broadcasted boolean indices: ::
>>> b = y > 20
>>> b
array([[False, False, False, False, False, False, False],
[False, False, False, False, False, False, False],
[False, False, False, False, False, False, False],
[ True, True, True, True, True, True, True],
[ True, True, True, True, True, True, True]])
>>> y[b[:,5],1:3]
array([[22, 23],
[29, 30]])
Structural indexing tools
=========================
To facilitate easy matching of array shapes with expressions and in
assignments, the np.newaxis object can be used within array indices
to add new dimensions with a size of 1. For example: ::
>>> y.shape
(5, 7)
>>> y[:,np.newaxis,:].shape
(5, 1, 7)
Note that there are no new elements in the array, just that the
dimensionality is increased. This can be handy to combine two
arrays in a way that otherwise would require explicitly reshaping
operations. For example: ::
>>> x = np.arange(5)
>>> x[:,np.newaxis] + x[np.newaxis,:]
array([[0, 1, 2, 3, 4],
[1, 2, 3, 4, 5],
[2, 3, 4, 5, 6],
[3, 4, 5, 6, 7],
[4, 5, 6, 7, 8]])
The ellipsis syntax maybe used to indicate selecting in full any
remaining unspecified dimensions. For example: ::
>>> z = np.arange(81).reshape(3,3,3,3)
>>> z[1,...,2]
array([[29, 32, 35],
[38, 41, 44],
[47, 50, 53]])
This is equivalent to: ::
>>> z[1,:,:,2]
array([[29, 32, 35],
[38, 41, 44],
[47, 50, 53]])
Assigning values to indexed arrays
==================================
As mentioned, one can select a subset of an array to assign to using
a single index, slices, and index and mask arrays. The value being
assigned to the indexed array must be shape consistent (the same shape
or broadcastable to the shape the index produces). For example, it is
permitted to assign a constant to a slice: ::
>>> x = np.arange(10)
>>> x[2:7] = 1
or an array of the right size: ::
>>> x[2:7] = np.arange(5)
Note that assignments may result in changes if assigning
higher types to lower types (like floats to ints) or even
exceptions (assigning complex to floats or ints): ::
>>> x[1] = 1.2
>>> x[1]
1
>>> x[1] = 1.2j
TypeError: can't convert complex to int
Unlike some of the references (such as array and mask indices)
assignments are always made to the original data in the array
(indeed, nothing else would make sense!). Note though, that some
actions may not work as one may naively expect. This particular
example is often surprising to people: ::
>>> x = np.arange(0, 50, 10)
>>> x
array([ 0, 10, 20, 30, 40])
>>> x[np.array([1, 1, 3, 1])] += 1
>>> x
array([ 0, 11, 20, 31, 40])
Where people expect that the 1st location will be incremented by 3.
In fact, it will only be incremented by 1. The reason is because
a new array is extracted from the original (as a temporary) containing
the values at 1, 1, 3, 1, then the value 1 is added to the temporary,
and then the temporary is assigned back to the original array. Thus
the value of the array at x[1]+1 is assigned to x[1] three times,
rather than being incremented 3 times.
Dealing with variable numbers of indices within programs
========================================================
The index syntax is very powerful but limiting when dealing with
a variable number of indices. For example, if you want to write
a function that can handle arguments with various numbers of
dimensions without having to write special case code for each
number of possible dimensions, how can that be done? If one
supplies to the index a tuple, the tuple will be interpreted
as a list of indices. For example (using the previous definition
for the array z): ::
>>> indices = (1,1,1,1)
>>> z[indices]
40
So one can use code to construct tuples of any number of indices
and then use these within an index.
Slices can be specified within programs by using the slice() function
in Python. For example: ::
>>> indices = (1,1,1,slice(0,2)) # same as [1,1,1,0:2]
>>> z[indices]
array([39, 40])
Likewise, ellipsis can be specified by code by using the Ellipsis
object: ::
>>> indices = (1, Ellipsis, 1) # same as [1,...,1]
>>> z[indices]
array([[28, 31, 34],
[37, 40, 43],
[46, 49, 52]])
For this reason it is possible to use the output from the np.nonzero()
function directly as an index since it always returns a tuple of index
arrays.
Because the special treatment of tuples, they are not automatically
converted to an array as a list would be. As an example: ::
>>> z[[1,1,1,1]] # produces a large array
array([[[[27, 28, 29],
[30, 31, 32], ...
>>> z[(1,1,1,1)] # returns a single value
40
""" |
class InvalidProgramException(SystemException,ISerializable,_Exception):
"""
The exception that is thrown when a program contains invalid Microsoft intermediate language (MSIL) or metadata. Generally this indicates a bug in the compiler that generated the program.
InvalidProgramException()
InvalidProgramException(message: str)
InvalidProgramException(message: str,inner: Exception)
"""
def add_SerializeObjectState(self,*args):
""" add_SerializeObjectState(self: Exception,value: EventHandler[SafeSerializationEventArgs]) """
pass
def remove_SerializeObjectState(self,*args):
""" remove_SerializeObjectState(self: Exception,value: EventHandler[SafeSerializationEventArgs]) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,message=None,inner=None):
"""
__new__(cls: type)
__new__(cls: type,message: str)
__new__(cls: type,message: str,inner: Exception)
"""
pass
def __reduce_ex__(self,*args):
pass
def __str__(self,*args):
pass
| class Invalidprogramexception(SystemException, ISerializable, _Exception):
"""
The exception that is thrown when a program contains invalid Microsoft intermediate language (MSIL) or metadata. Generally this indicates a bug in the compiler that generated the program.
InvalidProgramException()
InvalidProgramException(message: str)
InvalidProgramException(message: str,inner: Exception)
"""
def add__serialize_object_state(self, *args):
""" add_SerializeObjectState(self: Exception,value: EventHandler[SafeSerializationEventArgs]) """
pass
def remove__serialize_object_state(self, *args):
""" remove_SerializeObjectState(self: Exception,value: EventHandler[SafeSerializationEventArgs]) """
pass
def __init__(self, *args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self, message=None, inner=None):
"""
__new__(cls: type)
__new__(cls: type,message: str)
__new__(cls: type,message: str,inner: Exception)
"""
pass
def __reduce_ex__(self, *args):
pass
def __str__(self, *args):
pass |
# This sample tests error detection for certain cases that
# are explicitly disallowed by PEP 572 for assignment expressions
# when used in context of a list comprehension.
pairs = []
stuff = []
# These should generate an error because assignment
# expressions aren't allowed within an iterator expression
# in a "for" clause of a list comprehension.
[x for x, y in (pairs2 := pairs) if x % 2 == 0]
[x for x, y in ([1, 2, 3, pairs2 := pairs]) if x % 2 == 0]
{x: y for x, y in (pairs2 := pairs) if x % 2 == 0}
{x for x, y in (pairs2 := pairs) if x % 2 == 0}
foo = (x for x, y in ([1, 2, 3, pairs2 := pairs]) if x % 2 == 0)
# This should generate an error because 'j' is used as a
# "for target" and the target of an assignment expression.
[[(j := j) for i in range(5)] for j in range(5)]
[i := 0 for i, j in stuff]
[i+1 for i in (i := stuff)]
[False and (i := 0) for i, j in stuff]
[i for i, j in stuff if True or (j := 1)]
# These should generate an error because assignment
# expressions aren't allowed within an iterator expression
# in a "for" clause of a list comprehension.
[i+1 for i in (j := stuff)]
[i+1 for i in range(2) for j in (k := stuff)]
[i+1 for i in [j for j in (k := stuff)]]
[i+1 for i in (lambda: (j := stuff))()]
class Example:
# This should generate an error because the containing
# scope for the list comprehension is a class.
[(j := i) for i in range(5)]
# This should generate an error because 'j' is used as a
# "for target" and the target of an assignment expression.
[i for i in [1, 2] if True or (j := 1) for j in range(10)]
| pairs = []
stuff = []
[x for (x, y) in (pairs2 := pairs) if x % 2 == 0]
[x for (x, y) in [1, 2, 3, (pairs2 := pairs)] if x % 2 == 0]
{x: y for (x, y) in (pairs2 := pairs) if x % 2 == 0}
{x for (x, y) in (pairs2 := pairs) if x % 2 == 0}
foo = (x for (x, y) in [1, 2, 3, (pairs2 := pairs)] if x % 2 == 0)
[[(j := j) for i in range(5)] for j in range(5)]
[(i := 0) for (i, j) in stuff]
[i + 1 for i in (i := stuff)]
[False and (i := 0) for (i, j) in stuff]
[i for (i, j) in stuff if True or (j := 1)]
[i + 1 for i in (j := stuff)]
[i + 1 for i in range(2) for j in (k := stuff)]
[i + 1 for i in [j for j in (k := stuff)]]
[i + 1 for i in (lambda : (j := stuff))()]
class Example:
[(j := i) for i in range(5)]
[i for i in [1, 2] if True or (j := 1) for j in range(10)] |
class Component:
def __init__(self, id_, name_):
self.id = id_
self.name = name_
| class Component:
def __init__(self, id_, name_):
self.id = id_
self.name = name_ |
class Mods:
__slots__ = ('map_changing', 'nf', 'ez', 'hd', 'hr', 'dt', 'ht', 'nc',
'fl', 'so', 'speed_changing', 'map_changing')
def __init__(self, mods_str=''):
self.nf = False
self.ez = False
self.hd = False
self.hr = False
self.dt = False
self.ht = False
self.nc = False
self.fl = False
self.so = False
self.speed_changing = False
self.map_changing = False
if mods_str:
self.from_str(mods_str)
self.update_state()
def update_state(self):
# speed changing - dt or ht or nc is used
self.speed_changing = self.dt or self.ht or self.nc
# if hr or ez or dt or ht or nc
self.map_changing = self.hr or self.ez or self.speed_changing
def __str__(self):
string = ''
if self.nf:
string += "NF"
if self.ez:
string += "EZ"
if self.hd:
string += "HD"
if self.hr:
string += "HR"
if self.dt:
string += "DT"
if self.ht:
string += "HT"
if self.nc:
string += "NC"
if self.fl:
string += "FL"
if self.so:
string += "SO"
return string
def from_str(self, mods):
if not mods:
return
# split mods string to chunks with length of two characters
mods = [mods[i:i + 2] for i in range(0, len(mods), 2)]
if "NF" in mods:
self.nf = True
if "EZ" in mods:
self.ez = True
if "HD" in mods:
self.hd = True
if "HR" in mods:
self.hr = True
if "DT" in mods:
self.dt = True
if "HT" in mods:
self.ht = True
if "NC" in mods:
self.nc = True
if "FL" in mods:
self.fl = True
if "SO" in mods:
self.so = True
self.update_state()
class HitObject:
__slots__ = ('pos', 'time', 'h_type', 'end_time', 'slider')
def __init__(self, pos, time, h_type, end_time, slider):
self.pos = pos
self.time = time
self.h_type = h_type
self.end_time = end_time
self.slider = slider
class SliderData:
__slots__ = ('s_type', 'points', 'repeats', 'length')
def __init__(self, s_type, points, repeats, length):
self.s_type = s_type
self.points = points
self.repeats = repeats
self.length = length
class TimingPoint:
__slots__ = ('time', 'ms_per_beat', 'inherited')
def __init__(self, time, ms_per_beat, inherited):
self.time = time
self.ms_per_beat = ms_per_beat
self.inherited = inherited
| class Mods:
__slots__ = ('map_changing', 'nf', 'ez', 'hd', 'hr', 'dt', 'ht', 'nc', 'fl', 'so', 'speed_changing', 'map_changing')
def __init__(self, mods_str=''):
self.nf = False
self.ez = False
self.hd = False
self.hr = False
self.dt = False
self.ht = False
self.nc = False
self.fl = False
self.so = False
self.speed_changing = False
self.map_changing = False
if mods_str:
self.from_str(mods_str)
self.update_state()
def update_state(self):
self.speed_changing = self.dt or self.ht or self.nc
self.map_changing = self.hr or self.ez or self.speed_changing
def __str__(self):
string = ''
if self.nf:
string += 'NF'
if self.ez:
string += 'EZ'
if self.hd:
string += 'HD'
if self.hr:
string += 'HR'
if self.dt:
string += 'DT'
if self.ht:
string += 'HT'
if self.nc:
string += 'NC'
if self.fl:
string += 'FL'
if self.so:
string += 'SO'
return string
def from_str(self, mods):
if not mods:
return
mods = [mods[i:i + 2] for i in range(0, len(mods), 2)]
if 'NF' in mods:
self.nf = True
if 'EZ' in mods:
self.ez = True
if 'HD' in mods:
self.hd = True
if 'HR' in mods:
self.hr = True
if 'DT' in mods:
self.dt = True
if 'HT' in mods:
self.ht = True
if 'NC' in mods:
self.nc = True
if 'FL' in mods:
self.fl = True
if 'SO' in mods:
self.so = True
self.update_state()
class Hitobject:
__slots__ = ('pos', 'time', 'h_type', 'end_time', 'slider')
def __init__(self, pos, time, h_type, end_time, slider):
self.pos = pos
self.time = time
self.h_type = h_type
self.end_time = end_time
self.slider = slider
class Sliderdata:
__slots__ = ('s_type', 'points', 'repeats', 'length')
def __init__(self, s_type, points, repeats, length):
self.s_type = s_type
self.points = points
self.repeats = repeats
self.length = length
class Timingpoint:
__slots__ = ('time', 'ms_per_beat', 'inherited')
def __init__(self, time, ms_per_beat, inherited):
self.time = time
self.ms_per_beat = ms_per_beat
self.inherited = inherited |
class Solution:
def maxNumOfSubstrings(self, s: str) -> List[str]:
start, end = {}, {}
for i, c in enumerate(s):
if c not in start:
start[c] = i
end[c] = i
def checkSubstring(i):
curr = i
right = end[s[curr]]
while curr <= right:
if start[s[curr]] < i:
return -1
right = max(right, end[s[curr]])
curr += 1
return right
result = []
prevRight = -1
for i, c in enumerate(s):
if i == start[c]:
right = checkSubstring(i)
if right != -1:
if i > prevRight:
result.append(s[i:right + 1])
else:
result[-1] = s[i:right + 1]
prevRight = right
return result
| class Solution:
def max_num_of_substrings(self, s: str) -> List[str]:
(start, end) = ({}, {})
for (i, c) in enumerate(s):
if c not in start:
start[c] = i
end[c] = i
def check_substring(i):
curr = i
right = end[s[curr]]
while curr <= right:
if start[s[curr]] < i:
return -1
right = max(right, end[s[curr]])
curr += 1
return right
result = []
prev_right = -1
for (i, c) in enumerate(s):
if i == start[c]:
right = check_substring(i)
if right != -1:
if i > prevRight:
result.append(s[i:right + 1])
else:
result[-1] = s[i:right + 1]
prev_right = right
return result |
# -*- coding: utf-8 -*-
{
'name': "HR Attendance Holidays",
'summary': """""",
'category': 'Human Resources',
'description': """
Hides the attendance presence button when an employee is on leave.
""",
'version': '1.0',
'depends': ['hr_attendance', 'hr_holidays'],
'auto_install': True,
'data': [
'views/hr_employee_views.xml',
],
'license': 'LGPL-3',
}
| {'name': 'HR Attendance Holidays', 'summary': '', 'category': 'Human Resources', 'description': '\nHides the attendance presence button when an employee is on leave.\n ', 'version': '1.0', 'depends': ['hr_attendance', 'hr_holidays'], 'auto_install': True, 'data': ['views/hr_employee_views.xml'], 'license': 'LGPL-3'} |
# -*- coding: utf-8 -*-
"""This Module helps test private extras."""
class PrivateDict(dict):
"""A priviate dictionary."""
| """This Module helps test private extras."""
class Privatedict(dict):
"""A priviate dictionary.""" |
# -*- coding: utf-8 -*-
"""
Created on Sat Oct 3 11:15:57 2020
@author: Tarun Jaiswal
"""
x=range(2,11,2)
print (x)
for item in x:
print(item,end=",")
| """
Created on Sat Oct 3 11:15:57 2020
@author: Tarun Jaiswal
"""
x = range(2, 11, 2)
print(x)
for item in x:
print(item, end=',') |
'''
this code is for using PySpark to read straight from S3 bucket instead of using the default data source (AWS Glue Data Catalog).
'''
#this is the default line that we will change:
datasource0 = glueContext.create_dynamic_frame.from_catalog(database = "<DATABASE_NAME>", table_name = "<TABLE_NAME>", transformation_ctx = "datasource0")
# so replace the previous code with this:
obj_list = ['s3://<OBJECT_PATH>'] # list of all relevant objects
datasource0 = glueContext.create_dynamic_frame_from_options(connection_type = "s3",connection_options={"paths": [obj_list]}, format = "csv", format_options={"withHeader": False,"separator": ","})
# for more info: https://docs.aws.amazon.com/glue/latest/dg/aws-glue-api-crawler-pyspark-extensions-glue-context.html#aws-glue-api-crawler-pyspark-extensions-glue-context-create_dynamic_frame_from_options
| """
this code is for using PySpark to read straight from S3 bucket instead of using the default data source (AWS Glue Data Catalog).
"""
datasource0 = glueContext.create_dynamic_frame.from_catalog(database='<DATABASE_NAME>', table_name='<TABLE_NAME>', transformation_ctx='datasource0')
obj_list = ['s3://<OBJECT_PATH>']
datasource0 = glueContext.create_dynamic_frame_from_options(connection_type='s3', connection_options={'paths': [obj_list]}, format='csv', format_options={'withHeader': False, 'separator': ','}) |
"""
New England
Buffalo
Miami
N.Y. Jets
Pittsburgh
Baltimore
Cincinnati
Cleveland
Jacksonville
Tennessee
Indianapolis
Houston
Kansas City
L.A. Chargers
Las Vegas
Denver
Philadelphia
Dallas
Washington
N.Y. Giants
Minnesota
Detroit
Green Bay
Chicago
New Orleans
Carolina
Atlanta
Tampa Bay
L.A. Rams
Seattle
Arizona
San Francisco
"""
cbs_team_names = {
"Washington": "Washington Football Team",
"Tennessee": "Tennessee Titans",
"Tampa Bay": "Tampa Bay Buccaneers",
"Seattle": "Seattle Seahawks",
"San Francisco": "San Francisco 49ers",
"L.A. Chargers": "Los Angeles Chargers",
"L.A. Rams": "Los Angeles Rams",
"Pittsburgh": "Pittsburgh Steelers",
"Philadelphia": "Philadelphia Eagles",
"Las Vegas": "Las Vegas Raiders",
"N.Y. Jets": "New York Jets",
"N.Y. Giants": "New York Giants",
"New Orleans": "New Orleans Saints",
"New England": "New England Patriots",
"Minnesota": "Minnesota Vikings",
"Miami": "Miami Dolphins",
"Kansas City": "Kansas City Chiefs",
"Jacksonville": "Jacksonville Jaguars",
"Indianapolis": "Indianapolis Colts",
"Houston": "Houston Texans",
"Green Bay": "Green Bay Packers",
"Detroit": "Detroit Lions",
"Denver": "Denver Broncos",
"Dallas": "Dallas Cowboys",
"Cleveland": "Cleveland Browns",
"Cincinnati": "Cincinnati Bengals",
"Chicago": "Chicago Bears",
"Carolina": "Carolina Panthers",
"Buffalo": "Buffalo Bills",
"Baltimore": "Baltimore Ravens",
"Atlanta": "Atlanta Falcons",
"Arizona": "Arizona Cardinals"
}
| """
New England
Buffalo
Miami
N.Y. Jets
Pittsburgh
Baltimore
Cincinnati
Cleveland
Jacksonville
Tennessee
Indianapolis
Houston
Kansas City
L.A. Chargers
Las Vegas
Denver
Philadelphia
Dallas
Washington
N.Y. Giants
Minnesota
Detroit
Green Bay
Chicago
New Orleans
Carolina
Atlanta
Tampa Bay
L.A. Rams
Seattle
Arizona
San Francisco
"""
cbs_team_names = {'Washington': 'Washington Football Team', 'Tennessee': 'Tennessee Titans', 'Tampa Bay': 'Tampa Bay Buccaneers', 'Seattle': 'Seattle Seahawks', 'San Francisco': 'San Francisco 49ers', 'L.A. Chargers': 'Los Angeles Chargers', 'L.A. Rams': 'Los Angeles Rams', 'Pittsburgh': 'Pittsburgh Steelers', 'Philadelphia': 'Philadelphia Eagles', 'Las Vegas': 'Las Vegas Raiders', 'N.Y. Jets': 'New York Jets', 'N.Y. Giants': 'New York Giants', 'New Orleans': 'New Orleans Saints', 'New England': 'New England Patriots', 'Minnesota': 'Minnesota Vikings', 'Miami': 'Miami Dolphins', 'Kansas City': 'Kansas City Chiefs', 'Jacksonville': 'Jacksonville Jaguars', 'Indianapolis': 'Indianapolis Colts', 'Houston': 'Houston Texans', 'Green Bay': 'Green Bay Packers', 'Detroit': 'Detroit Lions', 'Denver': 'Denver Broncos', 'Dallas': 'Dallas Cowboys', 'Cleveland': 'Cleveland Browns', 'Cincinnati': 'Cincinnati Bengals', 'Chicago': 'Chicago Bears', 'Carolina': 'Carolina Panthers', 'Buffalo': 'Buffalo Bills', 'Baltimore': 'Baltimore Ravens', 'Atlanta': 'Atlanta Falcons', 'Arizona': 'Arizona Cardinals'} |
class Solution:
def removePalindromeSub(self, s: str) -> int:
if not s or len(s) == 0:
return 0
left, right = 0, len(s) - 1
while left < right and s[left] == s[right]:
left += 1
right -= 1
if left >= right:
return 1
else:
return 2 | class Solution:
def remove_palindrome_sub(self, s: str) -> int:
if not s or len(s) == 0:
return 0
(left, right) = (0, len(s) - 1)
while left < right and s[left] == s[right]:
left += 1
right -= 1
if left >= right:
return 1
else:
return 2 |
class Solution(object):
def backspaceCompare(self, s, t):
"""
:type s: str
:type t: str
:rtype: bool
"""
def manipulateString(string):
new_string = []
for char in string:
if char != '#':
new_string.append(char)
elif new_string:
new_string.pop()
return new_string
new_s = manipulateString(s)
new_t = manipulateString(t)
if new_s == new_t:
return True
else:
return False
sol = Solution()
# Test Case1, answer should be True
s1 = "ab#c"
t1 = "ad#c"
ans1 = sol.backspaceCompare(s1, t1)
print(ans1)
# Test Case2, answer should be True
s2 = "ab##"
t2 = "c#d#"
ans2 = sol.backspaceCompare(s2, t2)
print(ans2)
# Test Case3, answer should be True
s3 = "a##c"
t3 = "#a#c"
ans3 = sol.backspaceCompare(s3, t3)
print(ans3)
# Test Case4, answer should be False
s4 = "a#c"
t4 = "b"
ans4 = sol.backspaceCompare(s4, t4)
print(ans4)
| class Solution(object):
def backspace_compare(self, s, t):
"""
:type s: str
:type t: str
:rtype: bool
"""
def manipulate_string(string):
new_string = []
for char in string:
if char != '#':
new_string.append(char)
elif new_string:
new_string.pop()
return new_string
new_s = manipulate_string(s)
new_t = manipulate_string(t)
if new_s == new_t:
return True
else:
return False
sol = solution()
s1 = 'ab#c'
t1 = 'ad#c'
ans1 = sol.backspaceCompare(s1, t1)
print(ans1)
s2 = 'ab##'
t2 = 'c#d#'
ans2 = sol.backspaceCompare(s2, t2)
print(ans2)
s3 = 'a##c'
t3 = '#a#c'
ans3 = sol.backspaceCompare(s3, t3)
print(ans3)
s4 = 'a#c'
t4 = 'b'
ans4 = sol.backspaceCompare(s4, t4)
print(ans4) |
radious=2.5
area=3.14*radious**2
print("area of circle",area)
circum=2*3.14*radious
print("circumof",circum)
| radious = 2.5
area = 3.14 * radious ** 2
print('area of circle', area)
circum = 2 * 3.14 * radious
print('circumof', circum) |
#
# SOFTWARE HISTORY
#
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 09/10/14 #3623 randerso Manually created, do not regenerate
#
class SiteActivationNotification(object):
def __init__(self):
self.type = None
self.status = None
self.primarySite = None
self.modifiedSite = None
self.runMode = None
self.serverName = None
self.pluginName = None
def getType(self):
return self.type
def setType(self, notificationType):
self.type = notificationType
def getStatus(self):
return self.status
def setStatus(self, status):
self.status = status
def getPrimarySite(self):
return self.primarySite
def setPrimarySite(self, primarysite):
self.primarySite = primarysite
def getModifiedSite(self):
return self.modifiedSite
def setModifiedSite(self, modifiedSite):
self.modifiedSite = modifiedSite
def getRunMode(self):
return self.runMode
def setRunMode(self, runMode):
self.runMode = runMode
def getServerName(self):
return self.serverName
def setServerName(self, serverName):
self.serverName = serverName
def getPluginName(self):
return self.pluginName
def setPluginName(self, pluginName):
self.pluginName = pluginName
def __str__(self):
return self.pluginName.upper() + ":" \
+ self.status + ":" \
+ self.type + " " \
+ self.modifiedSite.upper() + " on " \
+ self.serverName + ":" \
+ self.runMode
| class Siteactivationnotification(object):
def __init__(self):
self.type = None
self.status = None
self.primarySite = None
self.modifiedSite = None
self.runMode = None
self.serverName = None
self.pluginName = None
def get_type(self):
return self.type
def set_type(self, notificationType):
self.type = notificationType
def get_status(self):
return self.status
def set_status(self, status):
self.status = status
def get_primary_site(self):
return self.primarySite
def set_primary_site(self, primarysite):
self.primarySite = primarysite
def get_modified_site(self):
return self.modifiedSite
def set_modified_site(self, modifiedSite):
self.modifiedSite = modifiedSite
def get_run_mode(self):
return self.runMode
def set_run_mode(self, runMode):
self.runMode = runMode
def get_server_name(self):
return self.serverName
def set_server_name(self, serverName):
self.serverName = serverName
def get_plugin_name(self):
return self.pluginName
def set_plugin_name(self, pluginName):
self.pluginName = pluginName
def __str__(self):
return self.pluginName.upper() + ':' + self.status + ':' + self.type + ' ' + self.modifiedSite.upper() + ' on ' + self.serverName + ':' + self.runMode |
#!/usr/bin/env python3
# Change the variables and rename this file to secret.py
# add your url here (without trailing / at the end!)
url = "https://home-assistant.duckdns.org"
# get a "Long-Lived Access Token" at YOUR_URL/profile
token = "AJKSDHHASJKDHA871263291873KHGSDKAJSGD"
| url = 'https://home-assistant.duckdns.org'
token = 'AJKSDHHASJKDHA871263291873KHGSDKAJSGD' |
# Time: O(n)
# Space: O(1)
class Solution(object):
def largestSubarray(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: List[int]
"""
left, right, l = 0, 1, 0
while right+k-1 < len(nums) and right+l < len(nums):
if nums[left+l] == nums[right+l]:
l += 1
continue
if nums[left+l] > nums[right+l]:
right += l+1
else:
left = max(right, min(left+l+1, len(nums)-k))
right = left+1
l = 0
return nums[left:left+k]
| class Solution(object):
def largest_subarray(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: List[int]
"""
(left, right, l) = (0, 1, 0)
while right + k - 1 < len(nums) and right + l < len(nums):
if nums[left + l] == nums[right + l]:
l += 1
continue
if nums[left + l] > nums[right + l]:
right += l + 1
else:
left = max(right, min(left + l + 1, len(nums) - k))
right = left + 1
l = 0
return nums[left:left + k] |
# pylint: skip-file
class OadmPolicyException(Exception):
''' Registry Exception Class '''
pass
class OadmPolicyUserConfig(OpenShiftCLIConfig):
''' RegistryConfig is a DTO for the registry. '''
def __init__(self, namespace, kubeconfig, policy_options):
super(OadmPolicyUserConfig, self).__init__(policy_options['name']['value'],
namespace, kubeconfig, policy_options)
self.kind = self.get_kind()
self.namespace = namespace
def get_kind(self):
''' return the kind we are working with '''
if self.config_options['resource_kind']['value'] == 'role':
return 'rolebinding'
elif self.config_options['resource_kind']['value'] == 'cluster-role':
return 'clusterrolebinding'
elif self.config_options['resource_kind']['value'] == 'scc':
return 'scc'
return None
class OadmPolicyUser(OpenShiftCLI):
''' Class to wrap the oc command line tools '''
def __init__(self,
policy_config,
verbose=False):
''' Constructor for OadmPolicyUser '''
super(OadmPolicyUser, self).__init__(policy_config.namespace, policy_config.kubeconfig, verbose)
self.config = policy_config
self.verbose = verbose
self._rolebinding = None
self._scc = None
@property
def role_binding(self):
''' role_binding property '''
return self._rolebinding
@role_binding.setter
def role_binding(self, binding):
''' setter for role_binding property '''
self._rolebinding = binding
@property
def security_context_constraint(self):
''' security_context_constraint property '''
return self._scc
@security_context_constraint.setter
def security_context_constraint(self, scc):
''' setter for security_context_constraint property '''
self._scc = scc
def get(self):
'''fetch the desired kind'''
resource_name = self.config.config_options['name']['value']
if resource_name == 'cluster-reader':
resource_name += 's'
return self._get(self.config.kind, resource_name)
def exists_role_binding(self):
''' return whether role_binding exists '''
results = self.get()
if results['returncode'] == 0:
self.role_binding = RoleBinding(results['results'][0])
if self.role_binding.find_user_name(self.config.config_options['user']['value']) != None:
return True
return False
elif '\"%s\" not found' % self.config.config_options['name']['value'] in results['stderr']:
return False
return results
def exists_scc(self):
''' return whether scc exists '''
results = self.get()
if results['returncode'] == 0:
self.security_context_constraint = SecurityContextConstraints(results['results'][0])
if self.security_context_constraint.find_user(self.config.config_options['user']['value']):
return True
return False
return results
def exists(self):
'''does the object exist?'''
if self.config.config_options['resource_kind']['value'] == 'cluster-role':
return self.exists_role_binding()
elif self.config.config_options['resource_kind']['value'] == 'role':
return self.exists_role_binding()
elif self.config.config_options['resource_kind']['value'] == 'scc':
return self.exists_scc()
return False
def perform(self):
'''perform action on resource'''
cmd = ['-n', self.config.namespace, 'policy',
self.config.config_options['action']['value'],
self.config.config_options['name']['value'],
self.config.config_options['user']['value']]
return self.openshift_cmd(cmd, oadm=True)
| class Oadmpolicyexception(Exception):
""" Registry Exception Class """
pass
class Oadmpolicyuserconfig(OpenShiftCLIConfig):
""" RegistryConfig is a DTO for the registry. """
def __init__(self, namespace, kubeconfig, policy_options):
super(OadmPolicyUserConfig, self).__init__(policy_options['name']['value'], namespace, kubeconfig, policy_options)
self.kind = self.get_kind()
self.namespace = namespace
def get_kind(self):
""" return the kind we are working with """
if self.config_options['resource_kind']['value'] == 'role':
return 'rolebinding'
elif self.config_options['resource_kind']['value'] == 'cluster-role':
return 'clusterrolebinding'
elif self.config_options['resource_kind']['value'] == 'scc':
return 'scc'
return None
class Oadmpolicyuser(OpenShiftCLI):
""" Class to wrap the oc command line tools """
def __init__(self, policy_config, verbose=False):
""" Constructor for OadmPolicyUser """
super(OadmPolicyUser, self).__init__(policy_config.namespace, policy_config.kubeconfig, verbose)
self.config = policy_config
self.verbose = verbose
self._rolebinding = None
self._scc = None
@property
def role_binding(self):
""" role_binding property """
return self._rolebinding
@role_binding.setter
def role_binding(self, binding):
""" setter for role_binding property """
self._rolebinding = binding
@property
def security_context_constraint(self):
""" security_context_constraint property """
return self._scc
@security_context_constraint.setter
def security_context_constraint(self, scc):
""" setter for security_context_constraint property """
self._scc = scc
def get(self):
"""fetch the desired kind"""
resource_name = self.config.config_options['name']['value']
if resource_name == 'cluster-reader':
resource_name += 's'
return self._get(self.config.kind, resource_name)
def exists_role_binding(self):
""" return whether role_binding exists """
results = self.get()
if results['returncode'] == 0:
self.role_binding = role_binding(results['results'][0])
if self.role_binding.find_user_name(self.config.config_options['user']['value']) != None:
return True
return False
elif '"%s" not found' % self.config.config_options['name']['value'] in results['stderr']:
return False
return results
def exists_scc(self):
""" return whether scc exists """
results = self.get()
if results['returncode'] == 0:
self.security_context_constraint = security_context_constraints(results['results'][0])
if self.security_context_constraint.find_user(self.config.config_options['user']['value']):
return True
return False
return results
def exists(self):
"""does the object exist?"""
if self.config.config_options['resource_kind']['value'] == 'cluster-role':
return self.exists_role_binding()
elif self.config.config_options['resource_kind']['value'] == 'role':
return self.exists_role_binding()
elif self.config.config_options['resource_kind']['value'] == 'scc':
return self.exists_scc()
return False
def perform(self):
"""perform action on resource"""
cmd = ['-n', self.config.namespace, 'policy', self.config.config_options['action']['value'], self.config.config_options['name']['value'], self.config.config_options['user']['value']]
return self.openshift_cmd(cmd, oadm=True) |
# Stage 3/6: More interaction
# Description
# We are going to make our program more complex. As you remember,
# the conicoin rate was fixed in the previous stage. But in the real world,
# things are different. It's time to write a program that takes your
# conicoins and an up-to-date conicoin exchange rate, then counts how
# many dollars you would get, and print the result.
# Objectives
# 1. Get the number of conicoins from the user input.
# 2. Get the exchange rate from the user input.
# 3. Calculate and print hte result.
# Example
# The greater-than symbol followed by a space ( > ) represents the user
# input. Note that it's not part of the input.
# Example 1:
# Please, enter the number of conicoins you have: > 13
# Please, enter the exchange rate: > 2
# The total amount of dollars: 26
# Example 2:
# Please, enter the number of conicoins you have: > 128
# Please, enter the exchange rate: > 3.21
# The total amount of dollars: 410.88
class CurrencyConverter:
def __init__(self):
self.exchange = 0
self.dollars = 0
self.coins = 0
self.conicoin_question = "Please, enter the number of conicoins you have: "
self.exchange_question = "Please, enter the exchange rate: "
self.amount_message = "The total amount of dollars:"
def start(self):
self.converter()
def user(self, question):
return input(question)
def converter(self):
self.coins = int(self.user(self.conicoin_question))
self.exchange = float(self.user(self.exchange_question))
self.dollars = self.coins * self.exchange
print(self.amount_message, round(self.dollars) if self.dollars % 1 == 0 else round(self.dollars, 2))
def main():
cur = CurrencyConverter()
cur.start()
if __name__ == '__main__':
main() | class Currencyconverter:
def __init__(self):
self.exchange = 0
self.dollars = 0
self.coins = 0
self.conicoin_question = 'Please, enter the number of conicoins you have: '
self.exchange_question = 'Please, enter the exchange rate: '
self.amount_message = 'The total amount of dollars:'
def start(self):
self.converter()
def user(self, question):
return input(question)
def converter(self):
self.coins = int(self.user(self.conicoin_question))
self.exchange = float(self.user(self.exchange_question))
self.dollars = self.coins * self.exchange
print(self.amount_message, round(self.dollars) if self.dollars % 1 == 0 else round(self.dollars, 2))
def main():
cur = currency_converter()
cur.start()
if __name__ == '__main__':
main() |
def test_convert_from_bool(get_contract_with_gas_estimation):
code = """
@external
def foo() -> bool:
val: bool = True and True and False
return val
@external
def bar() -> bool:
val: bool = True or True or False
return val
@external
def foobar() -> bool:
val: bool = False and True or False
return val
@external
def oof() -> bool:
val: bool = False or False or False or False or False or True
return val
@external
def rab() -> bool:
val: bool = True and True and True and True and True and False
return val
@external
def oofrab() -> bool:
val: bool = False and True or False and True or False and False or True
return val
"""
c = get_contract_with_gas_estimation(code)
assert c.foo() is False
assert c.bar() is True
assert c.foobar() is False
assert c.oof() is True
assert c.rab() is False
assert c.oofrab() is True
| def test_convert_from_bool(get_contract_with_gas_estimation):
code = '\n@external\ndef foo() -> bool:\n val: bool = True and True and False\n return val\n\n@external\ndef bar() -> bool:\n val: bool = True or True or False\n return val\n\n@external\ndef foobar() -> bool:\n val: bool = False and True or False\n return val\n\n@external\ndef oof() -> bool:\n val: bool = False or False or False or False or False or True\n return val\n\n@external\ndef rab() -> bool:\n val: bool = True and True and True and True and True and False\n return val\n\n@external\ndef oofrab() -> bool:\n val: bool = False and True or False and True or False and False or True\n return val\n '
c = get_contract_with_gas_estimation(code)
assert c.foo() is False
assert c.bar() is True
assert c.foobar() is False
assert c.oof() is True
assert c.rab() is False
assert c.oofrab() is True |
class ForkName:
Frontier = 'Frontier'
Homestead = 'Homestead'
EIP150 = 'EIP150'
EIP158 = 'EIP158'
Byzantium = 'Byzantium'
Constantinople = 'Constantinople'
Metropolis = 'Metropolis'
ConstantinopleFix = 'ConstantinopleFix'
Istanbul = 'Istanbul'
Berlin = 'Berlin'
London = 'London'
ArrowGlacier = 'ArrowGlacier'
| class Forkname:
frontier = 'Frontier'
homestead = 'Homestead'
eip150 = 'EIP150'
eip158 = 'EIP158'
byzantium = 'Byzantium'
constantinople = 'Constantinople'
metropolis = 'Metropolis'
constantinople_fix = 'ConstantinopleFix'
istanbul = 'Istanbul'
berlin = 'Berlin'
london = 'London'
arrow_glacier = 'ArrowGlacier' |
class fruta:
def __init__ (self,nombre, calorias, vitamina_c, porcentaje_fibra, porcentaje_potasio):
self.nombre = nombre
self.calorias = calorias
self.vitamina_c = vitamina_c
self.porcentaje_fibra = porcentaje_fibra
self.porcentaje_potasio = porcentaje_potasio
def get_calorias(self):
return self.calorias
def set_calorias(self):
return self.calorias
def __repr__ (self):
return "Nombre: " +self.nombre+" Calorias: "+str(self.calorias)+"K " +" Vitamina C: "+str(self.vitamina_c)+ "mm/kg ""Porcentaje de fibra: "+str(self.porcentaje_fibra)+"% "+" Porcentaje de Potasio: " +str(self.porcentaje_potasio)+"% "
def engorda (self):
if self.calorias>100:
return "Verdadero"
else:
return "Falso"
def nogripe (self):
if self.vitamina_c > 0:
return "Verdadero"
else:
return "Falso"
fruta1=fruta("banana",110,28,34,60)
fruta2=fruta("manzana",80,0,40,5)
fruta3=fruta("pera",90,8,37,8)
print(fruta1)
print(fruta2)
print(fruta3)
print("")
print(fruta1.engorda())
print(fruta2.engorda())
print(fruta3.engorda())
print("")
print(fruta1.nogripe())
print(fruta2.nogripe())
print(fruta3.nogripe())
| class Fruta:
def __init__(self, nombre, calorias, vitamina_c, porcentaje_fibra, porcentaje_potasio):
self.nombre = nombre
self.calorias = calorias
self.vitamina_c = vitamina_c
self.porcentaje_fibra = porcentaje_fibra
self.porcentaje_potasio = porcentaje_potasio
def get_calorias(self):
return self.calorias
def set_calorias(self):
return self.calorias
def __repr__(self):
return 'Nombre: ' + self.nombre + ' Calorias: ' + str(self.calorias) + 'K ' + ' Vitamina C: ' + str(self.vitamina_c) + 'mm/kg Porcentaje de fibra: ' + str(self.porcentaje_fibra) + '% ' + ' Porcentaje de Potasio: ' + str(self.porcentaje_potasio) + '% '
def engorda(self):
if self.calorias > 100:
return 'Verdadero'
else:
return 'Falso'
def nogripe(self):
if self.vitamina_c > 0:
return 'Verdadero'
else:
return 'Falso'
fruta1 = fruta('banana', 110, 28, 34, 60)
fruta2 = fruta('manzana', 80, 0, 40, 5)
fruta3 = fruta('pera', 90, 8, 37, 8)
print(fruta1)
print(fruta2)
print(fruta3)
print('')
print(fruta1.engorda())
print(fruta2.engorda())
print(fruta3.engorda())
print('')
print(fruta1.nogripe())
print(fruta2.nogripe())
print(fruta3.nogripe()) |
load("//:bouncycastle.bzl", "bouncycastle_repos")
load("//:gerrit_api_version.bzl", "gerrit_api_version")
load("//:rules_python.bzl", "rules_python_repos")
load("//tools:maven_jar.bzl", "MAVEN_LOCAL", "MAVEN_CENTRAL", "maven_jar")
"""Bazel rule for building [Gerrit Code Review](https://www.gerritcodereview.com/)
gerrit_api is rule for fetching Gerrit plugin API using Bazel.
"""
def gerrit_api(version = "3.3.0",
plugin_api_sha1 = "cef5b27d3beb894b366002657dda6e0e2ce47223",
acceptance_framework_sha1 = "e7d2c52919da966032a70d998899dad47b85ed76"):
gerrit_api_version(
name = "gerrit_api_version",
version = version,
)
bouncycastle_repos()
rules_python_repos()
local_repository = version.endswith("-SNAPSHOT")
maven_jar(
name = "gerrit_plugin_api",
artifact = "com.google.gerrit:gerrit-plugin-api:" + version,
sha1 = "" if local_repository else plugin_api_sha1,
repository = MAVEN_LOCAL if local_repository else MAVEN_CENTRAL,
)
maven_jar(
name = "gerrit_acceptance_framework",
artifact = "com.google.gerrit:gerrit-acceptance-framework:" + version,
sha1 = "" if local_repository else acceptance_framework_sha1,
repository = MAVEN_LOCAL if local_repository else MAVEN_CENTRAL,
)
native.bind(
name = "gerrit-plugin-api",
actual = "@gerrit_plugin_api//jar",
)
native.bind(
name = "gerrit-acceptance-framework",
actual = "@gerrit_acceptance_framework//jar",
)
native.bind(
name = "gerrit-plugin-api-neverlink",
actual = "@gerrit_plugin_api//jar:neverlink",
)
native.bind(
name = "gerrit-acceptance-framework-neverlink",
actual = "@gerrit_acceptance_framework//jar:neverlink",
)
| load('//:bouncycastle.bzl', 'bouncycastle_repos')
load('//:gerrit_api_version.bzl', 'gerrit_api_version')
load('//:rules_python.bzl', 'rules_python_repos')
load('//tools:maven_jar.bzl', 'MAVEN_LOCAL', 'MAVEN_CENTRAL', 'maven_jar')
'Bazel rule for building [Gerrit Code Review](https://www.gerritcodereview.com/)\ngerrit_api is rule for fetching Gerrit plugin API using Bazel.\n'
def gerrit_api(version='3.3.0', plugin_api_sha1='cef5b27d3beb894b366002657dda6e0e2ce47223', acceptance_framework_sha1='e7d2c52919da966032a70d998899dad47b85ed76'):
gerrit_api_version(name='gerrit_api_version', version=version)
bouncycastle_repos()
rules_python_repos()
local_repository = version.endswith('-SNAPSHOT')
maven_jar(name='gerrit_plugin_api', artifact='com.google.gerrit:gerrit-plugin-api:' + version, sha1='' if local_repository else plugin_api_sha1, repository=MAVEN_LOCAL if local_repository else MAVEN_CENTRAL)
maven_jar(name='gerrit_acceptance_framework', artifact='com.google.gerrit:gerrit-acceptance-framework:' + version, sha1='' if local_repository else acceptance_framework_sha1, repository=MAVEN_LOCAL if local_repository else MAVEN_CENTRAL)
native.bind(name='gerrit-plugin-api', actual='@gerrit_plugin_api//jar')
native.bind(name='gerrit-acceptance-framework', actual='@gerrit_acceptance_framework//jar')
native.bind(name='gerrit-plugin-api-neverlink', actual='@gerrit_plugin_api//jar:neverlink')
native.bind(name='gerrit-acceptance-framework-neverlink', actual='@gerrit_acceptance_framework//jar:neverlink') |
# this is an embedded Python script it's really on GitHub
# and this is only a reference - so when it changes people
# will see the change on the webpage .. GOODTIMES !
pid = Runtime.start("pid","PID") | pid = Runtime.start('pid', 'PID') |
def fram_write8(addr: number, val: number):
pins.digital_write_pin(DigitalPin.P16, 0)
pins.spi_write(OPCODE_WRITE)
pins.spi_write(addr >> 8)
pins.spi_write(addr & 0xff)
pins.spi_write(val)
pins.digital_write_pin(DigitalPin.P16, 1)
def on_button_pressed_a():
fram_write8(0, 10)
basic.pause(100)
serial.write_line("FRAM at 0xFF: " + ("" + str(fram_read8(0))))
input.on_button_pressed(Button.A, on_button_pressed_a)
def fram_getDeviceID():
global whoami, wh0, wh1, wh2, wh3
pins.digital_write_pin(DigitalPin.P16, 0)
whoami = pins.spi_write(OPCODE_RDID)
wh0 = pins.spi_write(255)
wh1 = pins.spi_write(255)
wh2 = pins.spi_write(255)
wh3 = pins.spi_write(255)
pins.digital_write_pin(DigitalPin.P16, 1)
serial.write_line("WHOAMI: " + ("" + str(whoami)) + " wh0:" + ("" + str(wh0)) + " wh1:" + ("" + str(wh1)) + " wh2:" + ("" + str(wh2)) + " wh3:" + ("" + str(wh3)))
if wh1 == 127:
serial.write_line("FRAM Connected")
def fram_write_enable():
global wh3
pins.digital_write_pin(DigitalPin.P16, 0)
wh3 = pins.spi_write(OPCODE_WREN)
pins.digital_write_pin(DigitalPin.P16, 1)
serial.write_line("FRAM Writes Enabled")
def fram_read8(addr: number):
global wh3
pins.digital_write_pin(DigitalPin.P16, 0)
pins.spi_write(OPCODE_READ)
pins.spi_write(addr >> 8)
pins.spi_write(addr & 0xff)
wh3 = pins.spi_write(255)
pins.digital_write_pin(DigitalPin.P16, 1)
return wh3
wh3 = 0
wh2 = 0
wh1 = 0
wh0 = 0
whoami = 0
OPCODE_WREN = 0
OPCODE_RDID = 0
OPCODE_READ = 3
OPCODE_WRITE = 2
OPCODE_RDID = 159
OPCODE_WREN = 6
pins.digital_write_pin(DigitalPin.P16, 1)
pins.spi_pins(DigitalPin.P15, DigitalPin.P14, DigitalPin.P13)
pins.spi_format(8, 0)
pins.spi_frequency(1000000)
fram_getDeviceID()
fram_write_enable() | def fram_write8(addr: number, val: number):
pins.digital_write_pin(DigitalPin.P16, 0)
pins.spi_write(OPCODE_WRITE)
pins.spi_write(addr >> 8)
pins.spi_write(addr & 255)
pins.spi_write(val)
pins.digital_write_pin(DigitalPin.P16, 1)
def on_button_pressed_a():
fram_write8(0, 10)
basic.pause(100)
serial.write_line('FRAM at 0xFF: ' + ('' + str(fram_read8(0))))
input.on_button_pressed(Button.A, on_button_pressed_a)
def fram_get_device_id():
global whoami, wh0, wh1, wh2, wh3
pins.digital_write_pin(DigitalPin.P16, 0)
whoami = pins.spi_write(OPCODE_RDID)
wh0 = pins.spi_write(255)
wh1 = pins.spi_write(255)
wh2 = pins.spi_write(255)
wh3 = pins.spi_write(255)
pins.digital_write_pin(DigitalPin.P16, 1)
serial.write_line('WHOAMI: ' + ('' + str(whoami)) + ' wh0:' + ('' + str(wh0)) + ' wh1:' + ('' + str(wh1)) + ' wh2:' + ('' + str(wh2)) + ' wh3:' + ('' + str(wh3)))
if wh1 == 127:
serial.write_line('FRAM Connected')
def fram_write_enable():
global wh3
pins.digital_write_pin(DigitalPin.P16, 0)
wh3 = pins.spi_write(OPCODE_WREN)
pins.digital_write_pin(DigitalPin.P16, 1)
serial.write_line('FRAM Writes Enabled')
def fram_read8(addr: number):
global wh3
pins.digital_write_pin(DigitalPin.P16, 0)
pins.spi_write(OPCODE_READ)
pins.spi_write(addr >> 8)
pins.spi_write(addr & 255)
wh3 = pins.spi_write(255)
pins.digital_write_pin(DigitalPin.P16, 1)
return wh3
wh3 = 0
wh2 = 0
wh1 = 0
wh0 = 0
whoami = 0
opcode_wren = 0
opcode_rdid = 0
opcode_read = 3
opcode_write = 2
opcode_rdid = 159
opcode_wren = 6
pins.digital_write_pin(DigitalPin.P16, 1)
pins.spi_pins(DigitalPin.P15, DigitalPin.P14, DigitalPin.P13)
pins.spi_format(8, 0)
pins.spi_frequency(1000000)
fram_get_device_id()
fram_write_enable() |
class HtmlDocument(object):
""" Provides top-level programmatic access to an HTML document hosted by the System.Windows.Forms.WebBrowser control. """
def AttachEventHandler(self,eventName,eventHandler):
"""
AttachEventHandler(self: HtmlDocument,eventName: str,eventHandler: EventHandler)
Adds an event handler for the named HTML DOM event.
eventName: The name of the event you want to handle.
eventHandler: The managed code that handles the event.
"""
pass
def CreateElement(self,elementTag):
"""
CreateElement(self: HtmlDocument,elementTag: str) -> HtmlElement
Creates a new HtmlElement of the specified HTML tag type.
elementTag: The name of the HTML element to create.
Returns: A new element of the specified tag type.
"""
pass
def DetachEventHandler(self,eventName,eventHandler):
"""
DetachEventHandler(self: HtmlDocument,eventName: str,eventHandler: EventHandler)
Removes an event handler from a named event on the HTML DOM.
eventName: The name of the event you want to cease handling.
eventHandler: The managed code that handles the event.
"""
pass
def Equals(self,obj):
"""
Equals(self: HtmlDocument,obj: object) -> bool
obj: The object to compare with the current object.
Returns: true if the specified System.Object is equal to the current System.Object; otherwise,false.
"""
pass
def ExecCommand(self,command,showUI,value):
"""
ExecCommand(self: HtmlDocument,command: str,showUI: bool,value: object)
Executes the specified command against the document.
command: The name of the command to execute.
showUI: Whether or not to show command-specific dialog boxes or message boxes to the user.
value: The value to assign using the command. Not applicable for all commands.
"""
pass
def Focus(self):
"""
Focus(self: HtmlDocument)
Sets user input focus on the current document.
"""
pass
def GetElementById(self,id):
"""
GetElementById(self: HtmlDocument,id: str) -> HtmlElement
Retrieves a single System.Windows.Forms.HtmlElement using the element's ID attribute as a search
key.
id: The ID attribute of the element to retrieve.
Returns: Returns the first object with the same ID attribute as the specified value,or null if the id
cannot be found.
"""
pass
def GetElementFromPoint(self,point):
"""
GetElementFromPoint(self: HtmlDocument,point: Point) -> HtmlElement
Retrieves the HTML element located at the specified client coordinates.
point: The x,y position of the element on the screen,relative to the top-left corner of the document.
Returns: The System.Windows.Forms.HtmlElement at the specified screen location in the document.
"""
pass
def GetElementsByTagName(self,tagName):
"""
GetElementsByTagName(self: HtmlDocument,tagName: str) -> HtmlElementCollection
Retrieve a collection of elements with the specified HTML tag.
tagName: The name of the HTML tag for the System.Windows.Forms.HtmlElement objects you want to retrieve.
Returns: The collection of elements who tag name is equal to the tagName argument.
"""
pass
def GetHashCode(self):
"""
GetHashCode(self: HtmlDocument) -> int
Returns: A hash code for the current System.Object.
"""
pass
def InvokeScript(self,scriptName,args=None):
"""
InvokeScript(self: HtmlDocument,scriptName: str) -> object
Executes an Active Scripting function defined in an HTML page.
scriptName: The name of the script method to invoke.
Returns: The object returned by the Active Scripting call.
InvokeScript(self: HtmlDocument,scriptName: str,args: Array[object]) -> object
Executes an Active Scripting function defined in an HTML page.
scriptName: The name of the script method to invoke.
args: The arguments to pass to the script method.
Returns: The object returned by the Active Scripting call.
"""
pass
def OpenNew(self,replaceInHistory):
"""
OpenNew(self: HtmlDocument,replaceInHistory: bool) -> HtmlDocument
Gets a new System.Windows.Forms.HtmlDocument to use with the
System.Windows.Forms.HtmlDocument.Write(System.String) method.
replaceInHistory: Whether the new window's navigation should replace the previous element in the navigation
history of the DOM.
Returns: A new document for writing.
"""
pass
def Write(self,text):
"""
Write(self: HtmlDocument,text: str)
Writes a new HTML page.
text: The HTML text to write into the document.
"""
pass
def __eq__(self,*args):
""" x.__eq__(y) <==> x==y """
pass
def __ne__(self,*args):
pass
ActiveElement=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Provides the System.Windows.Forms.HtmlElement which currently has user input focus.
Get: ActiveElement(self: HtmlDocument) -> HtmlElement
"""
ActiveLinkColor=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Drawing.Color of a hyperlink when clicked by a user.
Get: ActiveLinkColor(self: HtmlDocument) -> Color
Set: ActiveLinkColor(self: HtmlDocument)=value
"""
All=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets an instance of System.Windows.Forms.HtmlElementCollection,which stores all System.Windows.Forms.HtmlElement objects for the document.
Get: All(self: HtmlDocument) -> HtmlElementCollection
"""
BackColor=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the background color of the HTML document.
Get: BackColor(self: HtmlDocument) -> Color
Set: BackColor(self: HtmlDocument)=value
"""
Body=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the System.Windows.Forms.HtmlElement for the BODY tag.
Get: Body(self: HtmlDocument) -> HtmlElement
"""
Cookie=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the HTTP cookies associated with this document.
Get: Cookie(self: HtmlDocument) -> str
Set: Cookie(self: HtmlDocument)=value
"""
DefaultEncoding=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the encoding used by default for the current document.
Get: DefaultEncoding(self: HtmlDocument) -> str
"""
Domain=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the string describing the domain of this document for security purposes.
Get: Domain(self: HtmlDocument) -> str
Set: Domain(self: HtmlDocument)=value
"""
DomDocument=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the unmanaged interface pointer for this System.Windows.Forms.HtmlDocument.
Get: DomDocument(self: HtmlDocument) -> object
"""
Encoding=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the character encoding for this document.
Get: Encoding(self: HtmlDocument) -> str
Set: Encoding(self: HtmlDocument)=value
"""
Focused=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating whether the document has user input focus.
Get: Focused(self: HtmlDocument) -> bool
"""
ForeColor=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the text color for the document.
Get: ForeColor(self: HtmlDocument) -> Color
Set: ForeColor(self: HtmlDocument)=value
"""
Forms=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a collection of all of the <FORM> elements in the document.
Get: Forms(self: HtmlDocument) -> HtmlElementCollection
"""
Images=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a collection of all image tags in the document.
Get: Images(self: HtmlDocument) -> HtmlElementCollection
"""
LinkColor=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the color of hyperlinks.
Get: LinkColor(self: HtmlDocument) -> Color
Set: LinkColor(self: HtmlDocument)=value
"""
Links=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a list of all the hyperlinks within this HTML document.
Get: Links(self: HtmlDocument) -> HtmlElementCollection
"""
RightToLeft=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the direction of text in the current document.
Get: RightToLeft(self: HtmlDocument) -> bool
Set: RightToLeft(self: HtmlDocument)=value
"""
Title=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the text value of the <TITLE> tag in the current HTML document.
Get: Title(self: HtmlDocument) -> str
Set: Title(self: HtmlDocument)=value
"""
Url=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the URL describing the location of this document.
Get: Url(self: HtmlDocument) -> Uri
"""
VisitedLinkColor=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the Color of links to HTML pages that the user has already visited.
Get: VisitedLinkColor(self: HtmlDocument) -> Color
Set: VisitedLinkColor(self: HtmlDocument)=value
"""
Window=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the System.Windows.Forms.HtmlWindow associated with this document.
Get: Window(self: HtmlDocument) -> HtmlWindow
"""
Click=None
ContextMenuShowing=None
Focusing=None
LosingFocus=None
MouseDown=None
MouseLeave=None
MouseMove=None
MouseOver=None
MouseUp=None
Stop=None
| class Htmldocument(object):
""" Provides top-level programmatic access to an HTML document hosted by the System.Windows.Forms.WebBrowser control. """
def attach_event_handler(self, eventName, eventHandler):
"""
AttachEventHandler(self: HtmlDocument,eventName: str,eventHandler: EventHandler)
Adds an event handler for the named HTML DOM event.
eventName: The name of the event you want to handle.
eventHandler: The managed code that handles the event.
"""
pass
def create_element(self, elementTag):
"""
CreateElement(self: HtmlDocument,elementTag: str) -> HtmlElement
Creates a new HtmlElement of the specified HTML tag type.
elementTag: The name of the HTML element to create.
Returns: A new element of the specified tag type.
"""
pass
def detach_event_handler(self, eventName, eventHandler):
"""
DetachEventHandler(self: HtmlDocument,eventName: str,eventHandler: EventHandler)
Removes an event handler from a named event on the HTML DOM.
eventName: The name of the event you want to cease handling.
eventHandler: The managed code that handles the event.
"""
pass
def equals(self, obj):
"""
Equals(self: HtmlDocument,obj: object) -> bool
obj: The object to compare with the current object.
Returns: true if the specified System.Object is equal to the current System.Object; otherwise,false.
"""
pass
def exec_command(self, command, showUI, value):
"""
ExecCommand(self: HtmlDocument,command: str,showUI: bool,value: object)
Executes the specified command against the document.
command: The name of the command to execute.
showUI: Whether or not to show command-specific dialog boxes or message boxes to the user.
value: The value to assign using the command. Not applicable for all commands.
"""
pass
def focus(self):
"""
Focus(self: HtmlDocument)
Sets user input focus on the current document.
"""
pass
def get_element_by_id(self, id):
"""
GetElementById(self: HtmlDocument,id: str) -> HtmlElement
Retrieves a single System.Windows.Forms.HtmlElement using the element's ID attribute as a search
key.
id: The ID attribute of the element to retrieve.
Returns: Returns the first object with the same ID attribute as the specified value,or null if the id
cannot be found.
"""
pass
def get_element_from_point(self, point):
"""
GetElementFromPoint(self: HtmlDocument,point: Point) -> HtmlElement
Retrieves the HTML element located at the specified client coordinates.
point: The x,y position of the element on the screen,relative to the top-left corner of the document.
Returns: The System.Windows.Forms.HtmlElement at the specified screen location in the document.
"""
pass
def get_elements_by_tag_name(self, tagName):
"""
GetElementsByTagName(self: HtmlDocument,tagName: str) -> HtmlElementCollection
Retrieve a collection of elements with the specified HTML tag.
tagName: The name of the HTML tag for the System.Windows.Forms.HtmlElement objects you want to retrieve.
Returns: The collection of elements who tag name is equal to the tagName argument.
"""
pass
def get_hash_code(self):
"""
GetHashCode(self: HtmlDocument) -> int
Returns: A hash code for the current System.Object.
"""
pass
def invoke_script(self, scriptName, args=None):
"""
InvokeScript(self: HtmlDocument,scriptName: str) -> object
Executes an Active Scripting function defined in an HTML page.
scriptName: The name of the script method to invoke.
Returns: The object returned by the Active Scripting call.
InvokeScript(self: HtmlDocument,scriptName: str,args: Array[object]) -> object
Executes an Active Scripting function defined in an HTML page.
scriptName: The name of the script method to invoke.
args: The arguments to pass to the script method.
Returns: The object returned by the Active Scripting call.
"""
pass
def open_new(self, replaceInHistory):
"""
OpenNew(self: HtmlDocument,replaceInHistory: bool) -> HtmlDocument
Gets a new System.Windows.Forms.HtmlDocument to use with the
System.Windows.Forms.HtmlDocument.Write(System.String) method.
replaceInHistory: Whether the new window's navigation should replace the previous element in the navigation
history of the DOM.
Returns: A new document for writing.
"""
pass
def write(self, text):
"""
Write(self: HtmlDocument,text: str)
Writes a new HTML page.
text: The HTML text to write into the document.
"""
pass
def __eq__(self, *args):
""" x.__eq__(y) <==> x==y """
pass
def __ne__(self, *args):
pass
active_element = property(lambda self: object(), lambda self, v: None, lambda self: None)
'Provides the System.Windows.Forms.HtmlElement which currently has user input focus.\n\n\n\nGet: ActiveElement(self: HtmlDocument) -> HtmlElement\n\n\n\n'
active_link_color = property(lambda self: object(), lambda self, v: None, lambda self: None)
'Gets or sets the System.Drawing.Color of a hyperlink when clicked by a user.\n\n\n\nGet: ActiveLinkColor(self: HtmlDocument) -> Color\n\n\n\nSet: ActiveLinkColor(self: HtmlDocument)=value\n\n'
all = property(lambda self: object(), lambda self, v: None, lambda self: None)
'Gets an instance of System.Windows.Forms.HtmlElementCollection,which stores all System.Windows.Forms.HtmlElement objects for the document.\n\n\n\nGet: All(self: HtmlDocument) -> HtmlElementCollection\n\n\n\n'
back_color = property(lambda self: object(), lambda self, v: None, lambda self: None)
'Gets or sets the background color of the HTML document.\n\n\n\nGet: BackColor(self: HtmlDocument) -> Color\n\n\n\nSet: BackColor(self: HtmlDocument)=value\n\n'
body = property(lambda self: object(), lambda self, v: None, lambda self: None)
'Gets the System.Windows.Forms.HtmlElement for the BODY tag.\n\n\n\nGet: Body(self: HtmlDocument) -> HtmlElement\n\n\n\n'
cookie = property(lambda self: object(), lambda self, v: None, lambda self: None)
'Gets or sets the HTTP cookies associated with this document.\n\n\n\nGet: Cookie(self: HtmlDocument) -> str\n\n\n\nSet: Cookie(self: HtmlDocument)=value\n\n'
default_encoding = property(lambda self: object(), lambda self, v: None, lambda self: None)
'Gets the encoding used by default for the current document.\n\n\n\nGet: DefaultEncoding(self: HtmlDocument) -> str\n\n\n\n'
domain = property(lambda self: object(), lambda self, v: None, lambda self: None)
'Gets or sets the string describing the domain of this document for security purposes.\n\n\n\nGet: Domain(self: HtmlDocument) -> str\n\n\n\nSet: Domain(self: HtmlDocument)=value\n\n'
dom_document = property(lambda self: object(), lambda self, v: None, lambda self: None)
'Gets the unmanaged interface pointer for this System.Windows.Forms.HtmlDocument.\n\n\n\nGet: DomDocument(self: HtmlDocument) -> object\n\n\n\n'
encoding = property(lambda self: object(), lambda self, v: None, lambda self: None)
'Gets or sets the character encoding for this document.\n\n\n\nGet: Encoding(self: HtmlDocument) -> str\n\n\n\nSet: Encoding(self: HtmlDocument)=value\n\n'
focused = property(lambda self: object(), lambda self, v: None, lambda self: None)
'Gets a value indicating whether the document has user input focus.\n\n\n\nGet: Focused(self: HtmlDocument) -> bool\n\n\n\n'
fore_color = property(lambda self: object(), lambda self, v: None, lambda self: None)
'Gets or sets the text color for the document.\n\n\n\nGet: ForeColor(self: HtmlDocument) -> Color\n\n\n\nSet: ForeColor(self: HtmlDocument)=value\n\n'
forms = property(lambda self: object(), lambda self, v: None, lambda self: None)
'Gets a collection of all of the <FORM> elements in the document.\n\n\n\nGet: Forms(self: HtmlDocument) -> HtmlElementCollection\n\n\n\n'
images = property(lambda self: object(), lambda self, v: None, lambda self: None)
'Gets a collection of all image tags in the document.\n\n\n\nGet: Images(self: HtmlDocument) -> HtmlElementCollection\n\n\n\n'
link_color = property(lambda self: object(), lambda self, v: None, lambda self: None)
'Gets or sets the color of hyperlinks.\n\n\n\nGet: LinkColor(self: HtmlDocument) -> Color\n\n\n\nSet: LinkColor(self: HtmlDocument)=value\n\n'
links = property(lambda self: object(), lambda self, v: None, lambda self: None)
'Gets a list of all the hyperlinks within this HTML document.\n\n\n\nGet: Links(self: HtmlDocument) -> HtmlElementCollection\n\n\n\n'
right_to_left = property(lambda self: object(), lambda self, v: None, lambda self: None)
'Gets or sets the direction of text in the current document.\n\n\n\nGet: RightToLeft(self: HtmlDocument) -> bool\n\n\n\nSet: RightToLeft(self: HtmlDocument)=value\n\n'
title = property(lambda self: object(), lambda self, v: None, lambda self: None)
'Gets or sets the text value of the <TITLE> tag in the current HTML document.\n\n\n\nGet: Title(self: HtmlDocument) -> str\n\n\n\nSet: Title(self: HtmlDocument)=value\n\n'
url = property(lambda self: object(), lambda self, v: None, lambda self: None)
'Gets the URL describing the location of this document.\n\n\n\nGet: Url(self: HtmlDocument) -> Uri\n\n\n\n'
visited_link_color = property(lambda self: object(), lambda self, v: None, lambda self: None)
'Gets or sets the Color of links to HTML pages that the user has already visited.\n\n\n\nGet: VisitedLinkColor(self: HtmlDocument) -> Color\n\n\n\nSet: VisitedLinkColor(self: HtmlDocument)=value\n\n'
window = property(lambda self: object(), lambda self, v: None, lambda self: None)
'Gets the System.Windows.Forms.HtmlWindow associated with this document.\n\n\n\nGet: Window(self: HtmlDocument) -> HtmlWindow\n\n\n\n'
click = None
context_menu_showing = None
focusing = None
losing_focus = None
mouse_down = None
mouse_leave = None
mouse_move = None
mouse_over = None
mouse_up = None
stop = None |
class GumoBaseError(RuntimeError):
pass
class ConfigurationError(GumoBaseError):
pass
class ServiceAccountConfigurationError(ConfigurationError):
pass
class ObjectNotoFoundError(GumoBaseError):
pass
| class Gumobaseerror(RuntimeError):
pass
class Configurationerror(GumoBaseError):
pass
class Serviceaccountconfigurationerror(ConfigurationError):
pass
class Objectnotofounderror(GumoBaseError):
pass |
class CrudBackend(object):
def __init__(self):
pass
def create(self, key, data=None):
return NotImplementedError()
def read(self, key):
return NotImplementedError()
def update(self, key, data):
return NotImplementedError()
def delete(self, key):
return NotImplementedError()
def has_node(self, key):
return NotImplementedError()
| class Crudbackend(object):
def __init__(self):
pass
def create(self, key, data=None):
return not_implemented_error()
def read(self, key):
return not_implemented_error()
def update(self, key, data):
return not_implemented_error()
def delete(self, key):
return not_implemented_error()
def has_node(self, key):
return not_implemented_error() |
class CommonInfoAdminMixin:
def get_readonly_fields(self, request, obj=None):
return super().get_readonly_fields(request, obj) + ('created_by', 'lastmodified_by', 'created_at',
'lastmodified_at')
def save_form(self, request, form, change):
if form.instance and request.user:
if not form.instance.id:
form.instance.created_by = request.user
form.instance.lastmodified_by = request.user
return super().save_form(request, form, change)
| class Commoninfoadminmixin:
def get_readonly_fields(self, request, obj=None):
return super().get_readonly_fields(request, obj) + ('created_by', 'lastmodified_by', 'created_at', 'lastmodified_at')
def save_form(self, request, form, change):
if form.instance and request.user:
if not form.instance.id:
form.instance.created_by = request.user
form.instance.lastmodified_by = request.user
return super().save_form(request, form, change) |
'''
Problem Statement
Given a linked list with integer data, arrange the elements in such a manner that all nodes with even numbers are placed after odd numbers.
Do not create any new nodes and avoid using any other data structure. The relative order of even and odd elements must not change.
Example:
linked list = 1 2 3 4 5 6
output = 1 3 5 2 4 6
'''
class Node:
def __init__(self, data):
self.data = data
self.next = None
"""
parameter: - head of the given linked list
return: - head of the updated list with all even elements placed after odd elements
"""
#--------------------------------------------------#
'''
The Idea: Traverse the given LinkedList, and build two sub-lists: EVEN and ODD.
For this purpose, we will use four helper references, that denotes starting and
current ending of EVEN and ODD sub-list respectively.
1. For each Node in the LinkedList, check if its data is even/odd.
Change the "next" reference (pointer) of each Node, based on the following rules:
- First even valued Node will be referenced by head of EVEN sub-list
- Subsequent even valued Node will be appended to the tail of EVEN sub-list
- First odd valued Node will be referenced by head of ODD sub-list
- Subsequent odd valued Node will be appended to the tail of ODD sub-list
2. After the loop, append the EVEN sub-list to the tail of ODD sub-list.
'''
#--------------------------------------------------#
def even_after_odd(head):
if head is None:
return head
# Helper references
''' `even_head` and `even_tail` represents the starting and current ending of the "EVEN" sub-list '''
even_head = None
even_tail = None
''' `odd_head` and `odd_tail` represents the starting and current ending of the "ODD" sub-list '''
odd_head = None
odd_tail = None
# <-- "current" represents the current Node.
current = head
# Loop untill there are Nodes available in the LinkedList
while current: # <-- "current" will be updated at the end of each iteration
# <-- "next_node" represents the next Node w.r.t. the current Node
next_node = current.next
if current.data % 2 == 0: # <-- current Node is even
# Below
if even_head is None: # <-- Make the current Node as the starting Node of EVEN sub-list
even_head = current # `even_head` will now point where `current` is already pointing
even_tail = even_head
else: # <-- Append the current even node to the tail of EVEN sub-list
even_tail.next = current
even_tail = even_tail.next
else:
if odd_head is None: # <-- Make the current Node as the starting Node of ODD sub-list
odd_head = current
odd_tail = odd_head
else: # <-- Append the current odd node to the tail of ODD sub-list
odd_tail.next = current
odd_tail = odd_tail.next
current.next = None
current = next_node # <-- Update "head" Node, for next iteration
if odd_head is None: # <-- Special case, when there are no odd Nodes
return even_head
# <-- Append the EVEN sub-list to the tail of ODD sub-list
odd_tail.next = even_head
return odd_head
# helper functions for testing purpose
def create_linked_list(arr):
if len(arr) == 0:
return None
head = Node(arr[0])
tail = head
for data in arr[1:]:
tail.next = Node(data)
tail = tail.next
return head
def print_linked_list(head):
while head:
print(head.data, end=' ')
head = head.next
print()
def test_function(test_case):
head = test_case[0]
solution = test_case[1]
node_tracker = dict({})
node_tracker['nodes'] = list()
temp = head
while temp:
node_tracker['nodes'].append(temp)
temp = temp.next
head = even_after_odd(head)
temp = head
index = 0
try:
while temp:
if temp.data != solution[index] or temp not in node_tracker['nodes']:
print("Fail")
return
temp = temp.next
index += 1
print("Pass")
except Exception as e:
print("Fail")
arr = [1, 2, 3, 4, 5, 6]
solution = [1, 3, 5, 2, 4, 6]
head = create_linked_list(arr)
test_case = [head, solution]
test_function(test_case)
arr = [1, 3, 5, 7]
solution = [1, 3, 5, 7]
head = create_linked_list(arr)
test_case = [head, solution]
test_function(test_case)
arr = [2, 4, 6, 8]
solution = [2, 4, 6, 8]
head = create_linked_list(arr)
test_case = [head, solution]
test_function(test_case)
| """
Problem Statement
Given a linked list with integer data, arrange the elements in such a manner that all nodes with even numbers are placed after odd numbers.
Do not create any new nodes and avoid using any other data structure. The relative order of even and odd elements must not change.
Example:
linked list = 1 2 3 4 5 6
output = 1 3 5 2 4 6
"""
class Node:
def __init__(self, data):
self.data = data
self.next = None
'\nparameter: - head of the given linked list\nreturn: - head of the updated list with all even elements placed after odd elements\n'
'\nThe Idea: Traverse the given LinkedList, and build two sub-lists: EVEN and ODD. \nFor this purpose, we will use four helper references, that denotes starting and \ncurrent ending of EVEN and ODD sub-list respectively. \n\n1. For each Node in the LinkedList, check if its data is even/odd. \nChange the "next" reference (pointer) of each Node, based on the following rules:\n - First even valued Node will be referenced by head of EVEN sub-list\n - Subsequent even valued Node will be appended to the tail of EVEN sub-list\n\n - First odd valued Node will be referenced by head of ODD sub-list\n - Subsequent odd valued Node will be appended to the tail of ODD sub-list\n \n2. After the loop, append the EVEN sub-list to the tail of ODD sub-list.\n'
def even_after_odd(head):
if head is None:
return head
' `even_head` and `even_tail` represents the starting and current ending of the "EVEN" sub-list '
even_head = None
even_tail = None
' `odd_head` and `odd_tail` represents the starting and current ending of the "ODD" sub-list '
odd_head = None
odd_tail = None
current = head
while current:
next_node = current.next
if current.data % 2 == 0:
if even_head is None:
even_head = current
even_tail = even_head
else:
even_tail.next = current
even_tail = even_tail.next
elif odd_head is None:
odd_head = current
odd_tail = odd_head
else:
odd_tail.next = current
odd_tail = odd_tail.next
current.next = None
current = next_node
if odd_head is None:
return even_head
odd_tail.next = even_head
return odd_head
def create_linked_list(arr):
if len(arr) == 0:
return None
head = node(arr[0])
tail = head
for data in arr[1:]:
tail.next = node(data)
tail = tail.next
return head
def print_linked_list(head):
while head:
print(head.data, end=' ')
head = head.next
print()
def test_function(test_case):
head = test_case[0]
solution = test_case[1]
node_tracker = dict({})
node_tracker['nodes'] = list()
temp = head
while temp:
node_tracker['nodes'].append(temp)
temp = temp.next
head = even_after_odd(head)
temp = head
index = 0
try:
while temp:
if temp.data != solution[index] or temp not in node_tracker['nodes']:
print('Fail')
return
temp = temp.next
index += 1
print('Pass')
except Exception as e:
print('Fail')
arr = [1, 2, 3, 4, 5, 6]
solution = [1, 3, 5, 2, 4, 6]
head = create_linked_list(arr)
test_case = [head, solution]
test_function(test_case)
arr = [1, 3, 5, 7]
solution = [1, 3, 5, 7]
head = create_linked_list(arr)
test_case = [head, solution]
test_function(test_case)
arr = [2, 4, 6, 8]
solution = [2, 4, 6, 8]
head = create_linked_list(arr)
test_case = [head, solution]
test_function(test_case) |
# krotki
k = ('a', 1, 'qqq', {1: 'x', 2: 'y'})
print(k)
print(k[0])
print(k[-1])
print(k[1:-1])
print('------operacje ----------')
# k.append('www')
# k.remove('qq')
print(k.index(1))
# print k.index('b')
print(k.count('b'))
print(len(k))
k[-1][1] = 'zzz'
print(k)
print('a' in k, 'z' in k)
# krotka jako lista
l = list(k)
print(l)
l[0] = 'x'
# i znow jako krotka
k = tuple(l)
print(k)
print(dir(tuple))
x = []
x.append(x)
l = tuple(x)
print(l)
| k = ('a', 1, 'qqq', {1: 'x', 2: 'y'})
print(k)
print(k[0])
print(k[-1])
print(k[1:-1])
print('------operacje ----------')
print(k.index(1))
print(k.count('b'))
print(len(k))
k[-1][1] = 'zzz'
print(k)
print('a' in k, 'z' in k)
l = list(k)
print(l)
l[0] = 'x'
k = tuple(l)
print(k)
print(dir(tuple))
x = []
x.append(x)
l = tuple(x)
print(l) |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
def _assemble_versioned_impl(ctx):
if not ctx.attr.version_file:
version_file = ctx.actions.declare_file(ctx.attr.name + "__do_not_reference.version")
version = ctx.var.get('version', '0.0.0')
ctx.actions.run_shell(
inputs = [],
outputs = [version_file],
command = "echo {} > {}".format(version, version_file.path)
)
else:
version_file = ctx.file.version_file
# assemble-version.py $output $version $targets
ctx.actions.run(
inputs = ctx.files.targets + [version_file],
outputs = [ctx.outputs.archive],
executable = ctx.executable._assemble_versioned_py,
arguments = [ctx.outputs.archive.path, version_file.path] + [target.path for target in ctx.files.targets],
progress_message = "Versioning assembled distributions to {}".format(version_file.short_path)
)
return DefaultInfo(data_runfiles = ctx.runfiles(files=[ctx.outputs.archive]))
assemble_versioned = rule(
attrs = {
"targets": attr.label_list(
allow_files = [".zip", ".tar.gz"],
doc = "Archives to version and put into output archive"
),
"version_file": attr.label(
allow_single_file = True,
doc = "File containing version string"
),
"_assemble_versioned_py": attr.label(
default = "//common/assemble_versioned:assemble-versioned",
executable = True,
cfg = "host"
)
},
implementation = _assemble_versioned_impl,
outputs = {
"archive": "%{name}.zip"
},
output_to_genfiles = True,
doc = "Version multiple archives for subsequent simultaneous deployment"
)
| def _assemble_versioned_impl(ctx):
if not ctx.attr.version_file:
version_file = ctx.actions.declare_file(ctx.attr.name + '__do_not_reference.version')
version = ctx.var.get('version', '0.0.0')
ctx.actions.run_shell(inputs=[], outputs=[version_file], command='echo {} > {}'.format(version, version_file.path))
else:
version_file = ctx.file.version_file
ctx.actions.run(inputs=ctx.files.targets + [version_file], outputs=[ctx.outputs.archive], executable=ctx.executable._assemble_versioned_py, arguments=[ctx.outputs.archive.path, version_file.path] + [target.path for target in ctx.files.targets], progress_message='Versioning assembled distributions to {}'.format(version_file.short_path))
return default_info(data_runfiles=ctx.runfiles(files=[ctx.outputs.archive]))
assemble_versioned = rule(attrs={'targets': attr.label_list(allow_files=['.zip', '.tar.gz'], doc='Archives to version and put into output archive'), 'version_file': attr.label(allow_single_file=True, doc='File containing version string'), '_assemble_versioned_py': attr.label(default='//common/assemble_versioned:assemble-versioned', executable=True, cfg='host')}, implementation=_assemble_versioned_impl, outputs={'archive': '%{name}.zip'}, output_to_genfiles=True, doc='Version multiple archives for subsequent simultaneous deployment') |
"""--------------------------------------------------------------------
* $Id: GUI_definition.py $
*
* This file is part of libRadtran.
* Copyright (c) 1997-2012 by Arve Kylling, Bernhard Mayer,
* Claudia Emde, Robert Buras
*
* ######### Contact info: http://www.libradtran.org #########
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*--------------------------------------------------------------------"""
__all__ = ["FileInput", "FloatInput", "TextInput", "IntegerInput", "ListInput", "IntegerListInput", "BooleanInput", "VariableNumberOfLinesInput"]
class Input():
def __init__(self, name=None, optional=False):
"""
name = displayed above input in GUI for options with multiple inputs
"""
assert not name is None
self.name = name
self.optional = optional
class NumberInput(Input):
def __init__(self, default=None, valid_range=(-1e99, 1e99),**kwargs):
Input.__init__(self, **kwargs)
# This should be removed when/if the option files are cleaned up
if default in ("NOT_DEFINED_INTEGER", "NOT_DEFINED_FLOAT"):
default = None
self.default = default
self.valid_range = valid_range
class FileInput(Input):
def __init__(self, **kwargs):
Input.__init__(self, **kwargs)
class FloatInput(NumberInput):
pass
class TextInput(Input):
def __init__(self, default=None, **kwargs):
Input.__init__(self, **kwargs)
self.default = default
class IntegerInput(NumberInput):
def __init__(self, default=None, **kwargs):
# This should be removed when/if the option files are cleaned up
if default in ("NOT_DEFINED_INTEGER", "NOT_DEFINED_FLOAT"):
default = None
if not default is None:
assert type(default) == int, \
"Default of integer input must be an integer!"
NumberInput.__init__(self, default=default, **kwargs)
class ListInput(Input):
""" Valid inputs are one among a list of strings """
def __init__(self, default=None, valid_range=None, optional=False,logical_file=False,
**kwargs):
Input.__init__(self, optional=optional, **kwargs)
assert not valid_range is None, "You must provide a range of choices!"
self.valid_range = []
for val in valid_range:
if isinstance(val,str): self.valid_range.append( val.lower() )
else: self.valid_range.append( val )
if optional:
if self.valid_range.count(""):
self.valid_range.remove("")
self.valid_range.insert(0,"")
if isinstance(default,str):
default=default.lower()
if default is None:
default = self.valid_range[0]
assert default in self.valid_range, "Default not among valid options!"
self.default = default
self.logical_file=logical_file
class IntegerListInput(ListInput):
def __init__(self, **kwargs):
ListInput.__init__(self, **kwargs)
self.default = str(self.default)
self.valid_range = tuple([str(i) for i in self.valid_range])
class BooleanInput(Input):
pass
class VariableNumberOfLinesInput(Input):
def __init__(self, valid_range=None, **kwargs):
Input.__init__(self, **kwargs)
assert not valid_range is None
self.valid_range = valid_range
| """--------------------------------------------------------------------
* $Id: GUI_definition.py $
*
* This file is part of libRadtran.
* Copyright (c) 1997-2012 by Arve Kylling, Bernhard Mayer,
* Claudia Emde, Robert Buras
*
* ######### Contact info: http://www.libradtran.org #########
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*--------------------------------------------------------------------"""
__all__ = ['FileInput', 'FloatInput', 'TextInput', 'IntegerInput', 'ListInput', 'IntegerListInput', 'BooleanInput', 'VariableNumberOfLinesInput']
class Input:
def __init__(self, name=None, optional=False):
"""
name = displayed above input in GUI for options with multiple inputs
"""
assert not name is None
self.name = name
self.optional = optional
class Numberinput(Input):
def __init__(self, default=None, valid_range=(-1e+99, 1e+99), **kwargs):
Input.__init__(self, **kwargs)
if default in ('NOT_DEFINED_INTEGER', 'NOT_DEFINED_FLOAT'):
default = None
self.default = default
self.valid_range = valid_range
class Fileinput(Input):
def __init__(self, **kwargs):
Input.__init__(self, **kwargs)
class Floatinput(NumberInput):
pass
class Textinput(Input):
def __init__(self, default=None, **kwargs):
Input.__init__(self, **kwargs)
self.default = default
class Integerinput(NumberInput):
def __init__(self, default=None, **kwargs):
if default in ('NOT_DEFINED_INTEGER', 'NOT_DEFINED_FLOAT'):
default = None
if not default is None:
assert type(default) == int, 'Default of integer input must be an integer!'
NumberInput.__init__(self, default=default, **kwargs)
class Listinput(Input):
""" Valid inputs are one among a list of strings """
def __init__(self, default=None, valid_range=None, optional=False, logical_file=False, **kwargs):
Input.__init__(self, optional=optional, **kwargs)
assert not valid_range is None, 'You must provide a range of choices!'
self.valid_range = []
for val in valid_range:
if isinstance(val, str):
self.valid_range.append(val.lower())
else:
self.valid_range.append(val)
if optional:
if self.valid_range.count(''):
self.valid_range.remove('')
self.valid_range.insert(0, '')
if isinstance(default, str):
default = default.lower()
if default is None:
default = self.valid_range[0]
assert default in self.valid_range, 'Default not among valid options!'
self.default = default
self.logical_file = logical_file
class Integerlistinput(ListInput):
def __init__(self, **kwargs):
ListInput.__init__(self, **kwargs)
self.default = str(self.default)
self.valid_range = tuple([str(i) for i in self.valid_range])
class Booleaninput(Input):
pass
class Variablenumberoflinesinput(Input):
def __init__(self, valid_range=None, **kwargs):
Input.__init__(self, **kwargs)
assert not valid_range is None
self.valid_range = valid_range |
test = {
'name': 'What Would Scheme Display?',
'points': 1,
'suites': [
{
'cases': [
{
'code': r"""
scm> (- 10 4)
6
scm> (* 7 6)
42
scm> (+ 1 2 3 4)
10
scm> (/ 8 2 2)
2
scm> (quotient 29 5)
5
scm> (modulo 29 5)
4
""",
'hidden': False,
'locked': False
},
{
'code': r"""
scm> (= 1 3) ; Scheme uses '=' instead of '==' for comparison
#f
scm> (< 1 3)
#t
scm> (or #t #f) ; or special form short circuits
#t
scm> (and #t #f (/ 1 0))
#f
scm> (not #t)
#f
""",
'hidden': False,
'locked': False
},
{
'code': r"""
scm> (define x 3)
x
scm> x
3
scm> (define y (+ x 4))
y
scm> y
7
scm> (define x (lambda (y) (* y 2)))
x
scm> (x y)
14
""",
'hidden': False,
'locked': False
},
{
'code': r"""
scm> (if (print 1) (print 2) (print 3))
1
2
scm> (* (if (> 3 2) 1 2) (+ 4 5))
9
scm> (define foo (lambda (x y z) (if x y z)))
foo
scm> (foo 1 2 (print 'hi))
hi
2
scm> ((lambda (a) (print 'a)) 100)
a
""",
'hidden': False,
'locked': False
}
],
'scored': True,
'setup': r"""
""",
'teardown': '',
'type': 'scheme'
}
]
}
| test = {'name': 'What Would Scheme Display?', 'points': 1, 'suites': [{'cases': [{'code': '\n scm> (- 10 4)\n 6\n scm> (* 7 6)\n 42\n scm> (+ 1 2 3 4)\n 10\n scm> (/ 8 2 2)\n 2\n scm> (quotient 29 5)\n 5\n scm> (modulo 29 5)\n 4\n ', 'hidden': False, 'locked': False}, {'code': "\n scm> (= 1 3) ; Scheme uses '=' instead of '==' for comparison\n #f\n scm> (< 1 3)\n #t\n scm> (or #t #f) ; or special form short circuits\n #t\n scm> (and #t #f (/ 1 0))\n #f\n scm> (not #t)\n #f\n ", 'hidden': False, 'locked': False}, {'code': '\n scm> (define x 3)\n x\n scm> x\n 3\n scm> (define y (+ x 4))\n y\n scm> y\n 7\n scm> (define x (lambda (y) (* y 2)))\n x\n scm> (x y)\n 14\n ', 'hidden': False, 'locked': False}, {'code': "\n scm> (if (print 1) (print 2) (print 3))\n 1\n 2\n scm> (* (if (> 3 2) 1 2) (+ 4 5))\n 9\n scm> (define foo (lambda (x y z) (if x y z)))\n foo\n scm> (foo 1 2 (print 'hi))\n hi\n 2\n scm> ((lambda (a) (print 'a)) 100)\n a\n ", 'hidden': False, 'locked': False}], 'scored': True, 'setup': '\n \n ', 'teardown': '', 'type': 'scheme'}]} |
MODAL_REQUEST = {
"callback_id": "change_request_review",
"type": "modal",
"title": {
"type": "plain_text",
"text": "Switcher Change Request"
},
"submit": {
"type": "plain_text",
"text": "Submit"
},
"close": {
"type": "plain_text",
"text": "Cancel"
},
"blocks": [
{
"type": "context",
"elements": [
{
"type": "plain_text",
"text": "Select the options below to request a Switcher status change."
}
]
},
{
"type": "divider"
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "Environment"
},
"accessory": {
"type": "static_select",
"placeholder": {
"type": "plain_text",
"text": "Select an item"
},
"options": [],
"action_id": "selection_environment"
}
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "Group"
},
"accessory": {
"type": "static_select",
"placeholder": {
"type": "plain_text",
"text": "Select an item"
},
"options": [
{
"text": {
"type": "plain_text",
"text": "-"
},
"value": "-"
}
],
"action_id": "selection_group"
}
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "Switcher"
},
"accessory": {
"type": "static_select",
"placeholder": {
"type": "plain_text",
"text": "Select an item"
},
"options": [
{
"text": {
"type": "plain_text",
"text": "-"
},
"value": "-"
}
],
"action_id": "selection_switcher"
}
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "Status"
},
"accessory": {
"type": "static_select",
"placeholder": {
"type": "plain_text",
"text": "Select an item"
},
"options": [
{
"text": {
"type": "plain_text",
"text": "-"
},
"value": "-"
}
],
"action_id": "selection_status"
}
}
]
}
APP_HOME = {
"type": "home",
"blocks": [
{
"type": "image",
"image_url": "https://raw.githubusercontent.com/switcherapi/switcherapi-assets/master/samples/slack/logo.png",
"alt_text": "Switcher Slack App"
},
{
"type": "context",
"elements": [
{
"type": "plain_text",
"text": "What are you up today?"
}
]
},
{
"type": "divider"
},
{
"type": "actions",
"elements": [
{
"type": "button",
"text": {
"type": "plain_text",
"text": "Open Change Request"
},
"action_id": "change_request"
}
]
}
]
} | modal_request = {'callback_id': 'change_request_review', 'type': 'modal', 'title': {'type': 'plain_text', 'text': 'Switcher Change Request'}, 'submit': {'type': 'plain_text', 'text': 'Submit'}, 'close': {'type': 'plain_text', 'text': 'Cancel'}, 'blocks': [{'type': 'context', 'elements': [{'type': 'plain_text', 'text': 'Select the options below to request a Switcher status change.'}]}, {'type': 'divider'}, {'type': 'section', 'text': {'type': 'mrkdwn', 'text': 'Environment'}, 'accessory': {'type': 'static_select', 'placeholder': {'type': 'plain_text', 'text': 'Select an item'}, 'options': [], 'action_id': 'selection_environment'}}, {'type': 'section', 'text': {'type': 'mrkdwn', 'text': 'Group'}, 'accessory': {'type': 'static_select', 'placeholder': {'type': 'plain_text', 'text': 'Select an item'}, 'options': [{'text': {'type': 'plain_text', 'text': '-'}, 'value': '-'}], 'action_id': 'selection_group'}}, {'type': 'section', 'text': {'type': 'mrkdwn', 'text': 'Switcher'}, 'accessory': {'type': 'static_select', 'placeholder': {'type': 'plain_text', 'text': 'Select an item'}, 'options': [{'text': {'type': 'plain_text', 'text': '-'}, 'value': '-'}], 'action_id': 'selection_switcher'}}, {'type': 'section', 'text': {'type': 'mrkdwn', 'text': 'Status'}, 'accessory': {'type': 'static_select', 'placeholder': {'type': 'plain_text', 'text': 'Select an item'}, 'options': [{'text': {'type': 'plain_text', 'text': '-'}, 'value': '-'}], 'action_id': 'selection_status'}}]}
app_home = {'type': 'home', 'blocks': [{'type': 'image', 'image_url': 'https://raw.githubusercontent.com/switcherapi/switcherapi-assets/master/samples/slack/logo.png', 'alt_text': 'Switcher Slack App'}, {'type': 'context', 'elements': [{'type': 'plain_text', 'text': 'What are you up today?'}]}, {'type': 'divider'}, {'type': 'actions', 'elements': [{'type': 'button', 'text': {'type': 'plain_text', 'text': 'Open Change Request'}, 'action_id': 'change_request'}]}]} |
# File: wildfire_consts.py
#
# Copyright (c) 2016-2022 Splunk Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under
# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific language governing permissions
# and limitations under the License.
WILDFIRE_JSON_BASE_URL = "base_url"
WILDFIRE_JSON_TASK_ID = "task_id"
WILDFIRE_JSON_API_KEY = "api_key" # pragma: allowlist secret
WILDFIRE_JSON_MALWARE = "malware"
WILDFIRE_JSON_TASK_ID = "id"
WILDFIRE_JSON_URL = "url"
WILDFIRE_JSON_HASH = "hash"
WILDFIRE_JSON_PLATFORM = "platform"
WILDFIRE_JSON_POLL_TIMEOUT_MINS = "timeout"
WILDFIRE_ERR_UNABLE_TO_PARSE_REPLY = "Unable to parse reply from device"
WILDFIRE_ERR_REPLY_FORMAT_KEY_MISSING = "None '{key}' missing in reply from device"
WILDFIRE_ERR_REPLY_NOT_SUCCESS = "REST call returned '{status}'"
WILDFIRE_SUCC_REST_CALL_SUCCEEDED = "REST Api call succeeded"
WILDFIRE_ERR_REST_API = "REST Api Call returned error, status_code: {status_code}, detail: {detail}"
WILDFIRE_ERR_FILE_NOT_FOUND_IN_VAULT = "File not found in vault"
WILDFIRE_INVALID_INT = "Please provide a valid integer value in the {param}"
WILDFIRE_ERR_INVALID_PARAM = "Please provide a non-zero positive integer in the {param}"
WILDFIRE_ERR_NEGATIVE_INT_PARAM = "Please provide a valid non-negative integer value in the {param}"
WILDFIRE_TEST_PDF_FILE = "wildfire_test_connectivity.pdf"
WILDFIRE_SLEEP_SECS = 10
WILDFIRE_MSG_REPORT_PENDING = "Report Pending"
WILDFIRE_MSG_MAX_POLLS_REACHED = ("Reached max polling attempts. "
"Please use the MD5 or Sha256 of the file as a parameter to <b>get report</b> to query the report status.")
WILDFIRE_TIMEOUT = "'timeout' action parameter"
# in minutes
WILDFIRE_MAX_TIMEOUT_DEF = 10
| wildfire_json_base_url = 'base_url'
wildfire_json_task_id = 'task_id'
wildfire_json_api_key = 'api_key'
wildfire_json_malware = 'malware'
wildfire_json_task_id = 'id'
wildfire_json_url = 'url'
wildfire_json_hash = 'hash'
wildfire_json_platform = 'platform'
wildfire_json_poll_timeout_mins = 'timeout'
wildfire_err_unable_to_parse_reply = 'Unable to parse reply from device'
wildfire_err_reply_format_key_missing = "None '{key}' missing in reply from device"
wildfire_err_reply_not_success = "REST call returned '{status}'"
wildfire_succ_rest_call_succeeded = 'REST Api call succeeded'
wildfire_err_rest_api = 'REST Api Call returned error, status_code: {status_code}, detail: {detail}'
wildfire_err_file_not_found_in_vault = 'File not found in vault'
wildfire_invalid_int = 'Please provide a valid integer value in the {param}'
wildfire_err_invalid_param = 'Please provide a non-zero positive integer in the {param}'
wildfire_err_negative_int_param = 'Please provide a valid non-negative integer value in the {param}'
wildfire_test_pdf_file = 'wildfire_test_connectivity.pdf'
wildfire_sleep_secs = 10
wildfire_msg_report_pending = 'Report Pending'
wildfire_msg_max_polls_reached = 'Reached max polling attempts. Please use the MD5 or Sha256 of the file as a parameter to <b>get report</b> to query the report status.'
wildfire_timeout = "'timeout' action parameter"
wildfire_max_timeout_def = 10 |
birds = ( ('Passerculus sandwichensis','Savannah sparrow',18.7),
('Delichon urbica','House martin',19),
('Junco phaeonotus','Yellow-eyed junco',19.5),
('Junco hyemalis','Dark-eyed junco',19.6),
('Tachycineata bicolor','Tree swallow',20.2),
)
#(1) Write three separate list comprehensions that create three different
# lists containing the latin names, common names and mean body masses for
# each species in birds, respectively.
# (2) Now do the same using conventional loops (you can shoose to do this
# before 1 !).
# ANNOTATE WHAT EVERY BLOCK OR IF NECESSARY, LINE IS DOING!
# ALSO, PLEASE INCLUDE A DOCSTRING AT THE BEGINNING OF THIS FILE THAT
# SAYS WHAT THE SCRIPT DOES AND WHO THE AUTHOR IS.
| birds = (('Passerculus sandwichensis', 'Savannah sparrow', 18.7), ('Delichon urbica', 'House martin', 19), ('Junco phaeonotus', 'Yellow-eyed junco', 19.5), ('Junco hyemalis', 'Dark-eyed junco', 19.6), ('Tachycineata bicolor', 'Tree swallow', 20.2)) |
# -*- coding: utf-8 -*-
"""
Created on Sun Jan 21 16:19:46 2018
@author: Sherry
Done
"""
def CorsairStats():
#Get base stats
Agility = 7 + d6()
Alertness = 3 + d6()
Charm = 2 + d6()
Cunning = 12 + d6()
Dexterity = 13 + d6()
Fate = 4 + d6()
Intelligence = 10 + d6()
Knowledge = 9 + d6()
Mechanical = 11 + d6()
Nature = 8 + d6()
Stamina = 5 + d6()
Strength = 6 + d6()
#get speciality list started
Specialties = ['Conceal', 'Filch', 'Forgery', 'Unlock']
#determine Age, Night Vision, Racial Benefits
Age = Intelligence + Knowledge - 5
Night_Vision = "No"
Racial_Ability = "Growth"
Uses_Per_Day = 2
#Get physical stats
Height = Strength + d6()
if Height <= 9:
Height = "Tiny"
elif Height <= 13:
Height = "Very Short"
elif Height <= 17:
Height = "Short"
elif Height <= 18:
Height = "Average"
Weight = Stamina + d6()
if Weight <= 8:
Weight = "Very Thin"
elif Weight <= 10:
Weight = "Thin"
elif Weight <= 13:
Weight = "Average"
elif Weight <= 15:
Weight = "Heavy"
elif Weight <= 17:
Weight = "Very Heavy"
#get family background
Background = Fate + d6()
if Background == 6:
Background = "Derelict"
Bronze = 10
Free = 8
new_specs = ["Lie", "Search"]
for ea in new_specs:
if ea not in Specialties:
Specialties.append(ea)
else:
Free += 1
elif Background == 7:
Background = "Serf"
Bronze = 10
Free = 8
new_specs = ["Plants", "Forage"]
for ea in new_specs:
if ea not in Specialties:
Specialties.append(ea)
else:
Free += 1
elif Background == 8:
Background = "Herder"
Bronze = 10
Free = 8
new_specs = ["Tame", "Direction"]
for ea in new_specs:
if ea not in Specialties:
Specialties.append(ea)
else:
Free += 1
elif Background == 9:
Background = "Gatherer"
Bronze = 110
Free = 7
new_specs = ["Plants", "Forage"]
for ea in new_specs:
if ea not in Specialties:
Specialties.append(ea)
else:
Free += 1
elif Background == 10:
Background = "Hunter"
Bronze = 110
Free = 7
new_specs = ["Forage", "Track"]
for ea in new_specs:
if ea not in Specialties:
Specialties.append(ea)
else:
Free += 1
elif Background == 11:
Background = "Robber"
Bronze = 110
Free = 7
new_specs = ["Sword", "Bully"]
for ea in new_specs:
if ea not in Specialties:
Specialties.append(ea)
else:
Free += 1
elif Background == 12:
Background = "Counterfeiter"
Bronze = 210
Free = 6
new_specs = ["Contacts", "Literacy"]
for ea in new_specs:
if ea not in Specialties:
Specialties.append(ea)
else:
Free += 1
elif Background == 13:
Background = "Burglar"
Bronze = 210
Free = 6
new_specs = ["Unlock", "Stealth"]
for ea in new_specs:
if ea not in Specialties:
Specialties.append(ea)
else:
Free += 1
elif Background == 14:
Background = "Story-Teller"
Bronze = 210
Free = 6
new_specs = ["Legends", "Entertain"]
for ea in new_specs:
if ea not in Specialties:
Specialties.append(ea)
else:
Free += 1
elif Background == 15:
Background = "Toolmaker"
Bronze = 310
Free = 5
new_specs = ["Build", "Bargain"]
for ea in new_specs:
if ea not in Specialties:
Specialties.append(ea)
else:
Free += 1
elif Background == 16:
Background = "Healer"
Bronze = 310
Free = 5
new_specs = ["Plants", "Medical"]
for ea in new_specs:
if ea not in Specialties:
Specialties.append(ea)
else:
Free += 1 | """
Created on Sun Jan 21 16:19:46 2018
@author: Sherry
Done
"""
def corsair_stats():
agility = 7 + d6()
alertness = 3 + d6()
charm = 2 + d6()
cunning = 12 + d6()
dexterity = 13 + d6()
fate = 4 + d6()
intelligence = 10 + d6()
knowledge = 9 + d6()
mechanical = 11 + d6()
nature = 8 + d6()
stamina = 5 + d6()
strength = 6 + d6()
specialties = ['Conceal', 'Filch', 'Forgery', 'Unlock']
age = Intelligence + Knowledge - 5
night__vision = 'No'
racial__ability = 'Growth'
uses__per__day = 2
height = Strength + d6()
if Height <= 9:
height = 'Tiny'
elif Height <= 13:
height = 'Very Short'
elif Height <= 17:
height = 'Short'
elif Height <= 18:
height = 'Average'
weight = Stamina + d6()
if Weight <= 8:
weight = 'Very Thin'
elif Weight <= 10:
weight = 'Thin'
elif Weight <= 13:
weight = 'Average'
elif Weight <= 15:
weight = 'Heavy'
elif Weight <= 17:
weight = 'Very Heavy'
background = Fate + d6()
if Background == 6:
background = 'Derelict'
bronze = 10
free = 8
new_specs = ['Lie', 'Search']
for ea in new_specs:
if ea not in Specialties:
Specialties.append(ea)
else:
free += 1
elif Background == 7:
background = 'Serf'
bronze = 10
free = 8
new_specs = ['Plants', 'Forage']
for ea in new_specs:
if ea not in Specialties:
Specialties.append(ea)
else:
free += 1
elif Background == 8:
background = 'Herder'
bronze = 10
free = 8
new_specs = ['Tame', 'Direction']
for ea in new_specs:
if ea not in Specialties:
Specialties.append(ea)
else:
free += 1
elif Background == 9:
background = 'Gatherer'
bronze = 110
free = 7
new_specs = ['Plants', 'Forage']
for ea in new_specs:
if ea not in Specialties:
Specialties.append(ea)
else:
free += 1
elif Background == 10:
background = 'Hunter'
bronze = 110
free = 7
new_specs = ['Forage', 'Track']
for ea in new_specs:
if ea not in Specialties:
Specialties.append(ea)
else:
free += 1
elif Background == 11:
background = 'Robber'
bronze = 110
free = 7
new_specs = ['Sword', 'Bully']
for ea in new_specs:
if ea not in Specialties:
Specialties.append(ea)
else:
free += 1
elif Background == 12:
background = 'Counterfeiter'
bronze = 210
free = 6
new_specs = ['Contacts', 'Literacy']
for ea in new_specs:
if ea not in Specialties:
Specialties.append(ea)
else:
free += 1
elif Background == 13:
background = 'Burglar'
bronze = 210
free = 6
new_specs = ['Unlock', 'Stealth']
for ea in new_specs:
if ea not in Specialties:
Specialties.append(ea)
else:
free += 1
elif Background == 14:
background = 'Story-Teller'
bronze = 210
free = 6
new_specs = ['Legends', 'Entertain']
for ea in new_specs:
if ea not in Specialties:
Specialties.append(ea)
else:
free += 1
elif Background == 15:
background = 'Toolmaker'
bronze = 310
free = 5
new_specs = ['Build', 'Bargain']
for ea in new_specs:
if ea not in Specialties:
Specialties.append(ea)
else:
free += 1
elif Background == 16:
background = 'Healer'
bronze = 310
free = 5
new_specs = ['Plants', 'Medical']
for ea in new_specs:
if ea not in Specialties:
Specialties.append(ea)
else:
free += 1 |
COLORS = {
'PROBLEM': 'red',
'RECOVERY': 'green',
'UP': 'green',
'ACKNOWLEDGEMENT': 'purple',
'FLAPPINGSTART': 'yellow',
'WARNING': 'yellow',
'UNKNOWN': 'gray',
'CRITICAL': 'red',
'FLAPPINGEND': 'green',
'FLAPPINGSTOP': 'green',
'FLAPPINGDISABLED': 'purple',
'DOWNTIMESTART': 'red',
'DOWNTIMESTOP': 'green',
'DOWNTIMEEND': 'green'
}
| colors = {'PROBLEM': 'red', 'RECOVERY': 'green', 'UP': 'green', 'ACKNOWLEDGEMENT': 'purple', 'FLAPPINGSTART': 'yellow', 'WARNING': 'yellow', 'UNKNOWN': 'gray', 'CRITICAL': 'red', 'FLAPPINGEND': 'green', 'FLAPPINGSTOP': 'green', 'FLAPPINGDISABLED': 'purple', 'DOWNTIMESTART': 'red', 'DOWNTIMESTOP': 'green', 'DOWNTIMEEND': 'green'} |
def load(h):
return ({'abbr': 0,
'code': 0,
'title': 'Mass density (concentration)',
'units': 'kg m-3'},
{'abbr': 1,
'code': 1,
'title': 'Column-integrated mass density',
'units': 'kg m-2'},
{'abbr': 2,
'code': 2,
'title': 'Mass mixing ratio (mass fraction in air)',
'units': 'kg/kg'},
{'abbr': 3,
'code': 3,
'title': 'Atmosphere emission mass flux',
'units': 'kg m-2 s-1'},
{'abbr': 4,
'code': 4,
'title': 'Atmosphere net production mass flux',
'units': 'kg m-2 s-1'},
{'abbr': 5,
'code': 5,
'title': 'Atmosphere net production and emission mass flux',
'units': 'kg m-2 s-1'},
{'abbr': 6,
'code': 6,
'title': 'Surface dry deposition mass flux',
'units': 'kg m-2 s-1'},
{'abbr': 7,
'code': 7,
'title': 'Surface wet deposition mass flux',
'units': 'kg m-2 s-1'},
{'abbr': 8,
'code': 8,
'title': 'Atmosphere re-emission mass flux',
'units': 'kg m-2 s-1'},
{'abbr': 9,
'code': 9,
'title': 'Wet deposition by large-scale precipitation mass flux',
'units': 'kg m-2 s-1'},
{'abbr': 10,
'code': 10,
'title': 'Wet deposition by convective precipitation mass flux',
'units': 'kg m-2 s-1'},
{'abbr': 11,
'code': 11,
'title': 'Sedimentation mass flux',
'units': 'kg m-2 s-1'},
{'abbr': 12,
'code': 12,
'title': 'Dry deposition mass flux',
'units': 'kg m-2 s-1'},
{'abbr': 13,
'code': 13,
'title': 'Transfer from hydrophobic to hydrophilic',
'units': 'kg kg-1 s-1'},
{'abbr': 14,
'code': 14,
'title': 'Transfer from SO2 (sulphur dioxide) to SO4 (sulphate)',
'units': 'kg kg-1 s-1'},
{'abbr': 50, 'code': 50, 'title': 'Amount in atmosphere', 'units': 'mol'},
{'abbr': 51, 'code': 51, 'title': 'Concentration in air', 'units': 'mol m-3'},
{'abbr': 52,
'code': 52,
'title': 'Volume mixing ratio (fraction in air)',
'units': 'mol/mol'},
{'abbr': 53,
'code': 53,
'title': 'Chemical gross production rate of concentration',
'units': 'mol m-3 s-1'},
{'abbr': 54,
'code': 54,
'title': 'Chemical gross destruction rate of concentration',
'units': 'mol m-3 s-1'},
{'abbr': 55, 'code': 55, 'title': 'Surface flux', 'units': 'mol m-2 s-1'},
{'abbr': 56,
'code': 56,
'title': 'Changes of amount in atmosphere',
'units': 'mol/s'},
{'abbr': 57,
'code': 57,
'title': 'Total yearly average burden of the atmosphere',
'units': 'mol'},
{'abbr': 58,
'code': 58,
'title': 'Total yearly averaged atmospheric loss',
'units': 'mol/s'},
{'abbr': 59,
'code': 59,
'title': 'Aerosol number concentration',
'units': 'm-3'},
{'abbr': 60,
'code': 60,
'title': 'Aerosol specific number concentration',
'units': 'kg-1'},
{'abbr': 61,
'code': 61,
'title': 'Maximum of mass density in layer',
'units': 'kg m-3'},
{'abbr': 62,
'code': 62,
'title': 'Height of maximum mass density',
'units': 'm'},
{'abbr': 63,
'code': 63,
'title': 'Column-averaged mass density in layer',
'units': 'kg m-3'},
{'abbr': 100,
'code': 100,
'title': 'Surface area density (aerosol)',
'units': 'm-1'},
{'abbr': 101, 'code': 101, 'title': 'Vertical visual range', 'units': 'm'},
{'abbr': 102,
'code': 102,
'title': 'Aerosol optical thickness',
'units': 'Numeric'},
{'abbr': 103,
'code': 103,
'title': 'Single scattering albedo',
'units': 'Numeric'},
{'abbr': 104, 'code': 104, 'title': 'Asymmetry factor', 'units': 'Numeric'},
{'abbr': 105,
'code': 105,
'title': 'Aerosol extinction coefficient',
'units': 'm-1'},
{'abbr': 106,
'code': 106,
'title': 'Aerosol absorption coefficient',
'units': 'm-1'},
{'abbr': 107,
'code': 107,
'title': 'Aerosol lidar backscatter from satellite',
'units': 'm-1 sr-1'},
{'abbr': 108,
'code': 108,
'title': 'Aerosol lidar backscatter from the ground',
'units': 'm-1 sr-1'},
{'abbr': 109,
'code': 109,
'title': 'Aerosol lidar extinction from satellite',
'units': 'm-1'},
{'abbr': 110,
'code': 110,
'title': 'Aerosol lidar extinction from the ground',
'units': 'm-1'},
{'abbr': 111, 'code': 111, 'title': 'Angstrom exponent', 'units': 'Numeric'},
{'abbr': None, 'code': 255, 'title': 'Missing'})
| def load(h):
return ({'abbr': 0, 'code': 0, 'title': 'Mass density (concentration)', 'units': 'kg m-3'}, {'abbr': 1, 'code': 1, 'title': 'Column-integrated mass density', 'units': 'kg m-2'}, {'abbr': 2, 'code': 2, 'title': 'Mass mixing ratio (mass fraction in air)', 'units': 'kg/kg'}, {'abbr': 3, 'code': 3, 'title': 'Atmosphere emission mass flux', 'units': 'kg m-2 s-1'}, {'abbr': 4, 'code': 4, 'title': 'Atmosphere net production mass flux', 'units': 'kg m-2 s-1'}, {'abbr': 5, 'code': 5, 'title': 'Atmosphere net production and emission mass flux', 'units': 'kg m-2 s-1'}, {'abbr': 6, 'code': 6, 'title': 'Surface dry deposition mass flux', 'units': 'kg m-2 s-1'}, {'abbr': 7, 'code': 7, 'title': 'Surface wet deposition mass flux', 'units': 'kg m-2 s-1'}, {'abbr': 8, 'code': 8, 'title': 'Atmosphere re-emission mass flux', 'units': 'kg m-2 s-1'}, {'abbr': 9, 'code': 9, 'title': 'Wet deposition by large-scale precipitation mass flux', 'units': 'kg m-2 s-1'}, {'abbr': 10, 'code': 10, 'title': 'Wet deposition by convective precipitation mass flux', 'units': 'kg m-2 s-1'}, {'abbr': 11, 'code': 11, 'title': 'Sedimentation mass flux', 'units': 'kg m-2 s-1'}, {'abbr': 12, 'code': 12, 'title': 'Dry deposition mass flux', 'units': 'kg m-2 s-1'}, {'abbr': 13, 'code': 13, 'title': 'Transfer from hydrophobic to hydrophilic', 'units': 'kg kg-1 s-1'}, {'abbr': 14, 'code': 14, 'title': 'Transfer from SO2 (sulphur dioxide) to SO4 (sulphate)', 'units': 'kg kg-1 s-1'}, {'abbr': 50, 'code': 50, 'title': 'Amount in atmosphere', 'units': 'mol'}, {'abbr': 51, 'code': 51, 'title': 'Concentration in air', 'units': 'mol m-3'}, {'abbr': 52, 'code': 52, 'title': 'Volume mixing ratio (fraction in air)', 'units': 'mol/mol'}, {'abbr': 53, 'code': 53, 'title': 'Chemical gross production rate of concentration', 'units': 'mol m-3 s-1'}, {'abbr': 54, 'code': 54, 'title': 'Chemical gross destruction rate of concentration', 'units': 'mol m-3 s-1'}, {'abbr': 55, 'code': 55, 'title': 'Surface flux', 'units': 'mol m-2 s-1'}, {'abbr': 56, 'code': 56, 'title': 'Changes of amount in atmosphere', 'units': 'mol/s'}, {'abbr': 57, 'code': 57, 'title': 'Total yearly average burden of the atmosphere', 'units': 'mol'}, {'abbr': 58, 'code': 58, 'title': 'Total yearly averaged atmospheric loss', 'units': 'mol/s'}, {'abbr': 59, 'code': 59, 'title': 'Aerosol number concentration', 'units': 'm-3'}, {'abbr': 60, 'code': 60, 'title': 'Aerosol specific number concentration', 'units': 'kg-1'}, {'abbr': 61, 'code': 61, 'title': 'Maximum of mass density in layer', 'units': 'kg m-3'}, {'abbr': 62, 'code': 62, 'title': 'Height of maximum mass density', 'units': 'm'}, {'abbr': 63, 'code': 63, 'title': 'Column-averaged mass density in layer', 'units': 'kg m-3'}, {'abbr': 100, 'code': 100, 'title': 'Surface area density (aerosol)', 'units': 'm-1'}, {'abbr': 101, 'code': 101, 'title': 'Vertical visual range', 'units': 'm'}, {'abbr': 102, 'code': 102, 'title': 'Aerosol optical thickness', 'units': 'Numeric'}, {'abbr': 103, 'code': 103, 'title': 'Single scattering albedo', 'units': 'Numeric'}, {'abbr': 104, 'code': 104, 'title': 'Asymmetry factor', 'units': 'Numeric'}, {'abbr': 105, 'code': 105, 'title': 'Aerosol extinction coefficient', 'units': 'm-1'}, {'abbr': 106, 'code': 106, 'title': 'Aerosol absorption coefficient', 'units': 'm-1'}, {'abbr': 107, 'code': 107, 'title': 'Aerosol lidar backscatter from satellite', 'units': 'm-1 sr-1'}, {'abbr': 108, 'code': 108, 'title': 'Aerosol lidar backscatter from the ground', 'units': 'm-1 sr-1'}, {'abbr': 109, 'code': 109, 'title': 'Aerosol lidar extinction from satellite', 'units': 'm-1'}, {'abbr': 110, 'code': 110, 'title': 'Aerosol lidar extinction from the ground', 'units': 'm-1'}, {'abbr': 111, 'code': 111, 'title': 'Angstrom exponent', 'units': 'Numeric'}, {'abbr': None, 'code': 255, 'title': 'Missing'}) |
#===========================================================================
#
# Port to use for the web server. Configure the Eagle to use this
# port as it's 'cloud provider' using http://host:PORT
#
#===========================================================================
httpPort = 22042
#===========================================================================
#
# MQTT topic names
#
#===========================================================================
# Meter reading topic (reports current meter reading in kWh)
mqttEnergy = 'power/elec/Home/energy'
# Instantaneous power usage topic (reports power usage in W)
mqttPower = 'power/elec/Home/power'
#Current price topic (returns current price of electricity from meter)
mqttPrice = 'power/elec/Home/price'
#Current rate label (returns rate label from meter)
mqttRateLabel = 'power/elec/Home/ratelabel'
#===========================================================================
#
# Logging configuration. Env variables are allowed in the file name.
#
#===========================================================================
logFile = '/var/log/tHome/eagle.log'
logLevel = 20
| http_port = 22042
mqtt_energy = 'power/elec/Home/energy'
mqtt_power = 'power/elec/Home/power'
mqtt_price = 'power/elec/Home/price'
mqtt_rate_label = 'power/elec/Home/ratelabel'
log_file = '/var/log/tHome/eagle.log'
log_level = 20 |
def runUserScript(func, params, paramTypes):
if (len(params) != len(paramTypes)):
onParameterError()
newParams = []
for i, val in enumerate(params):
newParams.append(parseParameter(i, paramTypes[i], val))
func(*newParams)
class Node:
def __init__(self, val, left, right, next):
self.val = val
self.left = left
self.right = right
self.next = next
def parseNode(param):
first = Node(param[0], None, None, None)
arr = []
arr.append([first, 0])
for i, val in enumerate(param):
if i is 0:
continue
top = arr[0]
val = None if param[i] is None else Node(param[i], None, None, None)
if top[1] is 0:
top[0].left = val
top[1] = 1
else:
top[0].right = val
arr.pop(0)
if val is not None:
arr.append([val, 0])
return first
def parseSpecialParameter(index, paramType, param):
if paramType == "Node":
return parseNode(param)
return None
| def run_user_script(func, params, paramTypes):
if len(params) != len(paramTypes):
on_parameter_error()
new_params = []
for (i, val) in enumerate(params):
newParams.append(parse_parameter(i, paramTypes[i], val))
func(*newParams)
class Node:
def __init__(self, val, left, right, next):
self.val = val
self.left = left
self.right = right
self.next = next
def parse_node(param):
first = node(param[0], None, None, None)
arr = []
arr.append([first, 0])
for (i, val) in enumerate(param):
if i is 0:
continue
top = arr[0]
val = None if param[i] is None else node(param[i], None, None, None)
if top[1] is 0:
top[0].left = val
top[1] = 1
else:
top[0].right = val
arr.pop(0)
if val is not None:
arr.append([val, 0])
return first
def parse_special_parameter(index, paramType, param):
if paramType == 'Node':
return parse_node(param)
return None |
def readlines(fname):
try:
with open(fname, 'r') as fpt:
return fpt.readlines()
except:
return []
def convert(data):
for i in range(len(data)):
try:
data[i] = float(data[i])
except ValueError:
continue
def csv_lst(fname):
l = readlines(fname)
if len(l) == 0:
raise Exception('Missing file')
output = []
for i in l[1:]:
data = i.split(',')
convert(data)
output.append(data)
return output
dd = csv_lst('titanic.csv')
sur = 0
for x in dd:
sur += x[0]
print(sur)
| def readlines(fname):
try:
with open(fname, 'r') as fpt:
return fpt.readlines()
except:
return []
def convert(data):
for i in range(len(data)):
try:
data[i] = float(data[i])
except ValueError:
continue
def csv_lst(fname):
l = readlines(fname)
if len(l) == 0:
raise exception('Missing file')
output = []
for i in l[1:]:
data = i.split(',')
convert(data)
output.append(data)
return output
dd = csv_lst('titanic.csv')
sur = 0
for x in dd:
sur += x[0]
print(sur) |
def functionA(A: torch.Tensor, B: torch.Tensor) -> torch.Tensor:
## TODO for students
output = A.sum(axis = 0) * B.sum()
return output
def functionB(C: torch.Tensor) -> torch.Tensor:
# TODO flatten the tensor C
C = C.flatten()
# TODO create the idx tensor to be concatenated to C
# here we're going to do flatten and unsqueeze, but reshape can also be used
idx_tensor = torch.arange(0, len(C))
# TODO concatenate the two tensors
output = torch.cat([idx_tensor.unsqueeze(0), C.unsqueeze(0)], axis = 1)
return output
def functionC(D: torch.Tensor, E: torch.Tensor) -> torch.Tensor:
# TODO check we can reshape E into the shape of D
if torch.numel(D) == torch.numel(E) :
# TODO reshape E into the shape of D
E = E.reshape(D.shape)
# TODO sum the two tensors
output = D + E
else:
# TODO flatten both tensors
# this time we'll use reshape to keep the singleton dimension
D = D.reshape(1,-1)
E = E.reshape(1,-1)
# TODO concatenate the two tensors in the correct dimension
output = torch.cat([D,E], axis = 1)
return output
print(functionA(torch.tensor([[1,1], [1,1]]), torch.tensor([ [1,2,3],[1,2,3] ]) ))
print(functionB(torch.tensor([ [2,3],[-1,10] ])))
print(functionC(torch.tensor([[1, -1],[-1,3]]), torch.tensor([[2,3,0,2]])))
print(functionC(torch.tensor([[1, -1],[-1,3]]), torch.tensor([[2,3,0]]))) | def function_a(A: torch.Tensor, B: torch.Tensor) -> torch.Tensor:
output = A.sum(axis=0) * B.sum()
return output
def function_b(C: torch.Tensor) -> torch.Tensor:
c = C.flatten()
idx_tensor = torch.arange(0, len(C))
output = torch.cat([idx_tensor.unsqueeze(0), C.unsqueeze(0)], axis=1)
return output
def function_c(D: torch.Tensor, E: torch.Tensor) -> torch.Tensor:
if torch.numel(D) == torch.numel(E):
e = E.reshape(D.shape)
output = D + E
else:
d = D.reshape(1, -1)
e = E.reshape(1, -1)
output = torch.cat([D, E], axis=1)
return output
print(function_a(torch.tensor([[1, 1], [1, 1]]), torch.tensor([[1, 2, 3], [1, 2, 3]])))
print(function_b(torch.tensor([[2, 3], [-1, 10]])))
print(function_c(torch.tensor([[1, -1], [-1, 3]]), torch.tensor([[2, 3, 0, 2]])))
print(function_c(torch.tensor([[1, -1], [-1, 3]]), torch.tensor([[2, 3, 0]]))) |
minimum_points = 100
data_points = 150
if data_points >= minimum_points:
print("There are enough data points!")
if data_points < minimum_points:
print("Keep collecting data.") | minimum_points = 100
data_points = 150
if data_points >= minimum_points:
print('There are enough data points!')
if data_points < minimum_points:
print('Keep collecting data.') |
class Hand:
def __init__(self):
self.cards = []
self.value = 0
def add_card(self, card):
self.cards.append(card)
def calculate_value(self):
self.value = 0
has_ace = False
for card in self.cards:
if card.value.isnumeric():
self.value += int(card.value)
else:
if card.value == "A":
has_ace = True
self.value += 11
else:
self.value += 10
if has_ace and self.value > 21:
self.value -= 10
def get_value(self):
self.calculate_value()
return self.value | class Hand:
def __init__(self):
self.cards = []
self.value = 0
def add_card(self, card):
self.cards.append(card)
def calculate_value(self):
self.value = 0
has_ace = False
for card in self.cards:
if card.value.isnumeric():
self.value += int(card.value)
elif card.value == 'A':
has_ace = True
self.value += 11
else:
self.value += 10
if has_ace and self.value > 21:
self.value -= 10
def get_value(self):
self.calculate_value()
return self.value |
# This module is used in `test_doctest`.
# It must not have a docstring.
def func_with_docstring():
"""Some unrelated info."""
def func_without_docstring():
pass
def func_with_doctest():
"""
This function really contains a test case.
>>> func_with_doctest.__name__
'func_with_doctest'
"""
return 3
class ClassWithDocstring:
"""Some unrelated class information."""
class ClassWithoutDocstring:
pass
class ClassWithDoctest:
"""This class really has a test case in it.
>>> ClassWithDoctest.__name__
'ClassWithDoctest'
"""
class MethodWrapper:
def method_with_docstring(self):
"""Method with a docstring."""
def method_without_docstring(self):
pass
def method_with_doctest(self):
"""
This has a doctest!
>>> MethodWrapper.method_with_doctest.__name__
'method_with_doctest'
"""
| def func_with_docstring():
"""Some unrelated info."""
def func_without_docstring():
pass
def func_with_doctest():
"""
This function really contains a test case.
>>> func_with_doctest.__name__
'func_with_doctest'
"""
return 3
class Classwithdocstring:
"""Some unrelated class information."""
class Classwithoutdocstring:
pass
class Classwithdoctest:
"""This class really has a test case in it.
>>> ClassWithDoctest.__name__
'ClassWithDoctest'
"""
class Methodwrapper:
def method_with_docstring(self):
"""Method with a docstring."""
def method_without_docstring(self):
pass
def method_with_doctest(self):
"""
This has a doctest!
>>> MethodWrapper.method_with_doctest.__name__
'method_with_doctest'
""" |
# -*- coding: utf-8 -*-
"""
pyalgs
~~~~~
pyalgs provides the python implementation of the Robert Sedgwick's Coursera course on Algorithms (Part I and Part II).
:copyright: (c) 2017 by Xianshun Chen.
:license: BSD, see LICENSE for more details.
"""
__version__ = '0.01-dev' | """
pyalgs
~~~~~
pyalgs provides the python implementation of the Robert Sedgwick's Coursera course on Algorithms (Part I and Part II).
:copyright: (c) 2017 by Xianshun Chen.
:license: BSD, see LICENSE for more details.
"""
__version__ = '0.01-dev' |
#
# PySNMP MIB module GSM7312-QOS-ACL-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/GSM7312-QOS-ACL-MIB
# Produced by pysmi-0.3.4 at Wed May 1 13:20:03 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ValueSizeConstraint, ConstraintsUnion, ConstraintsIntersection, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsUnion", "ConstraintsIntersection", "SingleValueConstraint")
gsm7312QOS, = mibBuilder.importSymbols("GSM7312-QOS-MIB", "gsm7312QOS")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
TimeTicks, Unsigned32, Counter32, ModuleIdentity, Bits, Gauge32, MibScalar, MibTable, MibTableRow, MibTableColumn, ObjectIdentity, IpAddress, NotificationType, MibIdentifier, Integer32, iso, Counter64 = mibBuilder.importSymbols("SNMPv2-SMI", "TimeTicks", "Unsigned32", "Counter32", "ModuleIdentity", "Bits", "Gauge32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ObjectIdentity", "IpAddress", "NotificationType", "MibIdentifier", "Integer32", "iso", "Counter64")
DisplayString, TextualConvention, RowStatus = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention", "RowStatus")
gsm7312QOSACL = ModuleIdentity((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2))
gsm7312QOSACL.setRevisions(('2003-05-06 12:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: gsm7312QOSACL.setRevisionsDescriptions(('Initial revision.',))
if mibBuilder.loadTexts: gsm7312QOSACL.setLastUpdated('200305061200Z')
if mibBuilder.loadTexts: gsm7312QOSACL.setOrganization('Netgear')
if mibBuilder.loadTexts: gsm7312QOSACL.setContactInfo('')
if mibBuilder.loadTexts: gsm7312QOSACL.setDescription('')
aclTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 1), )
if mibBuilder.loadTexts: aclTable.setStatus('current')
if mibBuilder.loadTexts: aclTable.setDescription('A table of ACL instances.')
aclEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 1, 1), ).setIndexNames((0, "GSM7312-QOS-ACL-MIB", "aclIndex"))
if mibBuilder.loadTexts: aclEntry.setStatus('current')
if mibBuilder.loadTexts: aclEntry.setDescription('')
aclStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 1, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: aclStatus.setStatus('current')
if mibBuilder.loadTexts: aclStatus.setDescription('Status of this instance. active(1) - this ACL instance is active createAndGo(4) - set to this value to create an instance destroy(6) - set to this value to delete an instance')
aclIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 1, 1, 1), Integer32())
if mibBuilder.loadTexts: aclIndex.setStatus('current')
if mibBuilder.loadTexts: aclIndex.setDescription('The ACL index this instance is associated with.')
aclIfTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 2), )
if mibBuilder.loadTexts: aclIfTable.setStatus('current')
if mibBuilder.loadTexts: aclIfTable.setDescription('A table of ACL interface instances.')
aclIfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 2, 1), ).setIndexNames((0, "GSM7312-QOS-ACL-MIB", "aclIndex"), (0, "GSM7312-QOS-ACL-MIB", "aclIfIndex"), (0, "GSM7312-QOS-ACL-MIB", "aclIfDirection"))
if mibBuilder.loadTexts: aclIfEntry.setStatus('current')
if mibBuilder.loadTexts: aclIfEntry.setDescription('')
aclIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 2, 1, 1), Integer32())
if mibBuilder.loadTexts: aclIfIndex.setStatus('current')
if mibBuilder.loadTexts: aclIfIndex.setDescription('The interface this ACL instance is associated with.')
aclIfDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2))))
if mibBuilder.loadTexts: aclIfDirection.setStatus('current')
if mibBuilder.loadTexts: aclIfDirection.setDescription('The direction this ACL instance applies.')
aclIfStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 2, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: aclIfStatus.setStatus('current')
if mibBuilder.loadTexts: aclIfStatus.setDescription('Status of this instance. active(1) - this ACL index instance is active createAndGo(4) - set to this value to assign an interface to an ACL destroy(6) - set to this value to remove an interface to an ACL')
aclRuleTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3), )
if mibBuilder.loadTexts: aclRuleTable.setStatus('current')
if mibBuilder.loadTexts: aclRuleTable.setDescription('A table of ACL Rules instances.')
aclRuleEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1), ).setIndexNames((0, "GSM7312-QOS-ACL-MIB", "aclIndex"), (0, "GSM7312-QOS-ACL-MIB", "aclRuleIndex"))
if mibBuilder.loadTexts: aclRuleEntry.setStatus('current')
if mibBuilder.loadTexts: aclRuleEntry.setDescription('A table of ACL Classification Rules')
aclRuleIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 1), Integer32())
if mibBuilder.loadTexts: aclRuleIndex.setStatus('current')
if mibBuilder.loadTexts: aclRuleIndex.setDescription('The index of this instance.')
aclRuleAction = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("permit", 1), ("deny", 2))).clone('deny')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: aclRuleAction.setStatus('current')
if mibBuilder.loadTexts: aclRuleAction.setDescription('The type of action this rule should perform.')
aclRuleProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: aclRuleProtocol.setStatus('current')
if mibBuilder.loadTexts: aclRuleProtocol.setDescription('icmp - 1 igmp - 2 ip - 4 tcp - 6 udp - 17 All values from 1 to 255 are valid.')
aclRuleSrcIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 4), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: aclRuleSrcIpAddress.setStatus('current')
if mibBuilder.loadTexts: aclRuleSrcIpAddress.setDescription('The Source IP Address used in the ACL Classification.')
aclRuleSrcIpMask = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 5), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: aclRuleSrcIpMask.setStatus('current')
if mibBuilder.loadTexts: aclRuleSrcIpMask.setDescription('The Source IP Mask used in the ACL Classification.')
aclRuleSrcL4Port = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 6), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: aclRuleSrcL4Port.setStatus('current')
if mibBuilder.loadTexts: aclRuleSrcL4Port.setDescription('The Source Port Number (Layer 4) used in the ACL Classification.')
aclRuleSrcL4PortRangeStart = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 7), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: aclRuleSrcL4PortRangeStart.setStatus('current')
if mibBuilder.loadTexts: aclRuleSrcL4PortRangeStart.setDescription('The Source Port Number(Layer 4) range start.')
aclRuleSrcL4PortRangeEnd = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 8), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: aclRuleSrcL4PortRangeEnd.setStatus('current')
if mibBuilder.loadTexts: aclRuleSrcL4PortRangeEnd.setDescription('The Source Port Number(Layer 4) range end.')
aclRuleDestIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 9), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: aclRuleDestIpAddress.setStatus('current')
if mibBuilder.loadTexts: aclRuleDestIpAddress.setDescription('The Destination IP Address used in the ACL Classification.')
aclRuleDestIpMask = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 10), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: aclRuleDestIpMask.setStatus('current')
if mibBuilder.loadTexts: aclRuleDestIpMask.setDescription('The Destination IP Mask used in the ACL Classification.')
aclRuleDestL4Port = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 11), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: aclRuleDestL4Port.setStatus('current')
if mibBuilder.loadTexts: aclRuleDestL4Port.setDescription('The Destination Port (Layer 4) used in ACl classification.')
aclRuleDestL4PortRangeStart = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 12), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: aclRuleDestL4PortRangeStart.setStatus('current')
if mibBuilder.loadTexts: aclRuleDestL4PortRangeStart.setDescription('The Destination Port (Layer 4) starting range used in ACL classification.')
aclRuleDestL4PortRangeEnd = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 13), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: aclRuleDestL4PortRangeEnd.setStatus('current')
if mibBuilder.loadTexts: aclRuleDestL4PortRangeEnd.setDescription('The Destination Port (Layer 4) ending range used in ACL classification.')
aclRuleIPDSCP = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 14), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: aclRuleIPDSCP.setStatus('current')
if mibBuilder.loadTexts: aclRuleIPDSCP.setDescription('The Differentiated Services Code Point value.')
aclRuleIpPrecedence = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 15), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: aclRuleIpPrecedence.setStatus('current')
if mibBuilder.loadTexts: aclRuleIpPrecedence.setDescription('The Type of Service (TOS) IP Precedence value.')
aclRuleIpTosBits = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 16), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: aclRuleIpTosBits.setStatus('current')
if mibBuilder.loadTexts: aclRuleIpTosBits.setDescription('The Type of Service (TOS) Bits value.')
aclRuleIpTosMask = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 17), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: aclRuleIpTosMask.setStatus('current')
if mibBuilder.loadTexts: aclRuleIpTosMask.setDescription('The Type of Service (TOS) Mask value.')
aclRuleStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 18), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: aclRuleStatus.setStatus('current')
if mibBuilder.loadTexts: aclRuleStatus.setDescription('Status of this instance. active(1) - this ACL Rule is active createAndGo(4) - set to this value to create an instance destroy(6) - set to this value to delete an instance')
mibBuilder.exportSymbols("GSM7312-QOS-ACL-MIB", gsm7312QOSACL=gsm7312QOSACL, aclIfEntry=aclIfEntry, aclRuleIPDSCP=aclRuleIPDSCP, aclRuleDestIpAddress=aclRuleDestIpAddress, aclRuleDestL4PortRangeStart=aclRuleDestL4PortRangeStart, aclRuleIpPrecedence=aclRuleIpPrecedence, aclIfTable=aclIfTable, aclIndex=aclIndex, aclRuleDestL4PortRangeEnd=aclRuleDestL4PortRangeEnd, aclRuleSrcL4Port=aclRuleSrcL4Port, aclStatus=aclStatus, aclIfIndex=aclIfIndex, aclRuleIpTosMask=aclRuleIpTosMask, aclRuleAction=aclRuleAction, aclRuleSrcL4PortRangeEnd=aclRuleSrcL4PortRangeEnd, aclTable=aclTable, aclIfDirection=aclIfDirection, aclEntry=aclEntry, aclRuleDestL4Port=aclRuleDestL4Port, aclRuleSrcL4PortRangeStart=aclRuleSrcL4PortRangeStart, PYSNMP_MODULE_ID=gsm7312QOSACL, aclIfStatus=aclIfStatus, aclRuleEntry=aclRuleEntry, aclRuleDestIpMask=aclRuleDestIpMask, aclRuleTable=aclRuleTable, aclRuleIndex=aclRuleIndex, aclRuleIpTosBits=aclRuleIpTosBits, aclRuleSrcIpAddress=aclRuleSrcIpAddress, aclRuleStatus=aclRuleStatus, aclRuleProtocol=aclRuleProtocol, aclRuleSrcIpMask=aclRuleSrcIpMask)
| (integer, octet_string, object_identifier) = mibBuilder.importSymbols('ASN1', 'Integer', 'OctetString', 'ObjectIdentifier')
(named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(value_range_constraint, value_size_constraint, constraints_union, constraints_intersection, single_value_constraint) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'ValueRangeConstraint', 'ValueSizeConstraint', 'ConstraintsUnion', 'ConstraintsIntersection', 'SingleValueConstraint')
(gsm7312_qos,) = mibBuilder.importSymbols('GSM7312-QOS-MIB', 'gsm7312QOS')
(module_compliance, notification_group) = mibBuilder.importSymbols('SNMPv2-CONF', 'ModuleCompliance', 'NotificationGroup')
(time_ticks, unsigned32, counter32, module_identity, bits, gauge32, mib_scalar, mib_table, mib_table_row, mib_table_column, object_identity, ip_address, notification_type, mib_identifier, integer32, iso, counter64) = mibBuilder.importSymbols('SNMPv2-SMI', 'TimeTicks', 'Unsigned32', 'Counter32', 'ModuleIdentity', 'Bits', 'Gauge32', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'ObjectIdentity', 'IpAddress', 'NotificationType', 'MibIdentifier', 'Integer32', 'iso', 'Counter64')
(display_string, textual_convention, row_status) = mibBuilder.importSymbols('SNMPv2-TC', 'DisplayString', 'TextualConvention', 'RowStatus')
gsm7312_qosacl = module_identity((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2))
gsm7312QOSACL.setRevisions(('2003-05-06 12:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts:
gsm7312QOSACL.setRevisionsDescriptions(('Initial revision.',))
if mibBuilder.loadTexts:
gsm7312QOSACL.setLastUpdated('200305061200Z')
if mibBuilder.loadTexts:
gsm7312QOSACL.setOrganization('Netgear')
if mibBuilder.loadTexts:
gsm7312QOSACL.setContactInfo('')
if mibBuilder.loadTexts:
gsm7312QOSACL.setDescription('')
acl_table = mib_table((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 1))
if mibBuilder.loadTexts:
aclTable.setStatus('current')
if mibBuilder.loadTexts:
aclTable.setDescription('A table of ACL instances.')
acl_entry = mib_table_row((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 1, 1)).setIndexNames((0, 'GSM7312-QOS-ACL-MIB', 'aclIndex'))
if mibBuilder.loadTexts:
aclEntry.setStatus('current')
if mibBuilder.loadTexts:
aclEntry.setDescription('')
acl_status = mib_table_column((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 1, 1, 3), row_status()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
aclStatus.setStatus('current')
if mibBuilder.loadTexts:
aclStatus.setDescription('Status of this instance. active(1) - this ACL instance is active createAndGo(4) - set to this value to create an instance destroy(6) - set to this value to delete an instance')
acl_index = mib_table_column((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 1, 1, 1), integer32())
if mibBuilder.loadTexts:
aclIndex.setStatus('current')
if mibBuilder.loadTexts:
aclIndex.setDescription('The ACL index this instance is associated with.')
acl_if_table = mib_table((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 2))
if mibBuilder.loadTexts:
aclIfTable.setStatus('current')
if mibBuilder.loadTexts:
aclIfTable.setDescription('A table of ACL interface instances.')
acl_if_entry = mib_table_row((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 2, 1)).setIndexNames((0, 'GSM7312-QOS-ACL-MIB', 'aclIndex'), (0, 'GSM7312-QOS-ACL-MIB', 'aclIfIndex'), (0, 'GSM7312-QOS-ACL-MIB', 'aclIfDirection'))
if mibBuilder.loadTexts:
aclIfEntry.setStatus('current')
if mibBuilder.loadTexts:
aclIfEntry.setDescription('')
acl_if_index = mib_table_column((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 2, 1, 1), integer32())
if mibBuilder.loadTexts:
aclIfIndex.setStatus('current')
if mibBuilder.loadTexts:
aclIfIndex.setDescription('The interface this ACL instance is associated with.')
acl_if_direction = mib_table_column((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 2, 1, 2), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('inbound', 1), ('outbound', 2))))
if mibBuilder.loadTexts:
aclIfDirection.setStatus('current')
if mibBuilder.loadTexts:
aclIfDirection.setDescription('The direction this ACL instance applies.')
acl_if_status = mib_table_column((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 2, 1, 3), row_status()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
aclIfStatus.setStatus('current')
if mibBuilder.loadTexts:
aclIfStatus.setDescription('Status of this instance. active(1) - this ACL index instance is active createAndGo(4) - set to this value to assign an interface to an ACL destroy(6) - set to this value to remove an interface to an ACL')
acl_rule_table = mib_table((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3))
if mibBuilder.loadTexts:
aclRuleTable.setStatus('current')
if mibBuilder.loadTexts:
aclRuleTable.setDescription('A table of ACL Rules instances.')
acl_rule_entry = mib_table_row((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1)).setIndexNames((0, 'GSM7312-QOS-ACL-MIB', 'aclIndex'), (0, 'GSM7312-QOS-ACL-MIB', 'aclRuleIndex'))
if mibBuilder.loadTexts:
aclRuleEntry.setStatus('current')
if mibBuilder.loadTexts:
aclRuleEntry.setDescription('A table of ACL Classification Rules')
acl_rule_index = mib_table_column((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 1), integer32())
if mibBuilder.loadTexts:
aclRuleIndex.setStatus('current')
if mibBuilder.loadTexts:
aclRuleIndex.setDescription('The index of this instance.')
acl_rule_action = mib_table_column((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 2), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('permit', 1), ('deny', 2))).clone('deny')).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
aclRuleAction.setStatus('current')
if mibBuilder.loadTexts:
aclRuleAction.setDescription('The type of action this rule should perform.')
acl_rule_protocol = mib_table_column((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 3), integer32().subtype(subtypeSpec=value_range_constraint(1, 255))).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
aclRuleProtocol.setStatus('current')
if mibBuilder.loadTexts:
aclRuleProtocol.setDescription('icmp - 1 igmp - 2 ip - 4 tcp - 6 udp - 17 All values from 1 to 255 are valid.')
acl_rule_src_ip_address = mib_table_column((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 4), ip_address()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
aclRuleSrcIpAddress.setStatus('current')
if mibBuilder.loadTexts:
aclRuleSrcIpAddress.setDescription('The Source IP Address used in the ACL Classification.')
acl_rule_src_ip_mask = mib_table_column((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 5), ip_address()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
aclRuleSrcIpMask.setStatus('current')
if mibBuilder.loadTexts:
aclRuleSrcIpMask.setDescription('The Source IP Mask used in the ACL Classification.')
acl_rule_src_l4_port = mib_table_column((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 6), integer32()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
aclRuleSrcL4Port.setStatus('current')
if mibBuilder.loadTexts:
aclRuleSrcL4Port.setDescription('The Source Port Number (Layer 4) used in the ACL Classification.')
acl_rule_src_l4_port_range_start = mib_table_column((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 7), integer32()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
aclRuleSrcL4PortRangeStart.setStatus('current')
if mibBuilder.loadTexts:
aclRuleSrcL4PortRangeStart.setDescription('The Source Port Number(Layer 4) range start.')
acl_rule_src_l4_port_range_end = mib_table_column((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 8), integer32()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
aclRuleSrcL4PortRangeEnd.setStatus('current')
if mibBuilder.loadTexts:
aclRuleSrcL4PortRangeEnd.setDescription('The Source Port Number(Layer 4) range end.')
acl_rule_dest_ip_address = mib_table_column((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 9), ip_address()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
aclRuleDestIpAddress.setStatus('current')
if mibBuilder.loadTexts:
aclRuleDestIpAddress.setDescription('The Destination IP Address used in the ACL Classification.')
acl_rule_dest_ip_mask = mib_table_column((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 10), ip_address()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
aclRuleDestIpMask.setStatus('current')
if mibBuilder.loadTexts:
aclRuleDestIpMask.setDescription('The Destination IP Mask used in the ACL Classification.')
acl_rule_dest_l4_port = mib_table_column((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 11), integer32()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
aclRuleDestL4Port.setStatus('current')
if mibBuilder.loadTexts:
aclRuleDestL4Port.setDescription('The Destination Port (Layer 4) used in ACl classification.')
acl_rule_dest_l4_port_range_start = mib_table_column((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 12), integer32()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
aclRuleDestL4PortRangeStart.setStatus('current')
if mibBuilder.loadTexts:
aclRuleDestL4PortRangeStart.setDescription('The Destination Port (Layer 4) starting range used in ACL classification.')
acl_rule_dest_l4_port_range_end = mib_table_column((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 13), integer32()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
aclRuleDestL4PortRangeEnd.setStatus('current')
if mibBuilder.loadTexts:
aclRuleDestL4PortRangeEnd.setDescription('The Destination Port (Layer 4) ending range used in ACL classification.')
acl_rule_ipdscp = mib_table_column((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 14), integer32()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
aclRuleIPDSCP.setStatus('current')
if mibBuilder.loadTexts:
aclRuleIPDSCP.setDescription('The Differentiated Services Code Point value.')
acl_rule_ip_precedence = mib_table_column((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 15), integer32()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
aclRuleIpPrecedence.setStatus('current')
if mibBuilder.loadTexts:
aclRuleIpPrecedence.setDescription('The Type of Service (TOS) IP Precedence value.')
acl_rule_ip_tos_bits = mib_table_column((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 16), integer32()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
aclRuleIpTosBits.setStatus('current')
if mibBuilder.loadTexts:
aclRuleIpTosBits.setDescription('The Type of Service (TOS) Bits value.')
acl_rule_ip_tos_mask = mib_table_column((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 17), integer32()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
aclRuleIpTosMask.setStatus('current')
if mibBuilder.loadTexts:
aclRuleIpTosMask.setDescription('The Type of Service (TOS) Mask value.')
acl_rule_status = mib_table_column((1, 3, 6, 1, 4, 1, 4526, 1, 6, 3, 2, 3, 1, 18), row_status()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
aclRuleStatus.setStatus('current')
if mibBuilder.loadTexts:
aclRuleStatus.setDescription('Status of this instance. active(1) - this ACL Rule is active createAndGo(4) - set to this value to create an instance destroy(6) - set to this value to delete an instance')
mibBuilder.exportSymbols('GSM7312-QOS-ACL-MIB', gsm7312QOSACL=gsm7312QOSACL, aclIfEntry=aclIfEntry, aclRuleIPDSCP=aclRuleIPDSCP, aclRuleDestIpAddress=aclRuleDestIpAddress, aclRuleDestL4PortRangeStart=aclRuleDestL4PortRangeStart, aclRuleIpPrecedence=aclRuleIpPrecedence, aclIfTable=aclIfTable, aclIndex=aclIndex, aclRuleDestL4PortRangeEnd=aclRuleDestL4PortRangeEnd, aclRuleSrcL4Port=aclRuleSrcL4Port, aclStatus=aclStatus, aclIfIndex=aclIfIndex, aclRuleIpTosMask=aclRuleIpTosMask, aclRuleAction=aclRuleAction, aclRuleSrcL4PortRangeEnd=aclRuleSrcL4PortRangeEnd, aclTable=aclTable, aclIfDirection=aclIfDirection, aclEntry=aclEntry, aclRuleDestL4Port=aclRuleDestL4Port, aclRuleSrcL4PortRangeStart=aclRuleSrcL4PortRangeStart, PYSNMP_MODULE_ID=gsm7312QOSACL, aclIfStatus=aclIfStatus, aclRuleEntry=aclRuleEntry, aclRuleDestIpMask=aclRuleDestIpMask, aclRuleTable=aclRuleTable, aclRuleIndex=aclRuleIndex, aclRuleIpTosBits=aclRuleIpTosBits, aclRuleSrcIpAddress=aclRuleSrcIpAddress, aclRuleStatus=aclRuleStatus, aclRuleProtocol=aclRuleProtocol, aclRuleSrcIpMask=aclRuleSrcIpMask) |
class AttributeUsageAttribute(Attribute,_Attribute):
"""
Specifies the usage of another attribute class. This class cannot be inherited.
AttributeUsageAttribute(validOn: AttributeTargets)
"""
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,validOn):
""" __new__(cls: type,validOn: AttributeTargets) """
pass
def __reduce_ex__(self,*args):
pass
AllowMultiple=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a Boolean value indicating whether more than one instance of the indicated attribute can be specified for a single program element.
Get: AllowMultiple(self: AttributeUsageAttribute) -> bool
Set: AllowMultiple(self: AttributeUsageAttribute)=value
"""
Inherited=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a Boolean value indicating whether the indicated attribute can be inherited by derived classes and overriding members.
Get: Inherited(self: AttributeUsageAttribute) -> bool
Set: Inherited(self: AttributeUsageAttribute)=value
"""
ValidOn=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a set of values identifying which program elements that the indicated attribute can be applied to.
Get: ValidOn(self: AttributeUsageAttribute) -> AttributeTargets
"""
| class Attributeusageattribute(Attribute, _Attribute):
"""
Specifies the usage of another attribute class. This class cannot be inherited.
AttributeUsageAttribute(validOn: AttributeTargets)
"""
def __init__(self, *args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self, validOn):
""" __new__(cls: type,validOn: AttributeTargets) """
pass
def __reduce_ex__(self, *args):
pass
allow_multiple = property(lambda self: object(), lambda self, v: None, lambda self: None)
'Gets or sets a Boolean value indicating whether more than one instance of the indicated attribute can be specified for a single program element.\n\n\n\nGet: AllowMultiple(self: AttributeUsageAttribute) -> bool\n\n\n\nSet: AllowMultiple(self: AttributeUsageAttribute)=value\n\n'
inherited = property(lambda self: object(), lambda self, v: None, lambda self: None)
'Gets or sets a Boolean value indicating whether the indicated attribute can be inherited by derived classes and overriding members.\n\n\n\nGet: Inherited(self: AttributeUsageAttribute) -> bool\n\n\n\nSet: Inherited(self: AttributeUsageAttribute)=value\n\n'
valid_on = property(lambda self: object(), lambda self, v: None, lambda self: None)
'Gets a set of values identifying which program elements that the indicated attribute can be applied to.\n\n\n\nGet: ValidOn(self: AttributeUsageAttribute) -> AttributeTargets\n\n\n\n' |
"""
Backports of library components from newer python versions.
For internal use only.
"""
| """
Backports of library components from newer python versions.
For internal use only.
""" |
settings = {
"ARCHIVE" : True,
"MAX_POSTS" : 5000
}
| settings = {'ARCHIVE': True, 'MAX_POSTS': 5000} |
def minim(lst):
min = 100000
minI = 99999
for i in range(len(lst)):
if lst[i]<min:
min = lst[i]
minI=i
return min,minI
lst = list(map(int, input().split()))
lst2 = len(lst)*[0]
min = lst[1]
for i in range(len(lst)):
x,y = minim(lst)
lst2.append(x)
# print(minim(lst))
| def minim(lst):
min = 100000
min_i = 99999
for i in range(len(lst)):
if lst[i] < min:
min = lst[i]
min_i = i
return (min, minI)
lst = list(map(int, input().split()))
lst2 = len(lst) * [0]
min = lst[1]
for i in range(len(lst)):
(x, y) = minim(lst)
lst2.append(x) |
class CapacityMixin:
@staticmethod
def get_capacity(capacity, amount):
if amount > capacity:
return "Capacity reached!"
return capacity - amount
| class Capacitymixin:
@staticmethod
def get_capacity(capacity, amount):
if amount > capacity:
return 'Capacity reached!'
return capacity - amount |
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'program',
'type': 'executable',
'msvs_cygwin_shell': 0,
'sources': [
'program.c',
],
'actions': [
{
'action_name': 'make-prog1',
'inputs': [
'make-prog1.py',
],
'outputs': [
'<(INTERMEDIATE_DIR)/prog1.c',
],
'action': [
'python', '<(_inputs)', '<@(_outputs)',
],
'process_outputs_as_sources': 1,
},
{
'action_name': 'make-prog2',
'inputs': [
'make-prog2.py',
],
'outputs': [
'actions-out/prog2.c',
],
'action': [
'python', '<(_inputs)', '<@(_outputs)',
],
'process_outputs_as_sources': 1,
# Allows the test to run without hermetic cygwin on windows.
'msvs_cygwin_shell': 0,
},
],
},
{
'target_name': 'counter',
'type': 'none',
'actions': [
{
# This action should always run, regardless of whether or not it's
# inputs or the command-line change. We do this by creating a dummy
# first output, which is always missing, thus causing the build to
# always try to recreate it. Actual output files should be listed
# after the dummy one, and dependent targets should list the real
# output(s) in their inputs
# (see '../actions.gyp:depend_on_always_run_action').
'action_name': 'action_counter',
'inputs': [
'counter.py',
],
'outputs': [
'actions-out/action-counter.txt.always',
'actions-out/action-counter.txt',
],
'action': [
'python', '<(_inputs)', 'actions-out/action-counter.txt', '2',
],
# Allows the test to run without hermetic cygwin on windows.
'msvs_cygwin_shell': 0,
},
],
},
],
}
| {'targets': [{'target_name': 'program', 'type': 'executable', 'msvs_cygwin_shell': 0, 'sources': ['program.c'], 'actions': [{'action_name': 'make-prog1', 'inputs': ['make-prog1.py'], 'outputs': ['<(INTERMEDIATE_DIR)/prog1.c'], 'action': ['python', '<(_inputs)', '<@(_outputs)'], 'process_outputs_as_sources': 1}, {'action_name': 'make-prog2', 'inputs': ['make-prog2.py'], 'outputs': ['actions-out/prog2.c'], 'action': ['python', '<(_inputs)', '<@(_outputs)'], 'process_outputs_as_sources': 1, 'msvs_cygwin_shell': 0}]}, {'target_name': 'counter', 'type': 'none', 'actions': [{'action_name': 'action_counter', 'inputs': ['counter.py'], 'outputs': ['actions-out/action-counter.txt.always', 'actions-out/action-counter.txt'], 'action': ['python', '<(_inputs)', 'actions-out/action-counter.txt', '2'], 'msvs_cygwin_shell': 0}]}]} |
print("Enter Num 1 : ")
num1 = int(input())
print("Enter Num 2 : ")
num2 = int(input())
print("Sum = ", num1+num2)
print("This is important") | print('Enter Num 1 : ')
num1 = int(input())
print('Enter Num 2 : ')
num2 = int(input())
print('Sum = ', num1 + num2)
print('This is important') |
def likelihood(theta_hat, x, y):
"""The likelihood function for a linear model with noise sampled from a
Gaussian distribution with zero mean and unit variance.
Args:
theta_hat (float): An estimate of the slope parameter.
x (ndarray): An array of shape (samples,) that contains the input values.
y (ndarray): An array of shape (samples,) that contains the corresponding
measurement values to the inputs.
Returns:
float: the likelihood value for the theta_hat estimate
"""
sigma = 1
pdf = 1 / np.sqrt(2*np.pi*sigma**2) * np.exp(-(y - theta_hat*x)**2 / (2*sigma**2))
return pdf | def likelihood(theta_hat, x, y):
"""The likelihood function for a linear model with noise sampled from a
Gaussian distribution with zero mean and unit variance.
Args:
theta_hat (float): An estimate of the slope parameter.
x (ndarray): An array of shape (samples,) that contains the input values.
y (ndarray): An array of shape (samples,) that contains the corresponding
measurement values to the inputs.
Returns:
float: the likelihood value for the theta_hat estimate
"""
sigma = 1
pdf = 1 / np.sqrt(2 * np.pi * sigma ** 2) * np.exp(-(y - theta_hat * x) ** 2 / (2 * sigma ** 2))
return pdf |
UI={
'new_goal':{
't':'Send me the name of your goal'
}
}
| ui = {'new_goal': {'t': 'Send me the name of your goal'}} |
"""
Write a Python program to count occurrences of a substring in a string.
"""
str1 = "This pandemic is something serious, in the sense that a virus can spread while lockdown is on. Let's re_examine this virus please."
print("The Text is:", str1)
print()
print("The Number of occurence of the word virus is: ",str1.count("virus")) | """
Write a Python program to count occurrences of a substring in a string.
"""
str1 = "This pandemic is something serious, in the sense that a virus can spread while lockdown is on. Let's re_examine this virus please."
print('The Text is:', str1)
print()
print('The Number of occurence of the word virus is: ', str1.count('virus')) |
#if customizations are required when doing the update of the code of the jpackage
def main(j,jp,force=False):
recipe=jp.getCodeMgmtRecipe()
recipe.update(force=force)
| def main(j, jp, force=False):
recipe = jp.getCodeMgmtRecipe()
recipe.update(force=force) |
"""Targets for generating TensorFlow Python API __init__.py files."""
# keep sorted
TENSORFLOW_API_INIT_FILES = [
# BEGIN GENERATED FILES
"__init__.py",
"app/__init__.py",
"bitwise/__init__.py",
"compat/__init__.py",
"data/__init__.py",
"debugging/__init__.py",
"distributions/__init__.py",
"distributions/bijectors/__init__.py",
"dtypes/__init__.py",
"errors/__init__.py",
"feature_column/__init__.py",
"gfile/__init__.py",
"graph_util/__init__.py",
"image/__init__.py",
"io/__init__.py",
"initializers/__init__.py",
"keras/__init__.py",
"keras/activations/__init__.py",
"keras/applications/__init__.py",
"keras/applications/densenet/__init__.py",
"keras/applications/inception_resnet_v2/__init__.py",
"keras/applications/inception_v3/__init__.py",
"keras/applications/mobilenet/__init__.py",
"keras/applications/nasnet/__init__.py",
"keras/applications/resnet50/__init__.py",
"keras/applications/vgg16/__init__.py",
"keras/applications/vgg19/__init__.py",
"keras/applications/xception/__init__.py",
"keras/backend/__init__.py",
"keras/callbacks/__init__.py",
"keras/constraints/__init__.py",
"keras/datasets/__init__.py",
"keras/datasets/boston_housing/__init__.py",
"keras/datasets/cifar10/__init__.py",
"keras/datasets/cifar100/__init__.py",
"keras/datasets/fashion_mnist/__init__.py",
"keras/datasets/imdb/__init__.py",
"keras/datasets/mnist/__init__.py",
"keras/datasets/reuters/__init__.py",
"keras/estimator/__init__.py",
"keras/initializers/__init__.py",
"keras/layers/__init__.py",
"keras/losses/__init__.py",
"keras/metrics/__init__.py",
"keras/models/__init__.py",
"keras/optimizers/__init__.py",
"keras/preprocessing/__init__.py",
"keras/preprocessing/image/__init__.py",
"keras/preprocessing/sequence/__init__.py",
"keras/preprocessing/text/__init__.py",
"keras/regularizers/__init__.py",
"keras/utils/__init__.py",
"keras/wrappers/__init__.py",
"keras/wrappers/scikit_learn/__init__.py",
"layers/__init__.py",
"linalg/__init__.py",
"logging/__init__.py",
"losses/__init__.py",
"manip/__init__.py",
"math/__init__.py",
"metrics/__init__.py",
"nn/__init__.py",
"nn/rnn_cell/__init__.py",
"profiler/__init__.py",
"python_io/__init__.py",
"quantization/__init__.py",
"resource_loader/__init__.py",
"strings/__init__.py",
"saved_model/__init__.py",
"saved_model/builder/__init__.py",
"saved_model/constants/__init__.py",
"saved_model/loader/__init__.py",
"saved_model/main_op/__init__.py",
"saved_model/signature_constants/__init__.py",
"saved_model/signature_def_utils/__init__.py",
"saved_model/tag_constants/__init__.py",
"saved_model/utils/__init__.py",
"sets/__init__.py",
"sparse/__init__.py",
"spectral/__init__.py",
"summary/__init__.py",
"sysconfig/__init__.py",
"test/__init__.py",
"train/__init__.py",
"train/queue_runner/__init__.py",
"user_ops/__init__.py",
# END GENERATED FILES
]
# keep sorted
ESTIMATOR_API_INIT_FILES = [
# BEGIN GENERATED ESTIMATOR FILES
"__init__.py",
"estimator/__init__.py",
"estimator/export/__init__.py",
"estimator/inputs/__init__.py",
# END GENERATED ESTIMATOR FILES
]
# Creates a genrule that generates a directory structure with __init__.py
# files that import all exported modules (i.e. modules with tf_export
# decorators).
#
# Args:
# name: name of genrule to create.
# output_files: List of __init__.py files that should be generated.
# This list should include file name for every module exported using
# tf_export. For e.g. if an op is decorated with
# @tf_export('module1.module2', 'module3'). Then, output_files should
# include module1/module2/__init__.py and module3/__init__.py.
# root_init_template: Python init file that should be used as template for
# root __init__.py file. "# API IMPORTS PLACEHOLDER" comment inside this
# template will be replaced with root imports collected by this genrule.
# srcs: genrule sources. If passing root_init_template, the template file
# must be included in sources.
# api_name: Name of the project that you want to generate API files for
# (e.g. "tensorflow" or "estimator").
# package: Python package containing the @tf_export decorators you want to
# process
# package_dep: Python library target containing your package.
def gen_api_init_files(
name,
output_files = TENSORFLOW_API_INIT_FILES,
root_init_template = None,
srcs = [],
api_name = "tensorflow",
package = "tensorflow.python",
package_dep = "//tensorflow/python:no_contrib",
output_package = "tensorflow"):
root_init_template_flag = ""
if root_init_template:
root_init_template_flag = "--root_init_template=$(location " + root_init_template + ")"
api_gen_binary_target = "create_" + package + "_api"
native.py_binary(
name = "create_" + package + "_api",
srcs = ["//tensorflow/tools/api/generator:create_python_api.py"],
main = "//tensorflow/tools/api/generator:create_python_api.py",
srcs_version = "PY2AND3",
visibility = ["//visibility:public"],
deps = [
package_dep,
"//tensorflow/tools/api/generator:doc_srcs",
],
)
native.genrule(
name = name,
outs = output_files,
cmd = (
"$(location :" + api_gen_binary_target + ") " +
root_init_template_flag + " --apidir=$(@D) --apiname=" +
api_name + " --package=" + package + " --output_package=" +
output_package + " $(OUTS)"),
srcs = srcs,
tools = [":" + api_gen_binary_target ],
visibility = ["//tensorflow:__pkg__"],
)
| """Targets for generating TensorFlow Python API __init__.py files."""
tensorflow_api_init_files = ['__init__.py', 'app/__init__.py', 'bitwise/__init__.py', 'compat/__init__.py', 'data/__init__.py', 'debugging/__init__.py', 'distributions/__init__.py', 'distributions/bijectors/__init__.py', 'dtypes/__init__.py', 'errors/__init__.py', 'feature_column/__init__.py', 'gfile/__init__.py', 'graph_util/__init__.py', 'image/__init__.py', 'io/__init__.py', 'initializers/__init__.py', 'keras/__init__.py', 'keras/activations/__init__.py', 'keras/applications/__init__.py', 'keras/applications/densenet/__init__.py', 'keras/applications/inception_resnet_v2/__init__.py', 'keras/applications/inception_v3/__init__.py', 'keras/applications/mobilenet/__init__.py', 'keras/applications/nasnet/__init__.py', 'keras/applications/resnet50/__init__.py', 'keras/applications/vgg16/__init__.py', 'keras/applications/vgg19/__init__.py', 'keras/applications/xception/__init__.py', 'keras/backend/__init__.py', 'keras/callbacks/__init__.py', 'keras/constraints/__init__.py', 'keras/datasets/__init__.py', 'keras/datasets/boston_housing/__init__.py', 'keras/datasets/cifar10/__init__.py', 'keras/datasets/cifar100/__init__.py', 'keras/datasets/fashion_mnist/__init__.py', 'keras/datasets/imdb/__init__.py', 'keras/datasets/mnist/__init__.py', 'keras/datasets/reuters/__init__.py', 'keras/estimator/__init__.py', 'keras/initializers/__init__.py', 'keras/layers/__init__.py', 'keras/losses/__init__.py', 'keras/metrics/__init__.py', 'keras/models/__init__.py', 'keras/optimizers/__init__.py', 'keras/preprocessing/__init__.py', 'keras/preprocessing/image/__init__.py', 'keras/preprocessing/sequence/__init__.py', 'keras/preprocessing/text/__init__.py', 'keras/regularizers/__init__.py', 'keras/utils/__init__.py', 'keras/wrappers/__init__.py', 'keras/wrappers/scikit_learn/__init__.py', 'layers/__init__.py', 'linalg/__init__.py', 'logging/__init__.py', 'losses/__init__.py', 'manip/__init__.py', 'math/__init__.py', 'metrics/__init__.py', 'nn/__init__.py', 'nn/rnn_cell/__init__.py', 'profiler/__init__.py', 'python_io/__init__.py', 'quantization/__init__.py', 'resource_loader/__init__.py', 'strings/__init__.py', 'saved_model/__init__.py', 'saved_model/builder/__init__.py', 'saved_model/constants/__init__.py', 'saved_model/loader/__init__.py', 'saved_model/main_op/__init__.py', 'saved_model/signature_constants/__init__.py', 'saved_model/signature_def_utils/__init__.py', 'saved_model/tag_constants/__init__.py', 'saved_model/utils/__init__.py', 'sets/__init__.py', 'sparse/__init__.py', 'spectral/__init__.py', 'summary/__init__.py', 'sysconfig/__init__.py', 'test/__init__.py', 'train/__init__.py', 'train/queue_runner/__init__.py', 'user_ops/__init__.py']
estimator_api_init_files = ['__init__.py', 'estimator/__init__.py', 'estimator/export/__init__.py', 'estimator/inputs/__init__.py']
def gen_api_init_files(name, output_files=TENSORFLOW_API_INIT_FILES, root_init_template=None, srcs=[], api_name='tensorflow', package='tensorflow.python', package_dep='//tensorflow/python:no_contrib', output_package='tensorflow'):
root_init_template_flag = ''
if root_init_template:
root_init_template_flag = '--root_init_template=$(location ' + root_init_template + ')'
api_gen_binary_target = 'create_' + package + '_api'
native.py_binary(name='create_' + package + '_api', srcs=['//tensorflow/tools/api/generator:create_python_api.py'], main='//tensorflow/tools/api/generator:create_python_api.py', srcs_version='PY2AND3', visibility=['//visibility:public'], deps=[package_dep, '//tensorflow/tools/api/generator:doc_srcs'])
native.genrule(name=name, outs=output_files, cmd='$(location :' + api_gen_binary_target + ') ' + root_init_template_flag + ' --apidir=$(@D) --apiname=' + api_name + ' --package=' + package + ' --output_package=' + output_package + ' $(OUTS)', srcs=srcs, tools=[':' + api_gen_binary_target], visibility=['//tensorflow:__pkg__']) |
# Copyright 1999-2021 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
_flex_doc_FRAME = """
Get {desc} of dataframe and other, element-wise (binary operator `{op_name}`).
Equivalent to ``{equiv}``, but with support to substitute a fill_value
for missing data in one of the inputs. With reverse version, `{reverse}`.
Among flexible wrappers (`add`, `sub`, `mul`, `div`, `mod`, `pow`) to
arithmetic operators: `+`, `-`, `*`, `/`, `//`, `%`, `**`.
Parameters
----------
other : scalar, sequence, Series, or DataFrame
Any single or multiple element data structure, or list-like object.
axis : {{0 or 'index', 1 or 'columns'}}
Whether to compare by the index (0 or 'index') or columns
(1 or 'columns'). For Series input, axis to match Series index on.
level : int or label
Broadcast across a level, matching Index values on the
passed MultiIndex level.
fill_value : float or None, default None
Fill existing missing (NaN) values, and any new element needed for
successful DataFrame alignment, with this value before computation.
If data in both corresponding DataFrame locations is missing
the result will be missing.
Returns
-------
DataFrame
Result of the arithmetic operation.
See Also
--------
DataFrame.add : Add DataFrames.
DataFrame.sub : Subtract DataFrames.
DataFrame.mul : Multiply DataFrames.
DataFrame.div : Divide DataFrames (float division).
DataFrame.truediv : Divide DataFrames (float division).
DataFrame.floordiv : Divide DataFrames (integer division).
DataFrame.mod : Calculate modulo (remainder after division).
DataFrame.pow : Calculate exponential power.
Notes
-----
Mismatched indices will be unioned together.
Examples
--------
>>> import mars.dataframe as md
>>> df = md.DataFrame({{'angles': [0, 3, 4],
... 'degrees': [360, 180, 360]}},
... index=['circle', 'triangle', 'rectangle'])
>>> df.execute()
angles degrees
circle 0 360
triangle 3 180
rectangle 4 360
Add a scalar with operator version which return the same
results.
>>> (df + 1).execute()
angles degrees
circle 1 361
triangle 4 181
rectangle 5 361
>>> df.add(1).execute()
angles degrees
circle 1 361
triangle 4 181
rectangle 5 361
Divide by constant with reverse version.
>>> df.div(10).execute()
angles degrees
circle 0.0 36.0
triangle 0.3 18.0
rectangle 0.4 36.0
>>> df.rdiv(10).execute()
angles degrees
circle inf 0.027778
triangle 3.333333 0.055556
rectangle 2.500000 0.027778
Subtract a list and Series by axis with operator version.
>>> (df - [1, 2]).execute()
angles degrees
circle -1 358
triangle 2 178
rectangle 3 358
>>> df.sub([1, 2], axis='columns').execute()
angles degrees
circle -1 358
triangle 2 178
rectangle 3 358
>>> df.sub(md.Series([1, 1, 1], index=['circle', 'triangle', 'rectangle']),
... axis='index').execute()
angles degrees
circle -1 359
triangle 2 179
rectangle 3 359
Multiply a DataFrame of different shape with operator version.
>>> other = md.DataFrame({{'angles': [0, 3, 4]}},
... index=['circle', 'triangle', 'rectangle'])
>>> other.execute()
angles
circle 0
triangle 3
rectangle 4
>>> (df * other).execute()
angles degrees
circle 0 NaN
triangle 9 NaN
rectangle 16 NaN
>>> df.mul(other, fill_value=0).execute()
angles degrees
circle 0 0.0
triangle 9 0.0
rectangle 16 0.0
Divide by a MultiIndex by level.
>>> df_multindex = md.DataFrame({{'angles': [0, 3, 4, 4, 5, 6],
... 'degrees': [360, 180, 360, 360, 540, 720]}},
... index=[['A', 'A', 'A', 'B', 'B', 'B'],
... ['circle', 'triangle', 'rectangle',
... 'square', 'pentagon', 'hexagon']])
>>> df_multindex.execute()
angles degrees
A circle 0 360
triangle 3 180
rectangle 4 360
B square 4 360
pentagon 5 540
hexagon 6 720
>>> df.div(df_multindex, level=1, fill_value=0).execute()
angles degrees
A circle NaN 1.0
triangle 1.0 1.0
rectangle 1.0 1.0
B square 0.0 0.0
pentagon 0.0 0.0
hexagon 0.0 0.0
"""
_flex_doc_SERIES = """
Return {desc} of series and other, element-wise (binary operator `{op_name}`).
Equivalent to ``series {equiv} other``, but with support to substitute a fill_value for
missing data in one of the inputs.
Parameters
----------
other : Series or scalar value
fill_value : None or float value, default None (NaN)
Fill existing missing (NaN) values, and any new element needed for
successful Series alignment, with this value before computation.
If data in both corresponding Series locations is missing
the result will be missing.
level : int or name
Broadcast across a level, matching Index values on the
passed MultiIndex level.
Returns
-------
Series
The result of the operation.
See Also
--------
Series.{reverse}
Examples
--------
>>> import numpy as np
>>> import mars.dataframe as md
>>> a = md.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd'])
>>> a.execute()
a 1.0
b 1.0
c 1.0
d NaN
dtype: float64
>>> b = md.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e'])
>>> b.execute()
a 1.0
b NaN
d 1.0
e NaN
dtype: float64
"""
_flex_comp_doc_FRAME = """
Get {desc} of dataframe and other, element-wise (binary operator `{op_name}`).
Among flexible wrappers (`eq`, `ne`, `le`, `lt`, `ge`, `gt`) to comparison
operators.
Equivalent to `dataframe {equiv} other` with support to choose axis (rows or columns)
and level for comparison.
Parameters
----------
other : scalar, sequence, Series, or DataFrame
Any single or multiple element data structure, or list-like object.
axis : {{0 or 'index', 1 or 'columns'}}, default 'columns'
Whether to compare by the index (0 or 'index') or columns
(1 or 'columns').
level : int or label
Broadcast across a level, matching Index values on the passed
MultiIndex level.
Returns
-------
DataFrame of bool
Result of the comparison.
See Also
--------
DataFrame.eq : Compare DataFrames for equality elementwise.
DataFrame.ne : Compare DataFrames for inequality elementwise.
DataFrame.le : Compare DataFrames for less than inequality
or equality elementwise.
DataFrame.lt : Compare DataFrames for strictly less than
inequality elementwise.
DataFrame.ge : Compare DataFrames for greater than inequality
or equality elementwise.
DataFrame.gt : Compare DataFrames for strictly greater than
inequality elementwise.
Notes
-----
Mismatched indices will be unioned together.
`NaN` values are considered different (i.e. `NaN` != `NaN`).
Examples
--------
>>> df = pd.DataFrame({{'cost': [250, 150, 100],
... 'revenue': [100, 250, 300]}},
... index=['A', 'B', 'C'])
>>> df.execute()
cost revenue
A 250 100
B 150 250
C 100 300
Comparison with a scalar, using either the operator or method:
>>> (df == 100).execute()
cost revenue
A False True
B False False
C True False
>>> df.eq(100).execute()
cost revenue
A False True
B False False
C True False
When `other` is a :class:`Series`, the columns of a DataFrame are aligned
with the index of `other` and broadcast:
>>> (df != pd.Series([100, 250], index=["cost", "revenue"])).execute()
cost revenue
A True True
B True False
C False True
Use the method to control the broadcast axis:
>>> df.ne(pd.Series([100, 300], index=["A", "D"]), axis='index').execute()
cost revenue
A True False
B True True
C True True
D True True
When comparing to an arbitrary sequence, the number of columns must
match the number elements in `other`:
>>> (df == [250, 100]).execute()
cost revenue
A True True
B False False
C False False
Use the method to control the axis:
>>> df.eq([250, 250, 100], axis='index').execute()
cost revenue
A True False
B False True
C True False
Compare to a DataFrame of different shape.
>>> other = pd.DataFrame({{'revenue': [300, 250, 100, 150]}},
... index=['A', 'B', 'C', 'D'])
>>> other.execute()
revenue
A 300
B 250
C 100
D 150
>>> df.gt(other).execute()
cost revenue
A False False
B False False
C False True
D False False
Compare to a MultiIndex by level.
>>> df_multindex = pd.DataFrame({{'cost': [250, 150, 100, 150, 300, 220],
... 'revenue': [100, 250, 300, 200, 175, 225]}},
... index=[['Q1', 'Q1', 'Q1', 'Q2', 'Q2', 'Q2'],
... ['A', 'B', 'C', 'A', 'B', 'C']])
>>> df_multindex.execute()
cost revenue
Q1 A 250 100
B 150 250
C 100 300
Q2 A 150 200
B 300 175
C 220 225
>>> df.le(df_multindex, level=1).execute()
cost revenue
Q1 A True True
B True True
C True True
Q2 A False True
B True False
C True False
"""
_flex_comp_doc_SERIES = """
Return {desc} of series and other, element-wise (binary operator `{op_name}`).
Equivalent to ``series {equiv} other``, but with support to substitute a fill_value for
missing data in one of the inputs.
Parameters
----------
other : Series or scalar value
fill_value : None or float value, default None (NaN)
Fill existing missing (NaN) values, and any new element needed for
successful Series alignment, with this value before computation.
If data in both corresponding Series locations is missing
the result will be missing.
level : int or name
Broadcast across a level, matching Index values on the
passed MultiIndex level.
Returns
-------
Series
The result of the operation.
Examples
--------
>>> import numpy as np
>>> import mars.dataframe as md
>>> a = md.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd'])
>>> a.execute()
a 1.0
b 1.0
c 1.0
d NaN
dtype: float64
>>> b = md.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e'])
>>> b.execute()
a 1.0
b NaN
d 1.0
e NaN
dtype: float64
"""
def bin_arithmetic_doc(
desc, op_name=None, equiv=None, reverse=None, series_example=None
):
def wrapper(fun):
nonlocal op_name, reverse
op_name = op_name or fun.__name__
if reverse is None:
reverse = op_name[1:] if op_name.startswith("r") else "r" + op_name
fun.__frame_doc__ = _flex_doc_FRAME.format(
desc=desc, op_name=op_name, equiv=equiv, reverse=reverse
)
fun.__series_doc__ = _flex_doc_SERIES.format(
desc=desc, op_name=op_name, equiv=equiv, reverse=reverse
)
if series_example is not None: # pragma: no branch
fun.__series_doc__ += "\n" + series_example.strip()
return fun
return wrapper
def bin_compare_doc(desc, op_name=None, equiv=None, series_example=None):
def wrapper(fun):
nonlocal op_name
op_name = op_name or fun.__name__
fun.__frame_doc__ = _flex_comp_doc_FRAME.format(
desc=desc, op_name=op_name, equiv=equiv
)
fun.__series_doc__ = _flex_comp_doc_SERIES.format(
desc=desc, op_name=op_name, equiv=equiv
)
if series_example is not None: # pragma: no branch
fun.__series_doc__ += "\n" + series_example.strip()
return fun
return wrapper
| _flex_doc_frame = "\nGet {desc} of dataframe and other, element-wise (binary operator `{op_name}`).\nEquivalent to ``{equiv}``, but with support to substitute a fill_value\nfor missing data in one of the inputs. With reverse version, `{reverse}`.\nAmong flexible wrappers (`add`, `sub`, `mul`, `div`, `mod`, `pow`) to\narithmetic operators: `+`, `-`, `*`, `/`, `//`, `%`, `**`.\n\nParameters\n----------\nother : scalar, sequence, Series, or DataFrame\n Any single or multiple element data structure, or list-like object.\naxis : {{0 or 'index', 1 or 'columns'}}\n Whether to compare by the index (0 or 'index') or columns\n (1 or 'columns'). For Series input, axis to match Series index on.\nlevel : int or label\n Broadcast across a level, matching Index values on the\n passed MultiIndex level.\nfill_value : float or None, default None\n Fill existing missing (NaN) values, and any new element needed for\n successful DataFrame alignment, with this value before computation.\n If data in both corresponding DataFrame locations is missing\n the result will be missing.\n\nReturns\n-------\nDataFrame\n Result of the arithmetic operation.\n\nSee Also\n--------\nDataFrame.add : Add DataFrames.\nDataFrame.sub : Subtract DataFrames.\nDataFrame.mul : Multiply DataFrames.\nDataFrame.div : Divide DataFrames (float division).\nDataFrame.truediv : Divide DataFrames (float division).\nDataFrame.floordiv : Divide DataFrames (integer division).\nDataFrame.mod : Calculate modulo (remainder after division).\nDataFrame.pow : Calculate exponential power.\n\nNotes\n-----\nMismatched indices will be unioned together.\n\nExamples\n--------\n>>> import mars.dataframe as md\n>>> df = md.DataFrame({{'angles': [0, 3, 4],\n... 'degrees': [360, 180, 360]}},\n... index=['circle', 'triangle', 'rectangle'])\n>>> df.execute()\n angles degrees\ncircle 0 360\ntriangle 3 180\nrectangle 4 360\n\nAdd a scalar with operator version which return the same\nresults.\n\n>>> (df + 1).execute()\n angles degrees\ncircle 1 361\ntriangle 4 181\nrectangle 5 361\n\n>>> df.add(1).execute()\n angles degrees\ncircle 1 361\ntriangle 4 181\nrectangle 5 361\n\nDivide by constant with reverse version.\n\n>>> df.div(10).execute()\n angles degrees\ncircle 0.0 36.0\ntriangle 0.3 18.0\nrectangle 0.4 36.0\n\n>>> df.rdiv(10).execute()\n angles degrees\ncircle inf 0.027778\ntriangle 3.333333 0.055556\nrectangle 2.500000 0.027778\n\nSubtract a list and Series by axis with operator version.\n\n>>> (df - [1, 2]).execute()\n angles degrees\ncircle -1 358\ntriangle 2 178\nrectangle 3 358\n\n>>> df.sub([1, 2], axis='columns').execute()\n angles degrees\ncircle -1 358\ntriangle 2 178\nrectangle 3 358\n\n>>> df.sub(md.Series([1, 1, 1], index=['circle', 'triangle', 'rectangle']),\n... axis='index').execute()\n angles degrees\ncircle -1 359\ntriangle 2 179\nrectangle 3 359\n\nMultiply a DataFrame of different shape with operator version.\n\n>>> other = md.DataFrame({{'angles': [0, 3, 4]}},\n... index=['circle', 'triangle', 'rectangle'])\n>>> other.execute()\n angles\ncircle 0\ntriangle 3\nrectangle 4\n\n>>> (df * other).execute()\n angles degrees\ncircle 0 NaN\ntriangle 9 NaN\nrectangle 16 NaN\n\n>>> df.mul(other, fill_value=0).execute()\n angles degrees\ncircle 0 0.0\ntriangle 9 0.0\nrectangle 16 0.0\n\nDivide by a MultiIndex by level.\n\n>>> df_multindex = md.DataFrame({{'angles': [0, 3, 4, 4, 5, 6],\n... 'degrees': [360, 180, 360, 360, 540, 720]}},\n... index=[['A', 'A', 'A', 'B', 'B', 'B'],\n... ['circle', 'triangle', 'rectangle',\n... 'square', 'pentagon', 'hexagon']])\n>>> df_multindex.execute()\n angles degrees\nA circle 0 360\n triangle 3 180\n rectangle 4 360\nB square 4 360\n pentagon 5 540\n hexagon 6 720\n\n>>> df.div(df_multindex, level=1, fill_value=0).execute()\n angles degrees\nA circle NaN 1.0\n triangle 1.0 1.0\n rectangle 1.0 1.0\nB square 0.0 0.0\n pentagon 0.0 0.0\n hexagon 0.0 0.0\n"
_flex_doc_series = "\nReturn {desc} of series and other, element-wise (binary operator `{op_name}`).\n\nEquivalent to ``series {equiv} other``, but with support to substitute a fill_value for\nmissing data in one of the inputs.\n\nParameters\n----------\nother : Series or scalar value\nfill_value : None or float value, default None (NaN)\n Fill existing missing (NaN) values, and any new element needed for\n successful Series alignment, with this value before computation.\n If data in both corresponding Series locations is missing\n the result will be missing.\nlevel : int or name\n Broadcast across a level, matching Index values on the\n passed MultiIndex level.\n\nReturns\n-------\nSeries\n The result of the operation.\n\nSee Also\n--------\nSeries.{reverse}\n\nExamples\n--------\n>>> import numpy as np\n>>> import mars.dataframe as md\n>>> a = md.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd'])\n>>> a.execute()\na 1.0\nb 1.0\nc 1.0\nd NaN\ndtype: float64\n\n>>> b = md.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e'])\n>>> b.execute()\na 1.0\nb NaN\nd 1.0\ne NaN\ndtype: float64\n"
_flex_comp_doc_frame = '\nGet {desc} of dataframe and other, element-wise (binary operator `{op_name}`).\nAmong flexible wrappers (`eq`, `ne`, `le`, `lt`, `ge`, `gt`) to comparison\noperators.\n\nEquivalent to `dataframe {equiv} other` with support to choose axis (rows or columns)\nand level for comparison.\n\nParameters\n----------\nother : scalar, sequence, Series, or DataFrame\n Any single or multiple element data structure, or list-like object.\naxis : {{0 or \'index\', 1 or \'columns\'}}, default \'columns\'\n Whether to compare by the index (0 or \'index\') or columns\n (1 or \'columns\').\nlevel : int or label\n Broadcast across a level, matching Index values on the passed\n MultiIndex level.\n\nReturns\n-------\nDataFrame of bool\n Result of the comparison.\n\nSee Also\n--------\nDataFrame.eq : Compare DataFrames for equality elementwise.\nDataFrame.ne : Compare DataFrames for inequality elementwise.\nDataFrame.le : Compare DataFrames for less than inequality\n or equality elementwise.\nDataFrame.lt : Compare DataFrames for strictly less than\n inequality elementwise.\nDataFrame.ge : Compare DataFrames for greater than inequality\n or equality elementwise.\nDataFrame.gt : Compare DataFrames for strictly greater than\n inequality elementwise.\n\nNotes\n-----\nMismatched indices will be unioned together.\n`NaN` values are considered different (i.e. `NaN` != `NaN`).\n\nExamples\n--------\n>>> df = pd.DataFrame({{\'cost\': [250, 150, 100],\n... \'revenue\': [100, 250, 300]}},\n... index=[\'A\', \'B\', \'C\'])\n>>> df.execute()\n cost revenue\nA 250 100\nB 150 250\nC 100 300\n\nComparison with a scalar, using either the operator or method:\n\n>>> (df == 100).execute()\n cost revenue\nA False True\nB False False\nC True False\n\n>>> df.eq(100).execute()\n cost revenue\nA False True\nB False False\nC True False\n\nWhen `other` is a :class:`Series`, the columns of a DataFrame are aligned\nwith the index of `other` and broadcast:\n\n>>> (df != pd.Series([100, 250], index=["cost", "revenue"])).execute()\n cost revenue\nA True True\nB True False\nC False True\n\nUse the method to control the broadcast axis:\n\n>>> df.ne(pd.Series([100, 300], index=["A", "D"]), axis=\'index\').execute()\n cost revenue\nA True False\nB True True\nC True True\nD True True\n\nWhen comparing to an arbitrary sequence, the number of columns must\nmatch the number elements in `other`:\n\n>>> (df == [250, 100]).execute()\n cost revenue\nA True True\nB False False\nC False False\n\nUse the method to control the axis:\n\n>>> df.eq([250, 250, 100], axis=\'index\').execute()\n cost revenue\nA True False\nB False True\nC True False\n\nCompare to a DataFrame of different shape.\n\n>>> other = pd.DataFrame({{\'revenue\': [300, 250, 100, 150]}},\n... index=[\'A\', \'B\', \'C\', \'D\'])\n>>> other.execute()\n revenue\nA 300\nB 250\nC 100\nD 150\n\n>>> df.gt(other).execute()\n cost revenue\nA False False\nB False False\nC False True\nD False False\n\nCompare to a MultiIndex by level.\n\n>>> df_multindex = pd.DataFrame({{\'cost\': [250, 150, 100, 150, 300, 220],\n... \'revenue\': [100, 250, 300, 200, 175, 225]}},\n... index=[[\'Q1\', \'Q1\', \'Q1\', \'Q2\', \'Q2\', \'Q2\'],\n... [\'A\', \'B\', \'C\', \'A\', \'B\', \'C\']])\n>>> df_multindex.execute()\n cost revenue\nQ1 A 250 100\n B 150 250\n C 100 300\nQ2 A 150 200\n B 300 175\n C 220 225\n\n>>> df.le(df_multindex, level=1).execute()\n cost revenue\nQ1 A True True\n B True True\n C True True\nQ2 A False True\n B True False\n C True False\n'
_flex_comp_doc_series = "\nReturn {desc} of series and other, element-wise (binary operator `{op_name}`).\n\nEquivalent to ``series {equiv} other``, but with support to substitute a fill_value for\nmissing data in one of the inputs.\n\nParameters\n----------\nother : Series or scalar value\nfill_value : None or float value, default None (NaN)\n Fill existing missing (NaN) values, and any new element needed for\n successful Series alignment, with this value before computation.\n If data in both corresponding Series locations is missing\n the result will be missing.\nlevel : int or name\n Broadcast across a level, matching Index values on the\n passed MultiIndex level.\n\nReturns\n-------\nSeries\n The result of the operation.\n\nExamples\n--------\n>>> import numpy as np\n>>> import mars.dataframe as md\n>>> a = md.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd'])\n>>> a.execute()\na 1.0\nb 1.0\nc 1.0\nd NaN\ndtype: float64\n\n>>> b = md.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e'])\n>>> b.execute()\na 1.0\nb NaN\nd 1.0\ne NaN\ndtype: float64\n"
def bin_arithmetic_doc(desc, op_name=None, equiv=None, reverse=None, series_example=None):
def wrapper(fun):
nonlocal op_name, reverse
op_name = op_name or fun.__name__
if reverse is None:
reverse = op_name[1:] if op_name.startswith('r') else 'r' + op_name
fun.__frame_doc__ = _flex_doc_FRAME.format(desc=desc, op_name=op_name, equiv=equiv, reverse=reverse)
fun.__series_doc__ = _flex_doc_SERIES.format(desc=desc, op_name=op_name, equiv=equiv, reverse=reverse)
if series_example is not None:
fun.__series_doc__ += '\n' + series_example.strip()
return fun
return wrapper
def bin_compare_doc(desc, op_name=None, equiv=None, series_example=None):
def wrapper(fun):
nonlocal op_name
op_name = op_name or fun.__name__
fun.__frame_doc__ = _flex_comp_doc_FRAME.format(desc=desc, op_name=op_name, equiv=equiv)
fun.__series_doc__ = _flex_comp_doc_SERIES.format(desc=desc, op_name=op_name, equiv=equiv)
if series_example is not None:
fun.__series_doc__ += '\n' + series_example.strip()
return fun
return wrapper |
def maximo(x, y):
if x > y:
return x
else:
return y
| def maximo(x, y):
if x > y:
return x
else:
return y |
########
# PART 1
def get_layers(data, width = 25, height = 6):
layers = []
while data:
layer = []
for _ in range(width * height):
layer.append(data.pop(0))
layers.append(layer)
return layers
def count_digit_on_layer(layer, digit):
return sum([1 for val in layer if val == digit])
def get_layer_with_less_digit(layers, digit):
totals = [count_digit_on_layer(layer, digit) for layer in layers]
return layers[totals.index(min(totals))]
def get_check_digit(layer):
return count_digit_on_layer(layer, 1) * count_digit_on_layer(layer, 2)
layers = get_layers([int(ch) for ch in "123456789012"], 3, 2)
assert get_check_digit(get_layer_with_less_digit(layers, 0)) == 1
layers = get_layers([int(ch) for ch in "123256789012"], 3, 2)
assert get_check_digit(get_layer_with_less_digit(layers, 0)) == 2
with open("event2019/day08/input.txt", "r") as input:
data = [int(ch) for line in input for ch in line[:-1]]
layers = get_layers(data)
picked_layer = get_layer_with_less_digit(layers, 0)
answer = get_check_digit(get_layer_with_less_digit(layers, 0))
print("Part 1 =", answer)
assert answer == 1548 # check with accepted answer
########
# PART 2
def decode_image(layers, width = 25, height = 6):
image = layers[0]
for layer in layers[1:]:
for i in range(width * height):
image[i] = layer[i] if image[i] == 2 else image[i]
for _ in range(height):
for _ in range(width):
ch = image.pop(0)
print(' ' if ch == 0 else '#', end = "")
print()
layers = get_layers([int(ch) for ch in "0222112222120000"], 2, 2)
#decode_image(layers, 2, 2)
with open("event2019/day08/input.txt", "r") as input:
data = [int(ch) for line in input for ch in line[:-1]]
layers = get_layers(data)
print("Part 2 =")
decode_image(layers)
| def get_layers(data, width=25, height=6):
layers = []
while data:
layer = []
for _ in range(width * height):
layer.append(data.pop(0))
layers.append(layer)
return layers
def count_digit_on_layer(layer, digit):
return sum([1 for val in layer if val == digit])
def get_layer_with_less_digit(layers, digit):
totals = [count_digit_on_layer(layer, digit) for layer in layers]
return layers[totals.index(min(totals))]
def get_check_digit(layer):
return count_digit_on_layer(layer, 1) * count_digit_on_layer(layer, 2)
layers = get_layers([int(ch) for ch in '123456789012'], 3, 2)
assert get_check_digit(get_layer_with_less_digit(layers, 0)) == 1
layers = get_layers([int(ch) for ch in '123256789012'], 3, 2)
assert get_check_digit(get_layer_with_less_digit(layers, 0)) == 2
with open('event2019/day08/input.txt', 'r') as input:
data = [int(ch) for line in input for ch in line[:-1]]
layers = get_layers(data)
picked_layer = get_layer_with_less_digit(layers, 0)
answer = get_check_digit(get_layer_with_less_digit(layers, 0))
print('Part 1 =', answer)
assert answer == 1548
def decode_image(layers, width=25, height=6):
image = layers[0]
for layer in layers[1:]:
for i in range(width * height):
image[i] = layer[i] if image[i] == 2 else image[i]
for _ in range(height):
for _ in range(width):
ch = image.pop(0)
print(' ' if ch == 0 else '#', end='')
print()
layers = get_layers([int(ch) for ch in '0222112222120000'], 2, 2)
with open('event2019/day08/input.txt', 'r') as input:
data = [int(ch) for line in input for ch in line[:-1]]
layers = get_layers(data)
print('Part 2 =')
decode_image(layers) |
# System phrases
started: str = "Bot {} started"
closed: str = "Bot disabled"
loaded_cog: str = "Load cog - {}"
loading_failed: str = "Failed to load cog - {}\n{}"
kill: str = "Bot disabled"
# System errors
not_owner: str = "You have to be bot's owner to use this command"
# LanguageService
lang_changed: str = "Language has been changed"
| started: str = 'Bot {} started'
closed: str = 'Bot disabled'
loaded_cog: str = 'Load cog - {}'
loading_failed: str = 'Failed to load cog - {}\n{}'
kill: str = 'Bot disabled'
not_owner: str = "You have to be bot's owner to use this command"
lang_changed: str = 'Language has been changed' |
wordlist = [
"a's",
"able",
"about",
"above",
"according",
"accordingly",
"across",
"actually",
"after",
"afterwards",
"again",
"against",
"ain't",
"all",
"allow",
"allows",
"almost",
"alone",
"along",
"already",
"also",
"although",
"always",
"am",
"among",
"amongst",
"an",
"and",
"another",
"any",
"anybody",
"anyhow",
"anyone",
"anything",
"anyway",
"anyways",
"anywhere",
"apart",
"appear",
"appreciate",
"appropriate",
"are",
"aren't",
"around",
"as",
"aside",
"ask",
"asking",
"associated",
"at",
"available",
"away",
"awfully",
"be",
"became",
"because",
"become",
"becomes",
"becoming",
"been",
"before",
"beforehand",
"behind",
"being",
"believe",
"below",
"beside",
"besides",
"best",
"better",
"between",
"beyond",
"both",
"brief",
"but",
"by",
"c'mon",
"c's",
"came",
"can",
"can't",
"cannot",
"cant",
"cause",
"causes",
"certain",
"certainly",
"changes",
"clearly",
"co",
"com",
"come",
"comes",
"concerning",
"consequently",
"consider",
"considering",
"contain",
"containing",
"contains",
"corresponding",
"could",
"couldn't",
"course",
"currently",
"definitely",
"described",
"despite",
"did",
"didn't",
"different",
"do",
"does",
"doesn't",
"doing",
"don't",
"done",
"down",
"downwards",
"during",
"each",
"edu",
"eg",
"eight",
"either",
"else",
"elsewhere",
"enough",
"entirely",
"especially",
"et",
"etc",
"even",
"ever",
"every",
"everybody",
"everyone",
"everything",
"everywhere",
"ex",
"exactly",
"example",
"except",
"far",
"few",
"fifth",
"first",
"five",
"followed",
"following",
"follows",
"for",
"former",
"formerly",
"forth",
"four",
"from",
"further",
"furthermore",
"get",
"gets",
"getting",
"given",
"gives",
"go",
"goes",
"going",
"gone",
"got",
"gotten",
"greetings",
"had",
"hadn't",
"happens",
"hardly",
"has",
"hasn't",
"have",
"haven't",
"having",
"he",
"he's",
"hello",
"help",
"hence",
"her",
"here",
"here's",
"hereafter",
"hereby",
"herein",
"hereupon",
"hers",
"herself",
"hi",
"him",
"himself",
"his",
"hither",
"hopefully",
"how",
"howbeit",
"however",
"i'd",
"i'll",
"i'm",
"i've",
"ie",
"if",
"ignored",
"immediate",
"in",
"inasmuch",
"inc",
"indeed",
"indicate",
"indicated",
"indicates",
"inner",
"insofar",
"instead",
"into",
"inward",
"is",
"isn't",
"it",
"it'd",
"it'll",
"it's",
"its",
"itself",
"just",
"keep",
"keeps",
"kept",
"know",
"known",
"knows",
"last",
"lately",
"later",
"latter",
"latterly",
"least",
"less",
"lest",
"let",
"let's",
"like",
"liked",
"likely",
"little",
"look",
"looking",
"looks",
"ltd",
"mainly",
"many",
"may",
"maybe",
"me",
"mean",
"meanwhile",
"merely",
"might",
"more",
"moreover",
"most",
"mostly",
"much",
"must",
"my",
"myself",
"name",
"namely",
"nd",
"near",
"nearly",
"necessary",
"need",
"needs",
"neither",
"never",
"nevertheless",
"new",
"next",
"nine",
"no",
"nobody",
"non",
"none",
"noone",
"nor",
"normally",
"not",
"nothing",
"novel",
"now",
"nowhere",
"obviously",
"of",
"off",
"often",
"oh",
"ok",
"okay",
"old",
"on",
"once",
"one",
"ones",
"only",
"onto",
"or",
"other",
"others",
"otherwise",
"ought",
"our",
"ours",
"ourselves",
"out",
"outside",
"over",
"overall",
"own",
"particular",
"particularly",
"per",
"perhaps",
"placed",
"please",
"plus",
"possible",
"presumably",
"probably",
"provides",
"que",
"quite",
"qv",
"rather",
"rd",
"re",
"really",
"reasonably",
"regarding",
"regardless",
"regards relatively",
"respectively",
"right",
"said",
"same",
"saw",
"say",
"saying",
"says",
"second",
"secondly",
"see",
"seeing",
"seem",
"seemed",
"seeming",
"seems",
"seen",
"self",
"selves",
"sensible",
"sent",
"serious",
"seriously",
"seven",
"several",
"shall",
"she",
"should",
"shouldn't",
"since",
"six",
"so",
"some",
"somebody",
"somehow",
"someone",
"something",
"sometime",
"sometimes",
"somewhat",
"somewhere",
"soon",
"sorry",
"specified",
"specify",
"specifying",
"still",
"sub",
"such",
"sup",
"sure",
"t's",
"take",
"taken",
"tell",
"tends",
"th",
"than",
"thank",
"thanks",
"thanx",
"that",
"that's",
"thats",
"the",
"their",
"theirs",
"them",
"themselves",
"then",
"thence",
"there",
"there's",
"thereafter",
"thereby",
"therefore",
"therein",
"theres",
"thereupon",
"these",
"they",
"they'd",
"they'll",
"they're",
"they've",
"think",
"third",
"this",
"thorough",
"thoroughly",
"those",
"though",
"three",
"through",
"throughout",
"thru",
"thus",
"to",
"together",
"too",
"took",
"toward",
"towards",
"tried",
"tries",
"truly",
"try",
"trying",
"twice",
"two",
"un",
"under",
"unfortunately",
"unless",
"unlikely",
"until",
"unto",
"up",
"upon",
"us",
"use",
"used",
"useful",
"uses",
"using",
"usually",
"value",
"various",
"very",
"via",
"viz",
"vs",
"want",
"wants",
"was",
"wasn't",
"way",
"we",
"we'd",
"we'll",
"we're",
"we've",
"welcome",
"well",
"went",
"were",
"weren't",
"what",
"what's",
"whatever",
"when",
"whence",
"whenever",
"where",
"where's",
"whereafter",
"whereas",
"whereby",
"wherein",
"whereupon",
"wherever",
"whether",
"which",
"while",
"whither",
"who",
"who's",
"whoever",
"whole",
"whom",
"whose",
"why",
"will",
"willing",
"wish",
"with",
"within",
"without",
"won't",
"wonder",
"would",
"wouldn't",
"yes",
"yet",
"you",
"you'd",
"you'll",
"you're",
"you've",
"your",
"yours",
"yourself",
"yourselves",
"zero"
]
def words():
return wordlist
| wordlist = ["a's", 'able', 'about', 'above', 'according', 'accordingly', 'across', 'actually', 'after', 'afterwards', 'again', 'against', "ain't", 'all', 'allow', 'allows', 'almost', 'alone', 'along', 'already', 'also', 'although', 'always', 'am', 'among', 'amongst', 'an', 'and', 'another', 'any', 'anybody', 'anyhow', 'anyone', 'anything', 'anyway', 'anyways', 'anywhere', 'apart', 'appear', 'appreciate', 'appropriate', 'are', "aren't", 'around', 'as', 'aside', 'ask', 'asking', 'associated', 'at', 'available', 'away', 'awfully', 'be', 'became', 'because', 'become', 'becomes', 'becoming', 'been', 'before', 'beforehand', 'behind', 'being', 'believe', 'below', 'beside', 'besides', 'best', 'better', 'between', 'beyond', 'both', 'brief', 'but', 'by', "c'mon", "c's", 'came', 'can', "can't", 'cannot', 'cant', 'cause', 'causes', 'certain', 'certainly', 'changes', 'clearly', 'co', 'com', 'come', 'comes', 'concerning', 'consequently', 'consider', 'considering', 'contain', 'containing', 'contains', 'corresponding', 'could', "couldn't", 'course', 'currently', 'definitely', 'described', 'despite', 'did', "didn't", 'different', 'do', 'does', "doesn't", 'doing', "don't", 'done', 'down', 'downwards', 'during', 'each', 'edu', 'eg', 'eight', 'either', 'else', 'elsewhere', 'enough', 'entirely', 'especially', 'et', 'etc', 'even', 'ever', 'every', 'everybody', 'everyone', 'everything', 'everywhere', 'ex', 'exactly', 'example', 'except', 'far', 'few', 'fifth', 'first', 'five', 'followed', 'following', 'follows', 'for', 'former', 'formerly', 'forth', 'four', 'from', 'further', 'furthermore', 'get', 'gets', 'getting', 'given', 'gives', 'go', 'goes', 'going', 'gone', 'got', 'gotten', 'greetings', 'had', "hadn't", 'happens', 'hardly', 'has', "hasn't", 'have', "haven't", 'having', 'he', "he's", 'hello', 'help', 'hence', 'her', 'here', "here's", 'hereafter', 'hereby', 'herein', 'hereupon', 'hers', 'herself', 'hi', 'him', 'himself', 'his', 'hither', 'hopefully', 'how', 'howbeit', 'however', "i'd", "i'll", "i'm", "i've", 'ie', 'if', 'ignored', 'immediate', 'in', 'inasmuch', 'inc', 'indeed', 'indicate', 'indicated', 'indicates', 'inner', 'insofar', 'instead', 'into', 'inward', 'is', "isn't", 'it', "it'd", "it'll", "it's", 'its', 'itself', 'just', 'keep', 'keeps', 'kept', 'know', 'known', 'knows', 'last', 'lately', 'later', 'latter', 'latterly', 'least', 'less', 'lest', 'let', "let's", 'like', 'liked', 'likely', 'little', 'look', 'looking', 'looks', 'ltd', 'mainly', 'many', 'may', 'maybe', 'me', 'mean', 'meanwhile', 'merely', 'might', 'more', 'moreover', 'most', 'mostly', 'much', 'must', 'my', 'myself', 'name', 'namely', 'nd', 'near', 'nearly', 'necessary', 'need', 'needs', 'neither', 'never', 'nevertheless', 'new', 'next', 'nine', 'no', 'nobody', 'non', 'none', 'noone', 'nor', 'normally', 'not', 'nothing', 'novel', 'now', 'nowhere', 'obviously', 'of', 'off', 'often', 'oh', 'ok', 'okay', 'old', 'on', 'once', 'one', 'ones', 'only', 'onto', 'or', 'other', 'others', 'otherwise', 'ought', 'our', 'ours', 'ourselves', 'out', 'outside', 'over', 'overall', 'own', 'particular', 'particularly', 'per', 'perhaps', 'placed', 'please', 'plus', 'possible', 'presumably', 'probably', 'provides', 'que', 'quite', 'qv', 'rather', 'rd', 're', 'really', 'reasonably', 'regarding', 'regardless', 'regards\trelatively', 'respectively', 'right', 'said', 'same', 'saw', 'say', 'saying', 'says', 'second', 'secondly', 'see', 'seeing', 'seem', 'seemed', 'seeming', 'seems', 'seen', 'self', 'selves', 'sensible', 'sent', 'serious', 'seriously', 'seven', 'several', 'shall', 'she', 'should', "shouldn't", 'since', 'six', 'so', 'some', 'somebody', 'somehow', 'someone', 'something', 'sometime', 'sometimes', 'somewhat', 'somewhere', 'soon', 'sorry', 'specified', 'specify', 'specifying', 'still', 'sub', 'such', 'sup', 'sure', "t's", 'take', 'taken', 'tell', 'tends', 'th', 'than', 'thank', 'thanks', 'thanx', 'that', "that's", 'thats', 'the', 'their', 'theirs', 'them', 'themselves', 'then', 'thence', 'there', "there's", 'thereafter', 'thereby', 'therefore', 'therein', 'theres', 'thereupon', 'these', 'they', "they'd", "they'll", "they're", "they've", 'think', 'third', 'this', 'thorough', 'thoroughly', 'those', 'though', 'three', 'through', 'throughout', 'thru', 'thus', 'to', 'together', 'too', 'took', 'toward', 'towards', 'tried', 'tries', 'truly', 'try', 'trying', 'twice', 'two', 'un', 'under', 'unfortunately', 'unless', 'unlikely', 'until', 'unto', 'up', 'upon', 'us', 'use', 'used', 'useful', 'uses', 'using', 'usually', 'value', 'various', 'very', 'via', 'viz', 'vs', 'want', 'wants', 'was', "wasn't", 'way', 'we', "we'd", "we'll", "we're", "we've", 'welcome', 'well', 'went', 'were', "weren't", 'what', "what's", 'whatever', 'when', 'whence', 'whenever', 'where', "where's", 'whereafter', 'whereas', 'whereby', 'wherein', 'whereupon', 'wherever', 'whether', 'which', 'while', 'whither', 'who', "who's", 'whoever', 'whole', 'whom', 'whose', 'why', 'will', 'willing', 'wish', 'with', 'within', 'without', "won't", 'wonder', 'would', "wouldn't", 'yes', 'yet', 'you', "you'd", "you'll", "you're", "you've", 'your', 'yours', 'yourself', 'yourselves', 'zero']
def words():
return wordlist |
sample_trajectory = [[[8.29394929e-01, 2.94382693e-05, 1.24370992e+00], [0.8300607, 0.00321705, 1.24627523],
[0.83197002, 0.01345206, 1.25535293], [0.83280536, 0.02711211, 1.26502481],
[0.83431212, 0.04126721, 1.27488879], [0.83557291, 0.05575593, 1.28517274],
[0.83835516, 0.07094685, 1.29766037], [0.84018236, 0.0848757, 1.30992404],
[0.84367176, 0.09899109, 1.32165939], [0.84780989, 0.11275561, 1.332425],
[0.85343812, 0.12216536, 1.34048073], [0.85929401, 0.12485794, 1.34398368],
[0.86139773, 0.12570567, 1.34577036], [0.86072455, 0.12971565, 1.34570028],
[0.86142171, 0.13086022, 1.34607923], [0.86252171, 0.13226137, 1.34576342],
[0.86131819, 0.13343436, 1.34602691], [0.86237162, 0.13476304, 1.34625571],
[0.86233475, 0.13646685, 1.34643762], [0.86257895, 0.13770382, 1.34626134],
[0.86327492, 0.13898366, 1.34679944], [0.86351096, 0.14051317, 1.34688228],
[0.86460062, 0.14136772, 1.34739374], [0.86432451, 0.14269744, 1.34746041],
[0.86519599, 0.14313221, 1.34789781], [0.86501197, 0.1444806, 1.34814055],
[0.86577445, 0.14467521, 1.34821045], [0.86546423, 0.14557526, 1.34840742],
[0.86614776, 0.14591165, 1.34892248], [0.86594768, 0.14689357, 1.34916195],
[0.86666253, 0.14705808, 1.3493557], [0.86599142, 0.14764966, 1.34944308],
[0.86593019, 0.14761498, 1.34965421], [0.86554094, 0.14839651, 1.35002927],
[0.86609249, 0.14847811, 1.35004771], [0.86562717, 0.14882716, 1.35021255],
[0.86560691, 0.14878882, 1.35066697], [0.86523452, 0.14943953, 1.35077779],
[0.86584348, 0.14940555, 1.35102009], [0.86522901, 0.14976554, 1.35107163],
[0.86517114, 0.14941758, 1.35133674], [0.86440309, 0.15010623, 1.35173215],
[0.86470934, 0.14982907, 1.35153207], [0.86417685, 0.15034566, 1.35204203],
[0.86475626, 0.15039388, 1.35211787], [0.86454982, 0.15058402, 1.35209656],
[0.86462931, 0.15062848, 1.35254998], [0.86445455, 0.15109283, 1.35246659],
[0.86493616, 0.15096502, 1.35264486], [0.86440645, 0.15125336, 1.35257592]]
, [[8.29394372e-01, 5.02989856e-06, 1.24369044e+00], [8.28448860e-01, 5.21108705e-04, 1.24412433e+00],
[0.82821679, -0.00266306, 1.24340615], [0.82712893, -0.01278852, 1.24238184],
[0.8242383, -0.02746302, 1.24138853], [0.82092432, -0.04040762, 1.24021633],
[0.81676534, -0.05493966, 1.23709498], [0.81203041, -0.06978358, 1.23090388],
[0.81040154, -0.08357767, 1.22607369], [0.80996861, -0.09747925, 1.22250611],
[0.80839556, -0.11099092, 1.21861742], [0.80764377, -0.1220292, 1.21606453],
[0.80659937, -0.12782397, 1.21509924], [0.80421561, -0.12897047, 1.21389321],
[0.80543509, -0.13041502, 1.21218545], [0.80719798, -0.13077158, 1.20977956],
[0.80802402, -0.13106103, 1.20923984], [0.80989696, -0.13111801, 1.20648537],
[0.81088521, -0.13150426, 1.20611638], [0.81328081, -0.13140379, 1.20439828],
[0.81347524, -0.13236595, 1.20291265], [0.81442975, -0.13240708, 1.20165529],
[0.81439796, -0.13283523, 1.20113632], [0.8144709, -0.1324097, 1.20073386],
[0.81472718, -0.13277184, 1.20014908], [0.81455051, -0.13271349, 1.19998435],
[0.81461205, -0.13252529, 1.19982223], [0.81469605, -0.13255599, 1.19956114],
[0.8143656, -0.13258219, 1.19952868], [0.81439902, -0.1324634, 1.1993633], [0.81443352, -0.13243586, 1.1992618],
[0.81437157, -0.13234246, 1.19916844], [0.81439756, -0.13226621, 1.19910854],
[0.81441603, -0.13223594, 1.19907973], [0.81433559, -0.13216323, 1.19904629],
[0.81428657, -0.1320852, 1.19900435], [0.8142586, -0.13203865, 1.19895876], [0.8142406, -0.13199832, 1.19892711],
[0.81423518, -0.13195133, 1.19891042], [0.81423471, -0.13192349, 1.19890408],
[0.81422569, -0.1319086, 1.1988938], [0.81420996, -0.13187683, 1.19887342],
[0.81420364, -0.13185729, 1.19886479], [0.81419859, -0.13183793, 1.19885612],
[0.81419636, -0.13183512, 1.1988541], [0.81419609, -0.13182733, 1.19885183],
[0.81419618, -0.13182156, 1.19885077], [0.81419647, -0.13181636, 1.1988502],
[0.81419603, -0.13181478, 1.1988494], [0.81419583, -0.13181186, 1.19884878]]
, [[8.29396978e-01, 1.19098267e-06, 1.24374612e+00], [8.29385665e-01, -1.86096731e-05, 1.24374961e+00],
[0.83181454, -0.00296723, 1.24262025], [0.83897802, -0.01204637, 1.23870155],
[0.84804322, -0.02637806, 1.23229918], [0.85821909, -0.04112286, 1.22443261],
[0.86857714, -0.0544144, 1.21659045], [0.88077871, -0.0684697, 1.20824494],
[0.89227427, -0.08258709, 1.20086125], [0.902352, -0.09639873, 1.1955734], [0.91087973, -0.11036591, 1.19195383],
[0.91774859, -0.12421509, 1.18906245], [0.9213203, -0.133212, 1.18760226], [0.92225532, -0.13533401, 1.18683914],
[0.92271873, -0.13441819, 1.18610231], [0.92629903, -0.13659682, 1.18384976],
[0.9275226, -0.13732566, 1.18130875], [0.92780813, -0.13798298, 1.18097915],
[0.93048265, -0.13851559, 1.17977442], [0.93180621, -0.13885864, 1.17854726],
[0.93353268, -0.13944112, 1.17652771], [0.93455178, -0.13987549, 1.17493602],
[0.93528591, -0.14017046, 1.17416708], [0.9360892, -0.14048572, 1.17327655],
[0.93697368, -0.14082654, 1.17235835], [0.93770095, -0.14103281, 1.17180925],
[0.93822995, -0.14154182, 1.17133442], [0.93854101, -0.14166165, 1.17120383],
[0.93851431, -0.14218339, 1.170878], [0.93819314, -0.1424812, 1.17085096], [0.93810252, -0.14294232, 1.17071469],
[0.93798694, -0.14300315, 1.17063908], [0.93824079, -0.14332302, 1.17040369],
[0.93873968, -0.14327306, 1.17039663], [0.93894714, -0.14349533, 1.17027991],
[0.93895046, -0.14351914, 1.17031255], [0.93880131, -0.1439269, 1.1702054],
[0.93847315, -0.14393894, 1.17033768], [0.93858538, -0.1441697, 1.16997514],
[0.93843099, -0.14412295, 1.16999137], [0.93868804, -0.14428903, 1.16989184],
[0.93872778, -0.14416847, 1.16993376], [0.93880286, -0.14437759, 1.16981702],
[0.93860238, -0.14431987, 1.16976048], [0.93861807, -0.14447591, 1.16969077],
[0.93846924, -0.14445969, 1.16964596], [0.93854263, -0.14459339, 1.16963862],
[0.93855682, -0.14458076, 1.16964518], [0.93869078, -0.14460289, 1.16967438],
[0.93878573, -0.14462207, 1.16959673]]
, [[8.29407186e-01, -9.05759077e-06, 1.24368865e+00], [8.29452381e-01, -1.63704649e-04, 1.24357758e+00],
[8.25965077e-01, 2.61926546e-04, 1.24626125e+00], [0.81733529, 0.00174526, 1.25507639],
[0.80426213, 0.00362642, 1.26770935], [0.79167872, 0.0042421, 1.2788831], [0.77884486, 0.00425323, 1.29066411],
[0.76562033, 0.00397443, 1.30224383], [0.75246375, 0.00297646, 1.31192952], [0.73925773, 0.00222876, 1.31881364],
[0.72935091, 0.0015718, 1.32334408], [7.24226016e-01, 1.14350341e-03, 1.32602297e+00],
[7.22519410e-01, -1.47608797e-04, 1.32901491e+00], [7.23386890e-01, 9.03067432e-04, 1.32840265e+00],
[0.72517122, 0.0015991, 1.32902918], [7.27716689e-01, 1.23742022e-03, 1.33002939e+00],
[0.72845183, 0.00218048, 1.330628], [0.73016814, 0.00230491, 1.33130849], [0.73068608, 0.00276825, 1.33147273],
[0.73058817, 0.0026997, 1.33247778], [0.73087955, 0.00327157, 1.33278831], [0.7311847, 0.00344362, 1.33340541],
[0.73196251, 0.00363454, 1.33359357], [0.73037666, 0.00355182, 1.33388385], [0.73017486, 0.00311582, 1.33464425],
[0.729715, 0.00375855, 1.33492001], [0.73145033, 0.00389886, 1.33500413], [0.73134099, 0.00342494, 1.33517212],
[0.73106361, 0.00342293, 1.33576168], [0.73126924, 0.00411912, 1.33598262], [0.73231772, 0.00415066, 1.33587838],
[0.73068793, 0.00380189, 1.33574672], [0.73025543, 0.00317184, 1.33640307], [0.73089559, 0.004183, 1.33681776],
[0.73151253, 0.0044662, 1.3363534], [0.73176347, 0.00401998, 1.33673888], [0.7313257, 0.00468158, 1.3370429],
[0.73209111, 0.00487611, 1.33683827], [0.73184313, 0.00434979, 1.33684753], [0.73124175, 0.00449397, 1.33768667],
[0.73225456, 0.00543768, 1.3373894], [0.73236785, 0.00432575, 1.3374303], [0.73103295, 0.00467082, 1.3380206],
[0.73162836, 0.00503268, 1.33785837], [0.73221498, 0.0047949, 1.33773502], [0.73159943, 0.00474566, 1.33783584],
[0.73163975, 0.00478752, 1.33809036], [0.73227203, 0.00533049, 1.33800575], [0.73268811, 0.00471999, 1.33770073],
[0.731356, 0.00470975, 1.33810571]]
, [[8.29462825e-01, -3.02466188e-06, 1.24368449e+00], [8.30306652e-01, 5.36592938e-04, 1.24351743e+00],
[0.82627275, 0.00263688, 1.24290417], [0.81688637, 0.01068543, 1.24264759], [0.80534614, 0.02380721, 1.24175146],
[0.79290312, 0.03726687, 1.23912056], [0.77948659, 0.05097065, 1.2350255], [0.76523324, 0.06504495, 1.22959431],
[0.75226822, 0.07925581, 1.22512592], [0.74237365, 0.09353404, 1.2197175], [0.73913879, 0.10222336, 1.21704451],
[0.7394468, 0.10385065, 1.21247085], [0.7333889, 0.10470436, 1.20406516], [0.7259056, 0.10553633, 1.1994055],
[0.72469589, 0.10628135, 1.19902167], [0.7263446, 0.10798226, 1.19681979], [0.72525853, 0.10857415, 1.1928742],
[0.72563646, 0.10940517, 1.18991156], [0.72578947, 0.10959028, 1.19065591], [0.72531063, 0.11210447, 1.1887599],
[0.72605692, 0.11379858, 1.187013], [0.72639765, 0.1122605, 1.18610391], [0.72399292, 0.11456986, 1.18554155],
[0.72316012, 0.11516881, 1.18459081], [0.72574085, 0.11555135, 1.18364884], [0.72428049, 0.11549468, 1.18184939],
[0.72499676, 0.11608238, 1.18136709], [0.72520263, 0.1170543, 1.1805268], [0.72600627, 0.11705583, 1.17992443],
[0.72495808, 0.1178273, 1.17975925], [0.726027, 0.1182533, 1.17909275], [0.7251307, 0.11844033, 1.17929497],
[0.72508602, 0.11910898, 1.17888568], [0.72576297, 0.11895758, 1.17872822], [0.72457023, 0.11952085, 1.17893918],
[0.72523126, 0.11972244, 1.17814958], [0.72461038, 0.11958309, 1.17865173], [0.72451839, 0.12012874, 1.17848082],
[0.7254761, 0.12019711, 1.1782561], [0.72443222, 0.12010784, 1.17859375], [0.72464154, 0.12076384, 1.17830658],
[0.72562088, 0.12027699, 1.17823457], [0.72430732, 0.12081787, 1.17862396], [0.72516592, 0.12078771, 1.17773548],
[0.72409672, 0.12049726, 1.178552], [0.72442099, 0.12119749, 1.17828582], [0.72558587, 0.12082985, 1.17814712],
[0.7243532, 0.12087931, 1.1785126], [0.72489004, 0.12125501, 1.17807636], [0.72473186, 0.12086408, 1.17852156]]]
| sample_trajectory = [[[0.829394929, 2.94382693e-05, 1.24370992], [0.8300607, 0.00321705, 1.24627523], [0.83197002, 0.01345206, 1.25535293], [0.83280536, 0.02711211, 1.26502481], [0.83431212, 0.04126721, 1.27488879], [0.83557291, 0.05575593, 1.28517274], [0.83835516, 0.07094685, 1.29766037], [0.84018236, 0.0848757, 1.30992404], [0.84367176, 0.09899109, 1.32165939], [0.84780989, 0.11275561, 1.332425], [0.85343812, 0.12216536, 1.34048073], [0.85929401, 0.12485794, 1.34398368], [0.86139773, 0.12570567, 1.34577036], [0.86072455, 0.12971565, 1.34570028], [0.86142171, 0.13086022, 1.34607923], [0.86252171, 0.13226137, 1.34576342], [0.86131819, 0.13343436, 1.34602691], [0.86237162, 0.13476304, 1.34625571], [0.86233475, 0.13646685, 1.34643762], [0.86257895, 0.13770382, 1.34626134], [0.86327492, 0.13898366, 1.34679944], [0.86351096, 0.14051317, 1.34688228], [0.86460062, 0.14136772, 1.34739374], [0.86432451, 0.14269744, 1.34746041], [0.86519599, 0.14313221, 1.34789781], [0.86501197, 0.1444806, 1.34814055], [0.86577445, 0.14467521, 1.34821045], [0.86546423, 0.14557526, 1.34840742], [0.86614776, 0.14591165, 1.34892248], [0.86594768, 0.14689357, 1.34916195], [0.86666253, 0.14705808, 1.3493557], [0.86599142, 0.14764966, 1.34944308], [0.86593019, 0.14761498, 1.34965421], [0.86554094, 0.14839651, 1.35002927], [0.86609249, 0.14847811, 1.35004771], [0.86562717, 0.14882716, 1.35021255], [0.86560691, 0.14878882, 1.35066697], [0.86523452, 0.14943953, 1.35077779], [0.86584348, 0.14940555, 1.35102009], [0.86522901, 0.14976554, 1.35107163], [0.86517114, 0.14941758, 1.35133674], [0.86440309, 0.15010623, 1.35173215], [0.86470934, 0.14982907, 1.35153207], [0.86417685, 0.15034566, 1.35204203], [0.86475626, 0.15039388, 1.35211787], [0.86454982, 0.15058402, 1.35209656], [0.86462931, 0.15062848, 1.35254998], [0.86445455, 0.15109283, 1.35246659], [0.86493616, 0.15096502, 1.35264486], [0.86440645, 0.15125336, 1.35257592]], [[0.829394372, 5.02989856e-06, 1.24369044], [0.82844886, 0.000521108705, 1.24412433], [0.82821679, -0.00266306, 1.24340615], [0.82712893, -0.01278852, 1.24238184], [0.8242383, -0.02746302, 1.24138853], [0.82092432, -0.04040762, 1.24021633], [0.81676534, -0.05493966, 1.23709498], [0.81203041, -0.06978358, 1.23090388], [0.81040154, -0.08357767, 1.22607369], [0.80996861, -0.09747925, 1.22250611], [0.80839556, -0.11099092, 1.21861742], [0.80764377, -0.1220292, 1.21606453], [0.80659937, -0.12782397, 1.21509924], [0.80421561, -0.12897047, 1.21389321], [0.80543509, -0.13041502, 1.21218545], [0.80719798, -0.13077158, 1.20977956], [0.80802402, -0.13106103, 1.20923984], [0.80989696, -0.13111801, 1.20648537], [0.81088521, -0.13150426, 1.20611638], [0.81328081, -0.13140379, 1.20439828], [0.81347524, -0.13236595, 1.20291265], [0.81442975, -0.13240708, 1.20165529], [0.81439796, -0.13283523, 1.20113632], [0.8144709, -0.1324097, 1.20073386], [0.81472718, -0.13277184, 1.20014908], [0.81455051, -0.13271349, 1.19998435], [0.81461205, -0.13252529, 1.19982223], [0.81469605, -0.13255599, 1.19956114], [0.8143656, -0.13258219, 1.19952868], [0.81439902, -0.1324634, 1.1993633], [0.81443352, -0.13243586, 1.1992618], [0.81437157, -0.13234246, 1.19916844], [0.81439756, -0.13226621, 1.19910854], [0.81441603, -0.13223594, 1.19907973], [0.81433559, -0.13216323, 1.19904629], [0.81428657, -0.1320852, 1.19900435], [0.8142586, -0.13203865, 1.19895876], [0.8142406, -0.13199832, 1.19892711], [0.81423518, -0.13195133, 1.19891042], [0.81423471, -0.13192349, 1.19890408], [0.81422569, -0.1319086, 1.1988938], [0.81420996, -0.13187683, 1.19887342], [0.81420364, -0.13185729, 1.19886479], [0.81419859, -0.13183793, 1.19885612], [0.81419636, -0.13183512, 1.1988541], [0.81419609, -0.13182733, 1.19885183], [0.81419618, -0.13182156, 1.19885077], [0.81419647, -0.13181636, 1.1988502], [0.81419603, -0.13181478, 1.1988494], [0.81419583, -0.13181186, 1.19884878]], [[0.829396978, 1.19098267e-06, 1.24374612], [0.829385665, -1.86096731e-05, 1.24374961], [0.83181454, -0.00296723, 1.24262025], [0.83897802, -0.01204637, 1.23870155], [0.84804322, -0.02637806, 1.23229918], [0.85821909, -0.04112286, 1.22443261], [0.86857714, -0.0544144, 1.21659045], [0.88077871, -0.0684697, 1.20824494], [0.89227427, -0.08258709, 1.20086125], [0.902352, -0.09639873, 1.1955734], [0.91087973, -0.11036591, 1.19195383], [0.91774859, -0.12421509, 1.18906245], [0.9213203, -0.133212, 1.18760226], [0.92225532, -0.13533401, 1.18683914], [0.92271873, -0.13441819, 1.18610231], [0.92629903, -0.13659682, 1.18384976], [0.9275226, -0.13732566, 1.18130875], [0.92780813, -0.13798298, 1.18097915], [0.93048265, -0.13851559, 1.17977442], [0.93180621, -0.13885864, 1.17854726], [0.93353268, -0.13944112, 1.17652771], [0.93455178, -0.13987549, 1.17493602], [0.93528591, -0.14017046, 1.17416708], [0.9360892, -0.14048572, 1.17327655], [0.93697368, -0.14082654, 1.17235835], [0.93770095, -0.14103281, 1.17180925], [0.93822995, -0.14154182, 1.17133442], [0.93854101, -0.14166165, 1.17120383], [0.93851431, -0.14218339, 1.170878], [0.93819314, -0.1424812, 1.17085096], [0.93810252, -0.14294232, 1.17071469], [0.93798694, -0.14300315, 1.17063908], [0.93824079, -0.14332302, 1.17040369], [0.93873968, -0.14327306, 1.17039663], [0.93894714, -0.14349533, 1.17027991], [0.93895046, -0.14351914, 1.17031255], [0.93880131, -0.1439269, 1.1702054], [0.93847315, -0.14393894, 1.17033768], [0.93858538, -0.1441697, 1.16997514], [0.93843099, -0.14412295, 1.16999137], [0.93868804, -0.14428903, 1.16989184], [0.93872778, -0.14416847, 1.16993376], [0.93880286, -0.14437759, 1.16981702], [0.93860238, -0.14431987, 1.16976048], [0.93861807, -0.14447591, 1.16969077], [0.93846924, -0.14445969, 1.16964596], [0.93854263, -0.14459339, 1.16963862], [0.93855682, -0.14458076, 1.16964518], [0.93869078, -0.14460289, 1.16967438], [0.93878573, -0.14462207, 1.16959673]], [[0.829407186, -9.05759077e-06, 1.24368865], [0.829452381, -0.000163704649, 1.24357758], [0.825965077, 0.000261926546, 1.24626125], [0.81733529, 0.00174526, 1.25507639], [0.80426213, 0.00362642, 1.26770935], [0.79167872, 0.0042421, 1.2788831], [0.77884486, 0.00425323, 1.29066411], [0.76562033, 0.00397443, 1.30224383], [0.75246375, 0.00297646, 1.31192952], [0.73925773, 0.00222876, 1.31881364], [0.72935091, 0.0015718, 1.32334408], [0.724226016, 0.00114350341, 1.32602297], [0.72251941, -0.000147608797, 1.32901491], [0.72338689, 0.000903067432, 1.32840265], [0.72517122, 0.0015991, 1.32902918], [0.727716689, 0.00123742022, 1.33002939], [0.72845183, 0.00218048, 1.330628], [0.73016814, 0.00230491, 1.33130849], [0.73068608, 0.00276825, 1.33147273], [0.73058817, 0.0026997, 1.33247778], [0.73087955, 0.00327157, 1.33278831], [0.7311847, 0.00344362, 1.33340541], [0.73196251, 0.00363454, 1.33359357], [0.73037666, 0.00355182, 1.33388385], [0.73017486, 0.00311582, 1.33464425], [0.729715, 0.00375855, 1.33492001], [0.73145033, 0.00389886, 1.33500413], [0.73134099, 0.00342494, 1.33517212], [0.73106361, 0.00342293, 1.33576168], [0.73126924, 0.00411912, 1.33598262], [0.73231772, 0.00415066, 1.33587838], [0.73068793, 0.00380189, 1.33574672], [0.73025543, 0.00317184, 1.33640307], [0.73089559, 0.004183, 1.33681776], [0.73151253, 0.0044662, 1.3363534], [0.73176347, 0.00401998, 1.33673888], [0.7313257, 0.00468158, 1.3370429], [0.73209111, 0.00487611, 1.33683827], [0.73184313, 0.00434979, 1.33684753], [0.73124175, 0.00449397, 1.33768667], [0.73225456, 0.00543768, 1.3373894], [0.73236785, 0.00432575, 1.3374303], [0.73103295, 0.00467082, 1.3380206], [0.73162836, 0.00503268, 1.33785837], [0.73221498, 0.0047949, 1.33773502], [0.73159943, 0.00474566, 1.33783584], [0.73163975, 0.00478752, 1.33809036], [0.73227203, 0.00533049, 1.33800575], [0.73268811, 0.00471999, 1.33770073], [0.731356, 0.00470975, 1.33810571]], [[0.829462825, -3.02466188e-06, 1.24368449], [0.830306652, 0.000536592938, 1.24351743], [0.82627275, 0.00263688, 1.24290417], [0.81688637, 0.01068543, 1.24264759], [0.80534614, 0.02380721, 1.24175146], [0.79290312, 0.03726687, 1.23912056], [0.77948659, 0.05097065, 1.2350255], [0.76523324, 0.06504495, 1.22959431], [0.75226822, 0.07925581, 1.22512592], [0.74237365, 0.09353404, 1.2197175], [0.73913879, 0.10222336, 1.21704451], [0.7394468, 0.10385065, 1.21247085], [0.7333889, 0.10470436, 1.20406516], [0.7259056, 0.10553633, 1.1994055], [0.72469589, 0.10628135, 1.19902167], [0.7263446, 0.10798226, 1.19681979], [0.72525853, 0.10857415, 1.1928742], [0.72563646, 0.10940517, 1.18991156], [0.72578947, 0.10959028, 1.19065591], [0.72531063, 0.11210447, 1.1887599], [0.72605692, 0.11379858, 1.187013], [0.72639765, 0.1122605, 1.18610391], [0.72399292, 0.11456986, 1.18554155], [0.72316012, 0.11516881, 1.18459081], [0.72574085, 0.11555135, 1.18364884], [0.72428049, 0.11549468, 1.18184939], [0.72499676, 0.11608238, 1.18136709], [0.72520263, 0.1170543, 1.1805268], [0.72600627, 0.11705583, 1.17992443], [0.72495808, 0.1178273, 1.17975925], [0.726027, 0.1182533, 1.17909275], [0.7251307, 0.11844033, 1.17929497], [0.72508602, 0.11910898, 1.17888568], [0.72576297, 0.11895758, 1.17872822], [0.72457023, 0.11952085, 1.17893918], [0.72523126, 0.11972244, 1.17814958], [0.72461038, 0.11958309, 1.17865173], [0.72451839, 0.12012874, 1.17848082], [0.7254761, 0.12019711, 1.1782561], [0.72443222, 0.12010784, 1.17859375], [0.72464154, 0.12076384, 1.17830658], [0.72562088, 0.12027699, 1.17823457], [0.72430732, 0.12081787, 1.17862396], [0.72516592, 0.12078771, 1.17773548], [0.72409672, 0.12049726, 1.178552], [0.72442099, 0.12119749, 1.17828582], [0.72558587, 0.12082985, 1.17814712], [0.7243532, 0.12087931, 1.1785126], [0.72489004, 0.12125501, 1.17807636], [0.72473186, 0.12086408, 1.17852156]]] |
###
# Copyright 2019 Hewlett Packard Enterprise, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
# -*- coding: utf-8 -*-
"""This module contains common helper functions used by several pmem commands"""
class PmemHelpers(object):
"""
Class containing common helper functions used by several pmem commands
"""
@staticmethod
def py3_round(number, precision):
"""
Rounds numbers in accordance with the Python 3 round specification
:param number: number to be rounded
:type number: floating point number
:param precision: Number of decimal places the number should be rounded off to
:type precision: integer
:return: rounded off number
"""
if abs(round(number)-number) == 0.5:
return (2.0*round(number/2.0, precision))
return (round(number, precision))
@staticmethod
def parse_dimm_id(dimm_id_list):
"""
Converts DIMM IDs from the 'X@Y' format
to the 'PROC X DIMM Y' format
:param dimm_id_list: DIMM IDs in the 'X@Y' format
:type dimm_id_list: list
:return: list of DIMM IDs in the 'PROC X DIMM Y' format
"""
parsed_list = list()
for dimm_id in dimm_id_list:
temp = dimm_id.split("@")
parsed_list.append("PROC " + temp[0] + " DIMM " + temp[1])
return parsed_list
@staticmethod
def get_pmem_members(memory_members):
"""
Filters persistent memory members from memory resources
:param memory_members: members of memory collection resource
:type memory_members: list of members
:returns: list of persistent memory members if found else empty list
"""
base_module_type = "PMM"
pmem_members = list()
pmem_dimm_id = set()
for member in memory_members:
memory_type = member.get("Oem").get("Hpe").get("BaseModuleType")
if memory_type == base_module_type:
pmem_members.append(member)
pmem_dimm_id.add(member.get("DeviceLocator"))
return pmem_members, pmem_dimm_id
@staticmethod
def get_non_aep_members(memory_members):
"""
Filters dram memory members from memory resources
:param memory_members: members of memory collection resource
:type memory_members: list of members
:returns: list of dram memory members if found else empty list
"""
base_module_type = "PMM"
dram_members = list()
dram_dimm_id = set()
for member in memory_members:
memory_type = member.get("Oem").get("Hpe").get("BaseModuleType")
if memory_type != base_module_type:
dram_members.append(member)
dram_dimm_id.add(member.get("DeviceLocator"))
return dram_members, dram_dimm_id
@staticmethod
def json_to_text(dictionary):
"""
Converts json to string format
:param dictionary: json to be converted
:return: list containing the string
"""
output = ""
for key, value in dictionary.items():
item = "\n" + key + ":" + str(value)
output += item
return [output]
@staticmethod
def location_format_converter(location_list):
"""
Converts location format from 'PROC X DIMM Y' to 'X@Y'
:param location_list: list of locations of the format 'PROC X DIMM Y'
:type location_list: list of strings
:returns: string of locations in the 'X@Y' format (comma separated)
"""
converted_str = ""
for location in location_list:
temp = location.split(" ")
converted_str += temp[1] + "@" + temp[3]
if location is not location_list[-1]:
converted_str += ", "
return converted_str, ("PROC " + converted_str[0])
@staticmethod
def compare_id(id1, id2):
"""
Compares two ids
:param id1: first id to be compared
:param id2: second id to be compared
:return: True if ids are same else False
"""
if id1[-1] == "/":
id1 = id1[:-1]
if id2[-1] == "/":
id2 = id2[:-1]
return id1 == id2
| """This module contains common helper functions used by several pmem commands"""
class Pmemhelpers(object):
"""
Class containing common helper functions used by several pmem commands
"""
@staticmethod
def py3_round(number, precision):
"""
Rounds numbers in accordance with the Python 3 round specification
:param number: number to be rounded
:type number: floating point number
:param precision: Number of decimal places the number should be rounded off to
:type precision: integer
:return: rounded off number
"""
if abs(round(number) - number) == 0.5:
return 2.0 * round(number / 2.0, precision)
return round(number, precision)
@staticmethod
def parse_dimm_id(dimm_id_list):
"""
Converts DIMM IDs from the 'X@Y' format
to the 'PROC X DIMM Y' format
:param dimm_id_list: DIMM IDs in the 'X@Y' format
:type dimm_id_list: list
:return: list of DIMM IDs in the 'PROC X DIMM Y' format
"""
parsed_list = list()
for dimm_id in dimm_id_list:
temp = dimm_id.split('@')
parsed_list.append('PROC ' + temp[0] + ' DIMM ' + temp[1])
return parsed_list
@staticmethod
def get_pmem_members(memory_members):
"""
Filters persistent memory members from memory resources
:param memory_members: members of memory collection resource
:type memory_members: list of members
:returns: list of persistent memory members if found else empty list
"""
base_module_type = 'PMM'
pmem_members = list()
pmem_dimm_id = set()
for member in memory_members:
memory_type = member.get('Oem').get('Hpe').get('BaseModuleType')
if memory_type == base_module_type:
pmem_members.append(member)
pmem_dimm_id.add(member.get('DeviceLocator'))
return (pmem_members, pmem_dimm_id)
@staticmethod
def get_non_aep_members(memory_members):
"""
Filters dram memory members from memory resources
:param memory_members: members of memory collection resource
:type memory_members: list of members
:returns: list of dram memory members if found else empty list
"""
base_module_type = 'PMM'
dram_members = list()
dram_dimm_id = set()
for member in memory_members:
memory_type = member.get('Oem').get('Hpe').get('BaseModuleType')
if memory_type != base_module_type:
dram_members.append(member)
dram_dimm_id.add(member.get('DeviceLocator'))
return (dram_members, dram_dimm_id)
@staticmethod
def json_to_text(dictionary):
"""
Converts json to string format
:param dictionary: json to be converted
:return: list containing the string
"""
output = ''
for (key, value) in dictionary.items():
item = '\n' + key + ':' + str(value)
output += item
return [output]
@staticmethod
def location_format_converter(location_list):
"""
Converts location format from 'PROC X DIMM Y' to 'X@Y'
:param location_list: list of locations of the format 'PROC X DIMM Y'
:type location_list: list of strings
:returns: string of locations in the 'X@Y' format (comma separated)
"""
converted_str = ''
for location in location_list:
temp = location.split(' ')
converted_str += temp[1] + '@' + temp[3]
if location is not location_list[-1]:
converted_str += ', '
return (converted_str, 'PROC ' + converted_str[0])
@staticmethod
def compare_id(id1, id2):
"""
Compares two ids
:param id1: first id to be compared
:param id2: second id to be compared
:return: True if ids are same else False
"""
if id1[-1] == '/':
id1 = id1[:-1]
if id2[-1] == '/':
id2 = id2[:-1]
return id1 == id2 |
# https://binarysearch.com/
# GGA 2020.12.04
"""
User Problem
You Have:
You Need:
You Must:
Input/Output Example:
Solution (Feature/Product)
(Edge cases)
Reflect On, Improvements, Comparisons with other Solutions:
I learned:
"""
# function counting 'only-children' in tree
class Solution:
def solve(self, root):
# set local counter to 0 (counting only-children)
local_counter = 0
# set cumulative_counter to 0 (counting only-children)
cumulative_total_counter = 0
# 1. base case: root is empty
if not root:
return 0
# 2. check children (not recoursive)
# this updates local_counter (counting only-children)
if (root.right == None and root.left != None) or (
root.left == None and root.right != None
):
local_counter += 1
# 3. recoursive check through tree: \
# passing results to cumulative_total_counter (counting only-children)
cumulative_total_counter += (
local_counter + self.solve(root.right) + self.solve(root.left)
)
# return result of step 3
return cumulative_total_counter
############################
# Functions to Print Output
############################
# Sample Print Solution
class Tree:
def __init__(self, val, left=None, right=None):
self.val = val
self.left = left
self.right = right
"""
Rewrite Diagram:
1. [ -> Tree(
2. ] -> )
"""
tree_diagram_list = [0, [4, None, None], [2, [1, [3, None, None], None], None]]
root = Tree(0, Tree(4, None, None), Tree(2, Tree(1, Tree(3, None, None), None), None))
# print whole tree
def print_tree(root):
# print current node value
print(root.val)
# print left child
if root.left:
print_tree(root.left)
# print right child
if root.right:
print_tree(root.right)
return None
# print input tree for inspection
print_tree(root)
# input is the root node
test_input = root
run_test = Solution()
print("\nOutput =", run_test.solve(test_input))
| """
User Problem
You Have:
You Need:
You Must:
Input/Output Example:
Solution (Feature/Product)
(Edge cases)
Reflect On, Improvements, Comparisons with other Solutions:
I learned:
"""
class Solution:
def solve(self, root):
local_counter = 0
cumulative_total_counter = 0
if not root:
return 0
if root.right == None and root.left != None or (root.left == None and root.right != None):
local_counter += 1
cumulative_total_counter += local_counter + self.solve(root.right) + self.solve(root.left)
return cumulative_total_counter
class Tree:
def __init__(self, val, left=None, right=None):
self.val = val
self.left = left
self.right = right
'\nRewrite Diagram:\n1. [ -> Tree( \n2. ] -> )\n'
tree_diagram_list = [0, [4, None, None], [2, [1, [3, None, None], None], None]]
root = tree(0, tree(4, None, None), tree(2, tree(1, tree(3, None, None), None), None))
def print_tree(root):
print(root.val)
if root.left:
print_tree(root.left)
if root.right:
print_tree(root.right)
return None
print_tree(root)
test_input = root
run_test = solution()
print('\nOutput =', run_test.solve(test_input)) |
#-*- coding: utf-8 -*-
# https://github.com/Kodi-vStream/venom-xbmc-addons
class iHoster:
def getDisplayName(self):
raise NotImplementedError()
def setDisplayName(self, sDisplayName):
raise NotImplementedError()
def setFileName(self, sFileName):
raise NotImplementedError()
def getFileName(self):
raise NotImplementedError()
def getPluginIdentifier(self):
raise NotImplementedError()
def isDownloadable(self):
raise NotImplementedError()
def isJDownloaderable(self):
raise NotImplementedError()
def getPattern(self):
raise NotImplementedError()
def setUrl(self, sUrl):
raise NotImplementedError()
def checkUrl(self, sUrl):
raise NotImplementedError()
def getUrl(self):
raise NotImplementedError()
def getMediaLink(self):
raise NotImplementedError()
| class Ihoster:
def get_display_name(self):
raise not_implemented_error()
def set_display_name(self, sDisplayName):
raise not_implemented_error()
def set_file_name(self, sFileName):
raise not_implemented_error()
def get_file_name(self):
raise not_implemented_error()
def get_plugin_identifier(self):
raise not_implemented_error()
def is_downloadable(self):
raise not_implemented_error()
def is_j_downloaderable(self):
raise not_implemented_error()
def get_pattern(self):
raise not_implemented_error()
def set_url(self, sUrl):
raise not_implemented_error()
def check_url(self, sUrl):
raise not_implemented_error()
def get_url(self):
raise not_implemented_error()
def get_media_link(self):
raise not_implemented_error() |
"""
LC887 -- super egg drop
You are given K eggs, and you have access to a building with N floors from 1 to N.
Each egg is identical in function, and if an egg breaks, you cannot drop it again.
You know that there exists a floor F with 0 <= F <= N such that any egg dropped at a floor higher than F will break, and any egg dropped at or below floor F will not break.
Each move, you may take an egg (if you have an unbroken one) and drop it from any floor X (with 1 <= X <= N).
Your goal is to know with certainty what the value of F is.
What is the minimum number of moves that you need to know with certainty what F is, regardless of the initial value of F?
Example 1:
Input: K = 1, N = 2
Output: 2
Explanation:
Drop the egg from floor 1. If it breaks, we know with certainty that F = 0.
Otherwise, drop the egg from floor 2. If it breaks, we know with certainty that F = 1.
If it didn't break, then we know with certainty F = 2.
Hence, we needed 2 moves in the worst case to know what F is with certainty.
Example 2:
Input: K = 2, N = 6
Output: 3
Example 3:
Input: K = 3, N = 14
Output: 4
"""
# initial method -- dp
# TLE however
class Solution:
def superEggDrop(self, K: int, N: int) -> int:
# dp[i][j] means the number of moves needed to test j floor
# if there are i eggs left
dp = [[0 for _ in range(K+1)] for _ in range(N+1)]
for floor in range(1, N+1):
dp[floor][1] = floor
for egg in range(1, K+1):
dp[1][egg] = 1
for i in range(2, K+1):
for j in range(2, N+1):
max_move = N
for drop_floor in range(2, j//2+2):
max_move = min(max_move, max(dp[drop_floor-1][i-1], dp[j-drop_floor][i]))
dp[j][i] = 1 + max_move
return dp[N][K]
# reference: https://leetcode.com/articles/super-egg-drop/
# check this link
# feel the power of math
# every time I should write down the state transfer function to
class Solution:
def superEggDrop(self, K: int, N: int) -> int:
dp = [[0] * (K+1) for n in range(N+1)]
for m in range(1,N+1):
for k in range(1,K+1):
dp[m][k] = dp[m-1][k-1] + dp[m-1][k] + 1
if dp[m][k] >= N: return m
if __name__ == '__main__':
sol = Solution()
k = 4
n = 5000
print(sol.superEggDrop(k, n))
| """
LC887 -- super egg drop
You are given K eggs, and you have access to a building with N floors from 1 to N.
Each egg is identical in function, and if an egg breaks, you cannot drop it again.
You know that there exists a floor F with 0 <= F <= N such that any egg dropped at a floor higher than F will break, and any egg dropped at or below floor F will not break.
Each move, you may take an egg (if you have an unbroken one) and drop it from any floor X (with 1 <= X <= N).
Your goal is to know with certainty what the value of F is.
What is the minimum number of moves that you need to know with certainty what F is, regardless of the initial value of F?
Example 1:
Input: K = 1, N = 2
Output: 2
Explanation:
Drop the egg from floor 1. If it breaks, we know with certainty that F = 0.
Otherwise, drop the egg from floor 2. If it breaks, we know with certainty that F = 1.
If it didn't break, then we know with certainty F = 2.
Hence, we needed 2 moves in the worst case to know what F is with certainty.
Example 2:
Input: K = 2, N = 6
Output: 3
Example 3:
Input: K = 3, N = 14
Output: 4
"""
class Solution:
def super_egg_drop(self, K: int, N: int) -> int:
dp = [[0 for _ in range(K + 1)] for _ in range(N + 1)]
for floor in range(1, N + 1):
dp[floor][1] = floor
for egg in range(1, K + 1):
dp[1][egg] = 1
for i in range(2, K + 1):
for j in range(2, N + 1):
max_move = N
for drop_floor in range(2, j // 2 + 2):
max_move = min(max_move, max(dp[drop_floor - 1][i - 1], dp[j - drop_floor][i]))
dp[j][i] = 1 + max_move
return dp[N][K]
class Solution:
def super_egg_drop(self, K: int, N: int) -> int:
dp = [[0] * (K + 1) for n in range(N + 1)]
for m in range(1, N + 1):
for k in range(1, K + 1):
dp[m][k] = dp[m - 1][k - 1] + dp[m - 1][k] + 1
if dp[m][k] >= N:
return m
if __name__ == '__main__':
sol = solution()
k = 4
n = 5000
print(sol.superEggDrop(k, n)) |
expected_output={
"drops":{
"IN_US_CL_V4_PKT_FAILED_POLICY":{
"drop_type":8,
"packets":11019,
},
"IN_US_V4_PKT_SA_NOT_FOUND_SPI":{
"drop_type":4,
"packets":67643,
},
"OCT_GEN_NOTIFY_SOFT_EXPIRY":{
"drop_type":66,
"packets":159949980,
},
"OCT_PKT_HIT_INVALID_SA":{
"drop_type":68,
"packets":2797,
},
"OUT_OCT_HARD_EXPIRY":{
"drop_type":44,
"packets":3223664,
},
"OUT_V4_PKT_HIT_IKE_START_SP":{
"drop_type":33,
"packets":1763263723,
},
"OUT_V4_PKT_HIT_INVALID_SA":{
"drop_type":32,
"packets":28583,
},
}
} | expected_output = {'drops': {'IN_US_CL_V4_PKT_FAILED_POLICY': {'drop_type': 8, 'packets': 11019}, 'IN_US_V4_PKT_SA_NOT_FOUND_SPI': {'drop_type': 4, 'packets': 67643}, 'OCT_GEN_NOTIFY_SOFT_EXPIRY': {'drop_type': 66, 'packets': 159949980}, 'OCT_PKT_HIT_INVALID_SA': {'drop_type': 68, 'packets': 2797}, 'OUT_OCT_HARD_EXPIRY': {'drop_type': 44, 'packets': 3223664}, 'OUT_V4_PKT_HIT_IKE_START_SP': {'drop_type': 33, 'packets': 1763263723}, 'OUT_V4_PKT_HIT_INVALID_SA': {'drop_type': 32, 'packets': 28583}}} |
#
# PySNMP MIB module FASTPATH-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/FASTPATH-MIB
# Produced by pysmi-0.3.4 at Wed May 1 13:12:15 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ConstraintsIntersection, ConstraintsUnion, ValueSizeConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsIntersection", "ConstraintsUnion", "ValueSizeConstraint", "ValueRangeConstraint")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
IpAddress, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter64, NotificationType, enterprises, iso, Counter32, Integer32, Unsigned32, ObjectIdentity, TimeTicks, Bits, Gauge32, MibIdentifier, ModuleIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "IpAddress", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter64", "NotificationType", "enterprises", "iso", "Counter32", "Integer32", "Unsigned32", "ObjectIdentity", "TimeTicks", "Bits", "Gauge32", "MibIdentifier", "ModuleIdentity")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
excelan = MibIdentifier((1, 3, 6, 1, 4, 1, 23))
genericGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 23, 2))
fastpathMib = MibIdentifier((1, 3, 6, 1, 4, 1, 23, 2, 11))
scc = MibIdentifier((1, 3, 6, 1, 4, 1, 23, 2, 11, 1))
alap = MibIdentifier((1, 3, 6, 1, 4, 1, 23, 2, 11, 2))
ethernet = MibIdentifier((1, 3, 6, 1, 4, 1, 23, 2, 11, 3))
aarp = MibIdentifier((1, 3, 6, 1, 4, 1, 23, 2, 11, 4))
atif = MibIdentifier((1, 3, 6, 1, 4, 1, 23, 2, 11, 5))
ddp = MibIdentifier((1, 3, 6, 1, 4, 1, 23, 2, 11, 6))
rtmp = MibIdentifier((1, 3, 6, 1, 4, 1, 23, 2, 11, 7))
kip = MibIdentifier((1, 3, 6, 1, 4, 1, 23, 2, 11, 8))
zip = MibIdentifier((1, 3, 6, 1, 4, 1, 23, 2, 11, 9))
nbp = MibIdentifier((1, 3, 6, 1, 4, 1, 23, 2, 11, 10))
echo = MibIdentifier((1, 3, 6, 1, 4, 1, 23, 2, 11, 11))
buffer = MibIdentifier((1, 3, 6, 1, 4, 1, 23, 2, 11, 12))
sccInterruptCount = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sccInterruptCount.setStatus('mandatory')
sccAbortCount = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sccAbortCount.setStatus('mandatory')
sccSpuriousCount = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sccSpuriousCount.setStatus('mandatory')
sccCRCCount = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sccCRCCount.setStatus('mandatory')
sccOverrunCount = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sccOverrunCount.setStatus('mandatory')
sccUnderrunCount = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sccUnderrunCount.setStatus('mandatory')
alapReceiveCount = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 2, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: alapReceiveCount.setStatus('mandatory')
alapTransmitCount = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 2, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: alapTransmitCount.setStatus('mandatory')
alapNoHandlerCount = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 2, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: alapNoHandlerCount.setStatus('mandatory')
alapLengthErrorCount = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 2, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: alapLengthErrorCount.setStatus('mandatory')
alapBadCount = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 2, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: alapBadCount.setStatus('mandatory')
alapCollisionCount = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 2, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: alapCollisionCount.setStatus('mandatory')
alapDeferCount = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 2, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: alapDeferCount.setStatus('mandatory')
alapNoDataCount = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 2, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: alapNoDataCount.setStatus('mandatory')
alapRandomCTS = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 2, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: alapRandomCTS.setStatus('mandatory')
etherCRCErrors = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etherCRCErrors.setStatus('mandatory')
etherAlignErrors = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etherAlignErrors.setStatus('mandatory')
etherResourceErrors = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etherResourceErrors.setStatus('mandatory')
etherOverrunErrors = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etherOverrunErrors.setStatus('mandatory')
etherInPackets = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etherInPackets.setStatus('mandatory')
etherOutPackets = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etherOutPackets.setStatus('mandatory')
etherBadTransmits = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etherBadTransmits.setStatus('mandatory')
etherOversizeFrames = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etherOversizeFrames.setStatus('mandatory')
etherSpurRUReadys = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etherSpurRUReadys.setStatus('mandatory')
etherSpurCUActives = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etherSpurCUActives.setStatus('mandatory')
etherSpurUnknown = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etherSpurUnknown.setStatus('mandatory')
etherBcastDrops = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etherBcastDrops.setStatus('mandatory')
etherReceiverRestarts = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etherReceiverRestarts.setStatus('mandatory')
etherReinterrupts = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etherReinterrupts.setStatus('mandatory')
etherBufferReroutes = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etherBufferReroutes.setStatus('mandatory')
etherBufferDrops = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etherBufferDrops.setStatus('mandatory')
etherCollisions = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etherCollisions.setStatus('mandatory')
etherDefers = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etherDefers.setStatus('mandatory')
etherDMAUnderruns = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 19), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etherDMAUnderruns.setStatus('mandatory')
etherMaxCollisions = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 20), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etherMaxCollisions.setStatus('mandatory')
etherNoCarriers = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 21), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etherNoCarriers.setStatus('mandatory')
etherNoCTS = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 22), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etherNoCTS.setStatus('mandatory')
etherNoSQEs = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 23), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etherNoSQEs.setStatus('mandatory')
aarpTable = MibTable((1, 3, 6, 1, 4, 1, 23, 2, 11, 4, 1), ).setMaxAccess("readwrite")
if mibBuilder.loadTexts: aarpTable.setStatus('mandatory')
aarpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 23, 2, 11, 4, 1, 1), ).setMaxAccess("readwrite")
if mibBuilder.loadTexts: aarpEntry.setStatus('mandatory')
aarpIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 4, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: aarpIfIndex.setStatus('mandatory')
aarpPhysAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 4, 1, 1, 2), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: aarpPhysAddress.setStatus('mandatory')
aarpNetAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 4, 1, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: aarpNetAddress.setStatus('mandatory')
atifTable = MibTable((1, 3, 6, 1, 4, 1, 23, 2, 11, 5, 1), ).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atifTable.setStatus('mandatory')
atifEntry = MibTableRow((1, 3, 6, 1, 4, 1, 23, 2, 11, 5, 1, 1), ).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atifEntry.setStatus('mandatory')
atifIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 5, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atifIndex.setStatus('mandatory')
atifDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 5, 1, 1, 2), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atifDescr.setStatus('mandatory')
atifType = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 5, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("other", 1), ("localtalk", 2), ("ethertalk1", 3), ("ethertalk2", 4), ("tokentalk", 5), ("iptalk", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: atifType.setStatus('mandatory')
atifNetStart = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 5, 1, 1, 4), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atifNetStart.setStatus('mandatory')
atifNetEnd = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 5, 1, 1, 5), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atifNetEnd.setStatus('mandatory')
atifNetAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 5, 1, 1, 6), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atifNetAddress.setStatus('mandatory')
atifStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 5, 1, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: atifStatus.setStatus('mandatory')
atifNetConfig = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 5, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("configured", 1), ("garnered", 2), ("guessed", 3), ("unconfigured", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: atifNetConfig.setStatus('mandatory')
atifZoneConfig = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 5, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("configured", 1), ("garnered", 2), ("guessed", 3), ("unconfigured", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: atifZoneConfig.setStatus('mandatory')
atifZone = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 5, 1, 1, 10), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atifZone.setStatus('mandatory')
atifIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 5, 1, 1, 11), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: atifIfIndex.setStatus('mandatory')
ddpOutRequests = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 6, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ddpOutRequests.setStatus('mandatory')
ddpOutShort = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 6, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ddpOutShort.setStatus('mandatory')
ddpOutLong = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 6, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ddpOutLong.setStatus('mandatory')
ddpReceived = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 6, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ddpReceived.setStatus('mandatory')
ddpToForward = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 6, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ddpToForward.setStatus('mandatory')
ddpForwards = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 6, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ddpForwards.setStatus('mandatory')
ddpForMe = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 6, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ddpForMe.setStatus('mandatory')
ddpOutNoRoutes = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 6, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ddpOutNoRoutes.setStatus('mandatory')
ddpTooShortDrops = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 6, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ddpTooShortDrops.setStatus('mandatory')
ddpTooLongDrops = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 6, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ddpTooLongDrops.setStatus('mandatory')
ddpBroadcastDrops = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 6, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ddpBroadcastDrops.setStatus('mandatory')
ddpShortDDPDrops = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 6, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ddpShortDDPDrops.setStatus('mandatory')
ddpHopCountDrops = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 6, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ddpHopCountDrops.setStatus('mandatory')
rtmpTable = MibTable((1, 3, 6, 1, 4, 1, 23, 2, 11, 7, 1), ).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rtmpTable.setStatus('mandatory')
rtmpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 23, 2, 11, 7, 1, 1), ).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rtmpEntry.setStatus('mandatory')
rtmpRangeStart = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 7, 1, 1, 1), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rtmpRangeStart.setStatus('mandatory')
rtmpRangeEnd = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 7, 1, 1, 2), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rtmpRangeEnd.setStatus('mandatory')
rtmpNextHop = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 7, 1, 1, 3), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rtmpNextHop.setStatus('mandatory')
rtmpInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 7, 1, 1, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rtmpInterface.setStatus('mandatory')
rtmpHops = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 7, 1, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rtmpHops.setStatus('mandatory')
rtmpState = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 7, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("good", 1), ("suspect", 2), ("bad", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rtmpState.setStatus('mandatory')
kipTable = MibTable((1, 3, 6, 1, 4, 1, 23, 2, 11, 8, 1), ).setMaxAccess("readwrite")
if mibBuilder.loadTexts: kipTable.setStatus('mandatory')
kipEntry = MibTableRow((1, 3, 6, 1, 4, 1, 23, 2, 11, 8, 1, 1), ).setMaxAccess("readwrite")
if mibBuilder.loadTexts: kipEntry.setStatus('mandatory')
kipNet = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 8, 1, 1, 1), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: kipNet.setStatus('mandatory')
kipNextHop = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 8, 1, 1, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: kipNextHop.setStatus('mandatory')
kipHopCount = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 8, 1, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: kipHopCount.setStatus('mandatory')
kipBCastAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 8, 1, 1, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: kipBCastAddr.setStatus('mandatory')
kipCore = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 8, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("core", 1), ("notcore", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: kipCore.setStatus('mandatory')
kipKfps = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 8, 1, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: kipKfps.setStatus('mandatory')
zipTable = MibTable((1, 3, 6, 1, 4, 1, 23, 2, 11, 9, 1), ).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zipTable.setStatus('mandatory')
zipEntry = MibTableRow((1, 3, 6, 1, 4, 1, 23, 2, 11, 9, 1, 1), ).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zipEntry.setStatus('mandatory')
zipZoneName = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 9, 1, 1, 1), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zipZoneName.setStatus('mandatory')
zipZoneIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 9, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zipZoneIndex.setStatus('mandatory')
zipZoneNetStart = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 9, 1, 1, 3), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zipZoneNetStart.setStatus('mandatory')
zipZoneNetEnd = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 9, 1, 1, 4), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zipZoneNetEnd.setStatus('mandatory')
nbpTable = MibTable((1, 3, 6, 1, 4, 1, 23, 2, 11, 10, 1), ).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nbpTable.setStatus('mandatory')
nbpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 23, 2, 11, 10, 1, 1), ).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nbpEntry.setStatus('mandatory')
nbpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 10, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nbpIndex.setStatus('mandatory')
nbpObject = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 10, 1, 1, 2), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nbpObject.setStatus('mandatory')
nbpType = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 10, 1, 1, 3), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nbpType.setStatus('mandatory')
nbpZone = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 10, 1, 1, 4), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nbpZone.setStatus('mandatory')
echoRequests = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 11, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: echoRequests.setStatus('mandatory')
echoReplies = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 11, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: echoReplies.setStatus('mandatory')
bufferSize = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 12, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bufferSize.setStatus('mandatory')
bufferAvail = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 12, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bufferAvail.setStatus('mandatory')
bufferDrops = MibScalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 12, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bufferDrops.setStatus('mandatory')
bufferTypeTable = MibTable((1, 3, 6, 1, 4, 1, 23, 2, 11, 12, 4), ).setMaxAccess("readonly")
if mibBuilder.loadTexts: bufferTypeTable.setStatus('mandatory')
bufferTypeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 23, 2, 11, 12, 4, 1), ).setMaxAccess("readonly")
if mibBuilder.loadTexts: bufferTypeEntry.setStatus('mandatory')
bufferTypeIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 12, 4, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bufferTypeIndex.setStatus('mandatory')
bufferType = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 12, 4, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("other", 1), ("free", 2), ("localtalk", 3), ("ethernet", 4), ("arp", 5), ("data", 6), ("erbf", 7), ("etbf", 8), ("malloc", 9), ("tkbf", 10), ("token", 11)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bufferType.setStatus('mandatory')
bufferTypeDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 12, 4, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bufferTypeDescr.setStatus('mandatory')
bufferTypeCount = MibTableColumn((1, 3, 6, 1, 4, 1, 23, 2, 11, 12, 4, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bufferTypeCount.setStatus('mandatory')
mibBuilder.exportSymbols("FASTPATH-MIB", etherAlignErrors=etherAlignErrors, ddpOutNoRoutes=ddpOutNoRoutes, alapCollisionCount=alapCollisionCount, bufferAvail=bufferAvail, kipKfps=kipKfps, etherReceiverRestarts=etherReceiverRestarts, bufferTypeTable=bufferTypeTable, kipCore=kipCore, atifIndex=atifIndex, alapDeferCount=alapDeferCount, rtmpRangeStart=rtmpRangeStart, etherResourceErrors=etherResourceErrors, etherOversizeFrames=etherOversizeFrames, bufferTypeEntry=bufferTypeEntry, ddpOutRequests=ddpOutRequests, ddp=ddp, zipTable=zipTable, etherBadTransmits=etherBadTransmits, nbpTable=nbpTable, alap=alap, bufferDrops=bufferDrops, bufferType=bufferType, sccCRCCount=sccCRCCount, alapReceiveCount=alapReceiveCount, rtmpState=rtmpState, atifEntry=atifEntry, sccAbortCount=sccAbortCount, ddpToForward=ddpToForward, echo=echo, etherOverrunErrors=etherOverrunErrors, atifZoneConfig=atifZoneConfig, atifTable=atifTable, ddpForwards=ddpForwards, bufferTypeIndex=bufferTypeIndex, rtmpNextHop=rtmpNextHop, aarpNetAddress=aarpNetAddress, atif=atif, alapTransmitCount=alapTransmitCount, alapNoHandlerCount=alapNoHandlerCount, etherDMAUnderruns=etherDMAUnderruns, alapBadCount=alapBadCount, etherReinterrupts=etherReinterrupts, ddpTooShortDrops=ddpTooShortDrops, aarpPhysAddress=aarpPhysAddress, aarpIfIndex=aarpIfIndex, rtmpTable=rtmpTable, zipZoneIndex=zipZoneIndex, etherMaxCollisions=etherMaxCollisions, atifStatus=atifStatus, aarpEntry=aarpEntry, etherSpurUnknown=etherSpurUnknown, zipZoneNetStart=zipZoneNetStart, kipEntry=kipEntry, sccOverrunCount=sccOverrunCount, aarpTable=aarpTable, nbpObject=nbpObject, atifZone=atifZone, kipTable=kipTable, ddpForMe=ddpForMe, etherBufferDrops=etherBufferDrops, atifDescr=atifDescr, etherOutPackets=etherOutPackets, zipEntry=zipEntry, bufferSize=bufferSize, nbpEntry=nbpEntry, echoRequests=echoRequests, etherDefers=etherDefers, atifType=atifType, rtmpHops=rtmpHops, atifNetStart=atifNetStart, kipBCastAddr=kipBCastAddr, ethernet=ethernet, fastpathMib=fastpathMib, aarp=aarp, sccUnderrunCount=sccUnderrunCount, ddpBroadcastDrops=ddpBroadcastDrops, rtmpEntry=rtmpEntry, etherInPackets=etherInPackets, etherBcastDrops=etherBcastDrops, etherNoCTS=etherNoCTS, kipNextHop=kipNextHop, ddpOutShort=ddpOutShort, echoReplies=echoReplies, nbp=nbp, etherCollisions=etherCollisions, nbpIndex=nbpIndex, rtmp=rtmp, scc=scc, atifNetEnd=atifNetEnd, alapLengthErrorCount=alapLengthErrorCount, etherBufferReroutes=etherBufferReroutes, zipZoneNetEnd=zipZoneNetEnd, bufferTypeCount=bufferTypeCount, alapRandomCTS=alapRandomCTS, sccInterruptCount=sccInterruptCount, zipZoneName=zipZoneName, etherSpurRUReadys=etherSpurRUReadys, nbpZone=nbpZone, ddpReceived=ddpReceived, ddpShortDDPDrops=ddpShortDDPDrops, buffer=buffer, rtmpRangeEnd=rtmpRangeEnd, alapNoDataCount=alapNoDataCount, zip=zip, nbpType=nbpType, sccSpuriousCount=sccSpuriousCount, etherNoCarriers=etherNoCarriers, ddpTooLongDrops=ddpTooLongDrops, ddpHopCountDrops=ddpHopCountDrops, etherNoSQEs=etherNoSQEs, etherCRCErrors=etherCRCErrors, kipNet=kipNet, rtmpInterface=rtmpInterface, kipHopCount=kipHopCount, ddpOutLong=ddpOutLong, atifIfIndex=atifIfIndex, kip=kip, excelan=excelan, atifNetAddress=atifNetAddress, etherSpurCUActives=etherSpurCUActives, bufferTypeDescr=bufferTypeDescr, genericGroup=genericGroup, atifNetConfig=atifNetConfig)
| (object_identifier, integer, octet_string) = mibBuilder.importSymbols('ASN1', 'ObjectIdentifier', 'Integer', 'OctetString')
(named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(single_value_constraint, constraints_intersection, constraints_union, value_size_constraint, value_range_constraint) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'SingleValueConstraint', 'ConstraintsIntersection', 'ConstraintsUnion', 'ValueSizeConstraint', 'ValueRangeConstraint')
(notification_group, module_compliance) = mibBuilder.importSymbols('SNMPv2-CONF', 'NotificationGroup', 'ModuleCompliance')
(ip_address, mib_scalar, mib_table, mib_table_row, mib_table_column, counter64, notification_type, enterprises, iso, counter32, integer32, unsigned32, object_identity, time_ticks, bits, gauge32, mib_identifier, module_identity) = mibBuilder.importSymbols('SNMPv2-SMI', 'IpAddress', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'Counter64', 'NotificationType', 'enterprises', 'iso', 'Counter32', 'Integer32', 'Unsigned32', 'ObjectIdentity', 'TimeTicks', 'Bits', 'Gauge32', 'MibIdentifier', 'ModuleIdentity')
(textual_convention, display_string) = mibBuilder.importSymbols('SNMPv2-TC', 'TextualConvention', 'DisplayString')
excelan = mib_identifier((1, 3, 6, 1, 4, 1, 23))
generic_group = mib_identifier((1, 3, 6, 1, 4, 1, 23, 2))
fastpath_mib = mib_identifier((1, 3, 6, 1, 4, 1, 23, 2, 11))
scc = mib_identifier((1, 3, 6, 1, 4, 1, 23, 2, 11, 1))
alap = mib_identifier((1, 3, 6, 1, 4, 1, 23, 2, 11, 2))
ethernet = mib_identifier((1, 3, 6, 1, 4, 1, 23, 2, 11, 3))
aarp = mib_identifier((1, 3, 6, 1, 4, 1, 23, 2, 11, 4))
atif = mib_identifier((1, 3, 6, 1, 4, 1, 23, 2, 11, 5))
ddp = mib_identifier((1, 3, 6, 1, 4, 1, 23, 2, 11, 6))
rtmp = mib_identifier((1, 3, 6, 1, 4, 1, 23, 2, 11, 7))
kip = mib_identifier((1, 3, 6, 1, 4, 1, 23, 2, 11, 8))
zip = mib_identifier((1, 3, 6, 1, 4, 1, 23, 2, 11, 9))
nbp = mib_identifier((1, 3, 6, 1, 4, 1, 23, 2, 11, 10))
echo = mib_identifier((1, 3, 6, 1, 4, 1, 23, 2, 11, 11))
buffer = mib_identifier((1, 3, 6, 1, 4, 1, 23, 2, 11, 12))
scc_interrupt_count = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 1, 1), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
sccInterruptCount.setStatus('mandatory')
scc_abort_count = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 1, 2), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
sccAbortCount.setStatus('mandatory')
scc_spurious_count = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 1, 3), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
sccSpuriousCount.setStatus('mandatory')
scc_crc_count = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 1, 4), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
sccCRCCount.setStatus('mandatory')
scc_overrun_count = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 1, 5), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
sccOverrunCount.setStatus('mandatory')
scc_underrun_count = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 1, 6), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
sccUnderrunCount.setStatus('mandatory')
alap_receive_count = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 2, 1), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
alapReceiveCount.setStatus('mandatory')
alap_transmit_count = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 2, 2), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
alapTransmitCount.setStatus('mandatory')
alap_no_handler_count = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 2, 3), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
alapNoHandlerCount.setStatus('mandatory')
alap_length_error_count = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 2, 4), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
alapLengthErrorCount.setStatus('mandatory')
alap_bad_count = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 2, 5), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
alapBadCount.setStatus('mandatory')
alap_collision_count = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 2, 6), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
alapCollisionCount.setStatus('mandatory')
alap_defer_count = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 2, 7), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
alapDeferCount.setStatus('mandatory')
alap_no_data_count = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 2, 8), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
alapNoDataCount.setStatus('mandatory')
alap_random_cts = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 2, 9), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
alapRandomCTS.setStatus('mandatory')
ether_crc_errors = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 1), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etherCRCErrors.setStatus('mandatory')
ether_align_errors = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 2), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etherAlignErrors.setStatus('mandatory')
ether_resource_errors = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 3), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etherResourceErrors.setStatus('mandatory')
ether_overrun_errors = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 4), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etherOverrunErrors.setStatus('mandatory')
ether_in_packets = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 5), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etherInPackets.setStatus('mandatory')
ether_out_packets = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 6), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etherOutPackets.setStatus('mandatory')
ether_bad_transmits = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 7), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etherBadTransmits.setStatus('mandatory')
ether_oversize_frames = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 8), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etherOversizeFrames.setStatus('mandatory')
ether_spur_ru_readys = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 9), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etherSpurRUReadys.setStatus('mandatory')
ether_spur_cu_actives = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 10), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etherSpurCUActives.setStatus('mandatory')
ether_spur_unknown = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 11), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etherSpurUnknown.setStatus('mandatory')
ether_bcast_drops = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 12), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etherBcastDrops.setStatus('mandatory')
ether_receiver_restarts = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 13), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etherReceiverRestarts.setStatus('mandatory')
ether_reinterrupts = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 14), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etherReinterrupts.setStatus('mandatory')
ether_buffer_reroutes = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 15), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etherBufferReroutes.setStatus('mandatory')
ether_buffer_drops = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 16), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etherBufferDrops.setStatus('mandatory')
ether_collisions = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 17), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etherCollisions.setStatus('mandatory')
ether_defers = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 18), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etherDefers.setStatus('mandatory')
ether_dma_underruns = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 19), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etherDMAUnderruns.setStatus('mandatory')
ether_max_collisions = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 20), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etherMaxCollisions.setStatus('mandatory')
ether_no_carriers = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 21), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etherNoCarriers.setStatus('mandatory')
ether_no_cts = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 22), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etherNoCTS.setStatus('mandatory')
ether_no_sq_es = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 3, 23), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etherNoSQEs.setStatus('mandatory')
aarp_table = mib_table((1, 3, 6, 1, 4, 1, 23, 2, 11, 4, 1)).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
aarpTable.setStatus('mandatory')
aarp_entry = mib_table_row((1, 3, 6, 1, 4, 1, 23, 2, 11, 4, 1, 1)).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
aarpEntry.setStatus('mandatory')
aarp_if_index = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 4, 1, 1, 1), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
aarpIfIndex.setStatus('mandatory')
aarp_phys_address = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 4, 1, 1, 2), octet_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
aarpPhysAddress.setStatus('mandatory')
aarp_net_address = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 4, 1, 1, 3), octet_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
aarpNetAddress.setStatus('mandatory')
atif_table = mib_table((1, 3, 6, 1, 4, 1, 23, 2, 11, 5, 1)).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
atifTable.setStatus('mandatory')
atif_entry = mib_table_row((1, 3, 6, 1, 4, 1, 23, 2, 11, 5, 1, 1)).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
atifEntry.setStatus('mandatory')
atif_index = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 5, 1, 1, 1), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
atifIndex.setStatus('mandatory')
atif_descr = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 5, 1, 1, 2), octet_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
atifDescr.setStatus('mandatory')
atif_type = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 5, 1, 1, 3), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4, 5, 6))).clone(namedValues=named_values(('other', 1), ('localtalk', 2), ('ethertalk1', 3), ('ethertalk2', 4), ('tokentalk', 5), ('iptalk', 6)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
atifType.setStatus('mandatory')
atif_net_start = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 5, 1, 1, 4), octet_string()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
atifNetStart.setStatus('mandatory')
atif_net_end = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 5, 1, 1, 5), octet_string()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
atifNetEnd.setStatus('mandatory')
atif_net_address = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 5, 1, 1, 6), octet_string()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
atifNetAddress.setStatus('mandatory')
atif_status = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 5, 1, 1, 7), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
atifStatus.setStatus('mandatory')
atif_net_config = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 5, 1, 1, 8), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4))).clone(namedValues=named_values(('configured', 1), ('garnered', 2), ('guessed', 3), ('unconfigured', 4)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
atifNetConfig.setStatus('mandatory')
atif_zone_config = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 5, 1, 1, 9), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4))).clone(namedValues=named_values(('configured', 1), ('garnered', 2), ('guessed', 3), ('unconfigured', 4)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
atifZoneConfig.setStatus('mandatory')
atif_zone = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 5, 1, 1, 10), octet_string()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
atifZone.setStatus('mandatory')
atif_if_index = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 5, 1, 1, 11), integer32()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
atifIfIndex.setStatus('mandatory')
ddp_out_requests = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 6, 1), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
ddpOutRequests.setStatus('mandatory')
ddp_out_short = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 6, 2), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
ddpOutShort.setStatus('mandatory')
ddp_out_long = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 6, 3), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
ddpOutLong.setStatus('mandatory')
ddp_received = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 6, 4), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
ddpReceived.setStatus('mandatory')
ddp_to_forward = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 6, 5), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
ddpToForward.setStatus('mandatory')
ddp_forwards = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 6, 6), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
ddpForwards.setStatus('mandatory')
ddp_for_me = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 6, 7), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
ddpForMe.setStatus('mandatory')
ddp_out_no_routes = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 6, 9), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
ddpOutNoRoutes.setStatus('mandatory')
ddp_too_short_drops = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 6, 10), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
ddpTooShortDrops.setStatus('mandatory')
ddp_too_long_drops = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 6, 11), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
ddpTooLongDrops.setStatus('mandatory')
ddp_broadcast_drops = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 6, 12), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
ddpBroadcastDrops.setStatus('mandatory')
ddp_short_ddp_drops = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 6, 13), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
ddpShortDDPDrops.setStatus('mandatory')
ddp_hop_count_drops = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 6, 14), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
ddpHopCountDrops.setStatus('mandatory')
rtmp_table = mib_table((1, 3, 6, 1, 4, 1, 23, 2, 11, 7, 1)).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rtmpTable.setStatus('mandatory')
rtmp_entry = mib_table_row((1, 3, 6, 1, 4, 1, 23, 2, 11, 7, 1, 1)).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rtmpEntry.setStatus('mandatory')
rtmp_range_start = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 7, 1, 1, 1), octet_string()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rtmpRangeStart.setStatus('mandatory')
rtmp_range_end = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 7, 1, 1, 2), octet_string()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rtmpRangeEnd.setStatus('mandatory')
rtmp_next_hop = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 7, 1, 1, 3), octet_string()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rtmpNextHop.setStatus('mandatory')
rtmp_interface = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 7, 1, 1, 4), integer32()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rtmpInterface.setStatus('mandatory')
rtmp_hops = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 7, 1, 1, 5), integer32()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rtmpHops.setStatus('mandatory')
rtmp_state = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 7, 1, 1, 6), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3))).clone(namedValues=named_values(('good', 1), ('suspect', 2), ('bad', 3)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rtmpState.setStatus('mandatory')
kip_table = mib_table((1, 3, 6, 1, 4, 1, 23, 2, 11, 8, 1)).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
kipTable.setStatus('mandatory')
kip_entry = mib_table_row((1, 3, 6, 1, 4, 1, 23, 2, 11, 8, 1, 1)).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
kipEntry.setStatus('mandatory')
kip_net = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 8, 1, 1, 1), octet_string()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
kipNet.setStatus('mandatory')
kip_next_hop = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 8, 1, 1, 2), ip_address()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
kipNextHop.setStatus('mandatory')
kip_hop_count = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 8, 1, 1, 3), integer32()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
kipHopCount.setStatus('mandatory')
kip_b_cast_addr = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 8, 1, 1, 4), ip_address()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
kipBCastAddr.setStatus('mandatory')
kip_core = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 8, 1, 1, 5), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('core', 1), ('notcore', 2)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
kipCore.setStatus('mandatory')
kip_kfps = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 8, 1, 1, 6), integer32()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
kipKfps.setStatus('mandatory')
zip_table = mib_table((1, 3, 6, 1, 4, 1, 23, 2, 11, 9, 1)).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
zipTable.setStatus('mandatory')
zip_entry = mib_table_row((1, 3, 6, 1, 4, 1, 23, 2, 11, 9, 1, 1)).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
zipEntry.setStatus('mandatory')
zip_zone_name = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 9, 1, 1, 1), octet_string()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
zipZoneName.setStatus('mandatory')
zip_zone_index = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 9, 1, 1, 2), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
zipZoneIndex.setStatus('mandatory')
zip_zone_net_start = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 9, 1, 1, 3), octet_string()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
zipZoneNetStart.setStatus('mandatory')
zip_zone_net_end = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 9, 1, 1, 4), octet_string()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
zipZoneNetEnd.setStatus('mandatory')
nbp_table = mib_table((1, 3, 6, 1, 4, 1, 23, 2, 11, 10, 1)).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
nbpTable.setStatus('mandatory')
nbp_entry = mib_table_row((1, 3, 6, 1, 4, 1, 23, 2, 11, 10, 1, 1)).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
nbpEntry.setStatus('mandatory')
nbp_index = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 10, 1, 1, 1), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
nbpIndex.setStatus('mandatory')
nbp_object = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 10, 1, 1, 2), octet_string()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
nbpObject.setStatus('mandatory')
nbp_type = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 10, 1, 1, 3), octet_string()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
nbpType.setStatus('mandatory')
nbp_zone = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 10, 1, 1, 4), octet_string()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
nbpZone.setStatus('mandatory')
echo_requests = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 11, 1), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
echoRequests.setStatus('mandatory')
echo_replies = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 11, 2), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
echoReplies.setStatus('mandatory')
buffer_size = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 12, 1), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bufferSize.setStatus('mandatory')
buffer_avail = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 12, 2), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bufferAvail.setStatus('mandatory')
buffer_drops = mib_scalar((1, 3, 6, 1, 4, 1, 23, 2, 11, 12, 3), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bufferDrops.setStatus('mandatory')
buffer_type_table = mib_table((1, 3, 6, 1, 4, 1, 23, 2, 11, 12, 4)).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bufferTypeTable.setStatus('mandatory')
buffer_type_entry = mib_table_row((1, 3, 6, 1, 4, 1, 23, 2, 11, 12, 4, 1)).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bufferTypeEntry.setStatus('mandatory')
buffer_type_index = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 12, 4, 1, 1), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bufferTypeIndex.setStatus('mandatory')
buffer_type = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 12, 4, 1, 2), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=named_values(('other', 1), ('free', 2), ('localtalk', 3), ('ethernet', 4), ('arp', 5), ('data', 6), ('erbf', 7), ('etbf', 8), ('malloc', 9), ('tkbf', 10), ('token', 11)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bufferType.setStatus('mandatory')
buffer_type_descr = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 12, 4, 1, 3), octet_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bufferTypeDescr.setStatus('mandatory')
buffer_type_count = mib_table_column((1, 3, 6, 1, 4, 1, 23, 2, 11, 12, 4, 1, 4), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bufferTypeCount.setStatus('mandatory')
mibBuilder.exportSymbols('FASTPATH-MIB', etherAlignErrors=etherAlignErrors, ddpOutNoRoutes=ddpOutNoRoutes, alapCollisionCount=alapCollisionCount, bufferAvail=bufferAvail, kipKfps=kipKfps, etherReceiverRestarts=etherReceiverRestarts, bufferTypeTable=bufferTypeTable, kipCore=kipCore, atifIndex=atifIndex, alapDeferCount=alapDeferCount, rtmpRangeStart=rtmpRangeStart, etherResourceErrors=etherResourceErrors, etherOversizeFrames=etherOversizeFrames, bufferTypeEntry=bufferTypeEntry, ddpOutRequests=ddpOutRequests, ddp=ddp, zipTable=zipTable, etherBadTransmits=etherBadTransmits, nbpTable=nbpTable, alap=alap, bufferDrops=bufferDrops, bufferType=bufferType, sccCRCCount=sccCRCCount, alapReceiveCount=alapReceiveCount, rtmpState=rtmpState, atifEntry=atifEntry, sccAbortCount=sccAbortCount, ddpToForward=ddpToForward, echo=echo, etherOverrunErrors=etherOverrunErrors, atifZoneConfig=atifZoneConfig, atifTable=atifTable, ddpForwards=ddpForwards, bufferTypeIndex=bufferTypeIndex, rtmpNextHop=rtmpNextHop, aarpNetAddress=aarpNetAddress, atif=atif, alapTransmitCount=alapTransmitCount, alapNoHandlerCount=alapNoHandlerCount, etherDMAUnderruns=etherDMAUnderruns, alapBadCount=alapBadCount, etherReinterrupts=etherReinterrupts, ddpTooShortDrops=ddpTooShortDrops, aarpPhysAddress=aarpPhysAddress, aarpIfIndex=aarpIfIndex, rtmpTable=rtmpTable, zipZoneIndex=zipZoneIndex, etherMaxCollisions=etherMaxCollisions, atifStatus=atifStatus, aarpEntry=aarpEntry, etherSpurUnknown=etherSpurUnknown, zipZoneNetStart=zipZoneNetStart, kipEntry=kipEntry, sccOverrunCount=sccOverrunCount, aarpTable=aarpTable, nbpObject=nbpObject, atifZone=atifZone, kipTable=kipTable, ddpForMe=ddpForMe, etherBufferDrops=etherBufferDrops, atifDescr=atifDescr, etherOutPackets=etherOutPackets, zipEntry=zipEntry, bufferSize=bufferSize, nbpEntry=nbpEntry, echoRequests=echoRequests, etherDefers=etherDefers, atifType=atifType, rtmpHops=rtmpHops, atifNetStart=atifNetStart, kipBCastAddr=kipBCastAddr, ethernet=ethernet, fastpathMib=fastpathMib, aarp=aarp, sccUnderrunCount=sccUnderrunCount, ddpBroadcastDrops=ddpBroadcastDrops, rtmpEntry=rtmpEntry, etherInPackets=etherInPackets, etherBcastDrops=etherBcastDrops, etherNoCTS=etherNoCTS, kipNextHop=kipNextHop, ddpOutShort=ddpOutShort, echoReplies=echoReplies, nbp=nbp, etherCollisions=etherCollisions, nbpIndex=nbpIndex, rtmp=rtmp, scc=scc, atifNetEnd=atifNetEnd, alapLengthErrorCount=alapLengthErrorCount, etherBufferReroutes=etherBufferReroutes, zipZoneNetEnd=zipZoneNetEnd, bufferTypeCount=bufferTypeCount, alapRandomCTS=alapRandomCTS, sccInterruptCount=sccInterruptCount, zipZoneName=zipZoneName, etherSpurRUReadys=etherSpurRUReadys, nbpZone=nbpZone, ddpReceived=ddpReceived, ddpShortDDPDrops=ddpShortDDPDrops, buffer=buffer, rtmpRangeEnd=rtmpRangeEnd, alapNoDataCount=alapNoDataCount, zip=zip, nbpType=nbpType, sccSpuriousCount=sccSpuriousCount, etherNoCarriers=etherNoCarriers, ddpTooLongDrops=ddpTooLongDrops, ddpHopCountDrops=ddpHopCountDrops, etherNoSQEs=etherNoSQEs, etherCRCErrors=etherCRCErrors, kipNet=kipNet, rtmpInterface=rtmpInterface, kipHopCount=kipHopCount, ddpOutLong=ddpOutLong, atifIfIndex=atifIfIndex, kip=kip, excelan=excelan, atifNetAddress=atifNetAddress, etherSpurCUActives=etherSpurCUActives, bufferTypeDescr=bufferTypeDescr, genericGroup=genericGroup, atifNetConfig=atifNetConfig) |
# python3.7
"""Configuration for StyleGAN training demo.
All settings are particularly used for one replica (GPU), such as `batch_size`
and `num_workers`.
"""
runner_type = 'StyleGANRunner'
gan_type = 'stylegan'
resolution = 64
batch_size = 4
val_batch_size = 32
total_img = 100_000
# Training dataset is repeated at the beginning to avoid loading dataset
# repeatedly at the end of each epoch. This can save some I/O time.
data = dict(
num_workers=4,
repeat=500,
train=dict(root_dir='/data3/lyz/dataset/sgan_demo/data/demo.zip', data_format='zip',
resolution=resolution, mirror=0.5),
val=dict(root_dir='/data3/lyz/dataset/sgan_demo/data/demo.zip', data_format='zip',
resolution=resolution),
)
controllers = dict(
RunningLogger=dict(every_n_iters=10),
ProgressScheduler=dict(
every_n_iters=1, init_res=8, minibatch_repeats=4,
lod_training_img=5_000, lod_transition_img=5_000,
batch_size_schedule=dict(res4=64, res8=32, res16=16, res32=8),
),
Snapshoter=dict(every_n_iters=500, first_iter=True, num=200),
FIDEvaluator=dict(every_n_iters=5000, first_iter=True, num=50000),
Checkpointer=dict(every_n_iters=5000, first_iter=True),
)
modules = dict(
discriminator=dict(
model=dict(gan_type=gan_type, resolution=resolution),
lr=dict(lr_type='FIXED'),
opt=dict(opt_type='Adam', base_lr=1e-3, betas=(0.0, 0.99)),
kwargs_train=dict(),
kwargs_val=dict(),
),
generator=dict(
model=dict(gan_type=gan_type, resolution=resolution),
lr=dict(lr_type='FIXED'),
opt=dict(opt_type='Adam', base_lr=1e-3, betas=(0.0, 0.99)),
kwargs_train=dict(w_moving_decay=0.995, style_mixing_prob=0.9,
trunc_psi=1.0, trunc_layers=0, randomize_noise=True),
kwargs_val=dict(trunc_psi=1.0, trunc_layers=0, randomize_noise=False),
g_smooth_img=10000,
)
)
loss = dict(
type='LogisticGANLoss',
d_loss_kwargs=dict(r1_gamma=10.0),
g_loss_kwargs=dict(),
)
| """Configuration for StyleGAN training demo.
All settings are particularly used for one replica (GPU), such as `batch_size`
and `num_workers`.
"""
runner_type = 'StyleGANRunner'
gan_type = 'stylegan'
resolution = 64
batch_size = 4
val_batch_size = 32
total_img = 100000
data = dict(num_workers=4, repeat=500, train=dict(root_dir='/data3/lyz/dataset/sgan_demo/data/demo.zip', data_format='zip', resolution=resolution, mirror=0.5), val=dict(root_dir='/data3/lyz/dataset/sgan_demo/data/demo.zip', data_format='zip', resolution=resolution))
controllers = dict(RunningLogger=dict(every_n_iters=10), ProgressScheduler=dict(every_n_iters=1, init_res=8, minibatch_repeats=4, lod_training_img=5000, lod_transition_img=5000, batch_size_schedule=dict(res4=64, res8=32, res16=16, res32=8)), Snapshoter=dict(every_n_iters=500, first_iter=True, num=200), FIDEvaluator=dict(every_n_iters=5000, first_iter=True, num=50000), Checkpointer=dict(every_n_iters=5000, first_iter=True))
modules = dict(discriminator=dict(model=dict(gan_type=gan_type, resolution=resolution), lr=dict(lr_type='FIXED'), opt=dict(opt_type='Adam', base_lr=0.001, betas=(0.0, 0.99)), kwargs_train=dict(), kwargs_val=dict()), generator=dict(model=dict(gan_type=gan_type, resolution=resolution), lr=dict(lr_type='FIXED'), opt=dict(opt_type='Adam', base_lr=0.001, betas=(0.0, 0.99)), kwargs_train=dict(w_moving_decay=0.995, style_mixing_prob=0.9, trunc_psi=1.0, trunc_layers=0, randomize_noise=True), kwargs_val=dict(trunc_psi=1.0, trunc_layers=0, randomize_noise=False), g_smooth_img=10000))
loss = dict(type='LogisticGANLoss', d_loss_kwargs=dict(r1_gamma=10.0), g_loss_kwargs=dict()) |
"""Planets"""
LIGHT_GREY = (220, 220, 220)
ORANGE = (255, 128, 0)
BLUE = (0, 0, 255)
RED = (255, 0, 0)
YELLOW = (255, 255, 0)
LIGHT_BLUE = (0, 255, 255)
class Planet:
"""Planet"""
def __init__(self, name, mass, diameter, density, gravity, esc_velocity, rotation_period, day_length, from_sun, perihelion, apheleon, orbit_period, orbit_velocity, orbit_inclination, orbit_eccentricity, obliquity_to_orbit, temp, surface_pressure, moons, ring_sys, gmf, img, colour, atmosphere_comp):
self.name = name
self.mass = mass
self.diameter = diameter
self.density = density
self.gravity = gravity
self.esc_velocity = esc_velocity
self.rotation_period = rotation_period
self.day_length = day_length
self.from_sun = from_sun
self.perihelion = perihelion
self.apheleon = apheleon
self.orbit_period = orbit_period
self.orbit_velocity = orbit_velocity
self.orbit_inclination = orbit_inclination
self.orbit_eccentricity = orbit_eccentricity
self.obliquity_to_orbit = obliquity_to_orbit
self.temp = temp
self.surface_pressure = surface_pressure
self.moons = moons
self.ring_sys = ring_sys
self.gmf = gmf
self.img = img
self.colour = colour
self.atmosphere_comp = atmosphere_comp
mercury = Planet("Mercury", 0.33*10**24, 4879, 5429, 3.7, 4.3, 1407.6, 4222.6, 57.9*10**6, 46*10**6, 69.8*10**6, 88, 47.4, 7, 0.206, 0.034, 167, 0, 0, False, True,
"https://nssdc.gsfc.nasa.gov/planetary/banner/mercury.gif", LIGHT_GREY, [["Oxygen", 0.42], ["Sodium", 0.22], ["Hydrogen", 0.22], ["Helium", 0.06], ["Other", 0.08]])
venus = Planet("Venus", 4.87*10**24, 12104, 5243, 8.9, 10.4, -5832.5, 2802, 108.2*10**6, 107.5*10**6, 108.9*10**6, 224.7, 35, 3.4, 0.007, 177.4,
464, 92, 0, False, False, "https://nssdc.gsfc.nasa.gov/planetary/image/venus.jpg", ORANGE, [["Carbon Dioxide", 0.965], ["Nitrogen", 0.035]])
earth = Planet("Earth", 5.97*10**24, 12756, 5514, 9.8, 11.2, 23.9, 24, 149.6*10**6, 147.1*10**6, 152.1*10**6, 365.2, 29.8, 0, 0.017, 23.4, 15, 1,
1, False, True, "https://nssdc.gsfc.nasa.gov/planetary/banner/earth.gif", BLUE, [["Nitrogen", 0.7808], ["Oxygen", 0.2095], ["Other", 0.0097]])
moon = Planet("Moon", 0.073*10**24, 3475, 3340, 1.6, 2.4, 655.7, 708.7, 0.384*10**6, 0.363*10**6, 0.406*10**6, 27.3, 1, 5.1, 0.055, 6.7, -20,
0, 0, False, False, "https://nssdc.gsfc.nasa.gov/planetary/banner/moon.gif", LIGHT_GREY, [["Argon", 0.7], ["Helium", 0.29], ["Sodium", 0.01]])
mars = Planet("Mars", 0.642*10**24, 6792, 3934, 3.7, 5, 24.6, 24.7, 228*10**6, 206.7*10**6, 249.3*10**6, 687, 24.1, 1.8, 0.094, 25.2, -65, 0.01, 2, False, False,
"https://nssdc.gsfc.nasa.gov/planetary/banner/mars.gif", RED, [["Carbon Dioxide", 0.951], ["Nitrogen", 0.0259], ["Argon", 0.0194], ["Oxygen", 0.0016], ["Carbon Monoxide", 0.0006], ["Other", 0.0015]])
jupiter = Planet("Jupiter", 1898*10**24, 142984, 1326, 23.1, 59.5, 9.9, 9.9, 778.5*10**6, 740.6*10**6, 816.4*10**6, 4331, 13.1, 1.3, 0.049, 3.1, -
110, None, 79, True, True, "https://nssdc.gsfc.nasa.gov/planetary/banner/jupiter.gif", ORANGE, [["Molecular Hydrogen", 0.898], ["Helium", 0.102]])
saturn = Planet("Saturn", 568*10**24, 120536, 687, 9, 35.5, 10.7, 10.7, 1432*10**6, 1357.6*10**6, 1506.5*10**6, 10747, 9.7, 2.5, 0.052, 26.7, -140, None,
82, True, True, "https://nssdc.gsfc.nasa.gov/planetary/banner/saturn.gif", YELLOW, [["Molecular Hydrogen", 0.963], ["Helium", 0.0325], ["Other", 0.0045]])
uranus = Planet("Uranus", 86.8*10**24, 51118, 1270, 8.7, 21.3, -17.2, 17.2, 2867*10**6, 2732.7*10**6, 3001.4*10**6, 30589, 6.8, 0.8, 0.047, 97.8, -195, None,
27, True, True, "https://nssdc.gsfc.nasa.gov/planetary/banner/uranus.gif", BLUE, [["Molecular Hydrogen", 0.825], ["Helium", 0.152], ["Other", 0.023]])
neptune = Planet("Neptune", 102*10**24, 49528, 1638, 11, 23.5, 16.1, 16.1, 4515*10**6, 4471.1*10**6, 4558.9*10**6, 59800, 5.4, 1.8, 0.01, 28.3, -200, None,
14, True, True, "https://nssdc.gsfc.nasa.gov/planetary/banner/neptune.gif", BLUE, [["Molecular Hydrogen", 0.8], ["Helium", 0.19], ["Methane", 0.01]])
pluto = Planet("Pluto", 0.013*10**24, 2376, 1850, 0.7, 1.3, -153.3, 153.3, 5906.4*10**6, 4436.8*10**6, 7375.9*10**6, 90560, 4.7, 17.2, 0.244, 122.5, -225, 0.00001, 5, False,
None, "https://nssdc.gsfc.nasa.gov/planetary/banner/plutofact.gif", LIGHT_BLUE, [["Nitrogen", 0.99], ["Methane", 0.005], ["Carbon Monoxide", 0.0005], ["Other", 0.0045]])
| """Planets"""
light_grey = (220, 220, 220)
orange = (255, 128, 0)
blue = (0, 0, 255)
red = (255, 0, 0)
yellow = (255, 255, 0)
light_blue = (0, 255, 255)
class Planet:
"""Planet"""
def __init__(self, name, mass, diameter, density, gravity, esc_velocity, rotation_period, day_length, from_sun, perihelion, apheleon, orbit_period, orbit_velocity, orbit_inclination, orbit_eccentricity, obliquity_to_orbit, temp, surface_pressure, moons, ring_sys, gmf, img, colour, atmosphere_comp):
self.name = name
self.mass = mass
self.diameter = diameter
self.density = density
self.gravity = gravity
self.esc_velocity = esc_velocity
self.rotation_period = rotation_period
self.day_length = day_length
self.from_sun = from_sun
self.perihelion = perihelion
self.apheleon = apheleon
self.orbit_period = orbit_period
self.orbit_velocity = orbit_velocity
self.orbit_inclination = orbit_inclination
self.orbit_eccentricity = orbit_eccentricity
self.obliquity_to_orbit = obliquity_to_orbit
self.temp = temp
self.surface_pressure = surface_pressure
self.moons = moons
self.ring_sys = ring_sys
self.gmf = gmf
self.img = img
self.colour = colour
self.atmosphere_comp = atmosphere_comp
mercury = planet('Mercury', 0.33 * 10 ** 24, 4879, 5429, 3.7, 4.3, 1407.6, 4222.6, 57.9 * 10 ** 6, 46 * 10 ** 6, 69.8 * 10 ** 6, 88, 47.4, 7, 0.206, 0.034, 167, 0, 0, False, True, 'https://nssdc.gsfc.nasa.gov/planetary/banner/mercury.gif', LIGHT_GREY, [['Oxygen', 0.42], ['Sodium', 0.22], ['Hydrogen', 0.22], ['Helium', 0.06], ['Other', 0.08]])
venus = planet('Venus', 4.87 * 10 ** 24, 12104, 5243, 8.9, 10.4, -5832.5, 2802, 108.2 * 10 ** 6, 107.5 * 10 ** 6, 108.9 * 10 ** 6, 224.7, 35, 3.4, 0.007, 177.4, 464, 92, 0, False, False, 'https://nssdc.gsfc.nasa.gov/planetary/image/venus.jpg', ORANGE, [['Carbon Dioxide', 0.965], ['Nitrogen', 0.035]])
earth = planet('Earth', 5.97 * 10 ** 24, 12756, 5514, 9.8, 11.2, 23.9, 24, 149.6 * 10 ** 6, 147.1 * 10 ** 6, 152.1 * 10 ** 6, 365.2, 29.8, 0, 0.017, 23.4, 15, 1, 1, False, True, 'https://nssdc.gsfc.nasa.gov/planetary/banner/earth.gif', BLUE, [['Nitrogen', 0.7808], ['Oxygen', 0.2095], ['Other', 0.0097]])
moon = planet('Moon', 0.073 * 10 ** 24, 3475, 3340, 1.6, 2.4, 655.7, 708.7, 0.384 * 10 ** 6, 0.363 * 10 ** 6, 0.406 * 10 ** 6, 27.3, 1, 5.1, 0.055, 6.7, -20, 0, 0, False, False, 'https://nssdc.gsfc.nasa.gov/planetary/banner/moon.gif', LIGHT_GREY, [['Argon', 0.7], ['Helium', 0.29], ['Sodium', 0.01]])
mars = planet('Mars', 0.642 * 10 ** 24, 6792, 3934, 3.7, 5, 24.6, 24.7, 228 * 10 ** 6, 206.7 * 10 ** 6, 249.3 * 10 ** 6, 687, 24.1, 1.8, 0.094, 25.2, -65, 0.01, 2, False, False, 'https://nssdc.gsfc.nasa.gov/planetary/banner/mars.gif', RED, [['Carbon Dioxide', 0.951], ['Nitrogen', 0.0259], ['Argon', 0.0194], ['Oxygen', 0.0016], ['Carbon Monoxide', 0.0006], ['Other', 0.0015]])
jupiter = planet('Jupiter', 1898 * 10 ** 24, 142984, 1326, 23.1, 59.5, 9.9, 9.9, 778.5 * 10 ** 6, 740.6 * 10 ** 6, 816.4 * 10 ** 6, 4331, 13.1, 1.3, 0.049, 3.1, -110, None, 79, True, True, 'https://nssdc.gsfc.nasa.gov/planetary/banner/jupiter.gif', ORANGE, [['Molecular Hydrogen', 0.898], ['Helium', 0.102]])
saturn = planet('Saturn', 568 * 10 ** 24, 120536, 687, 9, 35.5, 10.7, 10.7, 1432 * 10 ** 6, 1357.6 * 10 ** 6, 1506.5 * 10 ** 6, 10747, 9.7, 2.5, 0.052, 26.7, -140, None, 82, True, True, 'https://nssdc.gsfc.nasa.gov/planetary/banner/saturn.gif', YELLOW, [['Molecular Hydrogen', 0.963], ['Helium', 0.0325], ['Other', 0.0045]])
uranus = planet('Uranus', 86.8 * 10 ** 24, 51118, 1270, 8.7, 21.3, -17.2, 17.2, 2867 * 10 ** 6, 2732.7 * 10 ** 6, 3001.4 * 10 ** 6, 30589, 6.8, 0.8, 0.047, 97.8, -195, None, 27, True, True, 'https://nssdc.gsfc.nasa.gov/planetary/banner/uranus.gif', BLUE, [['Molecular Hydrogen', 0.825], ['Helium', 0.152], ['Other', 0.023]])
neptune = planet('Neptune', 102 * 10 ** 24, 49528, 1638, 11, 23.5, 16.1, 16.1, 4515 * 10 ** 6, 4471.1 * 10 ** 6, 4558.9 * 10 ** 6, 59800, 5.4, 1.8, 0.01, 28.3, -200, None, 14, True, True, 'https://nssdc.gsfc.nasa.gov/planetary/banner/neptune.gif', BLUE, [['Molecular Hydrogen', 0.8], ['Helium', 0.19], ['Methane', 0.01]])
pluto = planet('Pluto', 0.013 * 10 ** 24, 2376, 1850, 0.7, 1.3, -153.3, 153.3, 5906.4 * 10 ** 6, 4436.8 * 10 ** 6, 7375.9 * 10 ** 6, 90560, 4.7, 17.2, 0.244, 122.5, -225, 1e-05, 5, False, None, 'https://nssdc.gsfc.nasa.gov/planetary/banner/plutofact.gif', LIGHT_BLUE, [['Nitrogen', 0.99], ['Methane', 0.005], ['Carbon Monoxide', 0.0005], ['Other', 0.0045]]) |
# Time: O(1)
# Space: O(1)
#
# Write a function to delete a node (except the tail) in a singly linked list,
# given only access to that node.
#
# Supposed the linked list is 1 -> 2 -> 3 -> 4 and you are given the third node
# with value 3, the linked list should become 1 -> 2 -> 4 after calling your function.
#
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
# @param {ListNode} node
# @return {void} Do not return anything, modify node in-place instead.
def deleteNode(self, node):
if node and node.next:
node_to_delete = node.next
node.val = node_to_delete.val
node.next = node_to_delete.next
del node_to_delete
| class Solution:
def delete_node(self, node):
if node and node.next:
node_to_delete = node.next
node.val = node_to_delete.val
node.next = node_to_delete.next
del node_to_delete |
#sum in python
'''
weight = float(input("Weight:"))
height = float(input("height"))
total = weight + height
print(total)
'''
#cal monthly salary
'''
import math
age= 17
pi= 3.14
print(type(age),age)
print(type(pi),pi)
print(math.pi)
salary = float(input('Salary: ')) #float convert text(string) to number with decimal place
bonus = float(input('Bouns: '))
Income = salary * 12 + bonus
print('Your monthly is $' + str(Income)) #'' or "" is use to define string
print('Your monthly is $', Income) #we can use + or , to more than one value but , will auto add a space and auto convert indentifier/var
'''
# # is comment
''' is also comment ''' | """
weight = float(input("Weight:"))
height = float(input("height"))
total = weight + height
print(total)
"""
'\nimport math\n\nage= 17\npi= 3.14\nprint(type(age),age)\nprint(type(pi),pi)\n\nprint(math.pi)\n\nsalary = float(input(\'Salary: \')) #float convert text(string) to number with decimal place\nbonus = float(input(\'Bouns: \'))\nIncome = salary * 12 + bonus\nprint(\'Your monthly is $\' + str(Income)) #\'\' or "" is use to define string\nprint(\'Your monthly is $\', Income) #we can use + or , to more than one value but , will auto add a space and auto convert indentifier/var\n'
' is also comment ' |
nonverified_users = ['calvin klein','ralph lauren','christian dior','donna karran']
verified_users = []
#verifying if there are new users and moving them to a verified list
while nonverified_users:
current_user = nonverified_users.pop()
print(f"\nVerifying user: {current_user}")
verified_users.append(current_user)
#Printing out the verified users
for user in verified_users:
print(f"\n\t{user.title()} is now verfied!") | nonverified_users = ['calvin klein', 'ralph lauren', 'christian dior', 'donna karran']
verified_users = []
while nonverified_users:
current_user = nonverified_users.pop()
print(f'\nVerifying user: {current_user}')
verified_users.append(current_user)
for user in verified_users:
print(f'\n\t{user.title()} is now verfied!') |
# from ..interpreter import model
# Commented out to make static node recovery be used
# @model('map_server', 'map_server')
def map_server(c):
c.read('~frame_id', 'map')
c.read('~negate', 0)
c.read('~occupied_thresh', 0.65)
c.read('~free_thresh', 0.196)
c.provide('static_map', 'nav_msgs/GetMap')
c.pub('map_metadata', 'nav_msgs/MapMetaData')
c.pub('map', 'nav_msgs/OccupancyGrid')
| def map_server(c):
c.read('~frame_id', 'map')
c.read('~negate', 0)
c.read('~occupied_thresh', 0.65)
c.read('~free_thresh', 0.196)
c.provide('static_map', 'nav_msgs/GetMap')
c.pub('map_metadata', 'nav_msgs/MapMetaData')
c.pub('map', 'nav_msgs/OccupancyGrid') |
def func1():
def func2():
return True
return func2()
if(func1()):
print("Acabou")
| def func1():
def func2():
return True
return func2()
if func1():
print('Acabou') |
fname = input('Enter File: ')
if len(fname) < 1:
fname = 'clown.txt'
hand = open(fname)
di = dict()
for lin in hand:
lin = lin.rstrip()
wds = lin.split()
#print(wds)
for w in wds:
# if the key is not there the count is zero
#print(w)
#print('**',w,di.get(w,-99))
#oldcount = di.get(w,0)
#print(w,'old',oldcount)
#newcount = oldcount + 1
#di[w] = newcount
#print(w,'new',newcount)
# idiom: retrieve/create/update counter
di[w] = di.get(w,0) + 1
# print(w,'new',di[w])
#if w in di:
# di[w] = di[w] + 1
#print('**EXISTING**')
#else:
# di[w] = 1
#print('**NEW**')
#print(di[w])
#print(di)
#now we want to find the most common bigword
largest = -1
theword = None
for k,v in di.items():
#print (k, v)
if v > largest:
largest = v
theword = k #capture/ remember the word is largest
print('Done', theword, largest)
| fname = input('Enter File: ')
if len(fname) < 1:
fname = 'clown.txt'
hand = open(fname)
di = dict()
for lin in hand:
lin = lin.rstrip()
wds = lin.split()
for w in wds:
di[w] = di.get(w, 0) + 1
largest = -1
theword = None
for (k, v) in di.items():
if v > largest:
largest = v
theword = k
print('Done', theword, largest) |
'''
Square Root of Integer
Asked in:
Facebook
Amazon
Microsoft
Given an integar A.
Compute and return the square root of A.
If A is not a perfect square, return floor(sqrt(A)).
DO NOT USE SQRT FUNCTION FROM STANDARD LIBRARY
Input Format
The first and only argument given is the integer A.
Output Format
Return floor(sqrt(A))
Constraints
1 <= A <= 10^9
For Example
Input 1:
A = 11
Output 1:
3
Input 2:
A = 9
Output 2:
3
'''
class Solution:
# @param A : integer
# @return an integer
def sqrt(self, n):
lo,hi=1,n
ans=n
while lo<=hi:
mid=(lo+hi)//2
x=mid*mid
if x>n:
hi=mid-1
elif x<=n:
lo=mid+1
ans=mid
return ans
| """
Square Root of Integer
Asked in:
Facebook
Amazon
Microsoft
Given an integar A.
Compute and return the square root of A.
If A is not a perfect square, return floor(sqrt(A)).
DO NOT USE SQRT FUNCTION FROM STANDARD LIBRARY
Input Format
The first and only argument given is the integer A.
Output Format
Return floor(sqrt(A))
Constraints
1 <= A <= 10^9
For Example
Input 1:
A = 11
Output 1:
3
Input 2:
A = 9
Output 2:
3
"""
class Solution:
def sqrt(self, n):
(lo, hi) = (1, n)
ans = n
while lo <= hi:
mid = (lo + hi) // 2
x = mid * mid
if x > n:
hi = mid - 1
elif x <= n:
lo = mid + 1
ans = mid
return ans |
# buildifier: disable=module-docstring
load("//bazel/platform:transitions.bzl", "risc0_transition")
# https://github.com/bazelbuild/bazel/blob/master/src/main/starlark/builtins_bzl/common/cc/cc_library.bzl
CC_TOOLCHAIN_TYPE = "@bazel_tools//tools/cpp:toolchain_type"
def _get_compilation_contexts_from_deps(deps):
compilation_contexts = []
for dep in deps:
if CcInfo in dep:
compilation_contexts.append(dep[CcInfo].compilation_context)
return compilation_contexts
def _get_linking_contexts_from_deps(deps):
linking_contexts = []
for dep in deps:
if CcInfo in dep:
linking_contexts.append(dep[CcInfo].linking_context)
return linking_contexts
def _compile(ctx, cc_toolchain, feature_configuration):
compilation_contexts = _get_compilation_contexts_from_deps(ctx.attr.deps)
return cc_common.compile(
name = ctx.label.name,
actions = ctx.actions,
cc_toolchain = cc_toolchain,
feature_configuration = feature_configuration,
srcs = ctx.files.srcs,
user_compile_flags = ctx.attr.copts,
defines = ctx.attr.defines,
local_defines = ctx.attr.local_defines,
compilation_contexts = compilation_contexts,
public_hdrs = ctx.files.hdrs,
additional_inputs = ctx.files.aux_srcs,
includes = ctx.attr.includes,
include_prefix = ctx.attr.include_prefix,
strip_include_prefix = ctx.attr.strip_include_prefix,
)
def _risc0_cc_library_impl(ctx):
cc_toolchain = ctx.toolchains[CC_TOOLCHAIN_TYPE].cc
feature_configuration = cc_common.configure_features(
ctx = ctx,
cc_toolchain = cc_toolchain,
requested_features = ctx.features,
unsupported_features = ctx.disabled_features,
)
(compile_context, compilation_outputs) = _compile(ctx, cc_toolchain, feature_configuration)
linking_contexts = _get_linking_contexts_from_deps(ctx.attr.deps)
(linking_context, linking_outputs) = cc_common.create_linking_context_from_compilation_outputs(
actions = ctx.actions,
name = ctx.label.name,
compilation_outputs = compilation_outputs,
cc_toolchain = cc_toolchain,
feature_configuration = feature_configuration,
linking_contexts = linking_contexts,
user_link_flags = ctx.attr.linkopts,
alwayslink = ctx.attr.alwayslink,
disallow_dynamic_library = True,
)
files_builder = []
if linking_outputs.library_to_link != None:
artifacts_to_build = linking_outputs.library_to_link
if artifacts_to_build.static_library != None:
files_builder.append(artifacts_to_build.static_library)
if artifacts_to_build.pic_static_library != None:
files_builder.append(artifacts_to_build.pic_static_library)
return [
DefaultInfo(files = depset(files_builder)),
CcInfo(
compilation_context = compile_context,
linking_context = linking_context,
),
]
def _risc0_cc_binary_impl(ctx):
cc_toolchain = ctx.toolchains[CC_TOOLCHAIN_TYPE].cc
feature_configuration = cc_common.configure_features(
ctx = ctx,
cc_toolchain = cc_toolchain,
requested_features = ctx.features,
unsupported_features = ctx.disabled_features,
)
(compile_context, compilation_outputs) = _compile(ctx, cc_toolchain, feature_configuration)
linking_contexts = _get_linking_contexts_from_deps(ctx.attr.deps)
linking_outputs = cc_common.link(
name = ctx.label.name,
actions = ctx.actions,
feature_configuration = feature_configuration,
cc_toolchain = cc_toolchain,
compilation_outputs = compilation_outputs,
linking_contexts = linking_contexts,
user_link_flags = ["-T", ctx.file._linker_script.path] + ctx.attr.linkopts,
output_type = "executable",
)
runfiles = ctx.runfiles(files = [linking_outputs.executable])
for data_dep in ctx.attr.data:
runfiles = runfiles.merge(ctx.runfiles(transitive_files = data_dep[DefaultInfo].files))
runfiles = runfiles.merge(data_dep[DefaultInfo].data_runfiles)
for src in ctx.attr.srcs:
runfiles = runfiles.merge(src[DefaultInfo].default_runfiles)
for dep in ctx.attr.deps:
runfiles = runfiles.merge(dep[DefaultInfo].default_runfiles)
return [DefaultInfo(
files = depset([linking_outputs.executable]),
runfiles = runfiles,
)]
attrs = {
"srcs": attr.label_list(allow_files = True),
"hdrs": attr.label_list(allow_files = True),
"aux_srcs": attr.label_list(allow_files = True),
"includes": attr.string_list(),
"defines": attr.string_list(),
"copts": attr.string_list(),
"linkopts": attr.string_list(),
"local_defines": attr.string_list(),
"alwayslink": attr.bool(default = False),
"strip_include_prefix": attr.string(),
"include_prefix": attr.string(),
"deps": attr.label_list(providers = [CcInfo]),
"data": attr.label_list(allow_files = True),
"_linker_script": attr.label(
allow_single_file = True,
default = Label("//risc0/zkvm/platform:risc0.ld"),
),
"_allowlist_function_transition": attr.label(
default = "@bazel_tools//tools/allowlists/function_transition_allowlist",
),
}
risc0_cc_library = rule(
implementation = _risc0_cc_library_impl,
attrs = attrs,
toolchains = [CC_TOOLCHAIN_TYPE],
fragments = ["cpp"],
incompatible_use_toolchain_transition = True,
cfg = risc0_transition,
)
risc0_cc_binary = rule(
implementation = _risc0_cc_binary_impl,
attrs = attrs,
toolchains = [CC_TOOLCHAIN_TYPE],
fragments = ["cpp"],
incompatible_use_toolchain_transition = True,
cfg = risc0_transition,
)
| load('//bazel/platform:transitions.bzl', 'risc0_transition')
cc_toolchain_type = '@bazel_tools//tools/cpp:toolchain_type'
def _get_compilation_contexts_from_deps(deps):
compilation_contexts = []
for dep in deps:
if CcInfo in dep:
compilation_contexts.append(dep[CcInfo].compilation_context)
return compilation_contexts
def _get_linking_contexts_from_deps(deps):
linking_contexts = []
for dep in deps:
if CcInfo in dep:
linking_contexts.append(dep[CcInfo].linking_context)
return linking_contexts
def _compile(ctx, cc_toolchain, feature_configuration):
compilation_contexts = _get_compilation_contexts_from_deps(ctx.attr.deps)
return cc_common.compile(name=ctx.label.name, actions=ctx.actions, cc_toolchain=cc_toolchain, feature_configuration=feature_configuration, srcs=ctx.files.srcs, user_compile_flags=ctx.attr.copts, defines=ctx.attr.defines, local_defines=ctx.attr.local_defines, compilation_contexts=compilation_contexts, public_hdrs=ctx.files.hdrs, additional_inputs=ctx.files.aux_srcs, includes=ctx.attr.includes, include_prefix=ctx.attr.include_prefix, strip_include_prefix=ctx.attr.strip_include_prefix)
def _risc0_cc_library_impl(ctx):
cc_toolchain = ctx.toolchains[CC_TOOLCHAIN_TYPE].cc
feature_configuration = cc_common.configure_features(ctx=ctx, cc_toolchain=cc_toolchain, requested_features=ctx.features, unsupported_features=ctx.disabled_features)
(compile_context, compilation_outputs) = _compile(ctx, cc_toolchain, feature_configuration)
linking_contexts = _get_linking_contexts_from_deps(ctx.attr.deps)
(linking_context, linking_outputs) = cc_common.create_linking_context_from_compilation_outputs(actions=ctx.actions, name=ctx.label.name, compilation_outputs=compilation_outputs, cc_toolchain=cc_toolchain, feature_configuration=feature_configuration, linking_contexts=linking_contexts, user_link_flags=ctx.attr.linkopts, alwayslink=ctx.attr.alwayslink, disallow_dynamic_library=True)
files_builder = []
if linking_outputs.library_to_link != None:
artifacts_to_build = linking_outputs.library_to_link
if artifacts_to_build.static_library != None:
files_builder.append(artifacts_to_build.static_library)
if artifacts_to_build.pic_static_library != None:
files_builder.append(artifacts_to_build.pic_static_library)
return [default_info(files=depset(files_builder)), cc_info(compilation_context=compile_context, linking_context=linking_context)]
def _risc0_cc_binary_impl(ctx):
cc_toolchain = ctx.toolchains[CC_TOOLCHAIN_TYPE].cc
feature_configuration = cc_common.configure_features(ctx=ctx, cc_toolchain=cc_toolchain, requested_features=ctx.features, unsupported_features=ctx.disabled_features)
(compile_context, compilation_outputs) = _compile(ctx, cc_toolchain, feature_configuration)
linking_contexts = _get_linking_contexts_from_deps(ctx.attr.deps)
linking_outputs = cc_common.link(name=ctx.label.name, actions=ctx.actions, feature_configuration=feature_configuration, cc_toolchain=cc_toolchain, compilation_outputs=compilation_outputs, linking_contexts=linking_contexts, user_link_flags=['-T', ctx.file._linker_script.path] + ctx.attr.linkopts, output_type='executable')
runfiles = ctx.runfiles(files=[linking_outputs.executable])
for data_dep in ctx.attr.data:
runfiles = runfiles.merge(ctx.runfiles(transitive_files=data_dep[DefaultInfo].files))
runfiles = runfiles.merge(data_dep[DefaultInfo].data_runfiles)
for src in ctx.attr.srcs:
runfiles = runfiles.merge(src[DefaultInfo].default_runfiles)
for dep in ctx.attr.deps:
runfiles = runfiles.merge(dep[DefaultInfo].default_runfiles)
return [default_info(files=depset([linking_outputs.executable]), runfiles=runfiles)]
attrs = {'srcs': attr.label_list(allow_files=True), 'hdrs': attr.label_list(allow_files=True), 'aux_srcs': attr.label_list(allow_files=True), 'includes': attr.string_list(), 'defines': attr.string_list(), 'copts': attr.string_list(), 'linkopts': attr.string_list(), 'local_defines': attr.string_list(), 'alwayslink': attr.bool(default=False), 'strip_include_prefix': attr.string(), 'include_prefix': attr.string(), 'deps': attr.label_list(providers=[CcInfo]), 'data': attr.label_list(allow_files=True), '_linker_script': attr.label(allow_single_file=True, default=label('//risc0/zkvm/platform:risc0.ld')), '_allowlist_function_transition': attr.label(default='@bazel_tools//tools/allowlists/function_transition_allowlist')}
risc0_cc_library = rule(implementation=_risc0_cc_library_impl, attrs=attrs, toolchains=[CC_TOOLCHAIN_TYPE], fragments=['cpp'], incompatible_use_toolchain_transition=True, cfg=risc0_transition)
risc0_cc_binary = rule(implementation=_risc0_cc_binary_impl, attrs=attrs, toolchains=[CC_TOOLCHAIN_TYPE], fragments=['cpp'], incompatible_use_toolchain_transition=True, cfg=risc0_transition) |
"""Cornershop Models.
"""
class Aisle:
"""Model for an aisle.
"""
def __init__(self, data:dict):
for key in data:
setattr(self, key, data[key])
self.products = [Product(p) for p in self.products]
def __repr__(self) -> str:
return f'<cornershop.models.Aisle: {self.aisle_id} - {self.aisle_name}>'
def __str__(self) -> str:
return self.aisle_id
class Branch:
"""Model for a branch.
"""
def __init__(self, data:dict):
self.ad_campaign = data['ad_campaign']
self.aisles = [Aisle(a) for a in data['aisles']]
def __str__(self) -> str:
return 'Branch'
def __repr__(self) -> str:
return f'<cornershop.models.Branch>'
class Country:
"""Model for a country.
"""
def __init__(self, data:dict):
for key in data:
setattr(self, key, data[key])
def __repr__(self) -> str:
return f'<cornershop.models.Country: {self.name}>'
def __str__(self) -> str:
return self.name
class Group:
"""Model for a group.
"""
def __init__(self, data:dict):
for key in data:
setattr(self, key, data[key])
self.items = [GroupItem(item) for item in data['items']]
def __repr__(self) -> str:
return f'<cornershop.model.Group: {self.name}>'
def __str__(self) -> str:
return self.name
class GroupItem:
"""Model for a group item.
"""
def __init__(self, data:dict):
self.type = data['type']
self.badges = data['badges']
for key in data['content']:
setattr(self, key, data['content'][key])
def __repr__(self) -> str:
return f'<cornershop.model.GroupItem: {self.name}>'
def __str__(self) -> str:
return self.name
class Product:
"""Model for a product.
"""
def __init__(self, data:dict):
for key in data:
setattr(self, key, data[key])
def __repr__(self) -> str:
return f'<cornershop.models.Product: {self.id} - {self.name}>'
def __str__(self) -> str:
return self.name
class Result:
"""Model for branch search results.
"""
def __init__(self, data:dict):
self.store = Store(data['store'])
self.search_result = SearchResult(data['search_result'])
def __str__(self) -> str:
return self.store.name
def __repr__(self) -> str:
return f'<cornershop.models.Result: "{self.search_result.search_term}" on {self.store.name}>'
class SearchResult:
"""Model for the search result query.
"""
def __init__(self, data:dict):
self.search_term = data['search_term']
self.aisles = [Aisle(aisle_data) for aisle_data in data['aisles']]
def __repr__(self) -> str:
return f'<cornershop.models.SearchResult: {self.search_term}>'
class Store:
"""Model for a store.
"""
def __init__(self, data:dict):
for key in data:
setattr(self, key, data[key])
def __str__(self) -> str:
return self.name
def __repr__(self) -> str:
return f'<cornershop.models.Store: {self.name}>'
| """Cornershop Models.
"""
class Aisle:
"""Model for an aisle.
"""
def __init__(self, data: dict):
for key in data:
setattr(self, key, data[key])
self.products = [product(p) for p in self.products]
def __repr__(self) -> str:
return f'<cornershop.models.Aisle: {self.aisle_id} - {self.aisle_name}>'
def __str__(self) -> str:
return self.aisle_id
class Branch:
"""Model for a branch.
"""
def __init__(self, data: dict):
self.ad_campaign = data['ad_campaign']
self.aisles = [aisle(a) for a in data['aisles']]
def __str__(self) -> str:
return 'Branch'
def __repr__(self) -> str:
return f'<cornershop.models.Branch>'
class Country:
"""Model for a country.
"""
def __init__(self, data: dict):
for key in data:
setattr(self, key, data[key])
def __repr__(self) -> str:
return f'<cornershop.models.Country: {self.name}>'
def __str__(self) -> str:
return self.name
class Group:
"""Model for a group.
"""
def __init__(self, data: dict):
for key in data:
setattr(self, key, data[key])
self.items = [group_item(item) for item in data['items']]
def __repr__(self) -> str:
return f'<cornershop.model.Group: {self.name}>'
def __str__(self) -> str:
return self.name
class Groupitem:
"""Model for a group item.
"""
def __init__(self, data: dict):
self.type = data['type']
self.badges = data['badges']
for key in data['content']:
setattr(self, key, data['content'][key])
def __repr__(self) -> str:
return f'<cornershop.model.GroupItem: {self.name}>'
def __str__(self) -> str:
return self.name
class Product:
"""Model for a product.
"""
def __init__(self, data: dict):
for key in data:
setattr(self, key, data[key])
def __repr__(self) -> str:
return f'<cornershop.models.Product: {self.id} - {self.name}>'
def __str__(self) -> str:
return self.name
class Result:
"""Model for branch search results.
"""
def __init__(self, data: dict):
self.store = store(data['store'])
self.search_result = search_result(data['search_result'])
def __str__(self) -> str:
return self.store.name
def __repr__(self) -> str:
return f'<cornershop.models.Result: "{self.search_result.search_term}" on {self.store.name}>'
class Searchresult:
"""Model for the search result query.
"""
def __init__(self, data: dict):
self.search_term = data['search_term']
self.aisles = [aisle(aisle_data) for aisle_data in data['aisles']]
def __repr__(self) -> str:
return f'<cornershop.models.SearchResult: {self.search_term}>'
class Store:
"""Model for a store.
"""
def __init__(self, data: dict):
for key in data:
setattr(self, key, data[key])
def __str__(self) -> str:
return self.name
def __repr__(self) -> str:
return f'<cornershop.models.Store: {self.name}>' |
# Copyright 2014 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load(
"@io_bazel_rules_go//go/private:context.bzl",
"go_context",
)
load(
"@io_bazel_rules_go//go/private:providers.bzl",
"GoPath",
)
load(
"@io_bazel_rules_go//go/private:rules/rule.bzl",
"go_rule",
)
def _go_vet_generate_impl(ctx):
print("""
EXPERIMENTAL: the go_vet_test rule is still very experimental
Please do not rely on it for production use, but feel free to use it and file issues
""")
go = go_context(ctx)
script_file = go.declare_file(go, ext=".bash")
gopath = []
files = ctx.files.data + go.stdlib.files
gopath = []
packages = []
for data in ctx.attr.data:
entry = data[GoPath]
gopath += [entry.gopath]
packages += [package.dir for package in entry.packages]
ctx.actions.write(output=script_file, is_executable=True, content="""
export GOPATH="{gopath}"
{go} tool vet {packages}
""".format(
go=go.go.short_path,
gopath=":".join(['$(pwd)/{})'.format(entry) for entry in gopath]),
packages=" ".join(packages),
))
return struct(
files = depset([script_file]),
runfiles = ctx.runfiles(files, collect_data = True),
)
_go_vet_generate = go_rule(
_go_vet_generate_impl,
attrs = {
"data": attr.label_list(
providers = [GoPath],
cfg = "data",
),
},
)
def go_vet_test(name, data, **kwargs):
script_name = "generate_"+name
_go_vet_generate(
name=script_name,
data=data,
tags = ["manual"],
)
native.sh_test(
name=name,
srcs=[script_name],
data=data,
**kwargs
) | load('@io_bazel_rules_go//go/private:context.bzl', 'go_context')
load('@io_bazel_rules_go//go/private:providers.bzl', 'GoPath')
load('@io_bazel_rules_go//go/private:rules/rule.bzl', 'go_rule')
def _go_vet_generate_impl(ctx):
print('\nEXPERIMENTAL: the go_vet_test rule is still very experimental\nPlease do not rely on it for production use, but feel free to use it and file issues\n')
go = go_context(ctx)
script_file = go.declare_file(go, ext='.bash')
gopath = []
files = ctx.files.data + go.stdlib.files
gopath = []
packages = []
for data in ctx.attr.data:
entry = data[GoPath]
gopath += [entry.gopath]
packages += [package.dir for package in entry.packages]
ctx.actions.write(output=script_file, is_executable=True, content='\nexport GOPATH="{gopath}"\n{go} tool vet {packages}\n'.format(go=go.go.short_path, gopath=':'.join(['$(pwd)/{})'.format(entry) for entry in gopath]), packages=' '.join(packages)))
return struct(files=depset([script_file]), runfiles=ctx.runfiles(files, collect_data=True))
_go_vet_generate = go_rule(_go_vet_generate_impl, attrs={'data': attr.label_list(providers=[GoPath], cfg='data')})
def go_vet_test(name, data, **kwargs):
script_name = 'generate_' + name
_go_vet_generate(name=script_name, data=data, tags=['manual'])
native.sh_test(name=name, srcs=[script_name], data=data, **kwargs) |
"""
==========================
kikola.contrib.basicsearch
==========================
Application to lightweight search over models, existed in your project.
Installation
============
1. Add ``kikola.contrib.basicsearch`` to your project's ``settings``
``INSTALLED_APPS`` var.
2. Set up ``SEARCH_MODELS`` var in your project's ``settings`` module. (see
default config for ``SEARCH_MODELS`` below_)
3. Include ``kikola.contrib.basicsearch.urls`` in your project's
``ROOT_URLCONF`` module::
from django.conf.urls.defaults import *
urlpatterns = patterns('',
(r'^search/', include('kikola.contrib.basicsearch.urls')),
)
4. Go to search url and enjoy :)
.. _below: `SEARCH_MODELS`_
Configuration
=============
You can customize ``basicsearch`` application by next setting vars
SEARCH_FORM
-----------
Full path to default ``SearchForm`` class.
By default uses ``kikola.contrib.basicsearch.forms.SearchForm`` class.
SEARCH_MODELS
-------------
**Required.** Sets up models for searching. For example to search over
Django's FlatPages use next config::
SEARCH_MODELS = {
# Use same format as ``app_label`` in serialized data
'flatpages.FlatPage': {
# Object description in search results
'description': '{{ obj.content|truncatewords_html:20 }}',
# Object fields to search
'fields': ('title', 'content'),
# Use fulltext search (use this only when
# ``settings.DATABASE_ENGINE == 'mysql'``)
'fulltext': False,
# Object link in search results (by default
# ``{{ obj.get_absolute_url }}`` used)
'link': '{% url flatpage obj.url %}',
# Priority. Useful when search not over one model. Objects with
# higher priority rendering first in search results.
'priority': 0,
# Object title in search results (by default ``{{ obj }}`` used)
'title': '{{ obj.title }}',
# Trigger. Custom filter to found search results. For example,
# current trigger enables search only over flatpages with
# ``enable_comments``.
#
# To disable trigger, set ``'trigger': None``
'trigger': lambda obj: obj.enable_comments,
}
}
SEARCH_NOT_FOUND_MESSAGE
------------------------
Default search "not found" message. By default: ``Any objects was found by
your query.``
SEARCH_QUERY_MIN_LENGTH
-----------------------
Minimal length of search query. By default: 3.
SEARCH_QUERY_MAX_LENGTH
-----------------------
Maximal length of search query. By default: 64.
SEARCH_RESULTS_PER_PAGE
-----------------------
Number of search results, rendering at search page. By default: 10.
SEARCH_TEMPLATE_NAME
--------------------
Template used for rendering search results. By default:
``basicsearch/search.html``.
"""
| """
==========================
kikola.contrib.basicsearch
==========================
Application to lightweight search over models, existed in your project.
Installation
============
1. Add ``kikola.contrib.basicsearch`` to your project's ``settings``
``INSTALLED_APPS`` var.
2. Set up ``SEARCH_MODELS`` var in your project's ``settings`` module. (see
default config for ``SEARCH_MODELS`` below_)
3. Include ``kikola.contrib.basicsearch.urls`` in your project's
``ROOT_URLCONF`` module::
from django.conf.urls.defaults import *
urlpatterns = patterns('',
(r'^search/', include('kikola.contrib.basicsearch.urls')),
)
4. Go to search url and enjoy :)
.. _below: `SEARCH_MODELS`_
Configuration
=============
You can customize ``basicsearch`` application by next setting vars
SEARCH_FORM
-----------
Full path to default ``SearchForm`` class.
By default uses ``kikola.contrib.basicsearch.forms.SearchForm`` class.
SEARCH_MODELS
-------------
**Required.** Sets up models for searching. For example to search over
Django's FlatPages use next config::
SEARCH_MODELS = {
# Use same format as ``app_label`` in serialized data
'flatpages.FlatPage': {
# Object description in search results
'description': '{{ obj.content|truncatewords_html:20 }}',
# Object fields to search
'fields': ('title', 'content'),
# Use fulltext search (use this only when
# ``settings.DATABASE_ENGINE == 'mysql'``)
'fulltext': False,
# Object link in search results (by default
# ``{{ obj.get_absolute_url }}`` used)
'link': '{% url flatpage obj.url %}',
# Priority. Useful when search not over one model. Objects with
# higher priority rendering first in search results.
'priority': 0,
# Object title in search results (by default ``{{ obj }}`` used)
'title': '{{ obj.title }}',
# Trigger. Custom filter to found search results. For example,
# current trigger enables search only over flatpages with
# ``enable_comments``.
#
# To disable trigger, set ``'trigger': None``
'trigger': lambda obj: obj.enable_comments,
}
}
SEARCH_NOT_FOUND_MESSAGE
------------------------
Default search "not found" message. By default: ``Any objects was found by
your query.``
SEARCH_QUERY_MIN_LENGTH
-----------------------
Minimal length of search query. By default: 3.
SEARCH_QUERY_MAX_LENGTH
-----------------------
Maximal length of search query. By default: 64.
SEARCH_RESULTS_PER_PAGE
-----------------------
Number of search results, rendering at search page. By default: 10.
SEARCH_TEMPLATE_NAME
--------------------
Template used for rendering search results. By default:
``basicsearch/search.html``.
""" |
"""
##################################################################################################
# Copyright Info : Copyright (c) Davar Lab @ Hikvision Research Institute. All rights reserved.
# Filename : lgpma_pub.py
# Abstract : Model settings for LGPMA detector on PubTabNet
# Current Version: 1.0.0
# Date : 2021-09-18
##################################################################################################
"""
_base_ = "./lgpma_base.py"
data = dict(
samples_per_gpu=3,
workers_per_gpu=1,
train=dict(
ann_file='path/to/PubTabNet_datalist_train_detection.json',
img_prefix='path/to/PubTabNet'),
val=dict(
ann_file='path/to/PubTabNet_2.0.0_val.jsonl',
img_prefix='path/to/PubTabNet'),
test=dict(
samples_per_gpu=1,
ann_file='path/to/PubTabNet_2.0.0_val.jsonl',
img_prefix='path/to/PubTabNet/Images/val/')
)
# yapf:enable
# runtime settings
checkpoint_config = dict(interval=1, filename_tmpl='checkpoint/maskrcnn-lgpma-pub-e{}.pth')
work_dir = 'path/to/workdir'
| """
##################################################################################################
# Copyright Info : Copyright (c) Davar Lab @ Hikvision Research Institute. All rights reserved.
# Filename : lgpma_pub.py
# Abstract : Model settings for LGPMA detector on PubTabNet
# Current Version: 1.0.0
# Date : 2021-09-18
##################################################################################################
"""
_base_ = './lgpma_base.py'
data = dict(samples_per_gpu=3, workers_per_gpu=1, train=dict(ann_file='path/to/PubTabNet_datalist_train_detection.json', img_prefix='path/to/PubTabNet'), val=dict(ann_file='path/to/PubTabNet_2.0.0_val.jsonl', img_prefix='path/to/PubTabNet'), test=dict(samples_per_gpu=1, ann_file='path/to/PubTabNet_2.0.0_val.jsonl', img_prefix='path/to/PubTabNet/Images/val/'))
checkpoint_config = dict(interval=1, filename_tmpl='checkpoint/maskrcnn-lgpma-pub-e{}.pth')
work_dir = 'path/to/workdir' |
class Solution:
def findLHS(self, nums: List[int]) -> int:
d=collections.defaultdict(lambda:0)
for i in range(0,len(nums)):
d[nums[i]]+=1
maxi=0
for i in d.keys():
if(d.get(i+1,"E")!="E"):
maxi=max(maxi,d[i]+d[i+1])
return maxi
| class Solution:
def find_lhs(self, nums: List[int]) -> int:
d = collections.defaultdict(lambda : 0)
for i in range(0, len(nums)):
d[nums[i]] += 1
maxi = 0
for i in d.keys():
if d.get(i + 1, 'E') != 'E':
maxi = max(maxi, d[i] + d[i + 1])
return maxi |
model_config = {}
# alpha config
model_config['alpha_jump_mode'] = "linear"
model_config['iter_alpha_jump'] = []
model_config['alpha_jump_vals'] = []
model_config['alpha_n_jumps'] = [0, 600, 600, 600, 600, 600, 600, 600, 600]
model_config['alpha_size_jumps'] = [0, 32, 32, 32, 32, 32, 32, 32, 32, 32]
# base config
model_config['max_iter_at_scale'] = [48000, 96000, 96000, 96000, 96000, 192000, 192000, 192000, 200000]
model_config['scaling_layer_channels'] = [512, 512, 512, 512, 256, 128, 64, 32, 16]
model_config['mini_batch_size'] = [16, 16, 16, 16, 16, 8, 8, 8, 8]
model_config['dim_latent_vector'] = 512
model_config['lambda_gp'] = 10
model_config["epsilon_d"] = 0.001
model_config["learning_rate"] = 0.001 | model_config = {}
model_config['alpha_jump_mode'] = 'linear'
model_config['iter_alpha_jump'] = []
model_config['alpha_jump_vals'] = []
model_config['alpha_n_jumps'] = [0, 600, 600, 600, 600, 600, 600, 600, 600]
model_config['alpha_size_jumps'] = [0, 32, 32, 32, 32, 32, 32, 32, 32, 32]
model_config['max_iter_at_scale'] = [48000, 96000, 96000, 96000, 96000, 192000, 192000, 192000, 200000]
model_config['scaling_layer_channels'] = [512, 512, 512, 512, 256, 128, 64, 32, 16]
model_config['mini_batch_size'] = [16, 16, 16, 16, 16, 8, 8, 8, 8]
model_config['dim_latent_vector'] = 512
model_config['lambda_gp'] = 10
model_config['epsilon_d'] = 0.001
model_config['learning_rate'] = 0.001 |
"""
If we list all the natural numbers below 10 that are multiples of 3 or 5, we get 3, 5, 6 and 9. The sum of these multiples is 23.
Find the sum of all the multiples of 3 or 5 below 1000.
"""
def multiples_sum():
return sum(i for i in range(1000) if (i % 3 == 0 or i % 5 == 0))
| """
If we list all the natural numbers below 10 that are multiples of 3 or 5, we get 3, 5, 6 and 9. The sum of these multiples is 23.
Find the sum of all the multiples of 3 or 5 below 1000.
"""
def multiples_sum():
return sum((i for i in range(1000) if i % 3 == 0 or i % 5 == 0)) |
class LineItem(object):
def __init__(self, description, weight, price):
self.description = description
self.set_weight(weight)
self.price = price
def subtotal(self):
return self.get_weight() * self.price
def get_weight(self):
return self.__weight
def set_weight(self, value):
if value > 0:
self.__weight = value
else:
raise ValueError('value must be > 0')
| class Lineitem(object):
def __init__(self, description, weight, price):
self.description = description
self.set_weight(weight)
self.price = price
def subtotal(self):
return self.get_weight() * self.price
def get_weight(self):
return self.__weight
def set_weight(self, value):
if value > 0:
self.__weight = value
else:
raise value_error('value must be > 0') |
_out_ = ""
_in_ = ""
_err_ = ""
_root_ = "" | _out_ = ''
_in_ = ''
_err_ = ''
_root_ = '' |
class FrontMiddleBackQueue:
def __init__(self):
self.queue = []
def pushFront(self, val: int):
self.queue.insert(0,val)
def pushMiddle(self, val: int):
self.queue.insert(len(self.queue) // 2,val)
def pushBack(self, val: int):
self.queue.append(val)
def popFront(self):
return (self.queue or [-1]).pop(0)
def popMiddle(self):
return (self.queue or [-1]).pop((len(self.queue)-1)//2)
def popBack(self):
return (self.queue or [-1]).pop()
# Your FrontMiddleBackQueue object will be instantiated and called as such:
# obj = FrontMiddleBackQueue()
# obj.pushFront(val)
# obj.pushMiddle(val)
# obj.pushBack(val)
# param_4 = obj.popFront()
# param_5 = obj.popMiddle()
# param_6 = obj.popBack() | class Frontmiddlebackqueue:
def __init__(self):
self.queue = []
def push_front(self, val: int):
self.queue.insert(0, val)
def push_middle(self, val: int):
self.queue.insert(len(self.queue) // 2, val)
def push_back(self, val: int):
self.queue.append(val)
def pop_front(self):
return (self.queue or [-1]).pop(0)
def pop_middle(self):
return (self.queue or [-1]).pop((len(self.queue) - 1) // 2)
def pop_back(self):
return (self.queue or [-1]).pop() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.