hexsha stringlengths 40 40 | size int64 3 1.03M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 972 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 972 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 972 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 3 1.03M | avg_line_length float64 1.13 941k | max_line_length int64 2 941k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f58e6c43e9e45e8a653e2d4760122c3d30af0762 | 8,098 | py | Python | shapeTesting/v2.py | Ravenshard/CS412T1C5 | ac22d27f7d73f14a1b67e48746cfe3058350e4ca | [
"MIT"
] | null | null | null | shapeTesting/v2.py | Ravenshard/CS412T1C5 | ac22d27f7d73f14a1b67e48746cfe3058350e4ca | [
"MIT"
] | null | null | null | shapeTesting/v2.py | Ravenshard/CS412T1C5 | ac22d27f7d73f14a1b67e48746cfe3058350e4ca | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import signal
import rospy
import smach
import smach_ros
import cv2
import cv_bridge
import numpy
import time
import math
from sensor_msgs.msg import Image
from enum import Enum
bridge = cv_bridge.CvBridge()
shutdown_requested = False
c1Triange = cv2.imread('./shapeTesting/c1Triangle.png', 0)
c2Triange = cv2.imread('./shapeTesting/c2Triangle.png', 0)
c1Square = cv2.imread('./shapeTesting/c1Square.png', 0)
c2Square = cv2.imread('./shapeTesting/c2Square.png', 0)
c1Circle = cv2.imread('./shapeTesting/c1Circle.png', 0)
c2Circle = cv2.imread('./shapeTesting/c2Circle.png', 0)
#c1Triange = cv2.imread('c1Triangle.png', 0)
#c2Triange = cv2.imread('./shapeTesting/c2Triangle.png', 0)
#c1Square = cv2.imread('c1Square.png', 0)
#c2Square = cv2.imread('./shapeTesting/c2Square.png', 0)
#c1Circle = cv2.imread('c1Circle.png', 0)
#c2Circle = cv2.imread('./shapeTesting/c2Circle.png', 0)
cntG = None
cntR1 = None
cntR2 = None
def request_shutdown(sig, frame):
global shutdown_requested
shutdown_requested = True
def initC1Images():
''' Initialize the model images for triangle, square and circle.
Use the shapes for c1 (camera 1, 3d sensors)
'''
threshT = cv2.threshold(c1Triange, 250, 255,0)[1]
threshS = cv2.threshold(c1Square, 250, 255,0)[1]
threshC = cv2.threshold(c1Circle, 250, 255,0)[1]
_, contoursT, hierarchyT = cv2.findContours(threshT,2,1)
_, contoursS, hierarchyS = cv2.findContours(threshS,2,1)
_, contoursC, hierarchyC = cv2.findContours(threshC,2,1)
cntT = contoursT[0]
cntS = contoursS[0]
cntC = contoursC[0]
return (cntT, cntS, cntC)
def initC2Images():
''' Initialize the model images for triangle, square and circle.
Use the shapes for c2 (camera 2, logitech)
'''
threshT = cv2.threshold(c1Triange, 250, 255,0)[1]
threshS = cv2.threshold(c2Square, 250, 255,0)[1]
threshC = cv2.threshold(c2Circle, 250, 255,0)[1]
_, contoursT, hierarchyT = cv2.findContours(threshT,2,1)
_, contoursS, hierarchyS = cv2.findContours(threshS,2,1)
_, contoursC, hierarchyC = cv2.findContours(threshC,2,1)
cntT = contoursT[0]
cntS = contoursS[0]
cntC = contoursC[0]
return (cntT, cntS, cntC)
def compareImages(cntT, cntS, cntC, cntI):
''' Compare cntI with the other shapes and return the one which cntI is
the closest to
Parameters:
cntT (contour): contours as found with cv2. findContours()
cntS (contour): contours as found with cv2. findContours()
cntC (contour): contours as found with cv2. findContours()
cntI (contour): contours as found with cv2. findContours()
Returns:
string: whichever cnt matches best, last letter of cnt
matches the name of the shape
'''
retT = cv2.matchShapes(cntI,cntT,1,0.0)
retS = cv2.matchShapes(cntI,cntS,1,0.0)
retC = cv2.matchShapes(cntI,cntC,1,0.0)
best = min(retT, retS, retC)
if best == retC: return "circle"
if best == retS: return "square"
if best == retT: return "triangle"
return "triangle"
def shapeDetection(colour, camera, p=False):
''' detect a shape given the colour and which camera we are using
Parameters:
colour (string): either red or green
camera (int): either 1 or 2
Returns:
string: the shape we detected
NOTE: cntG and cntR are global variables that will need to be addressed
Look at the 3 callback functions below to see how the variable is
created
'''
global cntG, cntR1, cntR2
maxCount = 10
loop = 1000
count = 0
while count < maxCount:
results = {"triangle":0, "square":0, "circle":0}
for _ in range(loop):
if camera == 1:
(cntT, cntS, cntC) = initC1Images()
cntR = cntR1
elif camera == 2:
(cntT, cntS, cntC) = initC2Images()
cntR = cntR2
if colour == "green": result = compareImages(cntT, cntS, cntC, cntG)
elif colour == "red": result = compareImages(cntT, cntS, cntC, cntR)
results[result] += 1
if p: print("results:\n{}".format(results))
confidence = max(results.values())
if confidence > 900:
if confidence == results["circle"]: return "circle"
if confidence == results["square"]: return "square"
if confidence == results["triangle"]: return "triangle"
count += 1
return "No Shape found"
def logitechRed_callback(msg):
global bridge, cntR2
image = bridge.imgmsg_to_cv2(msg, desired_encoding='bgr8')
hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
h, w, d = image.shape
upper_red_a = numpy.array([20, 255, 255])
lower_red_a = numpy.array([0, 150, 50])
red_mask_a = cv2.inRange(hsv, lower_red_a, upper_red_a)
upper_red_b = numpy.array([255, 255, 255])
lower_red_b = numpy.array([150, 150, 50])
red_mask_b = cv2.inRange(hsv, lower_red_b, upper_red_b)
red_mask = cv2.bitwise_or(red_mask_a, red_mask_b)
blur = cv2.medianBlur(red_mask, 7)
blur[0:h, 0:w/10] = 0
blur[0:h, 9*w/10*w:w] = 0
thresh = cv2.threshold(blur, 250, 255, 0)[1]
image2, contours, hierarchy = cv2.findContours(thresh, 2, 1)
if len(contours) > 0:
cntR2 = contours[0]
#cv2.imshow("log red window", blur)
#cv2.waitKey(3)
return
def cam1red_callback(msg):
global bridge, cntR1
image = bridge.imgmsg_to_cv2(msg, desired_encoding='bgr8')
hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
h, w, d = image.shape
h, w, d = image.shape
upper_red_a = numpy.array([20, 255, 255])
lower_red_a = numpy.array([0, 150, 50])
red_mask_a = cv2.inRange(hsv, lower_red_a, upper_red_a)
upper_red_b = numpy.array([255, 255, 255])
lower_red_b = numpy.array([150, 150, 50])
red_mask_b = cv2.inRange(hsv, lower_red_b, upper_red_b)
red_mask = cv2.bitwise_or(red_mask_a, red_mask_b)
blur = cv2.medianBlur(red_mask, 7)
blur[0:int((3.5 / 10.0) * h), 0:w] = 0
thresh = cv2.threshold(blur, 250, 255, 0)[1]
image2, contours, hierarchy = cv2.findContours(thresh, 2, 1)
if len(contours) > 0:
cntR1 = contours[0]
#cv2.imshow("red window", blur)
#cv2.waitKey(3)
return
def cam1green_callback(msg):
global bridge, cntG
image = bridge.imgmsg_to_cv2(msg, desired_encoding='bgr8')
hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
h, w, d = image.shape
upper_green = numpy.array([150, 255, 255])
lower_green = numpy.array([50, 80, 0])
green_mask = cv2.inRange(hsv, lower_green, upper_green)
blur = cv2.medianBlur(green_mask, 7)
blur[0:int((3.0/10.0)*h), 0:w] = 0
thresh = cv2.threshold(blur, 250, 255, 0)[1]
image2, contours, hierarchy = cv2.findContours(thresh, 2, 1)
if len(contours) > 0:
cntG = contours[0]
#cv2.imshow("green window", green_mask) # TODO: remove
#cv2.waitKey(3)
return
def count_objects(mask, threshold=1000, canvas=None):
"""Count the number of distinct objects in the boolean image."""
_, contours, _ = cv2.findContours(mask, 1, 2)
moments = [cv2.moments(cont) for cont in contours]
big_moments = [m for m in moments if m["m00"] > threshold]
if canvas is not None:
for moment in big_moments:
cx = int(moment["m10"] / moment["m00"])
cy = int(moment["m01"] / moment["m00"])
cv2.circle(canvas, (cx, cy), 20, (0, 0, 255), -1)
return len(big_moments)
def main():
rospy.init_node('attempt')
# image_sub = rospy.Subscriber('cv_camera/image_raw',
# Image, logitechRed_callback)
image_sub = rospy.Subscriber('camera/rgb/image_raw',
Image, cam1green_callback)
# image_sub = rospy.Subscriber('usb_cam/image_raw',
# Image, logitechRed_callback)
colour = "green"
camera = 1
x = shapeDetection(colour, camera, p=True)
print(x)
if __name__ == '__main__':
main()
| 33.882845 | 80 | 0.637194 |
7d7b23ff25464d0ffd9bf7372feddce37053fe56 | 4,413 | py | Python | ocs_ci/framework/main.py | Gangadhar-ibm/ocs-ci | cb8ff7abcdb0d29617300c914892aff2289a98e7 | [
"MIT"
] | null | null | null | ocs_ci/framework/main.py | Gangadhar-ibm/ocs-ci | cb8ff7abcdb0d29617300c914892aff2289a98e7 | [
"MIT"
] | null | null | null | ocs_ci/framework/main.py | Gangadhar-ibm/ocs-ci | cb8ff7abcdb0d29617300c914892aff2289a98e7 | [
"MIT"
] | null | null | null | import argparse
import os
import sys
import time
import pytest
import yaml
from ocs_ci import framework
from getpass import getuser
from ocs_ci.utility import utils
from ocs_ci.ocs.constants import OCP_VERSION_CONF_DIR, OCS_VERSION_CONF_DIR
from ocs_ci.ocs.exceptions import MissingRequiredConfigKeyError
def check_config_requirements():
"""
Checking if all required parameters were passed
Raises:
MissingRequiredConfigKeyError: In case of some required parameter is
not defined.
"""
try:
# Check for vSphere required parameters
if hasattr(framework.config, "ENV_DATA") and (
framework.config.ENV_DATA.get("platform", "").lower() == "vsphere"
):
framework.config.ENV_DATA["vsphere_user"]
framework.config.ENV_DATA["vsphere_password"]
framework.config.ENV_DATA["vsphere_datacenter"]
framework.config.ENV_DATA["vsphere_cluster"]
framework.config.ENV_DATA["vsphere_datastore"]
except KeyError as ex:
raise MissingRequiredConfigKeyError(ex)
def load_config(config_files):
"""
This function load the config files in the order defined in config_files
list.
Args:
config_files (list): config file paths
"""
for config_file in config_files:
with open(os.path.abspath(os.path.expanduser(config_file))) as file_stream:
custom_config_data = yaml.safe_load(file_stream)
framework.config.update(custom_config_data)
def init_ocsci_conf(arguments=None):
"""
Update the config object with any files passed via the CLI
Args:
arguments (list): Arguments for pytest execution
"""
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument("--ocsci-conf", action="append", default=[])
parser.add_argument(
"--ocs-version",
action="store",
choices=["4.2", "4.3", "4.4", "4.5", "4.6", "4.7", "4.8"],
)
parser.add_argument("--ocs-registry-image")
parser.add_argument("--flexy-env-file", default="", help="Path to flexy env file")
args, unknown = parser.parse_known_args(args=arguments)
ocs_version = args.ocs_version
load_config(args.ocsci_conf)
ocs_registry_image = framework.config.DEPLOYMENT.get("ocs_registry_image")
if args.ocs_registry_image:
ocs_registry_image = args.ocs_registry_image
if ocs_registry_image:
ocs_version_from_image = utils.get_ocs_version_from_image(ocs_registry_image)
if ocs_version and ocs_version != ocs_version_from_image:
framework.config.DEPLOYMENT["ignore_csv_mismatch"] = True
if not ocs_version:
ocs_version = ocs_version_from_image
if ocs_version:
version_config_file = os.path.join(
OCS_VERSION_CONF_DIR, f"ocs-{ocs_version}.yaml"
)
load_config([version_config_file])
ocp_version = framework.config.DEPLOYMENT["default_ocp_version"]
if "ocp_version" in framework.config.DEPLOYMENT:
ocp_version = framework.config.DEPLOYMENT["ocp_version"]
ocp_version_config = os.path.join(
OCP_VERSION_CONF_DIR, f"ocp-{ocp_version}-config.yaml"
)
load_config([ocp_version_config])
if args.flexy_env_file:
framework.config.ENV_DATA["flexy_env_file"] = args.flexy_env_file
framework.config.RUN["run_id"] = int(time.time())
bin_dir = framework.config.RUN.get("bin_dir")
if bin_dir:
framework.config.RUN["bin_dir"] = os.path.abspath(
os.path.expanduser(framework.config.RUN["bin_dir"])
)
utils.add_path_to_env_path(framework.config.RUN["bin_dir"])
check_config_requirements()
def main(argv=None):
arguments = argv or sys.argv[1:]
init_ocsci_conf(arguments)
pytest_logs_dir = utils.ocsci_log_path()
utils.create_directory_path(framework.config.RUN["log_dir"])
launch_name = utils.get_testrun_name() + getuser()
arguments.extend(
[
"-p",
"ocs_ci.framework.pytest_customization.ocscilib",
"-p",
"ocs_ci.framework.pytest_customization.marks",
"-p",
"ocs_ci.framework.pytest_customization.reports",
"--logger-logsdir",
pytest_logs_dir,
"--rp-launch",
launch_name,
]
)
return pytest.main(arguments)
| 34.476563 | 86 | 0.670746 |
213c20294025d1118b5894871d56c75ca2ff422e | 66,620 | py | Python | pandas/core/resample.py | shalarewicz/pandas | 070341cf4958652343f798c74c04a8c15de2fd04 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"MIT-0",
"ECL-2.0",
"BSD-3-Clause"
] | 1 | 2021-05-05T08:34:47.000Z | 2021-05-05T08:34:47.000Z | pandas/core/resample.py | shalarewicz/pandas | 070341cf4958652343f798c74c04a8c15de2fd04 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"MIT-0",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | pandas/core/resample.py | shalarewicz/pandas | 070341cf4958652343f798c74c04a8c15de2fd04 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"MIT-0",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | from __future__ import annotations
import copy
from datetime import timedelta
from textwrap import dedent
from typing import (
TYPE_CHECKING,
Callable,
Hashable,
no_type_check,
)
import numpy as np
from pandas._libs import lib
from pandas._libs.tslibs import (
BaseOffset,
IncompatibleFrequency,
NaT,
Period,
Timedelta,
Timestamp,
to_offset,
)
from pandas._typing import (
FrameOrSeries,
T,
TimedeltaConvertibleTypes,
TimestampConvertibleTypes,
final,
)
from pandas.compat.numpy import function as nv
from pandas.errors import AbstractMethodError
from pandas.util._decorators import (
Appender,
Substitution,
doc,
)
from pandas.core.dtypes.generic import (
ABCDataFrame,
ABCSeries,
)
import pandas.core.algorithms as algos
from pandas.core.apply import ResamplerWindowApply
from pandas.core.base import (
DataError,
PandasObject,
)
import pandas.core.common as com
from pandas.core.generic import (
NDFrame,
_shared_docs,
)
from pandas.core.groupby.generic import SeriesGroupBy
from pandas.core.groupby.groupby import (
BaseGroupBy,
GroupBy,
_pipe_template,
get_groupby,
)
from pandas.core.groupby.grouper import Grouper
from pandas.core.groupby.ops import BinGrouper
from pandas.core.indexes.api import Index
from pandas.core.indexes.datetimes import (
DatetimeIndex,
date_range,
)
from pandas.core.indexes.period import (
PeriodIndex,
period_range,
)
from pandas.core.indexes.timedeltas import (
TimedeltaIndex,
timedelta_range,
)
from pandas.tseries.frequencies import (
is_subperiod,
is_superperiod,
)
from pandas.tseries.offsets import (
DateOffset,
Day,
Nano,
Tick,
)
if TYPE_CHECKING:
from typing import Literal
_shared_docs_kwargs: dict[str, str] = {}
class Resampler(BaseGroupBy, PandasObject):
"""
Class for resampling datetimelike data, a groupby-like operation.
See aggregate, transform, and apply functions on this object.
It's easiest to use obj.resample(...) to use Resampler.
Parameters
----------
obj : Series or DataFrame
groupby : TimeGrouper
axis : int, default 0
kind : str or None
'period', 'timestamp' to override default index treatment
Returns
-------
a Resampler of the appropriate type
Notes
-----
After resampling, see aggregate, apply, and transform functions.
"""
grouper: BinGrouper
exclusions: frozenset[Hashable] = frozenset() # for SelectionMixin compat
# to the groupby descriptor
_attributes = [
"freq",
"axis",
"closed",
"label",
"convention",
"loffset",
"kind",
"origin",
"offset",
]
def __init__(
self,
obj: FrameOrSeries,
groupby: TimeGrouper,
axis: int = 0,
kind=None,
**kwargs,
):
self.groupby = groupby
self.keys = None
self.sort = True
self.axis = axis
self.kind = kind
self.squeeze = False
self.group_keys = True
self.as_index = True
self.groupby._set_grouper(self._convert_obj(obj), sort=True)
self.binner, self.grouper = self._get_binner()
@final
def _shallow_copy(self, obj, **kwargs):
"""
return a new object with the replacement attributes
"""
if isinstance(obj, self._constructor):
obj = obj.obj
for attr in self._attributes:
if attr not in kwargs:
kwargs[attr] = getattr(self, attr)
return self._constructor(obj, **kwargs)
def __str__(self) -> str:
"""
Provide a nice str repr of our rolling object.
"""
attrs = (
f"{k}={getattr(self.groupby, k)}"
for k in self._attributes
if getattr(self.groupby, k, None) is not None
)
return f"{type(self).__name__} [{', '.join(attrs)}]"
def __getattr__(self, attr: str):
if attr in self._internal_names_set:
return object.__getattribute__(self, attr)
if attr in self._attributes:
return getattr(self.groupby, attr)
if attr in self.obj:
return self[attr]
return object.__getattribute__(self, attr)
# error: Signature of "obj" incompatible with supertype "BaseGroupBy"
@property
def obj(self) -> FrameOrSeries: # type: ignore[override]
# error: Incompatible return value type (got "Optional[Any]",
# expected "FrameOrSeries")
return self.groupby.obj # type: ignore[return-value]
@property
def ax(self):
return self.groupby.ax
@property
def _from_selection(self) -> bool:
"""
Is the resampling from a DataFrame column or MultiIndex level.
"""
# upsampling and PeriodIndex resampling do not work
# with selection, this state used to catch and raise an error
return self.groupby is not None and (
self.groupby.key is not None or self.groupby.level is not None
)
def _convert_obj(self, obj: FrameOrSeries) -> FrameOrSeries:
"""
Provide any conversions for the object in order to correctly handle.
Parameters
----------
obj : Series or DataFrame
Returns
-------
Series or DataFrame
"""
return obj._consolidate()
def _get_binner_for_time(self):
raise AbstractMethodError(self)
@final
def _get_binner(self):
"""
Create the BinGrouper, assume that self.set_grouper(obj)
has already been called.
"""
binner, bins, binlabels = self._get_binner_for_time()
assert len(bins) == len(binlabels)
bin_grouper = BinGrouper(bins, binlabels, indexer=self.groupby.indexer)
return binner, bin_grouper
@Substitution(
klass="Resampler",
examples="""
>>> df = pd.DataFrame({'A': [1, 2, 3, 4]},
... index=pd.date_range('2012-08-02', periods=4))
>>> df
A
2012-08-02 1
2012-08-03 2
2012-08-04 3
2012-08-05 4
To get the difference between each 2-day period's maximum and minimum
value in one pass, you can do
>>> df.resample('2D').pipe(lambda x: x.max() - x.min())
A
2012-08-02 1
2012-08-04 1""",
)
@Appender(_pipe_template)
def pipe(
self,
func: Callable[..., T] | tuple[Callable[..., T], str],
*args,
**kwargs,
) -> T:
return super().pipe(func, *args, **kwargs)
_agg_see_also_doc = dedent(
"""
See Also
--------
DataFrame.groupby.aggregate : Aggregate using callable, string, dict,
or list of string/callables.
DataFrame.resample.transform : Transforms the Series on each group
based on the given function.
DataFrame.aggregate: Aggregate using one or more
operations over the specified axis.
"""
)
_agg_examples_doc = dedent(
"""
Examples
--------
>>> s = pd.Series([1,2,3,4,5],
index=pd.date_range('20130101', periods=5,freq='s'))
2013-01-01 00:00:00 1
2013-01-01 00:00:01 2
2013-01-01 00:00:02 3
2013-01-01 00:00:03 4
2013-01-01 00:00:04 5
Freq: S, dtype: int64
>>> r = s.resample('2s')
DatetimeIndexResampler [freq=<2 * Seconds>, axis=0, closed=left,
label=left, convention=start]
>>> r.agg(np.sum)
2013-01-01 00:00:00 3
2013-01-01 00:00:02 7
2013-01-01 00:00:04 5
Freq: 2S, dtype: int64
>>> r.agg(['sum','mean','max'])
sum mean max
2013-01-01 00:00:00 3 1.5 2
2013-01-01 00:00:02 7 3.5 4
2013-01-01 00:00:04 5 5.0 5
>>> r.agg({'result' : lambda x: x.mean() / x.std(),
'total' : np.sum})
total result
2013-01-01 00:00:00 3 2.121320
2013-01-01 00:00:02 7 4.949747
2013-01-01 00:00:04 5 NaN
"""
)
@doc(
_shared_docs["aggregate"],
see_also=_agg_see_also_doc,
examples=_agg_examples_doc,
klass="DataFrame",
axis="",
)
def aggregate(self, func, *args, **kwargs):
result = ResamplerWindowApply(self, func, args=args, kwargs=kwargs).agg()
if result is None:
how = func
grouper = None
result = self._groupby_and_aggregate(how, grouper, *args, **kwargs)
result = self._apply_loffset(result)
return result
agg = aggregate
apply = aggregate
def transform(self, arg, *args, **kwargs):
"""
Call function producing a like-indexed Series on each group and return
a Series with the transformed values.
Parameters
----------
arg : function
To apply to each group. Should return a Series with the same index.
Returns
-------
transformed : Series
Examples
--------
>>> resampled.transform(lambda x: (x - x.mean()) / x.std())
"""
return self._selected_obj.groupby(self.groupby).transform(arg, *args, **kwargs)
def _downsample(self, f):
raise AbstractMethodError(self)
def _upsample(self, f, limit=None, fill_value=None):
raise AbstractMethodError(self)
def _gotitem(self, key, ndim: int, subset=None):
"""
Sub-classes to define. Return a sliced object.
Parameters
----------
key : string / list of selections
ndim : {1, 2}
requested ndim of result
subset : object, default None
subset to act on
"""
grouper = self.grouper
if subset is None:
subset = self.obj
grouped = get_groupby(subset, by=None, grouper=grouper, axis=self.axis)
# try the key selection
try:
return grouped[key]
except KeyError:
return grouped
def _groupby_and_aggregate(self, how, grouper=None, *args, **kwargs):
"""
Re-evaluate the obj with a groupby aggregation.
"""
if grouper is None:
grouper = self.grouper
obj = self._selected_obj
grouped = get_groupby(obj, by=None, grouper=grouper, axis=self.axis)
try:
if isinstance(obj, ABCDataFrame) and callable(how):
# Check if the function is reducing or not.
result = grouped._aggregate_item_by_item(how, *args, **kwargs)
else:
result = grouped.aggregate(how, *args, **kwargs)
except DataError:
# got TypeErrors on aggregation
result = grouped.apply(how, *args, **kwargs)
except (AttributeError, KeyError):
# we have a non-reducing function; try to evaluate
# alternatively we want to evaluate only a column of the input
# test_apply_to_one_column_of_df the function being applied references
# a DataFrame column, but aggregate_item_by_item operates column-wise
# on Series, raising AttributeError or KeyError
# (depending on whether the column lookup uses getattr/__getitem__)
result = grouped.apply(how, *args, **kwargs)
except ValueError as err:
if "Must produce aggregated value" in str(err):
# raised in _aggregate_named
# see test_apply_without_aggregation, test_apply_with_mutated_index
pass
else:
raise
# we have a non-reducing function
# try to evaluate
result = grouped.apply(how, *args, **kwargs)
result = self._apply_loffset(result)
return self._wrap_result(result)
def _apply_loffset(self, result):
"""
If loffset is set, offset the result index.
This is NOT an idempotent routine, it will be applied
exactly once to the result.
Parameters
----------
result : Series or DataFrame
the result of resample
"""
# error: Cannot determine type of 'loffset'
needs_offset = (
isinstance(
self.loffset, # type: ignore[has-type]
(DateOffset, timedelta, np.timedelta64),
)
and isinstance(result.index, DatetimeIndex)
and len(result.index) > 0
)
if needs_offset:
# error: Cannot determine type of 'loffset'
result.index = result.index + self.loffset # type: ignore[has-type]
self.loffset = None
return result
def _get_resampler_for_grouping(self, groupby):
"""
Return the correct class for resampling with groupby.
"""
return self._resampler_for_grouping(self, groupby=groupby)
def _wrap_result(self, result):
"""
Potentially wrap any results.
"""
if isinstance(result, ABCSeries) and self._selection is not None:
result.name = self._selection
if isinstance(result, ABCSeries) and result.empty:
obj = self.obj
# When index is all NaT, result is empty but index is not
result.index = _asfreq_compat(obj.index[:0], freq=self.freq)
result.name = getattr(obj, "name", None)
return result
def pad(self, limit=None):
"""
Forward fill the values.
Parameters
----------
limit : int, optional
Limit of how many values to fill.
Returns
-------
An upsampled Series.
See Also
--------
Series.fillna: Fill NA/NaN values using the specified method.
DataFrame.fillna: Fill NA/NaN values using the specified method.
"""
return self._upsample("pad", limit=limit)
ffill = pad
def nearest(self, limit=None):
"""
Resample by using the nearest value.
When resampling data, missing values may appear (e.g., when the
resampling frequency is higher than the original frequency).
The `nearest` method will replace ``NaN`` values that appeared in
the resampled data with the value from the nearest member of the
sequence, based on the index value.
Missing values that existed in the original data will not be modified.
If `limit` is given, fill only this many values in each direction for
each of the original values.
Parameters
----------
limit : int, optional
Limit of how many values to fill.
Returns
-------
Series or DataFrame
An upsampled Series or DataFrame with ``NaN`` values filled with
their nearest value.
See Also
--------
backfill : Backward fill the new missing values in the resampled data.
pad : Forward fill ``NaN`` values.
Examples
--------
>>> s = pd.Series([1, 2],
... index=pd.date_range('20180101',
... periods=2,
... freq='1h'))
>>> s
2018-01-01 00:00:00 1
2018-01-01 01:00:00 2
Freq: H, dtype: int64
>>> s.resample('15min').nearest()
2018-01-01 00:00:00 1
2018-01-01 00:15:00 1
2018-01-01 00:30:00 2
2018-01-01 00:45:00 2
2018-01-01 01:00:00 2
Freq: 15T, dtype: int64
Limit the number of upsampled values imputed by the nearest:
>>> s.resample('15min').nearest(limit=1)
2018-01-01 00:00:00 1.0
2018-01-01 00:15:00 1.0
2018-01-01 00:30:00 NaN
2018-01-01 00:45:00 2.0
2018-01-01 01:00:00 2.0
Freq: 15T, dtype: float64
"""
return self._upsample("nearest", limit=limit)
def backfill(self, limit=None):
"""
Backward fill the new missing values in the resampled data.
In statistics, imputation is the process of replacing missing data with
substituted values [1]_. When resampling data, missing values may
appear (e.g., when the resampling frequency is higher than the original
frequency). The backward fill will replace NaN values that appeared in
the resampled data with the next value in the original sequence.
Missing values that existed in the original data will not be modified.
Parameters
----------
limit : int, optional
Limit of how many values to fill.
Returns
-------
Series, DataFrame
An upsampled Series or DataFrame with backward filled NaN values.
See Also
--------
bfill : Alias of backfill.
fillna : Fill NaN values using the specified method, which can be
'backfill'.
nearest : Fill NaN values with nearest neighbor starting from center.
pad : Forward fill NaN values.
Series.fillna : Fill NaN values in the Series using the
specified method, which can be 'backfill'.
DataFrame.fillna : Fill NaN values in the DataFrame using the
specified method, which can be 'backfill'.
References
----------
.. [1] https://en.wikipedia.org/wiki/Imputation_(statistics)
Examples
--------
Resampling a Series:
>>> s = pd.Series([1, 2, 3],
... index=pd.date_range('20180101', periods=3, freq='h'))
>>> s
2018-01-01 00:00:00 1
2018-01-01 01:00:00 2
2018-01-01 02:00:00 3
Freq: H, dtype: int64
>>> s.resample('30min').backfill()
2018-01-01 00:00:00 1
2018-01-01 00:30:00 2
2018-01-01 01:00:00 2
2018-01-01 01:30:00 3
2018-01-01 02:00:00 3
Freq: 30T, dtype: int64
>>> s.resample('15min').backfill(limit=2)
2018-01-01 00:00:00 1.0
2018-01-01 00:15:00 NaN
2018-01-01 00:30:00 2.0
2018-01-01 00:45:00 2.0
2018-01-01 01:00:00 2.0
2018-01-01 01:15:00 NaN
2018-01-01 01:30:00 3.0
2018-01-01 01:45:00 3.0
2018-01-01 02:00:00 3.0
Freq: 15T, dtype: float64
Resampling a DataFrame that has missing values:
>>> df = pd.DataFrame({'a': [2, np.nan, 6], 'b': [1, 3, 5]},
... index=pd.date_range('20180101', periods=3,
... freq='h'))
>>> df
a b
2018-01-01 00:00:00 2.0 1
2018-01-01 01:00:00 NaN 3
2018-01-01 02:00:00 6.0 5
>>> df.resample('30min').backfill()
a b
2018-01-01 00:00:00 2.0 1
2018-01-01 00:30:00 NaN 3
2018-01-01 01:00:00 NaN 3
2018-01-01 01:30:00 6.0 5
2018-01-01 02:00:00 6.0 5
>>> df.resample('15min').backfill(limit=2)
a b
2018-01-01 00:00:00 2.0 1.0
2018-01-01 00:15:00 NaN NaN
2018-01-01 00:30:00 NaN 3.0
2018-01-01 00:45:00 NaN 3.0
2018-01-01 01:00:00 NaN 3.0
2018-01-01 01:15:00 NaN NaN
2018-01-01 01:30:00 6.0 5.0
2018-01-01 01:45:00 6.0 5.0
2018-01-01 02:00:00 6.0 5.0
"""
return self._upsample("backfill", limit=limit)
bfill = backfill
def fillna(self, method, limit=None):
"""
Fill missing values introduced by upsampling.
In statistics, imputation is the process of replacing missing data with
substituted values [1]_. When resampling data, missing values may
appear (e.g., when the resampling frequency is higher than the original
frequency).
Missing values that existed in the original data will
not be modified.
Parameters
----------
method : {'pad', 'backfill', 'ffill', 'bfill', 'nearest'}
Method to use for filling holes in resampled data
* 'pad' or 'ffill': use previous valid observation to fill gap
(forward fill).
* 'backfill' or 'bfill': use next valid observation to fill gap.
* 'nearest': use nearest valid observation to fill gap.
limit : int, optional
Limit of how many consecutive missing values to fill.
Returns
-------
Series or DataFrame
An upsampled Series or DataFrame with missing values filled.
See Also
--------
backfill : Backward fill NaN values in the resampled data.
pad : Forward fill NaN values in the resampled data.
nearest : Fill NaN values in the resampled data
with nearest neighbor starting from center.
interpolate : Fill NaN values using interpolation.
Series.fillna : Fill NaN values in the Series using the
specified method, which can be 'bfill' and 'ffill'.
DataFrame.fillna : Fill NaN values in the DataFrame using the
specified method, which can be 'bfill' and 'ffill'.
References
----------
.. [1] https://en.wikipedia.org/wiki/Imputation_(statistics)
Examples
--------
Resampling a Series:
>>> s = pd.Series([1, 2, 3],
... index=pd.date_range('20180101', periods=3, freq='h'))
>>> s
2018-01-01 00:00:00 1
2018-01-01 01:00:00 2
2018-01-01 02:00:00 3
Freq: H, dtype: int64
Without filling the missing values you get:
>>> s.resample("30min").asfreq()
2018-01-01 00:00:00 1.0
2018-01-01 00:30:00 NaN
2018-01-01 01:00:00 2.0
2018-01-01 01:30:00 NaN
2018-01-01 02:00:00 3.0
Freq: 30T, dtype: float64
>>> s.resample('30min').fillna("backfill")
2018-01-01 00:00:00 1
2018-01-01 00:30:00 2
2018-01-01 01:00:00 2
2018-01-01 01:30:00 3
2018-01-01 02:00:00 3
Freq: 30T, dtype: int64
>>> s.resample('15min').fillna("backfill", limit=2)
2018-01-01 00:00:00 1.0
2018-01-01 00:15:00 NaN
2018-01-01 00:30:00 2.0
2018-01-01 00:45:00 2.0
2018-01-01 01:00:00 2.0
2018-01-01 01:15:00 NaN
2018-01-01 01:30:00 3.0
2018-01-01 01:45:00 3.0
2018-01-01 02:00:00 3.0
Freq: 15T, dtype: float64
>>> s.resample('30min').fillna("pad")
2018-01-01 00:00:00 1
2018-01-01 00:30:00 1
2018-01-01 01:00:00 2
2018-01-01 01:30:00 2
2018-01-01 02:00:00 3
Freq: 30T, dtype: int64
>>> s.resample('30min').fillna("nearest")
2018-01-01 00:00:00 1
2018-01-01 00:30:00 2
2018-01-01 01:00:00 2
2018-01-01 01:30:00 3
2018-01-01 02:00:00 3
Freq: 30T, dtype: int64
Missing values present before the upsampling are not affected.
>>> sm = pd.Series([1, None, 3],
... index=pd.date_range('20180101', periods=3, freq='h'))
>>> sm
2018-01-01 00:00:00 1.0
2018-01-01 01:00:00 NaN
2018-01-01 02:00:00 3.0
Freq: H, dtype: float64
>>> sm.resample('30min').fillna('backfill')
2018-01-01 00:00:00 1.0
2018-01-01 00:30:00 NaN
2018-01-01 01:00:00 NaN
2018-01-01 01:30:00 3.0
2018-01-01 02:00:00 3.0
Freq: 30T, dtype: float64
>>> sm.resample('30min').fillna('pad')
2018-01-01 00:00:00 1.0
2018-01-01 00:30:00 1.0
2018-01-01 01:00:00 NaN
2018-01-01 01:30:00 NaN
2018-01-01 02:00:00 3.0
Freq: 30T, dtype: float64
>>> sm.resample('30min').fillna('nearest')
2018-01-01 00:00:00 1.0
2018-01-01 00:30:00 NaN
2018-01-01 01:00:00 NaN
2018-01-01 01:30:00 3.0
2018-01-01 02:00:00 3.0
Freq: 30T, dtype: float64
DataFrame resampling is done column-wise. All the same options are
available.
>>> df = pd.DataFrame({'a': [2, np.nan, 6], 'b': [1, 3, 5]},
... index=pd.date_range('20180101', periods=3,
... freq='h'))
>>> df
a b
2018-01-01 00:00:00 2.0 1
2018-01-01 01:00:00 NaN 3
2018-01-01 02:00:00 6.0 5
>>> df.resample('30min').fillna("bfill")
a b
2018-01-01 00:00:00 2.0 1
2018-01-01 00:30:00 NaN 3
2018-01-01 01:00:00 NaN 3
2018-01-01 01:30:00 6.0 5
2018-01-01 02:00:00 6.0 5
"""
return self._upsample(method, limit=limit)
@doc(NDFrame.interpolate, **_shared_docs_kwargs)
def interpolate(
self,
method="linear",
axis=0,
limit=None,
inplace=False,
limit_direction="forward",
limit_area=None,
downcast=None,
**kwargs,
):
"""
Interpolate values according to different methods.
"""
result = self._upsample("asfreq")
return result.interpolate(
method=method,
axis=axis,
limit=limit,
inplace=inplace,
limit_direction=limit_direction,
limit_area=limit_area,
downcast=downcast,
**kwargs,
)
def asfreq(self, fill_value=None):
"""
Return the values at the new freq, essentially a reindex.
Parameters
----------
fill_value : scalar, optional
Value to use for missing values, applied during upsampling (note
this does not fill NaNs that already were present).
Returns
-------
DataFrame or Series
Values at the specified freq.
See Also
--------
Series.asfreq: Convert TimeSeries to specified frequency.
DataFrame.asfreq: Convert TimeSeries to specified frequency.
"""
return self._upsample("asfreq", fill_value=fill_value)
def std(self, ddof=1, *args, **kwargs):
"""
Compute standard deviation of groups, excluding missing values.
Parameters
----------
ddof : int, default 1
Degrees of freedom.
Returns
-------
DataFrame or Series
Standard deviation of values within each group.
"""
nv.validate_resampler_func("std", args, kwargs)
# error: Unexpected keyword argument "ddof" for "_downsample"
return self._downsample("std", ddof=ddof) # type: ignore[call-arg]
def var(self, ddof=1, *args, **kwargs):
"""
Compute variance of groups, excluding missing values.
Parameters
----------
ddof : int, default 1
Degrees of freedom.
Returns
-------
DataFrame or Series
Variance of values within each group.
"""
nv.validate_resampler_func("var", args, kwargs)
# error: Unexpected keyword argument "ddof" for "_downsample"
return self._downsample("var", ddof=ddof) # type: ignore[call-arg]
@doc(GroupBy.size)
def size(self):
result = self._downsample("size")
if not len(self.ax):
from pandas import Series
if self._selected_obj.ndim == 1:
name = self._selected_obj.name
else:
name = None
result = Series([], index=result.index, dtype="int64", name=name)
return result
@doc(GroupBy.count)
def count(self):
result = self._downsample("count")
if not len(self.ax):
if self._selected_obj.ndim == 1:
result = type(self._selected_obj)(
[], index=result.index, dtype="int64", name=self._selected_obj.name
)
else:
from pandas import DataFrame
result = DataFrame(
[], index=result.index, columns=result.columns, dtype="int64"
)
return result
def quantile(self, q=0.5, **kwargs):
"""
Return value at the given quantile.
.. versionadded:: 0.24.0
Parameters
----------
q : float or array-like, default 0.5 (50% quantile)
Returns
-------
DataFrame or Series
Quantile of values within each group.
See Also
--------
Series.quantile
Return a series, where the index is q and the values are the quantiles.
DataFrame.quantile
Return a DataFrame, where the columns are the columns of self,
and the values are the quantiles.
DataFrameGroupBy.quantile
Return a DataFrame, where the coulmns are groupby columns,
and the values are its quantiles.
"""
# error: Unexpected keyword argument "q" for "_downsample"
# error: Too many arguments for "_downsample"
return self._downsample("quantile", q=q, **kwargs) # type: ignore[call-arg]
# downsample methods
for method in ["sum", "prod", "min", "max", "first", "last"]:
def f(self, _method=method, min_count=0, *args, **kwargs):
nv.validate_resampler_func(_method, args, kwargs)
return self._downsample(_method, min_count=min_count)
f.__doc__ = getattr(GroupBy, method).__doc__
setattr(Resampler, method, f)
# downsample methods
for method in ["mean", "sem", "median", "ohlc"]:
def g(self, _method=method, *args, **kwargs):
nv.validate_resampler_func(_method, args, kwargs)
return self._downsample(_method)
g.__doc__ = getattr(GroupBy, method).__doc__
setattr(Resampler, method, g)
# series only methods
for method in ["nunique"]:
def h(self, _method=method):
return self._downsample(_method)
h.__doc__ = getattr(SeriesGroupBy, method).__doc__
setattr(Resampler, method, h)
class _GroupByMixin(PandasObject):
"""
Provide the groupby facilities.
"""
_attributes: list[str] # in practice the same as Resampler._attributes
def __init__(self, obj, parent=None, groupby=None, **kwargs):
# reached via ._gotitem and _get_resampler_for_grouping
if parent is None:
parent = obj
# initialize our GroupByMixin object with
# the resampler attributes
for attr in self._attributes:
setattr(self, attr, kwargs.get(attr, getattr(parent, attr)))
self.binner = parent.binner
self._groupby = groupby
self._groupby.mutated = True
self._groupby.grouper.mutated = True
self.groupby = copy.copy(parent.groupby)
@no_type_check
def _apply(self, f, grouper=None, *args, **kwargs):
"""
Dispatch to _upsample; we are stripping all of the _upsample kwargs and
performing the original function call on the grouped object.
"""
def func(x):
x = self._shallow_copy(x, groupby=self.groupby)
if isinstance(f, str):
return getattr(x, f)(**kwargs)
return x.apply(f, *args, **kwargs)
result = self._groupby.apply(func)
return self._wrap_result(result)
_upsample = _apply
_downsample = _apply
_groupby_and_aggregate = _apply
@final
def _gotitem(self, key, ndim, subset=None):
"""
Sub-classes to define. Return a sliced object.
Parameters
----------
key : string / list of selections
ndim : {1, 2}
requested ndim of result
subset : object, default None
subset to act on
"""
# create a new object to prevent aliasing
if subset is None:
# error: "GotItemMixin" has no attribute "obj"
subset = self.obj # type: ignore[attr-defined]
# we need to make a shallow copy of ourselves
# with the same groupby
kwargs = {attr: getattr(self, attr) for attr in self._attributes}
# Try to select from a DataFrame, falling back to a Series
try:
groupby = self._groupby[key]
except IndexError:
groupby = self._groupby
self = type(self)(subset, groupby=groupby, parent=self, **kwargs)
self._reset_cache()
if subset.ndim == 2 and (
lib.is_scalar(key) and key in subset or lib.is_list_like(key)
):
self._selection = key
return self
class DatetimeIndexResampler(Resampler):
@property
def _resampler_for_grouping(self):
return DatetimeIndexResamplerGroupby
def _get_binner_for_time(self):
# this is how we are actually creating the bins
if self.kind == "period":
return self.groupby._get_time_period_bins(self.ax)
return self.groupby._get_time_bins(self.ax)
def _downsample(self, how, **kwargs):
"""
Downsample the cython defined function.
Parameters
----------
how : string / cython mapped function
**kwargs : kw args passed to how function
"""
how = com.get_cython_func(how) or how
ax = self.ax
obj = self._selected_obj
if not len(ax):
# reset to the new freq
obj = obj.copy()
obj.index = obj.index._with_freq(self.freq)
assert obj.index.freq == self.freq, (obj.index.freq, self.freq)
return obj
# do we have a regular frequency
# error: Item "None" of "Optional[Any]" has no attribute "binlabels"
if (
(ax.freq is not None or ax.inferred_freq is not None)
and len(self.grouper.binlabels) > len(ax)
and how is None
):
# let's do an asfreq
return self.asfreq()
# we are downsampling
# we want to call the actual grouper method here
result = obj.groupby(self.grouper, axis=self.axis).aggregate(how, **kwargs)
result = self._apply_loffset(result)
return self._wrap_result(result)
def _adjust_binner_for_upsample(self, binner):
"""
Adjust our binner when upsampling.
The range of a new index should not be outside specified range
"""
if self.closed == "right":
binner = binner[1:]
else:
binner = binner[:-1]
return binner
def _upsample(self, method, limit=None, fill_value=None):
"""
Parameters
----------
method : string {'backfill', 'bfill', 'pad',
'ffill', 'asfreq'} method for upsampling
limit : int, default None
Maximum size gap to fill when reindexing
fill_value : scalar, default None
Value to use for missing values
See Also
--------
.fillna: Fill NA/NaN values using the specified method.
"""
if self.axis:
raise AssertionError("axis must be 0")
if self._from_selection:
raise ValueError(
"Upsampling from level= or on= selection "
"is not supported, use .set_index(...) "
"to explicitly set index to datetime-like"
)
ax = self.ax
obj = self._selected_obj
binner = self.binner
res_index = self._adjust_binner_for_upsample(binner)
# if we have the same frequency as our axis, then we are equal sampling
if (
limit is None
and to_offset(ax.inferred_freq) == self.freq
and len(obj) == len(res_index)
):
result = obj.copy()
result.index = res_index
else:
result = obj.reindex(
res_index, method=method, limit=limit, fill_value=fill_value
)
result = self._apply_loffset(result)
return self._wrap_result(result)
def _wrap_result(self, result):
result = super()._wrap_result(result)
# we may have a different kind that we were asked originally
# convert if needed
if self.kind == "period" and not isinstance(result.index, PeriodIndex):
result.index = result.index.to_period(self.freq)
return result
class DatetimeIndexResamplerGroupby(_GroupByMixin, DatetimeIndexResampler):
"""
Provides a resample of a groupby implementation
"""
@property
def _constructor(self):
return DatetimeIndexResampler
class PeriodIndexResampler(DatetimeIndexResampler):
@property
def _resampler_for_grouping(self):
return PeriodIndexResamplerGroupby
def _get_binner_for_time(self):
if self.kind == "timestamp":
return super()._get_binner_for_time()
return self.groupby._get_period_bins(self.ax)
def _convert_obj(self, obj: FrameOrSeries) -> FrameOrSeries:
obj = super()._convert_obj(obj)
if self._from_selection:
# see GH 14008, GH 12871
msg = (
"Resampling from level= or on= selection "
"with a PeriodIndex is not currently supported, "
"use .set_index(...) to explicitly set index"
)
raise NotImplementedError(msg)
if self.loffset is not None:
# Cannot apply loffset/timedelta to PeriodIndex -> convert to
# timestamps
self.kind = "timestamp"
# convert to timestamp
if self.kind == "timestamp":
obj = obj.to_timestamp(how=self.convention)
return obj
def _downsample(self, how, **kwargs):
"""
Downsample the cython defined function.
Parameters
----------
how : string / cython mapped function
**kwargs : kw args passed to how function
"""
# we may need to actually resample as if we are timestamps
if self.kind == "timestamp":
return super()._downsample(how, **kwargs)
how = com.get_cython_func(how) or how
ax = self.ax
if is_subperiod(ax.freq, self.freq):
# Downsampling
return self._groupby_and_aggregate(how, grouper=self.grouper, **kwargs)
elif is_superperiod(ax.freq, self.freq):
if how == "ohlc":
# GH #13083
# upsampling to subperiods is handled as an asfreq, which works
# for pure aggregating/reducing methods
# OHLC reduces along the time dimension, but creates multiple
# values for each period -> handle by _groupby_and_aggregate()
return self._groupby_and_aggregate(how, grouper=self.grouper)
return self.asfreq()
elif ax.freq == self.freq:
return self.asfreq()
raise IncompatibleFrequency(
f"Frequency {ax.freq} cannot be resampled to {self.freq}, "
"as they are not sub or super periods"
)
def _upsample(self, method, limit=None, fill_value=None):
"""
Parameters
----------
method : {'backfill', 'bfill', 'pad', 'ffill'}
Method for upsampling.
limit : int, default None
Maximum size gap to fill when reindexing.
fill_value : scalar, default None
Value to use for missing values.
See Also
--------
.fillna: Fill NA/NaN values using the specified method.
"""
# we may need to actually resample as if we are timestamps
if self.kind == "timestamp":
return super()._upsample(method, limit=limit, fill_value=fill_value)
ax = self.ax
obj = self.obj
new_index = self.binner
# Start vs. end of period
memb = ax.asfreq(self.freq, how=self.convention)
# Get the fill indexer
indexer = memb.get_indexer(new_index, method=method, limit=limit)
new_obj = _take_new_index(
obj,
indexer,
new_index,
axis=self.axis,
)
return self._wrap_result(new_obj)
class PeriodIndexResamplerGroupby(_GroupByMixin, PeriodIndexResampler):
"""
Provides a resample of a groupby implementation.
"""
@property
def _constructor(self):
return PeriodIndexResampler
class TimedeltaIndexResampler(DatetimeIndexResampler):
@property
def _resampler_for_grouping(self):
return TimedeltaIndexResamplerGroupby
def _get_binner_for_time(self):
return self.groupby._get_time_delta_bins(self.ax)
def _adjust_binner_for_upsample(self, binner):
"""
Adjust our binner when upsampling.
The range of a new index is allowed to be greater than original range
so we don't need to change the length of a binner, GH 13022
"""
return binner
class TimedeltaIndexResamplerGroupby(_GroupByMixin, TimedeltaIndexResampler):
"""
Provides a resample of a groupby implementation.
"""
@property
def _constructor(self):
return TimedeltaIndexResampler
def get_resampler(obj, kind=None, **kwds):
"""
Create a TimeGrouper and return our resampler.
"""
tg = TimeGrouper(**kwds)
return tg._get_resampler(obj, kind=kind)
get_resampler.__doc__ = Resampler.__doc__
def get_resampler_for_grouping(
groupby, rule, how=None, fill_method=None, limit=None, kind=None, on=None, **kwargs
):
"""
Return our appropriate resampler when grouping as well.
"""
# .resample uses 'on' similar to how .groupby uses 'key'
tg = TimeGrouper(freq=rule, key=on, **kwargs)
resampler = tg._get_resampler(groupby.obj, kind=kind)
return resampler._get_resampler_for_grouping(groupby=groupby)
class TimeGrouper(Grouper):
"""
Custom groupby class for time-interval grouping.
Parameters
----------
freq : pandas date offset or offset alias for identifying bin edges
closed : closed end of interval; 'left' or 'right'
label : interval boundary to use for labeling; 'left' or 'right'
convention : {'start', 'end', 'e', 's'}
If axis is PeriodIndex
"""
_attributes = Grouper._attributes + (
"closed",
"label",
"how",
"loffset",
"kind",
"convention",
"origin",
"offset",
)
def __init__(
self,
freq="Min",
closed: Literal["left", "right"] | None = None,
label: str | None = None,
how="mean",
axis=0,
fill_method=None,
limit=None,
loffset=None,
kind: str | None = None,
convention: str | None = None,
base: int | None = None,
origin: str | TimestampConvertibleTypes = "start_day",
offset: TimedeltaConvertibleTypes | None = None,
**kwargs,
):
# Check for correctness of the keyword arguments which would
# otherwise silently use the default if misspelled
if label not in {None, "left", "right"}:
raise ValueError(f"Unsupported value {label} for `label`")
if closed not in {None, "left", "right"}:
raise ValueError(f"Unsupported value {closed} for `closed`")
if convention not in {None, "start", "end", "e", "s"}:
raise ValueError(f"Unsupported value {convention} for `convention`")
freq = to_offset(freq)
end_types = {"M", "A", "Q", "BM", "BA", "BQ", "W"}
rule = freq.rule_code
if rule in end_types or ("-" in rule and rule[: rule.find("-")] in end_types):
if closed is None:
closed = "right"
if label is None:
label = "right"
else:
# The backward resample sets ``closed`` to ``'right'`` by default
# since the last value should be considered as the edge point for
# the last bin. When origin in "end" or "end_day", the value for a
# specific ``Timestamp`` index stands for the resample result from
# the current ``Timestamp`` minus ``freq`` to the current
# ``Timestamp`` with a right close.
if origin in ["end", "end_day"]:
if closed is None:
closed = "right"
if label is None:
label = "right"
else:
if closed is None:
closed = "left"
if label is None:
label = "left"
self.closed = closed
self.label = label
self.kind = kind
self.convention = convention or "E"
self.convention = self.convention.lower()
self.how = how
self.fill_method = fill_method
self.limit = limit
if origin in ("epoch", "start", "start_day", "end", "end_day"):
self.origin = origin
else:
try:
self.origin = Timestamp(origin)
except (ValueError, TypeError) as err:
raise ValueError(
"'origin' should be equal to 'epoch', 'start', 'start_day', "
"'end', 'end_day' or "
f"should be a Timestamp convertible type. Got '{origin}' instead."
) from err
try:
self.offset = Timedelta(offset) if offset is not None else None
except (ValueError, TypeError) as err:
raise ValueError(
"'offset' should be a Timedelta convertible type. "
f"Got '{offset}' instead."
) from err
# always sort time groupers
kwargs["sort"] = True
# Handle deprecated arguments since v1.1.0 of `base` and `loffset` (GH #31809)
if base is not None and offset is not None:
raise ValueError("'offset' and 'base' cannot be present at the same time")
if base and isinstance(freq, Tick):
# this conversion handle the default behavior of base and the
# special case of GH #10530. Indeed in case when dealing with
# a TimedeltaIndex base was treated as a 'pure' offset even though
# the default behavior of base was equivalent of a modulo on
# freq_nanos.
self.offset = Timedelta(base * freq.nanos // freq.n)
if isinstance(loffset, str):
loffset = to_offset(loffset)
self.loffset = loffset
super().__init__(freq=freq, axis=axis, **kwargs)
def _get_resampler(self, obj, kind=None):
"""
Return my resampler or raise if we have an invalid axis.
Parameters
----------
obj : input object
kind : string, optional
'period','timestamp','timedelta' are valid
Returns
-------
a Resampler
Raises
------
TypeError if incompatible axis
"""
self._set_grouper(obj)
ax = self.ax
if isinstance(ax, DatetimeIndex):
return DatetimeIndexResampler(obj, groupby=self, kind=kind, axis=self.axis)
elif isinstance(ax, PeriodIndex) or kind == "period":
return PeriodIndexResampler(obj, groupby=self, kind=kind, axis=self.axis)
elif isinstance(ax, TimedeltaIndex):
return TimedeltaIndexResampler(obj, groupby=self, axis=self.axis)
raise TypeError(
"Only valid with DatetimeIndex, "
"TimedeltaIndex or PeriodIndex, "
f"but got an instance of '{type(ax).__name__}'"
)
def _get_grouper(self, obj, validate: bool = True):
# create the resampler and return our binner
r = self._get_resampler(obj)
return r.binner, r.grouper, r.obj
def _get_time_bins(self, ax: DatetimeIndex):
if not isinstance(ax, DatetimeIndex):
raise TypeError(
"axis must be a DatetimeIndex, but got "
f"an instance of {type(ax).__name__}"
)
if len(ax) == 0:
binner = labels = DatetimeIndex(data=[], freq=self.freq, name=ax.name)
return binner, [], labels
first, last = _get_timestamp_range_edges(
ax.min(),
ax.max(),
self.freq,
closed=self.closed,
origin=self.origin,
offset=self.offset,
)
# GH #12037
# use first/last directly instead of call replace() on them
# because replace() will swallow the nanosecond part
# thus last bin maybe slightly before the end if the end contains
# nanosecond part and lead to `Values falls after last bin` error
# GH 25758: If DST lands at midnight (e.g. 'America/Havana'), user feedback
# has noted that ambiguous=True provides the most sensible result
binner = labels = date_range(
freq=self.freq,
start=first,
end=last,
tz=ax.tz,
name=ax.name,
ambiguous=True,
nonexistent="shift_forward",
)
ax_values = ax.asi8
binner, bin_edges = self._adjust_bin_edges(binner, ax_values)
# general version, knowing nothing about relative frequencies
bins = lib.generate_bins_dt64(
ax_values, bin_edges, self.closed, hasnans=ax.hasnans
)
if self.closed == "right":
labels = binner
if self.label == "right":
labels = labels[1:]
elif self.label == "right":
labels = labels[1:]
if ax.hasnans:
binner = binner.insert(0, NaT)
labels = labels.insert(0, NaT)
# if we end up with more labels than bins
# adjust the labels
# GH4076
if len(bins) < len(labels):
labels = labels[: len(bins)]
return binner, bins, labels
def _adjust_bin_edges(self, binner, ax_values):
# Some hacks for > daily data, see #1471, #1458, #1483
if self.freq != "D" and is_superperiod(self.freq, "D"):
if self.closed == "right":
# GH 21459, GH 9119: Adjust the bins relative to the wall time
bin_edges = binner.tz_localize(None)
bin_edges = bin_edges + timedelta(1) - Nano(1)
bin_edges = bin_edges.tz_localize(binner.tz).asi8
else:
bin_edges = binner.asi8
# intraday values on last day
if bin_edges[-2] > ax_values.max():
bin_edges = bin_edges[:-1]
binner = binner[:-1]
else:
bin_edges = binner.asi8
return binner, bin_edges
def _get_time_delta_bins(self, ax: TimedeltaIndex):
if not isinstance(ax, TimedeltaIndex):
raise TypeError(
"axis must be a TimedeltaIndex, but got "
f"an instance of {type(ax).__name__}"
)
if not len(ax):
binner = labels = TimedeltaIndex(data=[], freq=self.freq, name=ax.name)
return binner, [], labels
start, end = ax.min(), ax.max()
labels = binner = timedelta_range(
start=start, end=end, freq=self.freq, name=ax.name
)
end_stamps = labels + self.freq
bins = ax.searchsorted(end_stamps, side="left")
if self.offset:
# GH 10530 & 31809
labels += self.offset
if self.loffset:
# GH 33498
labels += self.loffset
return binner, bins, labels
def _get_time_period_bins(self, ax: DatetimeIndex):
if not isinstance(ax, DatetimeIndex):
raise TypeError(
"axis must be a DatetimeIndex, but got "
f"an instance of {type(ax).__name__}"
)
freq = self.freq
if not len(ax):
binner = labels = PeriodIndex(data=[], freq=freq, name=ax.name)
return binner, [], labels
labels = binner = period_range(start=ax[0], end=ax[-1], freq=freq, name=ax.name)
end_stamps = (labels + freq).asfreq(freq, "s").to_timestamp()
if ax.tz:
end_stamps = end_stamps.tz_localize(ax.tz)
bins = ax.searchsorted(end_stamps, side="left")
return binner, bins, labels
def _get_period_bins(self, ax: PeriodIndex):
if not isinstance(ax, PeriodIndex):
raise TypeError(
"axis must be a PeriodIndex, but got "
f"an instance of {type(ax).__name__}"
)
memb = ax.asfreq(self.freq, how=self.convention)
# NaT handling as in pandas._lib.lib.generate_bins_dt64()
nat_count = 0
if memb.hasnans:
nat_count = np.sum(memb._isnan)
memb = memb[~memb._isnan]
if not len(memb):
# index contains no valid (non-NaT) values
bins = np.array([], dtype=np.int64)
binner = labels = PeriodIndex(data=[], freq=self.freq, name=ax.name)
if len(ax) > 0:
# index is all NaT
binner, bins, labels = _insert_nat_bin(binner, bins, labels, len(ax))
return binner, bins, labels
freq_mult = self.freq.n
start = ax.min().asfreq(self.freq, how=self.convention)
end = ax.max().asfreq(self.freq, how="end")
bin_shift = 0
if isinstance(self.freq, Tick):
# GH 23882 & 31809: get adjusted bin edge labels with 'origin'
# and 'origin' support. This call only makes sense if the freq is a
# Tick since offset and origin are only used in those cases.
# Not doing this check could create an extra empty bin.
p_start, end = _get_period_range_edges(
start,
end,
self.freq,
closed=self.closed,
origin=self.origin,
offset=self.offset,
)
# Get offset for bin edge (not label edge) adjustment
start_offset = Period(start, self.freq) - Period(p_start, self.freq)
bin_shift = start_offset.n % freq_mult
start = p_start
labels = binner = period_range(
start=start, end=end, freq=self.freq, name=ax.name
)
i8 = memb.asi8
# when upsampling to subperiods, we need to generate enough bins
expected_bins_count = len(binner) * freq_mult
i8_extend = expected_bins_count - (i8[-1] - i8[0])
rng = np.arange(i8[0], i8[-1] + i8_extend, freq_mult)
rng += freq_mult
# adjust bin edge indexes to account for base
rng -= bin_shift
# Wrap in PeriodArray for PeriodArray.searchsorted
prng = type(memb._data)(rng, dtype=memb.dtype)
bins = memb.searchsorted(prng, side="left")
if nat_count > 0:
binner, bins, labels = _insert_nat_bin(binner, bins, labels, nat_count)
return binner, bins, labels
def _take_new_index(
obj: FrameOrSeries, indexer: np.ndarray, new_index: Index, axis: int = 0
) -> FrameOrSeries:
# indexer: np.ndarray[np.intp]
if isinstance(obj, ABCSeries):
new_values = algos.take_nd(obj._values, indexer)
# error: Incompatible return value type (got "Series", expected "FrameOrSeries")
return obj._constructor( # type: ignore[return-value]
new_values, index=new_index, name=obj.name
)
elif isinstance(obj, ABCDataFrame):
if axis == 1:
raise NotImplementedError("axis 1 is not supported")
new_mgr = obj._mgr.reindex_indexer(new_axis=new_index, indexer=indexer, axis=1)
# error: Incompatible return value type
# (got "DataFrame", expected "FrameOrSeries")
return obj._constructor(new_mgr) # type: ignore[return-value]
else:
raise ValueError("'obj' should be either a Series or a DataFrame")
def _get_timestamp_range_edges(
first: Timestamp,
last: Timestamp,
freq: BaseOffset,
closed: Literal["right", "left"] = "left",
origin="start_day",
offset: Timedelta | None = None,
) -> tuple[Timestamp, Timestamp]:
"""
Adjust the `first` Timestamp to the preceding Timestamp that resides on
the provided offset. Adjust the `last` Timestamp to the following
Timestamp that resides on the provided offset. Input Timestamps that
already reside on the offset will be adjusted depending on the type of
offset and the `closed` parameter.
Parameters
----------
first : pd.Timestamp
The beginning Timestamp of the range to be adjusted.
last : pd.Timestamp
The ending Timestamp of the range to be adjusted.
freq : pd.DateOffset
The dateoffset to which the Timestamps will be adjusted.
closed : {'right', 'left'}, default "left"
Which side of bin interval is closed.
origin : {'epoch', 'start', 'start_day'} or Timestamp, default 'start_day'
The timestamp on which to adjust the grouping. The timezone of origin must
match the timezone of the index.
If a timestamp is not used, these values are also supported:
- 'epoch': `origin` is 1970-01-01
- 'start': `origin` is the first value of the timeseries
- 'start_day': `origin` is the first day at midnight of the timeseries
offset : pd.Timedelta, default is None
An offset timedelta added to the origin.
Returns
-------
A tuple of length 2, containing the adjusted pd.Timestamp objects.
"""
if isinstance(freq, Tick):
index_tz = first.tz
if isinstance(origin, Timestamp) and (origin.tz is None) != (index_tz is None):
raise ValueError("The origin must have the same timezone as the index.")
elif origin == "epoch":
# set the epoch based on the timezone to have similar bins results when
# resampling on the same kind of indexes on different timezones
origin = Timestamp("1970-01-01", tz=index_tz)
if isinstance(freq, Day):
# _adjust_dates_anchored assumes 'D' means 24H, but first/last
# might contain a DST transition (23H, 24H, or 25H).
# So "pretend" the dates are naive when adjusting the endpoints
first = first.tz_localize(None)
last = last.tz_localize(None)
if isinstance(origin, Timestamp):
origin = origin.tz_localize(None)
first, last = _adjust_dates_anchored(
first, last, freq, closed=closed, origin=origin, offset=offset
)
if isinstance(freq, Day):
first = first.tz_localize(index_tz)
last = last.tz_localize(index_tz)
else:
first = first.normalize()
last = last.normalize()
if closed == "left":
first = Timestamp(freq.rollback(first))
else:
first = Timestamp(first - freq)
last = Timestamp(last + freq)
return first, last
def _get_period_range_edges(
first: Period,
last: Period,
freq: BaseOffset,
closed: Literal["right", "left"] = "left",
origin="start_day",
offset: Timedelta | None = None,
) -> tuple[Period, Period]:
"""
Adjust the provided `first` and `last` Periods to the respective Period of
the given offset that encompasses them.
Parameters
----------
first : pd.Period
The beginning Period of the range to be adjusted.
last : pd.Period
The ending Period of the range to be adjusted.
freq : pd.DateOffset
The freq to which the Periods will be adjusted.
closed : {'right', 'left'}, default "left"
Which side of bin interval is closed.
origin : {'epoch', 'start', 'start_day'}, Timestamp, default 'start_day'
The timestamp on which to adjust the grouping. The timezone of origin must
match the timezone of the index.
If a timestamp is not used, these values are also supported:
- 'epoch': `origin` is 1970-01-01
- 'start': `origin` is the first value of the timeseries
- 'start_day': `origin` is the first day at midnight of the timeseries
offset : pd.Timedelta, default is None
An offset timedelta added to the origin.
Returns
-------
A tuple of length 2, containing the adjusted pd.Period objects.
"""
if not all(isinstance(obj, Period) for obj in [first, last]):
raise TypeError("'first' and 'last' must be instances of type Period")
# GH 23882
first = first.to_timestamp()
last = last.to_timestamp()
adjust_first = not freq.is_on_offset(first)
adjust_last = freq.is_on_offset(last)
first, last = _get_timestamp_range_edges(
first, last, freq, closed=closed, origin=origin, offset=offset
)
first = (first + int(adjust_first) * freq).to_period(freq)
last = (last - int(adjust_last) * freq).to_period(freq)
return first, last
def _insert_nat_bin(
binner: PeriodIndex, bins: np.ndarray, labels: PeriodIndex, nat_count: int
) -> tuple[PeriodIndex, np.ndarray, PeriodIndex]:
# NaT handling as in pandas._lib.lib.generate_bins_dt64()
# shift bins by the number of NaT
assert nat_count > 0
bins += nat_count
bins = np.insert(bins, 0, nat_count)
binner = binner.insert(0, NaT)
labels = labels.insert(0, NaT)
return binner, bins, labels
def _adjust_dates_anchored(
first: Timestamp,
last: Timestamp,
freq: Tick,
closed: Literal["right", "left"] = "right",
origin="start_day",
offset: Timedelta | None = None,
) -> tuple[Timestamp, Timestamp]:
# First and last offsets should be calculated from the start day to fix an
# error cause by resampling across multiple days when a one day period is
# not a multiple of the frequency. See GH 8683
# To handle frequencies that are not multiple or divisible by a day we let
# the possibility to define a fixed origin timestamp. See GH 31809
origin_nanos = 0 # origin == "epoch"
if origin == "start_day":
origin_nanos = first.normalize().value
elif origin == "start":
origin_nanos = first.value
elif isinstance(origin, Timestamp):
origin_nanos = origin.value
elif origin in ["end", "end_day"]:
origin = last if origin == "end" else last.ceil("D")
sub_freq_times = (origin.value - first.value) // freq.nanos
if closed == "left":
sub_freq_times += 1
first = origin - sub_freq_times * freq
origin_nanos = first.value
origin_nanos += offset.value if offset else 0
# GH 10117 & GH 19375. If first and last contain timezone information,
# Perform the calculation in UTC in order to avoid localizing on an
# Ambiguous or Nonexistent time.
first_tzinfo = first.tzinfo
last_tzinfo = last.tzinfo
if first_tzinfo is not None:
first = first.tz_convert("UTC")
if last_tzinfo is not None:
last = last.tz_convert("UTC")
foffset = (first.value - origin_nanos) % freq.nanos
loffset = (last.value - origin_nanos) % freq.nanos
if closed == "right":
if foffset > 0:
# roll back
fresult = first.value - foffset
else:
fresult = first.value - freq.nanos
if loffset > 0:
# roll forward
lresult = last.value + (freq.nanos - loffset)
else:
# already the end of the road
lresult = last.value
else: # closed == 'left'
if foffset > 0:
fresult = first.value - foffset
else:
# start of the road
fresult = first.value
if loffset > 0:
# roll forward
lresult = last.value + (freq.nanos - loffset)
else:
lresult = last.value + freq.nanos
fresult = Timestamp(fresult)
lresult = Timestamp(lresult)
if first_tzinfo is not None:
fresult = fresult.tz_localize("UTC").tz_convert(first_tzinfo)
if last_tzinfo is not None:
lresult = lresult.tz_localize("UTC").tz_convert(last_tzinfo)
return fresult, lresult
def asfreq(
obj: FrameOrSeries,
freq,
method=None,
how=None,
normalize: bool = False,
fill_value=None,
) -> FrameOrSeries:
"""
Utility frequency conversion method for Series/DataFrame.
See :meth:`pandas.NDFrame.asfreq` for full documentation.
"""
if isinstance(obj.index, PeriodIndex):
if method is not None:
raise NotImplementedError("'method' argument is not supported")
if how is None:
how = "E"
new_obj = obj.copy()
new_obj.index = obj.index.asfreq(freq, how=how)
elif len(obj.index) == 0:
new_obj = obj.copy()
new_obj.index = _asfreq_compat(obj.index, freq)
else:
dti = date_range(obj.index.min(), obj.index.max(), freq=freq)
dti.name = obj.index.name
new_obj = obj.reindex(dti, method=method, fill_value=fill_value)
if normalize:
new_obj.index = new_obj.index.normalize()
return new_obj
def _asfreq_compat(index: DatetimeIndex | PeriodIndex | TimedeltaIndex, freq):
"""
Helper to mimic asfreq on (empty) DatetimeIndex and TimedeltaIndex.
Parameters
----------
index : PeriodIndex, DatetimeIndex, or TimedeltaIndex
freq : DateOffset
Returns
-------
same type as index
"""
if len(index) != 0:
# This should never be reached, always checked by the caller
raise ValueError(
"Can only set arbitrary freq for empty DatetimeIndex or TimedeltaIndex"
)
new_index: Index
if isinstance(index, PeriodIndex):
new_index = index.asfreq(freq=freq)
elif isinstance(index, DatetimeIndex):
new_index = DatetimeIndex([], dtype=index.dtype, freq=freq, name=index.name)
elif isinstance(index, TimedeltaIndex):
new_index = TimedeltaIndex([], dtype=index.dtype, freq=freq, name=index.name)
else: # pragma: no cover
raise TypeError(type(index))
return new_index
| 32.121504 | 88 | 0.5781 |
98e61b9bdb1046ca889ae3294916c3b73bfe6842 | 13,091 | py | Python | papers/ReTraCk/parser/sparql_executor.py | microsoft/KC | 928c74073246ef932f6b80f6fe353117a6cacb55 | [
"MIT"
] | 29 | 2021-07-27T05:48:53.000Z | 2022-03-30T00:05:41.000Z | papers/ReTraCk/parser/sparql_executor.py | microsoft/KC | 928c74073246ef932f6b80f6fe353117a6cacb55 | [
"MIT"
] | 5 | 2021-07-29T08:00:26.000Z | 2022-03-24T02:35:15.000Z | papers/ReTraCk/parser/sparql_executor.py | microsoft/KC | 928c74073246ef932f6b80f6fe353117a6cacb55 | [
"MIT"
] | 7 | 2021-07-29T07:53:52.000Z | 2022-02-21T08:10:26.000Z | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import re
import time
from SPARQLWrapper import SPARQLWrapper, JSON, SPARQLExceptions
sparql = SPARQLWrapper("http://localhost:8890/sparql/")
prefix = "http://rdf.freebase.com/ns/"
def set_sparql_wrapper(uri):
global sparql
sparql = SPARQLWrapper(uri)
def exec_sparql(query):
status_code = 200
try:
sparql.setQuery(query)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
except SPARQLExceptions.EndPointInternalError:
status_code = 500
except SPARQLExceptions.QueryBadFormed:
status_code = 400
except SPARQLExceptions.EndPointNotFound:
status_code = 404
except SPARQLExceptions.Unauthorized:
status_code = 401
except SPARQLExceptions.URITooLong:
status_code = 414
except Exception as e:
print(e)
status_code = -1
pred_answer = []
if status_code != 200:
time.sleep(5)
try:
sparql.setQuery(query)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
except:
return pred_answer, status_code
status_code = 200
# print(query)
for result in results["results"]["bindings"]:
if 'x' in result:
value = result["x"]["value"]
elif 'callret-0' in result:
value = result['callret-0']['value']
elif 'value' in result:
value = result['value']['value']
else:
raise Exception("UNKNOWN {}".format(result))
if value.startswith(prefix):
value = value[len(prefix):]
value = value.replace("-08:00", '')
pred_answer.append(value)
# print(pred_answer)
return pred_answer, status_code
def exec_demo_sparql(query):
status_code = 200
try:
sparql.setQuery(query)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
except SPARQLExceptions.EndPointInternalError:
status_code = 500
except SPARQLExceptions.QueryBadFormed:
status_code = 400
except SPARQLExceptions.EndPointNotFound:
status_code = 404
except SPARQLExceptions.Unauthorized:
status_code = 401
except SPARQLExceptions.URITooLong:
status_code = 414
except:
status_code = -1
meta_info = {}
if status_code != 200:
time.sleep(5)
try:
sparql.setQuery(query)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
except:
return meta_info, status_code
status_code = 200
for result in results["results"]["bindings"]:
meta_info[result["ent_id"]["value"][len(prefix):]] = {
"ent_desc": result["ent_desc"]["value"] if "ent_desc" in result else "",
"ent_name": result["ent_name"]["value"] if "ent_name" in result else "",
"ent_type": result["ent_type"]["value"][len(prefix):] if "ent_type" in result else ""
}
return meta_info, status_code
def exec_schema_demo_sparql(query):
status_code = 200
try:
sparql.setQuery(query)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
except SPARQLExceptions.EndPointInternalError:
status_code = 500
except SPARQLExceptions.QueryBadFormed:
status_code = 400
except SPARQLExceptions.EndPointNotFound:
status_code = 404
except SPARQLExceptions.Unauthorized:
status_code = 401
except SPARQLExceptions.URITooLong:
status_code = 414
except:
status_code = -1
meta_info = {}
if status_code != 200:
time.sleep(5)
try:
sparql.setQuery(query)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
except:
return meta_info, status_code
status_code = 200
for result in results["results"]["bindings"]:
meta_info[result["schema_id"]["value"][1:].replace("/", ".")] = {
"ent_id": result["ent_id"]["value"][len(prefix):],
"ent_desc": result["ent_desc"]["value"] if "ent_desc" in result else "",
"ent_name": result["ent_name"]["value"] if "ent_name" in result else "",
"ent_type": result["ent_type"]["value"][len(prefix):] if "ent_type" in result else ""
}
return meta_info, status_code
def exec_anchor_relation_sparql(query, rel_name="in_rel"):
status_code = 200
try:
sparql.setQuery(query)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
except SPARQLExceptions.EndPointInternalError:
status_code = 500
except SPARQLExceptions.QueryBadFormed:
status_code = 400
except SPARQLExceptions.EndPointNotFound:
status_code = 404
except SPARQLExceptions.Unauthorized:
status_code = 401
except SPARQLExceptions.URITooLong:
status_code = 414
except:
status_code = -1
meta_info = {}
if status_code != 200:
time.sleep(5)
try:
sparql.setQuery(query)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
except:
return meta_info, status_code
status_code = 200
for result in results["results"]["bindings"]:
ent_id = result["ent_id"]["value"][len(prefix):]
if ent_id not in meta_info:
meta_info[ent_id] = []
meta_info[ent_id].append(result[rel_name]["value"][len(prefix):])
return meta_info, status_code
def exec_verify_sparql(query):
status_code = 200
try:
sparql.setQuery(query)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
except SPARQLExceptions.EndPointInternalError:
status_code = 500
except SPARQLExceptions.QueryBadFormed:
status_code = 400
except SPARQLExceptions.EndPointNotFound:
status_code = 404
except SPARQLExceptions.Unauthorized:
status_code = 401
except SPARQLExceptions.URITooLong:
status_code = 414
except:
status_code = -1
pred_answer = []
slot_values = []
slot_idxes = []
if status_code != 200:
time.sleep(5)
try:
sparql.setQuery(query)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
except:
return pred_answer, slot_values, status_code
status_code = 200
# if len(results["results"]["bindings"]) > 0:
# slot_idxes = [k for k in results["results"]["bindings"][0] if "xsl" in k]
slot_idxes = results["head"]["vars"]
slot_idxes = [k for k in slot_idxes if "xsl" in k]
print(slot_idxes)
# print(query)
for result in results["results"]["bindings"]:
if 'x' in result:
value = result["x"]["value"]
elif 'callret-0' in result:
value = result['callret-0']['value']
elif 'value' in result:
value = result['value']['value']
else:
raise Exception("UNKNOWN {}".format(result))
if value.startswith(prefix):
value = value[len(prefix):]
value = value.replace("-08:00", '')
values = [result[xsl]["value"] for xsl in slot_idxes]
for i in range(len(values)):
if values[i].startswith(prefix):
values[i] = values[i][len(prefix):]
values[i] = values[i].replace("-08:00", '')
slot_values.append(values)
pred_answer.append(value)
# print(pred_answer)
if len(slot_values) >= 10000:
status_code = 100
return pred_answer, slot_values, status_code
def exec_verify_sparql_xsl_only(query):
status_code = 200
try:
sparql.setQuery(query)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
except SPARQLExceptions.EndPointInternalError:
status_code = 500
except SPARQLExceptions.QueryBadFormed:
status_code = 400
except SPARQLExceptions.EndPointNotFound:
status_code = 404
except SPARQLExceptions.Unauthorized:
status_code = 401
except SPARQLExceptions.URITooLong:
status_code = 414
except:
status_code = -1
slot_values = []
if status_code != 200:
time.sleep(5)
try:
sparql.setQuery(query)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
except:
return slot_values, status_code
status_code = 200
slot_idxes = results["head"]["vars"]
slot_idxes = [k for k in slot_idxes if "xsl" in k]
# print(slot_idxes)
# print(query)
for result in results["results"]["bindings"]:
values = [result[xsl]["value"] for xsl in slot_idxes]
for i in range(len(values)):
if values[i].startswith(prefix):
values[i] = values[i][len(prefix):]
values[i] = values[i].replace("-08:00", '')
slot_values.append(values)
# print(pred_answer)
if len(slot_values) >= 10000:
status_code = 100
return slot_values, status_code
def exec_verify_sparql_xsl_only_fix_literal(query):
if "^^<http://www.w3.org/2001/XMLSchema#" not in query:
return exec_verify_sparql_xsl_only(query)
slot_values, status_code = exec_verify_sparql_xsl_only(query)
if status_code == 200 and len(slot_values) == 0:
for data_type in ['integer', 'float', 'dateTime']:
query = query.replace("^^<http://www.w3.org/2001/XMLSchema#{}>".format(data_type), "")
slot_values, status_code = exec_verify_sparql_xsl_only(query)
return slot_values, status_code
def exec_sparql_fix_literal(query):
if "^^<http://www.w3.org/2001/XMLSchema#" not in query:
return exec_sparql(query)
answers, status_code = exec_sparql(query)
if status_code == 200 and len(answers) == 0:
query = re.sub("XMLSchema#\w+>", "", query.replace("^^<http://www.w3.org/2001/", ""))
answers, status_code = exec_sparql(query)
return answers, status_code
def retrieve_ent_meta_info(entities):
query = """PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> PREFIX : <http://rdf.freebase.com/ns/>
SELECT DISTINCT ?ent_id ?ent_desc ?ent_type ?ent_name
{
VALUES ?ent_id {%s}
OPTIONAL {?ent_id :type.object.name ?ent_name . FILTER (lang(?ent_name) = "en")}
OPTIONAL {?ent_id :common.topic.description ?ent_desc . FILTER (lang(?ent_desc) = "en")}
OPTIONAL {?ent_id :kg.object_profile.prominent_type ?ent_type .}
}
""" % (" ".join([":{}".format(e) for e in entities]),)
print(query)
entity_meta_info, status_code = exec_demo_sparql(query)
return entity_meta_info, status_code
def retrieve_schema_meta_info(schema_items):
query = """PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> PREFIX : <http://rdf.freebase.com/ns/>
SELECT DISTINCT ?ent_id ?schema_id ?ent_desc ?ent_type ?ent_name
{
VALUES ?schema_id {%s}
?ent_id :type.object.key ?schema_id .
OPTIONAL {?ent_id :type.object.name ?ent_name . FILTER (lang(?ent_name) = "en")}
OPTIONAL {?ent_id :common.topic.description ?ent_desc . FILTER (lang(?ent_desc) = "en")}
OPTIONAL {?ent_id :kg.object_profile.prominent_type ?ent_type .}
}
""" % (" ".join(["\"/{}\"".format(e.replace(".", "/")) for e in schema_items]),)
# print(query)
entity_meta_info, status_code = exec_schema_demo_sparql(query)
return entity_meta_info, status_code
def retrieve_anchor_relations(entities):
query = """PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> PREFIX : <http://rdf.freebase.com/ns/>
SELECT DISTINCT ?ent_id ?in_rel
{
VALUES ?ent_id {%s}
OPTIONAL {?s ?in_rel ?ent_id . FILTER (regex(?in_rel, "^http://rdf.freebase.com/ns/"))}
}
""" % (" ".join([":{}".format(e) for e in entities]),)
ent_in_relation, _ = exec_anchor_relation_sparql(query, "in_rel")
query = """PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> PREFIX : <http://rdf.freebase.com/ns/>
SELECT DISTINCT ?ent_id ?out_rel
{
VALUES ?ent_id {%s}
OPTIONAL {?ent_id ?out_rel ?v . FILTER (regex(?out_rel, "^http://rdf.freebase.com/ns/"))}
}
""" % (" ".join([":{}".format(e) for e in entities]),)
ent_out_relation, _ = exec_anchor_relation_sparql(query, "out_rel")
anchor_relations = {}
for ent in entities:
anchor_relations[ent] = {"in_relation": ent_in_relation.get(ent, []),
"out_relation": ent_out_relation.get(ent, [])}
return anchor_relations
| 35.865753 | 165 | 0.614239 |
30f76087ed10dccfad5a80832cce62f97f3be7aa | 2,575 | py | Python | src/datafiles/ini.py | N-z0/commonz | 275c48ef6aac32f0d809a96e56b0b0c254686747 | [
"Unlicense"
] | null | null | null | src/datafiles/ini.py | N-z0/commonz | 275c48ef6aac32f0d809a96e56b0b0c254686747 | [
"Unlicense"
] | null | null | null | src/datafiles/ini.py | N-z0/commonz | 275c48ef6aac32f0d809a96e56b0b0c254686747 | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python3
#coding: utf-8
### 1st line allows to execute this script by typing only its name in terminal, with no need to precede it with the python command
### 2nd line declaring source code charset should be not necessary but for exemple pydoc request it
__doc__ = "INI File Reading and Writing."#information describing the purpose of this module
__status__ = "Development"#should be one of 'Prototype' 'Development' 'Production' 'Deprecated' 'Release'
__version__ = "1.0.0"# version number,date or about last modification made compared to the previous version
__license__ = "public domain"# ref to an official existing License
__date__ = "2021"#started creation date / year month day
__author__ = "N-zo syslog@laposte.net"#the creator origin of this prog,
__maintainer__ = "Nzo"#person who curently makes improvements, replacing the author
__credits__ = []#passed mainteners and any other helpers
__contact__ = "syslog@laposte.net"# current contact adress for more info about this file
class Parser:
def __init__(self, pathname):
self.parse = {'':{}}
self.pathname = pathname
def read(self):
section=''
with open(self.pathname,'r') as f:
### A file is already an iterable full of lines.
### And it's a smart iterable, reading lines as you need them, with some clever buffering under the covers.
### .readlines() reads all the file into memory before starting to loop
for line in f :
line=line.split("#")[0]
line=line.split(";")[0]
line=line.strip()
if line=='' :
pass
elif line.startswith('[') and line.endswith(']') :
section = line.strip("[]")
self.parse.update({section:{}})
elif line.find("=") :
pair = line.split("=")
key = pair[0].strip()
value = pair[1].strip()
self.parse[section].update({key: value})
else :
raise EOFError
def get_sections(self):
return self.parse.keys()
def get_keys(self,section):
return self.parse[section].keys()
def get_valu(self,section,key):
return self.parse[section][key]
def set_valu(self,section,key,valu):
self.parse[section][key]=str(valu)
def add_section(self,section):
self.parse[section]={}
def add_key(self,section,key):
self.parse[section][key]=''
def write_file(self,pathname=None):
if not pathname :
pathname=self.pathname
with open(pathname,'w') as f:
for section in self.parse.keys() :
f.write( '[{}]\n'.format(section) )
for key in self.parse[section].keys() :
valu=self.parse[section][key]
f.write( '{} = {}\n'.format(key,valu) )
f.write( '\n' )
| 33.881579 | 130 | 0.688544 |
0b7ebb92c0b83015a1347c55f719d924aeb3dcc3 | 9,448 | py | Python | src/pybullet_planning/interfaces/debug_utils/debug_utils.py | logan-dunbar/pybullet_planning | 3b25fc7a0f350f4b46048be5c42f9cbf3ab2d6fb | [
"MIT"
] | null | null | null | src/pybullet_planning/interfaces/debug_utils/debug_utils.py | logan-dunbar/pybullet_planning | 3b25fc7a0f350f4b46048be5c42f9cbf3ab2d6fb | [
"MIT"
] | null | null | null | src/pybullet_planning/interfaces/debug_utils/debug_utils.py | logan-dunbar/pybullet_planning | 3b25fc7a0f350f4b46048be5c42f9cbf3ab2d6fb | [
"MIT"
] | null | null | null | import math
import numpy as np
import pybullet as p
from itertools import product, combinations
from pybullet_planning.utils import get_client, BASE_LINK, GREEN, RED, BLUE, BLACK, WHITE, NULL_ID, YELLOW
from pybullet_planning.interfaces.env_manager.pose_transformation import unit_pose, tform_point, unit_from_theta, get_distance
from pybullet_planning.interfaces.geometry.bounding_box import get_aabb
from pybullet_planning.interfaces.geometry.camera import apply_alpha, set_camera_pose
def get_lifetime(lifetime):
if lifetime is None:
return 0
return lifetime
def add_debug_parameter():
# TODO: make a slider that controls the step in the trajectory
# TODO: could store a list of savers
#targetVelocitySlider = p.addUserDebugParameter("wheelVelocity", -10, 10, 0)
#maxForce = p.readUserDebugParameter(maxForceSlider)
raise NotImplementedError()
def add_text(text, position=(0, 0, 0), color=BLACK, lifetime=None, parent=NULL_ID, parent_link=BASE_LINK):
return p.addUserDebugText(str(text), textPosition=position, textColorRGB=color[:3], # textSize=1,
lifeTime=get_lifetime(lifetime), parentObjectUniqueId=parent, parentLinkIndex=parent_link,
physicsClientId=get_client())
def add_line(start, end, color=BLACK, width=1, lifetime=None, parent=NULL_ID, parent_link=BASE_LINK):
"""[summary]
Parameters
----------
start : [type]
[description]
end : [type]
[description]
color : tuple, optional
[description], by default (0, 0, 0)
width : int, optional
[description], by default 1
lifetime : [type], optional
[description], by default None
parent : int, optional
[description], by default NULL_ID
parent_link : [type], optional
[description], by default BASE_LINK
Returns
-------
[type]
[description]
"""
return p.addUserDebugLine(start, end, lineColorRGB=color[:3], lineWidth=width,
lifeTime=get_lifetime(lifetime), parentObjectUniqueId=parent, parentLinkIndex=parent_link,
physicsClientId=get_client())
def remove_debug(debug):
p.removeUserDebugItem(debug, physicsClientId=get_client())
remove_handle = remove_debug
def remove_handles(handles):
for handle in handles:
remove_debug(handle)
def remove_all_debug():
p.removeAllUserDebugItems(physicsClientId=get_client())
def add_body_name(body, name=None, **kwargs):
from pybullet_planning.interfaces.env_manager.pose_transformation import set_pose
from pybullet_planning.interfaces.env_manager.savers import PoseSaver
from pybullet_planning.interfaces.robots.body import get_name
if name is None:
name = get_name(body)
with PoseSaver(body):
set_pose(body, unit_pose())
lower, upper = get_aabb(body)
#position = (0, 0, upper[2])
position = upper
return add_text(name, position=position, parent=body, **kwargs) # removeUserDebugItem
def add_segments(points, closed=False, **kwargs):
lines = []
for v1, v2 in zip(points, points[1:]):
lines.append(add_line(v1, v2, **kwargs))
if closed:
lines.append(add_line(points[-1], points[0], **kwargs))
return lines
def draw_link_name(body, link=BASE_LINK):
from pybullet_planning.interfaces.robots.link import get_link_name
return add_text(get_link_name(body, link), position=(0, 0.2, 0),
parent=body, parent_link=link)
def draw_pose(pose, length=0.1, **kwargs):
origin_world = tform_point(pose, np.zeros(3))
handles = []
for k in range(3):
axis = np.zeros(3)
axis[k] = 1
axis_world = tform_point(pose, length*axis)
handles.append(add_line(origin_world, axis_world, color=axis, **kwargs))
return handles
def draw_base_limits(limits, z=1e-2, **kwargs):
lower, upper = limits
vertices = [(lower[0], lower[1], z), (lower[0], upper[1], z),
(upper[0], upper[1], z), (upper[0], lower[1], z)]
return add_segments(vertices, closed=True, **kwargs)
def draw_circle(center, radius, n=24, **kwargs):
vertices = []
for i in range(n):
theta = i*2*math.pi/n
unit = np.append(unit_from_theta(theta), [0])
vertices.append(center+radius*unit)
return add_segments(vertices, closed=True, **kwargs)
def draw_aabb(aabb, **kwargs):
d = len(aabb[0])
vertices = list(product(range(len(aabb)), repeat=d))
lines = []
for i1, i2 in combinations(vertices, 2):
if sum(i1[k] != i2[k] for k in range(d)) == 1:
p1 = [aabb[i1[k]][k] for k in range(d)]
p2 = [aabb[i2[k]][k] for k in range(d)]
lines.append(add_line(p1, p2, **kwargs))
return lines
def draw_point(point, size=0.01, **kwargs):
lines = []
for i in range(len(point)):
axis = np.zeros(len(point))
axis[i] = 1.0
p1 = np.array(point) - size/2 * axis
p2 = np.array(point) + size/2 * axis
lines.append(add_line(p1, p2, **kwargs))
return lines
#extent = size * np.ones(len(point)) / 2
#aabb = np.array(point) - extent, np.array(point) + extent
#return draw_aabb(aabb, **kwargs)
def get_face_edges(face):
#return list(combinations(face, 2))
return list(zip(face, face[1:] + face[:1]))
def draw_mesh(mesh, **kwargs):
verts, faces = mesh
lines = []
for face in faces:
for i1, i2 in get_face_edges(face):
lines.append(add_line(verts[i1], verts[i2], **kwargs))
return lines
def draw_ray(ray, ray_result=None, visible_color=GREEN, occluded_color=RED, **kwargs):
if ray_result is None:
return [add_line(ray.start, ray.end, color=visible_color, **kwargs)]
if ray_result.objectUniqueId == NULL_ID:
hit_position = ray.end
else:
hit_position = ray_result.hit_position
return [
add_line(ray.start, hit_position, color=visible_color, **kwargs),
add_line(hit_position, ray.end, color=occluded_color, **kwargs),
]
def get_body_from_pb_id(i):
return p.getBodyUniqueId(i, physicsClientId=get_client())
def draw_collision_diagnosis(pb_closest_pt_output, viz_last_duration=-1, line_color=YELLOW, \
focus_camera=True, camera_ray=np.array([0.1, 0, 0.05])):
"""[summary]
Parameters
----------
pb_closest_pt_output : [type]
[description]
viz_last_duration : int, optional
[description], by default -1
"""
from pybullet_planning.interfaces.env_manager.simulation import has_gui
from pybullet_planning.interfaces.env_manager.user_io import HideOutput
from pybullet_planning.interfaces.env_manager.user_io import wait_for_user, wait_for_duration
from pybullet_planning.interfaces.robots.link import get_link_name, get_links
from pybullet_planning.interfaces.robots.body import set_color, remove_body, clone_body, get_name
if not has_gui() and pb_closest_pt_output:
return
# if paint_all_others:
# set_all_bodies_color()
# for b in obstacles:
# set_color(b, (0,0,1,0.3))
for u_cr in pb_closest_pt_output:
handles = []
b1 = get_body_from_pb_id(u_cr[1])
b2 = get_body_from_pb_id(u_cr[2])
l1 = u_cr[3]
l2 = u_cr[4]
b1_name = get_name(b1)
b2_name = get_name(b2)
l1_name = get_link_name(b1, l1)
l2_name = get_link_name(b2, l2)
print('*'*10)
print('pairwise link collision: (Body #{0}, Link #{1}) - (Body #{2} Link #{3})'.format(
b1_name, l1_name, b2_name, l2_name))
clone1_fail = False
clone2_fail = False
try:
with HideOutput():
cloned_body1 = clone_body(b1, links=[l1] if get_links(b1) else None, collision=True, visual=False)
except:
print('cloning (body #{}, link #{}) fails.'.format(b1_name, l1_name))
clone1_fail = True
cloned_body1 = b1
try:
with HideOutput():
cloned_body2 = clone_body(b2, links=[l2] if get_links(b2) else None, collision=True, visual=False)
except:
print('cloning (body #{}, link #{}) fails.'.format(b2_name, l2_name))
clone2_fail = True
cloned_body2 = b2
set_color(cloned_body1, apply_alpha(RED, 0.2))
set_color(cloned_body2, apply_alpha(GREEN, 0.2))
handles.append(add_body_name(b1))
handles.append(add_body_name(b2))
handles.append(draw_link_name(b1, l1))
handles.append(draw_link_name(b2, l2))
handles.append(add_line(u_cr[5], u_cr[6], color=line_color, width=5))
print('Penetration depth: {:.2E}'.format(get_distance(u_cr[5], u_cr[6])))
if focus_camera:
camera_base_pt = u_cr[5]
camera_pt = np.array(camera_base_pt) + camera_ray
set_camera_pose(tuple(camera_pt), camera_base_pt)
if viz_last_duration < 0:
wait_for_user()
else:
wait_for_duration(viz_last_duration)
# restore lines and colors
for h in handles: remove_debug(h)
if not clone1_fail :
remove_body(cloned_body1)
else:
set_color(b1, apply_alpha(WHITE, 0.5))
if not clone2_fail :
remove_body(cloned_body2)
else:
set_color(b2, apply_alpha(WHITE, 0.5))
| 36.762646 | 126 | 0.647121 |
735c4284a7be16da9d7c4fb2b0ad062c2cd0131a | 11,862 | py | Python | selfdrive/monitoring/driver_monitor.py | oht-volt/cj_volt | a11d12fbe909b93aa610df9a1c3c02601b726649 | [
"MIT"
] | 2 | 2020-10-22T09:20:14.000Z | 2020-10-22T09:20:20.000Z | selfdrive/monitoring/driver_monitor.py | oht-volt/cj_volt | a11d12fbe909b93aa610df9a1c3c02601b726649 | [
"MIT"
] | null | null | null | selfdrive/monitoring/driver_monitor.py | oht-volt/cj_volt | a11d12fbe909b93aa610df9a1c3c02601b726649 | [
"MIT"
] | null | null | null | from math import atan2, sqrt
from cereal import car
from common.numpy_fast import interp
from common.realtime import DT_DMON
from common.filter_simple import FirstOrderFilter
from common.stat_live import RunningStatFilter
EventName = car.CarEvent.EventName
# ******************************************************************************************
# NOTE: To fork maintainers.
# Disabling or nerfing safety features may get you and your users banned from our servers.
# We recommend that you do not change these numbers from the defaults.
# ******************************************************************************************
# _AWARENESS_TIME = 35. # passive wheel touch total timeout
_AWARENESS_TIME = 1800.
_AWARENESS_PRE_TIME_TILL_TERMINAL = 12.
_AWARENESS_PROMPT_TIME_TILL_TERMINAL = 6.
# _DISTRACTED_TIME = 11.
_DISTRACTED_TIME = 60.
_DISTRACTED_PRE_TIME_TILL_TERMINAL = 8.
_DISTRACTED_PROMPT_TIME_TILL_TERMINAL = 6.
_FACE_THRESHOLD = 0.5
_PARTIAL_FACE_THRESHOLD = 0.5
_EYE_THRESHOLD = 0.5
_SG_THRESHOLD = 0.5
_BLINK_THRESHOLD = 0.5
_BLINK_THRESHOLD_SLACK = 0.65
_BLINK_THRESHOLD_STRICT = 0.5
_PITCH_WEIGHT = 1.35 # pitch matters a lot more
_POSESTD_THRESHOLD = 0.14
_METRIC_THRESHOLD = 0.4
_METRIC_THRESHOLD_SLACK = 0.55
_METRIC_THRESHOLD_STRICT = 0.4
_PITCH_POS_ALLOWANCE = 0.12 # rad, to not be too sensitive on positive pitch
_PITCH_NATURAL_OFFSET = 0.02 # people don't seem to look straight when they drive relaxed, rather a bit up
_YAW_NATURAL_OFFSET = 0.08 # people don't seem to look straight when they drive relaxed, rather a bit to the right (center of car)
_HI_STD_TIMEOUT = 5
_HI_STD_FALLBACK_TIME = 10 # fall back to wheel touch if model is uncertain for a long time
_DISTRACTED_FILTER_TS = 0.25 # 0.6Hz
_POSE_CALIB_MIN_SPEED = 13 # 30 mph
_POSE_OFFSET_MIN_COUNT = 600 # valid data counts before calibration completes, 1 seg is 600 counts
_POSE_OFFSET_MAX_COUNT = 3600 # stop deweighting new data after 6 min, aka "short term memory"
_RECOVERY_FACTOR_MAX = 5. # relative to minus step change
_RECOVERY_FACTOR_MIN = 1.25 # relative to minus step change
MAX_TERMINAL_ALERTS = 3 # not allowed to engage after 3 terminal alerts
MAX_TERMINAL_DURATION = 300 # 30s
# model output refers to center of cropped image, so need to apply the x displacement offset
RESIZED_FOCAL = 320.0
H, W, FULL_W = 320, 160, 426
class DistractedType:
NOT_DISTRACTED = 0
BAD_POSE = 1
BAD_BLINK = 2
def face_orientation_from_net(angles_desc, pos_desc, rpy_calib, is_rhd):
# the output of these angles are in device frame
# so from driver's perspective, pitch is up and yaw is right
pitch_net, yaw_net, roll_net = angles_desc
face_pixel_position = ((pos_desc[0] + .5)*W - W + FULL_W, (pos_desc[1]+.5)*H)
yaw_focal_angle = atan2(face_pixel_position[0] - FULL_W//2, RESIZED_FOCAL)
pitch_focal_angle = atan2(face_pixel_position[1] - H//2, RESIZED_FOCAL)
pitch = pitch_net + pitch_focal_angle
yaw = -yaw_net + yaw_focal_angle
# no calib for roll
pitch -= rpy_calib[1]
yaw -= rpy_calib[2] * (1 - 2 * int(is_rhd)) # lhd -> -=, rhd -> +=
return roll_net, pitch, yaw
class DriverPose():
def __init__(self):
self.yaw = 0.
self.pitch = 0.
self.roll = 0.
self.yaw_std = 0.
self.pitch_std = 0.
self.roll_std = 0.
self.pitch_offseter = RunningStatFilter(max_trackable=_POSE_OFFSET_MAX_COUNT)
self.yaw_offseter = RunningStatFilter(max_trackable=_POSE_OFFSET_MAX_COUNT)
self.low_std = True
self.cfactor = 1.
class DriverBlink():
def __init__(self):
self.left_blink = 0.
self.right_blink = 0.
self.cfactor = 1.
class DriverStatus():
def __init__(self, rhd=False):
self.is_rhd_region = rhd
self.pose = DriverPose()
self.pose_calibrated = self.pose.pitch_offseter.filtered_stat.n > _POSE_OFFSET_MIN_COUNT and \
self.pose.yaw_offseter.filtered_stat.n > _POSE_OFFSET_MIN_COUNT
self.blink = DriverBlink()
self.awareness = 1.
self.awareness_active = 1.
self.awareness_passive = 1.
self.driver_distracted = False
self.driver_distraction_filter = FirstOrderFilter(0., _DISTRACTED_FILTER_TS, DT_DMON)
self.face_detected = False
self.face_partial = False
self.terminal_alert_cnt = 0
self.terminal_time = 0
self.step_change = 0.
self.active_monitoring_mode = True
self.is_model_uncertain = False
self.hi_stds = 0
self.hi_std_alert_enabled = True
self.threshold_prompt = _DISTRACTED_PROMPT_TIME_TILL_TERMINAL / _DISTRACTED_TIME
self._set_timers(active_monitoring=True)
def _set_timers(self, active_monitoring):
if self.active_monitoring_mode and self.awareness <= self.threshold_prompt:
if active_monitoring:
self.step_change = DT_DMON / _DISTRACTED_TIME
else:
self.step_change = 0.
return # no exploit after orange alert
elif self.awareness <= 0.:
return
if active_monitoring:
# when falling back from passive mode to active mode, reset awareness to avoid false alert
if not self.active_monitoring_mode:
self.awareness_passive = self.awareness
self.awareness = self.awareness_active
self.threshold_pre = _DISTRACTED_PRE_TIME_TILL_TERMINAL / _DISTRACTED_TIME
self.threshold_prompt = _DISTRACTED_PROMPT_TIME_TILL_TERMINAL / _DISTRACTED_TIME
self.step_change = DT_DMON / _DISTRACTED_TIME
self.active_monitoring_mode = True
else:
if self.active_monitoring_mode:
self.awareness_active = self.awareness
self.awareness = self.awareness_passive
self.threshold_pre = _AWARENESS_PRE_TIME_TILL_TERMINAL / _AWARENESS_TIME
self.threshold_prompt = _AWARENESS_PROMPT_TIME_TILL_TERMINAL / _AWARENESS_TIME
self.step_change = DT_DMON / _AWARENESS_TIME
self.active_monitoring_mode = False
def _is_driver_distracted(self, pose, blink):
if not self.pose_calibrated:
pitch_error = pose.pitch - _PITCH_NATURAL_OFFSET
yaw_error = pose.yaw - _YAW_NATURAL_OFFSET
else:
pitch_error = pose.pitch - self.pose.pitch_offseter.filtered_stat.mean()
yaw_error = pose.yaw - self.pose.yaw_offseter.filtered_stat.mean()
# positive pitch allowance
if pitch_error > 0.:
pitch_error = max(pitch_error - _PITCH_POS_ALLOWANCE, 0.)
pitch_error *= _PITCH_WEIGHT
pose_metric = sqrt(yaw_error**2 + pitch_error**2)
if pose_metric > _METRIC_THRESHOLD*pose.cfactor:
return DistractedType.BAD_POSE
elif (blink.left_blink + blink.right_blink)*0.5 > _BLINK_THRESHOLD*blink.cfactor:
return DistractedType.BAD_BLINK
else:
return DistractedType.NOT_DISTRACTED
def set_policy(self, model_data):
ep = min(model_data.meta.engagedProb, 0.8) / 0.8
self.pose.cfactor = interp(ep, [0, 0.5, 1], [_METRIC_THRESHOLD_STRICT, _METRIC_THRESHOLD, _METRIC_THRESHOLD_SLACK])/_METRIC_THRESHOLD
self.blink.cfactor = interp(ep, [0, 0.5, 1], [_BLINK_THRESHOLD_STRICT, _BLINK_THRESHOLD, _BLINK_THRESHOLD_SLACK])/_BLINK_THRESHOLD
def get_pose(self, driver_state, cal_rpy, car_speed, op_engaged):
if not all(len(x) > 0 for x in [driver_state.faceOrientation, driver_state.facePosition,
driver_state.faceOrientationStd, driver_state.facePositionStd]):
return
self.face_partial = driver_state.partialFace > _PARTIAL_FACE_THRESHOLD
self.face_detected = driver_state.faceProb > _FACE_THRESHOLD or self.face_partial
self.pose.roll, self.pose.pitch, self.pose.yaw = face_orientation_from_net(driver_state.faceOrientation, driver_state.facePosition, cal_rpy, self.is_rhd_region)
self.pose.pitch_std = driver_state.faceOrientationStd[0]
self.pose.yaw_std = driver_state.faceOrientationStd[1]
# self.pose.roll_std = driver_state.faceOrientationStd[2]
model_std_max = max(self.pose.pitch_std, self.pose.yaw_std)
self.pose.low_std = model_std_max < _POSESTD_THRESHOLD and not self.face_partial
self.blink.left_blink = driver_state.leftBlinkProb * (driver_state.leftEyeProb > _EYE_THRESHOLD) * (driver_state.sunglassesProb < _SG_THRESHOLD)
self.blink.right_blink = driver_state.rightBlinkProb * (driver_state.rightEyeProb > _EYE_THRESHOLD) * (driver_state.sunglassesProb < _SG_THRESHOLD)
self.driver_distracted = self._is_driver_distracted(self.pose, self.blink) > 0 and \
driver_state.faceProb > _FACE_THRESHOLD and self.pose.low_std
self.driver_distraction_filter.update(self.driver_distracted)
# update offseter
# only update when driver is actively driving the car above a certain speed
if self.face_detected and car_speed > _POSE_CALIB_MIN_SPEED and self.pose.low_std and (not op_engaged or not self.driver_distracted):
self.pose.pitch_offseter.push_and_update(self.pose.pitch)
self.pose.yaw_offseter.push_and_update(self.pose.yaw)
self.pose_calibrated = self.pose.pitch_offseter.filtered_stat.n > _POSE_OFFSET_MIN_COUNT and \
self.pose.yaw_offseter.filtered_stat.n > _POSE_OFFSET_MIN_COUNT
self.is_model_uncertain = self.hi_stds * DT_DMON > _HI_STD_FALLBACK_TIME
self._set_timers(self.face_detected and not self.is_model_uncertain)
if self.face_detected and not self.pose.low_std:
self.hi_stds += 1
elif self.face_detected and self.pose.low_std:
self.hi_stds = 0
def update(self, events, driver_engaged, ctrl_active, standstill):
if (driver_engaged and self.awareness > 0) or not ctrl_active:
# reset only when on disengagement if red reached
self.awareness = 1.
self.awareness_active = 1.
self.awareness_passive = 1.
return
driver_attentive = self.driver_distraction_filter.x < 0.37
awareness_prev = self.awareness
# Disabling the most annoying alert imaginable on a non Comma2
#if self.face_detected and self.hi_stds * DT_DMON > _HI_STD_TIMEOUT and self.hi_std_alert_enabled:
# events.add(EventName.driverMonitorLowAcc)
# self.hi_std_alert_enabled = False # only showed once until orange prompt resets it
if (driver_attentive and self.face_detected and self.pose.low_std and self.awareness > 0):
# only restore awareness when paying attention and alert is not red
self.awareness = min(self.awareness + ((_RECOVERY_FACTOR_MAX-_RECOVERY_FACTOR_MIN)*(1.-self.awareness)+_RECOVERY_FACTOR_MIN)*self.step_change, 1.)
if self.awareness == 1.:
self.awareness_passive = min(self.awareness_passive + self.step_change, 1.)
# don't display alert banner when awareness is recovering and has cleared orange
if self.awareness > self.threshold_prompt:
return
# should always be counting if distracted unless at standstill and reaching orange
if (not (self.face_detected and self.hi_stds * DT_DMON <= _HI_STD_FALLBACK_TIME) or (self.driver_distraction_filter.x > 0.63 and self.driver_distracted and self.face_detected)) and \
not (standstill and self.awareness - self.step_change <= self.threshold_prompt):
self.awareness = max(self.awareness - self.step_change, -0.1)
alert = None
if self.awareness <= 0.:
# terminal red alert: disengagement required
alert = EventName.driverDistracted if self.active_monitoring_mode else EventName.driverUnresponsive
self.hi_std_alert_enabled = True
self.terminal_time += 1
if awareness_prev > 0.:
self.terminal_alert_cnt += 1
elif self.awareness <= self.threshold_prompt:
# prompt orange alert
alert = EventName.promptDriverDistracted if self.active_monitoring_mode else EventName.promptDriverUnresponsive
elif self.awareness <= self.threshold_pre:
# pre green alert
alert = EventName.preDriverDistracted if self.active_monitoring_mode else EventName.preDriverUnresponsive
if alert is not None:
events.add(alert)
| 44.593985 | 186 | 0.733687 |
279cc02ac3251372e9ec1b4b6a2a5891995f7489 | 3,816 | py | Python | jupyterlab_git/tests/test_diff.py | btel/jupyterlab-git | 385916c1d0b1778a18eb7ab683d2c5d4f481a993 | [
"BSD-3-Clause"
] | null | null | null | jupyterlab_git/tests/test_diff.py | btel/jupyterlab-git | 385916c1d0b1778a18eb7ab683d2c5d4f481a993 | [
"BSD-3-Clause"
] | null | null | null | jupyterlab_git/tests/test_diff.py | btel/jupyterlab-git | 385916c1d0b1778a18eb7ab683d2c5d4f481a993 | [
"BSD-3-Clause"
] | null | null | null | import os
from subprocess import CalledProcessError
from unittest.mock import Mock, call, patch
import pytest
import tornado
from jupyterlab_git.git import Git
from .testutils import FakeContentManager
@pytest.mark.asyncio
async def test_changed_files_invalid_input():
with pytest.raises(tornado.web.HTTPError):
await Git(FakeContentManager("/bin")).changed_files(
base="64950a634cd11d1a01ddfedaeffed67b531cb11e"
)
@pytest.mark.asyncio
async def test_changed_files_single_commit():
with patch("jupyterlab_git.git.execute") as mock_execute:
# Given
mock_execute.return_value = tornado.gen.maybe_future(
(0, "file1.ipynb\x00file2.py\x00", "")
)
# When
actual_response = await Git(FakeContentManager("/bin")).changed_files(
single_commit="64950a634cd11d1a01ddfedaeffed67b531cb11e"
)
# Then
mock_execute.assert_called_once_with(
[
"git",
"diff",
"64950a634cd11d1a01ddfedaeffed67b531cb11e^!",
"--name-only",
"-z",
],
cwd="/bin",
)
assert {"code": 0, "files": ["file1.ipynb", "file2.py"]} == actual_response
@pytest.mark.asyncio
async def test_changed_files_working_tree():
with patch("jupyterlab_git.git.execute") as mock_execute:
# Given
mock_execute.return_value = tornado.gen.maybe_future(
(0, "file1.ipynb\x00file2.py", "")
)
# When
actual_response = await Git(FakeContentManager("/bin")).changed_files(
base="WORKING", remote="HEAD"
)
# Then
mock_execute.assert_called_once_with(
["git", "diff", "HEAD", "--name-only", "-z"], cwd="/bin"
)
assert {"code": 0, "files": ["file1.ipynb", "file2.py"]} == actual_response
@pytest.mark.asyncio
async def test_changed_files_index():
with patch("jupyterlab_git.git.execute") as mock_execute:
# Given
mock_execute.return_value = tornado.gen.maybe_future(
(0, "file1.ipynb\x00file2.py", "")
)
# When
actual_response = await Git(FakeContentManager("/bin")).changed_files(
base="INDEX", remote="HEAD"
)
# Then
mock_execute.assert_called_once_with(
["git", "diff", "--staged", "HEAD", "--name-only", "-z"], cwd="/bin"
)
assert {"code": 0, "files": ["file1.ipynb", "file2.py"]} == actual_response
@pytest.mark.asyncio
async def test_changed_files_two_commits():
with patch("jupyterlab_git.git.execute") as mock_execute:
# Given
mock_execute.return_value = tornado.gen.maybe_future(
(0, "file1.ipynb\x00file2.py", "")
)
# When
actual_response = await Git(FakeContentManager("/bin")).changed_files(
base="HEAD", remote="origin/HEAD"
)
# Then
mock_execute.assert_called_once_with(
["git", "diff", "HEAD", "origin/HEAD", "--name-only", "-z"], cwd="/bin"
)
assert {"code": 0, "files": ["file1.ipynb", "file2.py"]} == actual_response
@pytest.mark.asyncio
async def test_changed_files_git_diff_error():
with patch("jupyterlab_git.git.execute") as mock_execute:
# Given
mock_execute.side_effect = CalledProcessError(128, b"cmd", b"error message")
# When
actual_response = await Git(FakeContentManager("/bin")).changed_files(
base="HEAD", remote="origin/HEAD"
)
# Then
mock_execute.assert_called_once_with(
["git", "diff", "HEAD", "origin/HEAD", "--name-only", "-z"], cwd="/bin"
)
assert {"code": 128, "message": "error message"} == actual_response
| 30.528 | 84 | 0.600105 |
3a93e7b1d0ca1a2d4fbee516c03d94d37d2d38d2 | 1,563 | py | Python | 2020/Day12/Day12_Prob2_Moving_Waypoint.py | guilhermebaos/Advent-of-Code-Solutions | 232facf72a21284478134b2c56357352b4aaaf74 | [
"MIT"
] | null | null | null | 2020/Day12/Day12_Prob2_Moving_Waypoint.py | guilhermebaos/Advent-of-Code-Solutions | 232facf72a21284478134b2c56357352b4aaaf74 | [
"MIT"
] | null | null | null | 2020/Day12/Day12_Prob2_Moving_Waypoint.py | guilhermebaos/Advent-of-Code-Solutions | 232facf72a21284478134b2c56357352b4aaaf74 | [
"MIT"
] | null | null | null | # Puzzle Input
with open('Day12_Input.txt') as puzzle_input:
movement_list = puzzle_input.read().split('\n')
# Calculate the path
boat = [0, 0]
waypoint = [1, 10]
for movement in movement_list:
action = movement[0] # Get the action
value = int(movement[1:]) # Get it's value
if action == 'N': # Execute the action, by adding or subtracting the value from thewaypoints current
waypoint[0] += value # coordinates, which are relative to the boat
elif action == 'S':
waypoint[0] -= value
elif action == 'E':
waypoint[1] += value
elif action == 'W':
waypoint[1] -= value
elif action == 'L': # Rotate the waypoint left, it's x value becomes the symmetric
for _ in range(value // 90): # of it's old y value and it's y value becomes it's old x value
waypoint = [waypoint[1], -waypoint[0]]
elif action == 'R': # Rotate the waypoint right, it's y value becomes the symmetric
for _ in range(value // 90): # of it's old x value and it's x value becomes it's old y value
waypoint = [-waypoint[1], waypoint[0]]
elif action == 'F': # Move the boat to the waypoint {value} number of times
boat[0] += waypoint[0] * value
boat[1] += waypoint[1] * value
# The answer is the sum of the absolute value of the coordinates, because we started at 0, 0
print(sum(list(map(abs, boat))))
| 50.419355 | 119 | 0.56366 |
62597411098547078368eaadedbf797b14aec6ce | 1,072 | py | Python | backend/backend/wsgi.py | kevromster/GoEat | c68f405f870d3eeeab8bcadbf6906ad2a09a2ad0 | [
"Apache-2.0"
] | null | null | null | backend/backend/wsgi.py | kevromster/GoEat | c68f405f870d3eeeab8bcadbf6906ad2a09a2ad0 | [
"Apache-2.0"
] | 4 | 2020-05-11T12:01:51.000Z | 2020-09-14T09:11:22.000Z | backend/backend/wsgi.py | kevromster/GoEat | c68f405f870d3eeeab8bcadbf6906ad2a09a2ad0 | [
"Apache-2.0"
] | null | null | null | # Copyright © 2020 Roman Kuskov. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
WSGI config for backend project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
application = get_wsgi_application()
| 34.580645 | 80 | 0.712687 |
bd39087cac73c212e1e65372cdd81f2454ab7b54 | 3,168 | py | Python | Whatsapp_Automation/Whatsapp_Automation/autowat.py | Harshavardhan808/whatsApp-automation | 2b068db8bc9686339b7837096dd70a24440b90a3 | [
"MIT"
] | null | null | null | Whatsapp_Automation/Whatsapp_Automation/autowat.py | Harshavardhan808/whatsApp-automation | 2b068db8bc9686339b7837096dd70a24440b90a3 | [
"MIT"
] | null | null | null | Whatsapp_Automation/Whatsapp_Automation/autowat.py | Harshavardhan808/whatsApp-automation | 2b068db8bc9686339b7837096dd70a24440b90a3 | [
"MIT"
] | null | null | null | import pandas as pd
import time
from selenium import webdriver
from progress.bar import Bar
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
def upload_csv():
try:
csv = input("Enter the path of the csv file: ")
header = ["Name", "Affiliate_ID", "Phone", "Parent_ID"]
global df
df = pd.read_csv(csv, names=header)
df.dropna(axis=0, inplace=True)
bar = Bar('Processing .csv file', max=100)
for i in range(100):
time.sleep(0.02)
bar.next()
print("\nCSV added successfully!!!")
except:
print("Fail to load the csv file. Kindly check the file path.")
exit()
def send_msg():
count=0
for i,j in zip(df["Phone"], df["Affiliate_ID"]):
if i.isalnum()==True:
try:
number = "91{}".format(i)
extension = "?ref={}&t=l".format(j)
link = inp + extension
message = 'Thank you for being a part of eMoment.in family!! Check out our new product {} Have a great day!'.format(link)
url = 'https://wa.me/{}'.format(number)
driver.get(url)
continue_to_chat = driver.find_element_by_xpath("/html/body/div[1]/div[1]/div[2]/div/div[1]/div[1]/div/a").click()
time.sleep(1)
whatsapp_web = driver.find_element_by_xpath("/html/body/div[1]/div[1]/div[2]/div/div[2]/div/div/a").click()
time.sleep(2)
click_btn = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.CLASS_NAME, '_3uMse')))
time.sleep(10)
toext_box = driver.find_element_by_class_name("_3uMse").send_keys(message)
send_button = driver.find_element_by_class_name("_1U1xa").send_keys(Keys.ENTER)
time.sleep(2)
count += 1
if count % 10 == 0:
time.sleep(15)
else:
pass
except:
print('Message cannot be send to the number {} as this number is not available on Whatsapp.'.format(i))
else:
print("The number {} with affiliate ID {} is invalid".format(i,j))
if __name__ == '__main__':
print("\n")
print("****************************** Welcome to the Whatsapp Automation Software ******************************")
print("\n")
upload_csv()
inp = input("Enter the link: ")
options = webdriver.ChromeOptions()
CHROME_PROFILE_PATH = "user-data-dir=C:\\Users\\harshavardhan\\PycharmProjects\\Whatsapp_Automation\\Everything"
options.add_argument(CHROME_PROFILE_PATH)
driver = webdriver.Chrome("chromedriver.exe", options=options)
driver.maximize_window()
send_msg()
print("\n")
print("************************ Thank you for using the Whatsapp Automation Software ************************")
driver.close()
exit() | 41.142857 | 138 | 0.563763 |
b93d4308f5ed6f5a477a820b217910c76fa0da45 | 10,856 | py | Python | auto/python/sfm_ros.py | ACLeighner/AirSim | a4852eb01772c6bef45e481754e8b84fd32caef6 | [
"MIT"
] | null | null | null | auto/python/sfm_ros.py | ACLeighner/AirSim | a4852eb01772c6bef45e481754e8b84fd32caef6 | [
"MIT"
] | null | null | null | auto/python/sfm_ros.py | ACLeighner/AirSim | a4852eb01772c6bef45e481754e8b84fd32caef6 | [
"MIT"
] | null | null | null | import rospy
import tf2_ros
import tf2_geometry_msgs
import cv2
import numpy as np
import math
import struct
from cv_bridge import CvBridge
from sensor_msgs.msg import Image, CameraInfo, PointCloud2, PointField
from sensor_msgs import point_cloud2
from tf2_msgs.msg import TFMessage
from geometry_msgs.msg import PoseStamped, Quaternion, TransformStamped
from nav_msgs.msg import Odometry
from sfm import getColors
from sfm import triangulate
from sfm import triangulate_int
from sfm import drawTracks
from sfm import getTrackLength
from sfm import getObjectPointsEssential
from sfm import eliminateDuplicateObjects
from sfm import baFun
from sfm import bundle_adjustment_sparsity
from scipy.sparse import lil_matrix
from scipy.optimize import least_squares
from tf.transformations import quaternion_matrix, euler_from_quaternion, quaternion_multiply, quaternion_from_matrix
class mapping():
def __init__(self):
rospy.init_node('mapping', anonymous=True)
self.bridge = CvBridge()
self.tfBuffer = tf2_ros.Buffer()
listener = tf2_ros.TransformListener(self.tfBuffer)
self.image = []
self.pose = PoseStamped()
self.K = []
self.d = []
self.cam_width = []
self.cam_height = []
self.rotation = []
self.translation = []
self.tracking = False
self.img_curr = []
self.img_prev = []
self.features_orig = []
self.features_prev = []
self.obj_mask = []
self.reset = True
#INITIALIZE FEATURE MATCHING PARAMETERS#
self.maxCorners = 500 #Maximum number of corners to return. If there are more corners than are found, the strongest of them is returned
self.qualityLevel = 0.01 #For example, if best corner has measure = 1500, and qualityLevel=0.01 , then corners with quality<15 are rejected.
self.minDistance = 10 #Minimum possible Euclidean distance between the returned corners.
self.lk_params = dict(winSize = (15,15), maxLevel = 2, criteria = (cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT, 10, 0.03))
#rospy.Subscriber('/mavros/local_position/pose', PoseStamped, self.poseCallback)
rospy.Subscriber('orb_slam2_mono/pose', PoseStamped, self.poseCallback)
rospy.Subscriber('/airsim/base_link/camera/image_raw', Image, self.imageCallback)
rospy.Subscriber('/airsim/base_link/camera', CameraInfo, self.camInfoCallback)
self.cloud_pub = rospy.Publisher("cloud", PointCloud2, queue_size=10)
self.test_pose = rospy.Publisher("test_pose", PoseStamped, queue_size=10)
print('waiting on topics...')
rospy.wait_for_message('/airsim/base_link/camera/image_raw', Image)
#self.cam_width = self.img_curr.shape[0]
#self.cam_height = self.img_curr.shape[1]
print('K: ', self.K)
print('D: ', self.D)
rospy.wait_for_message('/mavros/local_position/pose', PoseStamped)
print('connected')
print(float(self.pose.header.stamp.secs)+float(self.pose.header.stamp.nsecs)/1000000000.0,float(self.header.stamp.secs)+float(self.header.stamp.nsecs)/1000000000.0)
self.run()
def poseCallback(self, data):
self.pose = data
#self.test_pose.publish(self.pose)
q = (self.pose.pose.orientation.x,self.pose.pose.orientation.y,self.pose.pose.orientation.z,self.pose.pose.orientation.w)
r = quaternion_matrix(q)
#print(r)
self.rotation = r[0:3,0:3]
self.translation = np.array(([self.pose.pose.position.x],[self.pose.pose.position.y],[self.pose.pose.position.z]))
#print(self.translation)
def imageCallback(self, data):
self.header = data.header
self.img_curr = self.bridge.imgmsg_to_cv2(data, desired_encoding='passthrough')
def run(self):
#TAKE NEW IMAGE#
rawImage2 = self.img_curr
if (rawImage2 == None):
print("Camera is not returning image, please check airsim for error messages")
sys.exit(0)
else:
img2 = self.img_curr
gray2 = cv2.cvtColor(img2, cv2.COLOR_RGB2GRAY)
#GET NEW FEATURE LOCATIONS#
next, status, error = cv2.calcOpticalFlowPyrLK(gray, gray2, prev, None, **lk_params)
error = error[status[:,0] == 1]
#First, filter out the points with high error
new = next[status[:,0] == 1]
new = new[error[:,0] < 10]
#Update the original list of features to reflect pixels that have been lost in the flow
orig = orig[status[:,0] == 1]
orig = orig[error[:,0] < 10]
#update the object mask
obj_mask = obj_mask[status[:,0] == 1]
obj_mask = obj_mask[error[:,0] < 10]
# Updates previous good feature points
prev = new.reshape(-1, 1, 2)
#Optional visualization
output = drawTracks(orig.astype(int), new, img2, (0, 255, 0))
cv2.imshow('Tracks',output)
gray = gray2
#IF SIGNIFICANT CHANGE FROM PREVIOUS IMAGE#
avg_track_len = getTrackLength(orig,new)
if (avg_track_len > (img.shape[0]/5) or (len(orig)/orig_count < 0.8)):
print('update, image #:',img_idx)
#Time to calculate SFM
prev_pts = orig.reshape(len(orig),2)
new_pts = new.reshape(len(new),2)
mask_inf = np.zeros(len(new_pts))
'''Lets check for infiniti points'''
for i in range(len(new_pts)):
x1, y1 = prev_pts[i].ravel()
x2, y2 = new_pts[i].ravel()
#get distance between new and original points
distance = math.sqrt((x1-x2)*(x1-x2)+(y1-y2)*(y1-y2))
if (distance >= 5):
mask_inf[i] = 1
new_pts = new_pts[mask_inf==1]
obj_mask = obj_mask[mask_inf==1]
print(len(prev_pts)-len(new_pts),'"infinite" eliminated')
prev_pts = prev_pts[mask_inf==1]
#Is this the first calculation?
if (img_idx == 0):
#initial P0
P0 = np.hstack((np.eye(3, 3), np.zeros((3, 1))))
#get first set of objects from essential matrix
obj_pts, prev_pts, new_pts, P = getObjectPointsEssential(prev_pts,new_pts,P0,K,d)
#Check quality of pts
if (len(obj_pts) > 15):
cam_objs = []
for i in range(len(obj_pts)):
cam_objs.append(np.hstack((i,obj_pts[i].ravel(),prev_pts[i].ravel())))
all_objs.append(cam_objs)
cam_objs = []
for i in range(len(obj_pts)):
cam_objs.append(np.hstack((i,obj_pts[i].ravel(),new_pts[i].ravel())))
obj_idx += 1
all_objs.append(cam_objs)
img_idx += 1
all_P_mats.append(P0)
all_P_mats.append(P)
P0 = P
#get a new set of features to track
new_features = cv2.goodFeaturesToTrack(gray, maxCorners, qualityLevel, minDistance)
orig, obj_mask = eliminateDuplicateObjects(all_objs[img_idx],new_features)
t = np.reshape(P0[:,3],(1,3))
t = t * np.array([1,-1,1])
#Get colors from detected pixels for coloring pointcloud
colors = getColors(new_pts,img2.copy())
all_pts = obj_pts
all_colors = colors
all_pts = np.vstack((all_pts,np.reshape(t,(1,3))))
all_colors = np.vstack((all_colors,np.array([0.,1.,0.])))
else:
orig = cv2.goodFeaturesToTrack(gray, maxCorners, qualityLevel, minDistance)
else:#get camera matrices from PNP
#SolvePNP
masked_objs = obj_mask[obj_mask[:,0]!=-1]
pixel_pts = new_pts[obj_mask[:,0]!=-1]
obj_pts = masked_objs[:,1:4]
_, rvec, tvec, inliers = cv2.solvePnPRansac(obj_pts, pixel_pts, K, d, flags=cv2.SOLVEPNP_EPNP)
print('SolvePnP used',len(inliers),'points of total', len(pixel_pts),'=',int(len(inliers)/len(pixel_pts)*100),'%')
#convert pnp rotation to (3x3) with rodrigues method
P1 = np.hstack((cv2.Rodrigues(rvec)[0], tvec))
#Now we need to get the full set of matches to apply this projection matrix to
#Get 3d world points and associated pixel values for the second image
new_objs, mask_tri, error = triangulate(prev_pts, P0, new_pts, P1, K, d)
obj_mask = obj_mask[mask_tri[:,0]==1]
prev_pts = prev_pts[mask_tri[:,0]==1]
new_pts = new_pts[mask_tri[:,0]==1]
cam_objs = []
for i in range(len(new_objs)):
if (obj_mask[i,0] != -1):
cam_objs.append(np.hstack((obj_mask[i,0],new_objs[i].ravel(),new_pts[i].ravel())))
else:
cam_objs.append(np.hstack((obj_idx,new_objs[i].ravel(),new_pts[i].ravel())))
obj_idx += 1
all_objs.append(cam_objs)
img_idx += 1
all_P_mats.append(P1)
P0 = P1
#get a new set of features to track
new_features = cv2.goodFeaturesToTrack(gray, maxCorners, qualityLevel, minDistance)
orig, obj_mask = eliminateDuplicateObjects(all_objs[img_idx],new_features)
t = np.reshape(P0[:,3],(1,3))
t = t * np.array([1,-1,1])
#Get colors from detected pixels for coloring pointcloud
colors = getColors(new_pts,img2.copy())
all_pts = np.vstack((all_pts,new_objs))
all_colors = np.vstack((all_colors,colors))
all_pts = np.vstack((all_pts,np.reshape(t,(1,3))))
all_colors = np.vstack((all_colors,np.array([1.,0.,0.])))
print('colro\points',len(all_pts),len(all_colors))
pcd = o3d.geometry.PointCloud()
pcd.points = o3d.utility.Vector3dVector(all_pts)
pcd.colors = o3d.utility.Vector3dVector(all_colors.astype(float))
o3d.io.write_point_cloud("pointcloud.ply", pcd)
frames = 3
if (False):
#if ((img_idx > frames) & ((img_idx-1) % frames == 0)):
#Lets do bundle adjustment
n_cameras = frames
ba_objs = []
camera_params = []
for i in range(frames):
ba_objs.extend(np.reshape(all_objs[(img_idx-frames-1+i)],(-1,6)))
P = all_P_mats[(img_idx-frames-1+i)]
R, _ = cv2.Rodrigues(P[:,0:3])
t = np.reshape(P[:,3],(1,3))
camera_params.extend(np.hstack((R.ravel(),t.ravel())))
ba_objs = np.reshape(ba_objs,(-1,6))
camera_params = np.reshape(camera_params,(-1,6))
print(camera_params)
camera_indices = np.empty_like(ba_objs[:,0]).astype(int)
next = 0
prev = 0
for i in range(frames):
next += len(all_objs[(img_idx-frames-1+i)])
camera_indices[prev:next] = i
prev = next
points_3d = np.reshape(ba_objs[:,1:4],(-1,3))
points_2d = np.reshape(ba_objs[:,4:6],(-1,2))
point_indices = np.reshape(ba_objs[:,0],(-1,1))
n_points = len(points_3d)
n = 6 * n_cameras + 3 * n_points
m = 2 * n_points
print("n_cameras: {}".format(n_cameras))
print("n_points: {}".format(n_points))
print("Total number of parameters: {}".format(n))
print("Total number of residuals: {}".format(m))
#ba_thread = myThread(1,"thread1",n_cameras,n_points, camera_indices, point_indices)
x0 = np.hstack((camera_params.ravel(), points_3d.ravel()))
f0 = baFun(x0, n_cameras, n_points, camera_indices, points_2d)
plt.plot(f0)
#plt.show()
A = bundle_adjustment_sparsity(n_cameras, n_points, camera_indices, point_indices)
res = least_squares(baFun, x0, jac_sparsity=A, verbose=2, x_scale='jac', ftol=1e-4, method='trf',
args=(n_cameras, n_points, camera_indices, points_2d))
plt.plot(res.fun)
plt.show()
prev = orig
orig_count = len(orig)
key = cv2.waitKey(1) & 0xFF;
if (key == 27 or key == ord('q') or key == ord('x')):
break;
if __name__ == '__main__':
mapping()
| 35.593443 | 166 | 0.680453 |
31387a1b1f396304dc51e3af5b59b30c4e8ef8e4 | 11,109 | py | Python | tests/services/test_connection.py | PixelogicDev/py42 | ccb100b03025fff1a060a39635bee3e76a251a85 | [
"MIT"
] | null | null | null | tests/services/test_connection.py | PixelogicDev/py42 | ccb100b03025fff1a060a39635bee3e76a251a85 | [
"MIT"
] | null | null | null | tests/services/test_connection.py | PixelogicDev/py42 | ccb100b03025fff1a060a39635bee3e76a251a85 | [
"MIT"
] | null | null | null | import json
import pytest
from requests import Response
from py42.exceptions import Py42Error
from py42.exceptions import Py42FeatureUnavailableError
from py42.exceptions import Py42InternalServerError
from py42.exceptions import Py42UnauthorizedError
from py42.response import Py42Response
from py42.services._auth import C42RenewableAuth
from py42.services._connection import Connection
from py42.services._connection import HostResolver
from py42.services._connection import KnownUrlHostResolver
from py42.services._connection import MicroserviceKeyHostResolver
from py42.services._connection import MicroservicePrefixHostResolver
from py42.services._keyvaluestore import KeyValueStoreService
default_kwargs = {
"timeout": 60,
"proxies": None,
"stream": False,
"verify": True,
"cert": None,
}
HOST_ADDRESS = "http://example.com"
URL = "/api/resource"
DATA_VALUE = "value"
JSON_VALUE = {"key": "value"}
KWARGS_INDEX = 1
DATA_KEY = "data"
TEST_RESPONSE_CONTENT = '{"key": "test_response_content"}'
@pytest.fixture
def mock_host_resolver(mocker):
mock = mocker.MagicMock(spec=HostResolver)
mock.get_host_address.return_value = HOST_ADDRESS
return mock
@pytest.fixture
def mock_auth(mocker):
return mocker.MagicMock(spec=C42RenewableAuth)
@pytest.fixture
def mock_key_value_service(mocker):
return mocker.MagicMock(spec=KeyValueStoreService)
@pytest.fixture
def mock_server_env_conn(mocker):
mock_conn = mocker.MagicMock(spec=Connection)
mock_response = mocker.MagicMock(spec=Response)
mock_response.text = '{"stsBaseUrl": "sts-testsuffix"}'
mock_conn.get.return_value = Py42Response(mock_response)
return mock_conn
@pytest.fixture
def mock_server_env_conn_missing_sts_base_url(mocker):
mock_conn = mocker.MagicMock(spec=Connection)
mock_response = mocker.MagicMock(spec=Response)
mock_response.text = "{}"
mock_conn.get.return_value = Py42Response(mock_response)
return mock_conn
class MockPreparedRequest(object):
def __init__(self, method, url, data=None):
self._method = method
self._url = url
self._data = data or []
def __eq__(self, other):
return (
self._method == other.method
and self._url == other.url
and self._data == other.data
)
class TestKnownUrlHostResolver(object):
def test_get_host_address_returns_expected_value(self):
resolver = KnownUrlHostResolver(HOST_ADDRESS)
assert resolver.get_host_address() == HOST_ADDRESS
class TestMicroserviceKeyHostResolver(object):
def test_get_host_address_returns_expected_value(self, mock_key_value_service):
mock_key_value_service.get_stored_value.return_value.text = HOST_ADDRESS
resolver = MicroserviceKeyHostResolver(mock_key_value_service, "TEST_KEY")
assert resolver.get_host_address() == HOST_ADDRESS
def test_get_host_address_passes_expected_key(self, mock_key_value_service):
resolver = MicroserviceKeyHostResolver(mock_key_value_service, "TEST_KEY")
resolver.get_host_address()
mock_key_value_service.get_stored_value.assert_called_once_with("TEST_KEY")
class TestMicroservicePrefixHostResolver(object):
def test_get_host_address_returns_expected_value(self, mock_server_env_conn):
resolver = MicroservicePrefixHostResolver(mock_server_env_conn, "TESTPREFIX")
assert resolver.get_host_address() == "TESTPREFIX-testsuffix"
def test_get_host_address_when_sts_base_url_not_found_raises_feature_unavailable_error(
self, mock_server_env_conn_missing_sts_base_url
):
resolver = MicroservicePrefixHostResolver(
mock_server_env_conn_missing_sts_base_url, "TESTPREFIX"
)
with pytest.raises(Py42FeatureUnavailableError):
resolver.get_host_address()
def test_get_host_address_calls_correct_server_env_url(self, mock_server_env_conn):
resolver = MicroservicePrefixHostResolver(mock_server_env_conn, "TESTPREFIX")
resolver.get_host_address()
mock_server_env_conn.get.assert_called_once_with("/api/ServerEnv")
class TestConnection(object):
def test_connection_get_calls_requests_with_get(
self, mock_host_resolver, mock_auth, success_requests_session
):
connection = Connection(mock_host_resolver, mock_auth, success_requests_session)
connection.get(URL)
expected = MockPreparedRequest("GET", HOST_ADDRESS + URL, None)
success_requests_session.prepare_request.assert_called_once_with(expected)
def test_connection_put_calls_requests_with_put(
self, mock_host_resolver, mock_auth, success_requests_session
):
connection = Connection(mock_host_resolver, mock_auth, success_requests_session)
connection.put(URL, data="testdata")
expected = MockPreparedRequest("PUT", HOST_ADDRESS + URL, "testdata")
success_requests_session.prepare_request.assert_called_once_with(expected)
def test_connection_post_calls_requests_with_post(
self, mock_host_resolver, mock_auth, success_requests_session
):
connection = Connection(mock_host_resolver, mock_auth, success_requests_session)
connection.post(URL, data="testdata")
expected = MockPreparedRequest("POST", HOST_ADDRESS + URL, "testdata")
success_requests_session.prepare_request.assert_called_once_with(expected)
def test_connection_patch_calls_requests_with_patch(
self, mock_host_resolver, mock_auth, success_requests_session
):
connection = Connection(mock_host_resolver, mock_auth, success_requests_session)
connection.patch(URL, data="testdata")
expected = MockPreparedRequest("PATCH", HOST_ADDRESS + URL, "testdata")
success_requests_session.prepare_request.assert_called_once_with(expected)
def test_connection_delete_calls_requests_with_delete(
self, mock_host_resolver, mock_auth, success_requests_session
):
connection = Connection(mock_host_resolver, mock_auth, success_requests_session)
connection.delete(URL)
expected = MockPreparedRequest("DELETE", HOST_ADDRESS + URL)
success_requests_session.prepare_request.assert_called_once_with(expected)
def test_connection_options_calls_requests_with_options(
self, mock_host_resolver, mock_auth, success_requests_session
):
connection = Connection(mock_host_resolver, mock_auth, success_requests_session)
connection.options(URL)
expected = MockPreparedRequest("OPTIONS", HOST_ADDRESS + URL)
success_requests_session.prepare_request.assert_called_once_with(expected)
def test_connection_head_calls_requests_with_head(
self, mock_host_resolver, mock_auth, success_requests_session
):
connection = Connection(mock_host_resolver, mock_auth, success_requests_session)
connection.head(URL)
expected = MockPreparedRequest("HEAD", HOST_ADDRESS + URL)
success_requests_session.prepare_request.assert_called_once_with(expected)
def test_connection_post_with_json_prepares_request_with_string_encoded_json_body(
self, mock_host_resolver, mock_auth, success_requests_session
):
connection = Connection(mock_host_resolver, mock_auth, success_requests_session)
connection.post(URL, json=JSON_VALUE)
expected = MockPreparedRequest(
"POST", HOST_ADDRESS + URL, json.dumps(JSON_VALUE)
)
success_requests_session.prepare_request.assert_called_once_with(expected)
def test_connection_post_with_data_and_json_params_overwrites_data_with_json(
self, mock_host_resolver, mock_auth, success_requests_session
):
connection = Connection(mock_host_resolver, mock_auth, success_requests_session)
connection.post(URL, data=DATA_VALUE, json=JSON_VALUE)
expected = MockPreparedRequest(
"POST", HOST_ADDRESS + URL, json.dumps(JSON_VALUE)
)
success_requests_session.prepare_request.assert_called_once_with(expected)
def test_connection_request_returns_utf8_response(
self, mock_host_resolver, mock_auth, success_requests_session
):
connection = Connection(mock_host_resolver, mock_auth, success_requests_session)
response = connection.request("GET", URL, data=DATA_VALUE, json=JSON_VALUE)
assert response.encoding == "utf-8"
def test_connection_request_when_streamed_doesnt_not_set_encoding_on_response(
self, mock_host_resolver, mock_auth, success_requests_session
):
connection = Connection(mock_host_resolver, mock_auth, success_requests_session)
response = connection.request("GET", URL, data=DATA_VALUE, stream=True)
assert response.encoding is None
def test_connection_request_returns_response_when_good_status_code(
self, mock_host_resolver, mock_auth, success_requests_session
):
connection = Connection(mock_host_resolver, mock_auth, success_requests_session)
response = connection.get(URL)
assert response.text == TEST_RESPONSE_CONTENT
def test_connection_request_with_error_status_code_raises_http_error(
self, mock_host_resolver, mock_auth, error_requests_session
):
connection = Connection(mock_host_resolver, mock_auth, error_requests_session)
with pytest.raises(Py42InternalServerError):
connection.get(URL)
def test_connection_request_calls_auth_handler_when_making_first_request(
self, mock_host_resolver, mock_auth, success_requests_session
):
connection = Connection(mock_host_resolver, mock_auth, success_requests_session)
connection.get(URL)
assert success_requests_session.prepare_request.call_count == 1
def test_connection_request_calls_auth_handler_clears_renews_credentials_when_response_unauthorized(
self, mock_host_resolver, mock_auth, renewed_requests_session
):
connection = Connection(mock_host_resolver, mock_auth, renewed_requests_session)
connection.get(URL)
assert renewed_requests_session.send.call_count == 2
assert renewed_requests_session.prepare_request.call_count == 2
assert mock_auth.clear_credentials.call_count == 1
def test_connection_request_raises_unauthorized_error_when_renewal_results_in_401(
self, mock_host_resolver, mock_auth, unauthorized_requests_session
):
connection = Connection(
mock_host_resolver, mock_auth, unauthorized_requests_session
)
with pytest.raises(Py42UnauthorizedError):
connection.get(URL)
assert unauthorized_requests_session.send.call_count == 2
def test_connection_request_when_session_returns_none_raises_py42_error(
self, mock_host_resolver, mock_auth, success_requests_session
):
success_requests_session.send.return_value = None
connection = Connection(mock_host_resolver, mock_auth, success_requests_session)
with pytest.raises(Py42Error):
connection.get(URL)
| 42.400763 | 104 | 0.763975 |
e73e7503420256e0f5a208b51ecc5247858bb738 | 1,769 | py | Python | c2cgeoportal/lib/bashcolor.py | craxxkid/c2cgeoportal | 60ca7d5d014d69b0a938f858271c911a30da77c3 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | c2cgeoportal/lib/bashcolor.py | craxxkid/c2cgeoportal | 60ca7d5d014d69b0a938f858271c911a30da77c3 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | c2cgeoportal/lib/bashcolor.py | craxxkid/c2cgeoportal | 60ca7d5d014d69b0a938f858271c911a30da77c3 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (c) 2014-2016, Camptocamp SA
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# The views and conclusions contained in the software and documentation are those
# of the authors and should not be interpreted as representing official policies,
# either expressed or implied, of the FreeBSD Project.
BLACK = 0
RED = 1
GREEN = 2
YELLOW = 3
BLUE = 4
MAGENTA = 5
CYAN = 6
WHITE = 7
def colorize(text, color): # pragma: no cover
return "\x1b[01;3%im%s\x1b[0m" % (color, text)
| 41.139535 | 81 | 0.765969 |
e324fccf2a15d3b0212c49339e2c57973a75f131 | 6,849 | py | Python | donkeycar/parts/simulation.py | JediLuke/rufus | 177c6012ecdaeaab42f45f76e478b14b5610c6b3 | [
"MIT"
] | 1 | 2019-03-01T03:29:01.000Z | 2019-03-01T03:29:01.000Z | donkeycar/parts/simulation.py | JediLuke/rufus | 177c6012ecdaeaab42f45f76e478b14b5610c6b3 | [
"MIT"
] | null | null | null | donkeycar/parts/simulation.py | JediLuke/rufus | 177c6012ecdaeaab42f45f76e478b14b5610c6b3 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Jun 25 17:30:28 2017
@author: wroscoe
"""
import base64
import random
import numpy as np
import socketio
import eventlet
import eventlet.wsgi
from PIL import Image
from flask import Flask
from io import BytesIO
import time
import os
"""
Method to test if you can import correct donkeycar package - see working dir of package that gets
used when you import donkeycar, change the string that prints to be extra extra sure you're using correct library
(Make sure you `source activate donkey`)
$>python
>>> from donkeycar.parts.simulation import ImportTest
>>> IT = ImportTest()
"""
class ImportTest():
def __init__(self):
print("Using library: " + os.getcwd() + " verification key (change me): " + "RUFUS72")
class FPSTimer(object):
def __init__(self):
self.t = time.time()
self.iter = 0
def reset(self):
self.t = time.time()
self.iter = 0
def on_frame(self):
self.iter += 1
if self.iter == 100:
e = time.time()
print('fps', 100.0 / (e - self.t))
self.t = time.time()
self.iter = 0
class SteeringServer(object):
'''
A SocketIO based Websocket server designed to integrate with
the Donkey Sim Unity project. Check the donkey branch of
https://github.com/tawnkramer/sdsandbox for source of simulator.
Prebuilt simulators available:
Windows: https://drive.google.com/file/d/0BxSsaxmEV-5YRC1ZWHZ4Y1dZTkE/view?usp=sharing
'''
def __init__(self, _sio, kpart, top_speed=4.0, image_part=None, steering_scale=1.0):
self.model = None
self.timer = FPSTimer()
self.sio = _sio
# TODO: convert this flask app to a tornado app to minimize dependencies.
self.app = Flask(__name__)
self.kpart = kpart
self.image_part = image_part
self.steering_scale = steering_scale
self.top_speed = top_speed
def throttle_control(self, last_steering, last_throttle, speed, nn_throttle):
'''
super basic throttle control, derive from this Server and override as needed
'''
if speed < self.top_speed:
return 0.3
return 0.0
def telemetry(self, sid, data):
'''
Callback when we get new data from Unity simulator.
We use it to process the image, do a forward inference,
then send controls back to client.
Takes sid (?) and data, a dictionary of json elements.
'''
if data:
# The current steering angle of the car
last_steering = float(data["steering_angle"])
# The current throttle of the car
last_throttle = float(data["throttle"])
# The current speed of the car
speed = float(data["speed"])
# The current image from the center camera of the car
imgString = data["image"]
# decode string based data into bytes, then to Image
image = Image.open(BytesIO(base64.b64decode(imgString)))
# then as numpy array
image_array = np.asarray(image)
# optional change to pre-preocess image before NN sees it
if self.image_part is not None:
image_array = self.image_part.run(image_array)
# forward pass - inference
steering, throttle = self.kpart.run(image_array)
# filter throttle here, as our NN doesn't always do a greate job
throttle = self.throttle_control(last_steering, last_throttle, speed, throttle)
# simulator will scale our steering based on it's angle based input.
# but we have an opportunity for more adjustment here.
steering *= self.steering_scale
# send command back to Unity simulator
self.send_control(steering, throttle)
else:
# NOTE: DON'T EDIT THIS.
self.sio.emit('manual', data={}, skip_sid=True)
self.timer.on_frame()
def connect(self, sid, environ):
print("connect ", sid)
self.timer.reset()
self.send_control(0, 0)
def send_control(self, steering_angle, throttle):
self.sio.emit(
"steer",
data={
'steering_angle': steering_angle.__str__(),
'throttle': throttle.__str__()
},
skip_sid=True)
def go(self, address):
# wrap Flask application with engineio's middleware
self.app = socketio.Middleware(self.sio, self.app)
# deploy as an eventlet WSGI server
try:
eventlet.wsgi.server(eventlet.listen(address), self.app)
except KeyboardInterrupt:
# unless some hits Ctrl+C and then we get this interrupt
print('stopping')
class MovingSquareTelemetry:
"""
Generator of cordinates of a bouncing moving square for simulations.
"""
def __init__(self, max_velocity=29,
x_min = 10, x_max=150,
y_min = 10, y_max=110):
self.velocity = random.random() * max_velocity
self.x_min, self.x_max = x_min, x_max
self.y_min, self.y_max = y_min, y_max
self.x_direction = random.random() * 2 - 1
self.y_direction = random.random() * 2 - 1
self.x = random.random() * x_max
self.y = random.random() * y_max
self.tel = self.x, self.y
def run(self):
#move
self.x += self.x_direction * self.velocity
self.y += self.y_direction * self.velocity
#make square bounce off walls
if self.y < self.y_min or self.y > self.y_max:
self.y_direction *= -1
if self.x < self.x_min or self.x > self.x_max:
self.x_direction *= -1
return int(self.x), int(self.y)
def update(self):
self.tel = self.run()
def run_threaded(self):
return self.tel
class SquareBoxCamera:
"""
Fake camera that returns an image with a square box.
This can be used to test if a learning algorithm can learn.
"""
def __init__(self, resolution=(120,160), box_size=4, color=(255, 0, 0)):
self.resolution = resolution
self.box_size = box_size
self.color = color
def run(self, x,y, box_size=None, color=None):
"""
Create an image of a square box at a given coordinates.
"""
radius = int((box_size or self.box_size)/2)
color = color or self.color
frame = np.zeros(shape=self.resolution + (3,))
frame[y - radius: y + radius,
x - radius: x + radius, :] = color
return frame
| 29.908297 | 117 | 0.592933 |
25174869c54453cae13c94d005d2e09c0585a098 | 5,420 | py | Python | mime/envs/table_envs/tower_scene.py | rjgpinel/mime-release | 26a850c4ba5b702b86d068995614163338fb01df | [
"MIT"
] | null | null | null | mime/envs/table_envs/tower_scene.py | rjgpinel/mime-release | 26a850c4ba5b702b86d068995614163338fb01df | [
"MIT"
] | null | null | null | mime/envs/table_envs/tower_scene.py | rjgpinel/mime-release | 26a850c4ba5b702b86d068995614163338fb01df | [
"MIT"
] | null | null | null | import numpy as np
from .table_scene import TableScene
from .table_modder import TableModder
from ..script import Script
from .utils import sample_without_overlap
class TowerScene(TableScene):
def __init__(self, **kwargs):
super(TowerScene, self).__init__(**kwargs)
self._modder = TableModder(self)
self._count_success = 0
self._num_cubes = 2
self._cubes = []
self._cubes_size = []
if self._rand_obj:
self._cube_size_range = {"low": 0.04, "high": 0.06}
else:
self._cube_sizes = [0.05 + i * 0.01 for i in range(self._num_cubes)]
v, w = self._max_tool_velocity
self._max_tool_velocity = (1.5 * v, w)
def load(self, np_random):
super(TowerScene, self).load(np_random)
def reset(self, np_random):
super(TowerScene, self).reset(np_random)
modder = self._modder
# load and randomize cage
modder.load_cage(np_random)
if self._domain_rand:
modder.randomize_cage_visual(np_random)
self._count_success = 0
low, high = self._object_workspace
low_arm, high_arm = low.copy(), high.copy()
low_arm[2] = 0.1
low_cubes, high_cubes = np.array(low.copy()), np.array(high.copy())
low_cubes[:2] += 0.02
high_cubes[:2] -= 0.02
for cube in self._cubes:
cube.remove()
self._cubes = []
self._cubes_size = []
gripper_pos, gripper_orn = self.random_gripper_pose(np_random)
q0 = self.robot.arm.controller.joints_target
q = self.robot.arm.kinematics.inverse(gripper_pos, gripper_orn, q0)
self.robot.arm.reset(q)
# load cubes
num_cubes = self._num_cubes
cubes = []
cubes_size = []
for i in range(num_cubes):
if self._rand_obj:
cube, cube_size = modder.load_mesh(
"cube", self._cube_size_range, np_random
)
else:
cube, cube_size = modder.load_mesh(
"cube", self._cube_sizes[i], np_random
)
# cube.dynamics.lateral_friction = 10
cubes.append(cube)
cubes_size.append(cube_size)
# sort cubes per decreasing size
# biggest cube first
idxs_sort = np.argsort(-np.array(cubes_size))
for idx in idxs_sort:
self._cubes.append(cubes[idx])
self._cubes_size.append(cubes_size[idx])
self._cubes_size = np.array(self._cubes_size)
# move cubes to a random position and change color
cubes = []
aabbs = []
for cube in self._cubes:
aabbs, _ = sample_without_overlap(
cube, aabbs, np_random, low_cubes, high_cubes, 0, 0, min_dist=0.05
)
if self._rand_obj:
color = np_random.uniform(0, 1, 4)
else:
color = np.array([11.0 / 255.0, 124.0 / 255.0, 96.0 / 255.0, 1])
color[3] = 1
cube.color = color
if self._domain_rand:
modder.randomize_object_color(np_random, cube, color)
def script(self):
arm = self.robot.arm
grip = self.robot.gripper
cubes_pos = self.cubes_position
cubes_size = self._cubes_size
tower_pos = cubes_pos[0]
height = 0
sc = Script(self)
moves = []
z_offset = 0.01
for pick_pos, cube_size in zip(cubes_pos[1:], cubes_size[:-1]):
height += cube_size
moves += [
sc.tool_move(arm, pick_pos + [0, 0, 0.1]),
sc.tool_move(arm, pick_pos + [0, 0, z_offset]),
sc.grip_close(grip),
sc.tool_move(arm, pick_pos + [0, 0, height + z_offset * 2]),
sc.tool_move(arm, tower_pos + [0, 0, height + z_offset * 2]),
sc.grip_open(grip),
sc.tool_move(arm, tower_pos + [0, 0, height + cube_size]),
]
return moves
@property
def cubes_position(self):
return np.array([cube.position[0] for cube in self._cubes])
def distance_to_cubes(self, idx):
tool_pos, _ = self.robot.arm.tool.state.position
return np.array(
[np.subtract(cube.position[0], tool_pos) for cube in self._cubes]
)
def get_reward(self, action):
return 0
def is_task_success(self):
cubes_pos = self.cubes_position
cubes_size = self._cubes_size
tower_pos = cubes_pos[0]
heights = np.cumsum(cubes_size)[1:]
bary_cubes = np.mean(cubes_pos[1:], axis=0)
cubes_on_tower = (
np.linalg.norm(np.subtract(tower_pos[:2], bary_cubes[:2])) < 0.04
)
heights_ok = True
for cube_pos, cube_size, height in zip(cubes_pos[1:], cubes_size[1:], heights):
heights_ok = (
heights_ok and np.abs(cube_pos[2] + cube_size / 2 - height) < 0.001
)
if heights_ok and cubes_on_tower:
self._count_success += 1
success = self._count_success > 5
return success
def test_scene():
from time import sleep
scene = TowerScene()
scene.renders(True)
np_random = np.random.RandomState(1)
while True:
scene.reset(np_random)
sleep(1)
if __name__ == "__main__":
test_scene()
| 31.511628 | 87 | 0.567528 |
3ad8f18c322f87dbecaf9bac1c2ddc988aa2e28b | 3,333 | py | Python | launch/nav2_7_recovery_launch.py | skylerpan/nav2_step | 54f25ff640549234ab5eb33736c6267cfdddf3b4 | [
"Apache-2.0"
] | 1 | 2021-07-03T17:47:04.000Z | 2021-07-03T17:47:04.000Z | launch/nav2_7_recovery_launch.py | skylerpan/nav2_step | 54f25ff640549234ab5eb33736c6267cfdddf3b4 | [
"Apache-2.0"
] | null | null | null | launch/nav2_7_recovery_launch.py | skylerpan/nav2_step | 54f25ff640549234ab5eb33736c6267cfdddf3b4 | [
"Apache-2.0"
] | 1 | 2020-04-15T10:13:53.000Z | 2020-04-15T10:13:53.000Z | # Copyright (c) 2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" This is all-in-one launch script intended for use by nav2 developers. """
import os
from ament_index_python.packages import get_package_prefix
from ament_index_python.packages import get_package_share_directory
from launch.conditions import IfCondition
from nav2_common.launch import RewrittenYaml
import launch.actions
import launch_ros.actions
def generate_launch_description():
# Get the launch directory
launch_dir = os.path.join(get_package_share_directory('nav2_bringup'), 'launch')
# Create the launch configuration variables
autostart = launch.substitutions.LaunchConfiguration('autostart')
bt_xml_file = launch.substitutions.LaunchConfiguration('bt')
params_file = launch.substitutions.LaunchConfiguration('params')
# Create our own temporary YAML files that include the following parameter substitutions
param_substitutions = {
'autostart': autostart,
'bt_xml_filename': bt_xml_file,
}
configured_params = RewrittenYaml(
source_file=params_file, rewrites=param_substitutions, convert_types=True)
# Declare the launch arguments
declare_autostart_cmd = launch.actions.DeclareLaunchArgument(
'autostart',
default_value='true',
description='Automatically startup the nav2 stack')
declare_bt_xml_cmd = launch.actions.DeclareLaunchArgument(
'bt',
default_value=os.path.join(
get_package_prefix('nav2_bt_navigator'),
'behavior_trees/navigate_w_replanning_and_recovery.xml'),
description='Full path to the Behavior Tree XML file to use for the BT navigator')
declare_params_file_cmd = launch.actions.DeclareLaunchArgument(
'params',
default_value=[launch.substitutions.ThisLaunchFileDir(), '/../params/nav2_params.yaml'],
description='Full path to the ROS2 parameters file to use for all launched nodes')
stdout_linebuf_envvar = launch.actions.SetEnvironmentVariable(
'RCUTILS_CONSOLE_STDOUT_LINE_BUFFERED', '1')
start_recovery_cmd = launch.actions.ExecuteProcess(
cmd=[
os.path.join(
get_package_prefix('nav2_recoveries'),
'lib/nav2_recoveries/recoveries_node'),
['__params:=', configured_params]],
cwd=[launch_dir], output='screen')
# Create the launch description and populate
ld = launch.LaunchDescription()
# Declare the launch options
ld.add_action(declare_autostart_cmd)
ld.add_action(declare_bt_xml_cmd)
ld.add_action(declare_params_file_cmd)
# Set environment variables
ld.add_action(stdout_linebuf_envvar)
# Add the actions to launch all of the navigation nodes
ld.add_action(start_recovery_cmd)
return ld
| 37.449438 | 96 | 0.737774 |
2e205034cfb3944e11f6c9949e2f5af3b156d08c | 4,189 | py | Python | src/model_processors/track_utils/matching.py | patrick-ubc/Huawei_HiFly_Drone | 5dae1b56f49c2b86c3b852bbc5e3a63e84ccd490 | [
"Apache-2.0"
] | 1 | 2021-09-21T23:23:59.000Z | 2021-09-21T23:23:59.000Z | src/model_processors/track_utils/matching.py | patrick-ubc/Huawei_HiFly_Drone | 5dae1b56f49c2b86c3b852bbc5e3a63e84ccd490 | [
"Apache-2.0"
] | 8 | 2021-07-05T21:41:53.000Z | 2022-02-15T19:46:13.000Z | src/model_processors/track_utils/matching.py | patrick-ubc/Huawei_HiFly_Drone | 5dae1b56f49c2b86c3b852bbc5e3a63e84ccd490 | [
"Apache-2.0"
] | 4 | 2021-06-11T22:47:04.000Z | 2021-07-29T19:57:13.000Z | """
Copyright 2021 Huawei Technologies Co., Ltd.
Copyright (c) 2020 YifuZhang
Licensed under the Apache License, Version 2.0 (the "License");
You may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# matching.py
import cv2
import numpy as np
import scipy
import lap
from scipy.spatial.distance import cdist
from cython_bbox import bbox_overlaps as bbox_ious
from track_utils import kalman_filter
import time
def linear_assignment(cost_matrix, thresh):
""" Hungarian algorithm: pair new detections with previous tracklets
return
matches: [idx of tracked_stracks, idx of detections]
u_track: [index of undefined track]
u_detection: [index of undefined detection]
"""
if cost_matrix.size == 0:
return np.empty((0, 2), dtype=int), tuple(range(cost_matrix.shape[0])), tuple(range(cost_matrix.shape[1]))
matches, unmatched_a, unmatched_b = [], [], []
cost, x, y = lap.lapjv(cost_matrix, extend_cost=True, cost_limit=thresh)
for ix, mx in enumerate(x):
if mx >= 0:
matches.append([ix, mx])
unmatched_a = np.where(x < 0)[0]
unmatched_b = np.where(y < 0)[0]
matches = np.asarray(matches)
return matches, unmatched_a, unmatched_b
def ious(atlbrs, btlbrs):
"""
Compute cost based on IoU
:type atlbrs: list[tlbr] | np.ndarray
:type atlbrs: list[tlbr] | np.ndarray
:rtype _ious np.ndarray
"""
_ious = np.zeros((len(atlbrs), len(btlbrs)), dtype=np.float)
if _ious.size == 0:
return _ious
_ious = bbox_ious(
np.ascontiguousarray(atlbrs, dtype=np.float),
np.ascontiguousarray(btlbrs, dtype=np.float)
)
return _ious
def iou_distance(atracks, btracks):
"""
Compute cost based on IoU
:type atracks: list[STrack]
:type btracks: list[STrack]
:rtype cost_matrix np.ndarray
"""
if (len(atracks)>0 and isinstance(atracks[0], np.ndarray)) \
or (len(btracks) > 0 and isinstance(btracks[0], np.ndarray)):
atlbrs = atracks
btlbrs = btracks
else:
atlbrs = [track.tlbr for track in atracks]
btlbrs = [track.tlbr for track in btracks]
_ious = ious(atlbrs, btlbrs)
cost_matrix = 1 - _ious
return cost_matrix
def embedding_distance(tracks, detections, metric='cosine'):
"""
:param tracks: list[STrack]
:param detections: list[BaseTrack]
:param metric:
:return: cost_matrix np.ndarray
"""
cost_matrix = np.zeros((len(tracks), len(detections)), dtype=np.float)
if cost_matrix.size == 0:
return cost_matrix
det_features = np.asarray([track.curr_feat for track in detections], dtype=np.float)
#for i, track in enumerate(tracks):
#cost_matrix[i, :] = np.maximum(0.0, cdist(track.smooth_feat.reshape(1,-1), det_features, metric))
track_features = np.asarray([track.smooth_feat for track in tracks], dtype=np.float)
cost_matrix = np.maximum(0.0, cdist(track_features, det_features, metric)) # Nomalized features
return cost_matrix
def fuse_motion(kf, cost_matrix, tracks, detections, only_position=False, lambda_=0.98):
"""
Use Kalman filter to obtain the posterior estimate
"""
if cost_matrix.size == 0:
return cost_matrix
gating_dim = 2 if only_position else 4
gating_threshold = kalman_filter.chi2inv95[gating_dim]
measurements = np.asarray([det.to_xyah() for det in detections])
for row, track in enumerate(tracks):
gating_distance = kf.gating_distance(
track.mean, track.covariance, measurements, only_position, metric='maha')
cost_matrix[row, gating_distance > gating_threshold] = np.inf
cost_matrix[row] = lambda_ * cost_matrix[row] + (1 - lambda_) * gating_distance
return cost_matrix | 33.512 | 114 | 0.686321 |
6b6c96f66ce50f7d612f68756b74f8b72fce690d | 88 | py | Python | libcity/config/__init__.py | moghadas76/test_bigcity | 607b9602c5b1113b23e1830455e174b0901d7558 | [
"Apache-2.0"
] | 221 | 2021-09-06T03:33:31.000Z | 2022-03-28T05:36:49.000Z | libcity/config/__init__.py | moghadas76/test_bigcity | 607b9602c5b1113b23e1830455e174b0901d7558 | [
"Apache-2.0"
] | 43 | 2021-09-19T16:12:28.000Z | 2022-03-31T16:29:03.000Z | libcity/config/__init__.py | moghadas76/test_bigcity | 607b9602c5b1113b23e1830455e174b0901d7558 | [
"Apache-2.0"
] | 64 | 2021-09-06T07:56:10.000Z | 2022-03-25T08:48:35.000Z | from libcity.config.config_parser import ConfigParser
__all__ = [
'ConfigParser'
]
| 14.666667 | 53 | 0.761364 |
4ae044ebe2081757dbb40964900718236ad21463 | 20,771 | py | Python | python_modules/dagster/dagster/core/definitions/composition.py | bitdotioinc/dagster | 4fe395a37b206b1a48b956fa5dd72bf698104cca | [
"Apache-2.0"
] | 1 | 2021-04-27T19:49:59.000Z | 2021-04-27T19:49:59.000Z | python_modules/dagster/dagster/core/definitions/composition.py | bitdotioinc/dagster | 4fe395a37b206b1a48b956fa5dd72bf698104cca | [
"Apache-2.0"
] | 7 | 2022-03-16T06:55:04.000Z | 2022-03-18T07:03:25.000Z | python_modules/dagster/dagster/core/definitions/composition.py | bitdotioinc/dagster | 4fe395a37b206b1a48b956fa5dd72bf698104cca | [
"Apache-2.0"
] | null | null | null | import warnings
from collections import namedtuple
from dagster import check
from dagster.core.errors import DagsterInvalidDefinitionError, DagsterInvariantViolationError
from dagster.utils import frozentags
from .dependency import DependencyDefinition, MultiDependencyDefinition, SolidInvocation
from .hook import HookDefinition
from .output import OutputDefinition
from .solid import ISolidDefinition
from .utils import validate_tags
_composition_stack = []
def _not_invoked_warning(solid, context_source, context_name):
check.inst_param(solid, "solid", CallableSolidNode)
warning_message = (
"While in {context} context '{name}', received an uninvoked solid '{solid_name}'.\n"
)
if solid.given_alias:
warning_message += "'{solid_name}' was aliased as '{given_alias}'.\n"
if solid.tags:
warning_message += "Provided tags: {tags}.\n"
if solid.hook_defs:
warning_message += "Provided hook definitions: {hooks}.\n"
warning_message = warning_message.format(
context=context_source,
name=context_name,
solid_name=solid.solid_def.name,
given_alias=solid.given_alias,
tags=solid.tags,
hooks=[hook.name for hook in solid.hook_defs],
)
warnings.warn(warning_message.strip())
def enter_composition(name, source):
_composition_stack.append(InProgressCompositionContext(name, source))
def exit_composition(output=None):
return _composition_stack.pop().complete(output)
def current_context():
return _composition_stack[-1]
def assert_in_composition(solid_name):
if len(_composition_stack) < 1:
raise DagsterInvariantViolationError(
'Attempted to call solid "{solid_name}" outside of a composition function. '
"Calling solids is only valid in a function decorated with "
"@pipeline or @composite_solid.".format(solid_name=solid_name)
)
def _is_in_composition():
return _composition_stack
class InProgressCompositionContext(object):
"""This context captures invocations of solids within a
composition function such as @composite_solid or @pipeline
"""
def __init__(self, name, source):
self.name = check.str_param(name, "name")
self.source = check.str_param(source, "source")
self._invocations = {}
self._collisions = {}
self._pending_invocations = {}
def observe_invocation(
self, given_alias, solid_def, input_bindings, input_mappings, tags=None, hook_defs=None
):
if given_alias is None:
solid_name = solid_def.name
self._pending_invocations.pop(solid_name, None)
if self._collisions.get(solid_name):
self._collisions[solid_name] += 1
solid_name = "{solid_name}_{n}".format(
solid_name=solid_name, n=self._collisions[solid_name]
)
else:
self._collisions[solid_name] = 1
else:
solid_name = given_alias
self._pending_invocations.pop(solid_name, None)
if self._invocations.get(solid_name):
raise DagsterInvalidDefinitionError(
"{source} {name} invoked the same solid ({solid_name}) twice without aliasing.".format(
source=self.source, name=self.name, solid_name=solid_name
)
)
self._invocations[solid_name] = InvokedSolidNode(
solid_name, solid_def, input_bindings, input_mappings, tags, hook_defs
)
return solid_name
def add_pending_invocation(self, solid):
solid = check.opt_inst_param(solid, "solid", CallableSolidNode)
solid_name = solid.given_alias if solid.given_alias else solid.solid_def.name
self._pending_invocations[solid_name] = solid
def complete(self, output):
return CompleteCompositionContext(
self.name,
self.source,
self._invocations,
check.opt_dict_param(output, "output"),
self._pending_invocations,
)
class CompleteCompositionContext(
namedtuple(
"_CompositionContext", "name solid_defs dependencies input_mappings output_mapping_dict"
)
):
"""The processed information from capturing solid invocations during a composition function.
"""
def __new__(cls, name, source, invocations, output_mapping_dict, pending_invocations):
dep_dict = {}
solid_def_dict = {}
input_mappings = []
for solid in pending_invocations.values():
_not_invoked_warning(solid, source, name)
for invocation in invocations.values():
def_name = invocation.solid_def.name
if def_name in solid_def_dict and solid_def_dict[def_name] is not invocation.solid_def:
raise DagsterInvalidDefinitionError(
'Detected conflicting solid definitions with the same name "{name}"'.format(
name=def_name
)
)
solid_def_dict[def_name] = invocation.solid_def
deps = {}
for input_name, node in invocation.input_bindings.items():
if isinstance(node, InvokedSolidOutputHandle):
deps[input_name] = DependencyDefinition(node.solid_name, node.output_name)
elif isinstance(node, list) and all(
map(lambda item: isinstance(item, InvokedSolidOutputHandle), node)
):
deps[input_name] = MultiDependencyDefinition(
[DependencyDefinition(call.solid_name, call.output_name) for call in node]
)
else:
check.failed("Unexpected input binding - got {node}".format(node=node))
dep_dict[
SolidInvocation(
invocation.solid_def.name,
invocation.solid_name,
tags=invocation.tags,
hook_defs=invocation.hook_defs,
)
] = deps
for input_name, node in invocation.input_mappings.items():
input_mappings.append(node.input_def.mapping_to(invocation.solid_name, input_name))
return super(cls, CompleteCompositionContext).__new__(
cls, name, list(solid_def_dict.values()), dep_dict, input_mappings, output_mapping_dict
)
class CallableSolidNode(object):
"""An intermediate object in solid composition to allow for binding information such as
an alias before invoking.
"""
def __init__(self, solid_def, given_alias=None, tags=None, hook_defs=None):
self.solid_def = solid_def
self.given_alias = check.opt_str_param(given_alias, "given_alias")
self.tags = check.opt_inst_param(tags, "tags", frozentags)
self.hook_defs = check.opt_set_param(hook_defs, "hook_defs", HookDefinition)
if _is_in_composition():
current_context().add_pending_invocation(self)
def __call__(self, *args, **kwargs):
solid_name = self.given_alias if self.given_alias else self.solid_def.name
assert_in_composition(solid_name)
input_bindings = {}
input_mappings = {}
# handle *args
for idx, output_node in enumerate(args):
if idx >= len(self.solid_def.input_defs):
raise DagsterInvalidDefinitionError(
"In {source} {name}, received too many inputs for solid "
"invocation {solid_name}. Only {def_num} defined, received {arg_num}".format(
source=current_context().source,
name=current_context().name,
solid_name=solid_name,
def_num=len(self.solid_def.input_defs),
arg_num=len(args),
)
)
input_name = self.solid_def.resolve_input_name_at_position(idx)
if input_name is None:
raise DagsterInvalidDefinitionError(
"In {source} {name}, could not resolve input based on position at "
"index {idx} for solid invocation {solid_name}. Use keyword args instead, "
"available inputs are: {inputs}".format(
idx=idx,
source=current_context().source,
name=current_context().name,
solid_name=solid_name,
inputs=list(map(lambda inp: inp.name, self.solid_def.input_defs)),
)
)
self._process_argument_node(
solid_name,
output_node,
input_name,
input_mappings,
input_bindings,
"(at position {idx})".format(idx=idx),
)
# then **kwargs
for input_name, output_node in kwargs.items():
self._process_argument_node(
solid_name,
output_node,
input_name,
input_mappings,
input_bindings,
"(passed by keyword)",
)
solid_name = current_context().observe_invocation(
self.given_alias,
self.solid_def,
input_bindings,
input_mappings,
self.tags,
self.hook_defs,
)
if len(self.solid_def.output_defs) == 0:
return None
if len(self.solid_def.output_defs) == 1:
output_name = self.solid_def.output_defs[0].name
return InvokedSolidOutputHandle(solid_name, output_name)
outputs = [output_def.name for output_def in self.solid_def.output_defs]
return namedtuple("_{solid_def}_outputs".format(solid_def=self.solid_def.name), outputs)(
**{output: InvokedSolidOutputHandle(solid_name, output) for output in outputs}
)
def _process_argument_node(
self, solid_name, output_node, input_name, input_mappings, input_bindings, arg_desc
):
if isinstance(output_node, InvokedSolidOutputHandle):
input_bindings[input_name] = output_node
elif isinstance(output_node, InputMappingNode):
input_mappings[input_name] = output_node
elif isinstance(output_node, list):
if all(map(lambda item: isinstance(item, InvokedSolidOutputHandle), output_node)):
input_bindings[input_name] = output_node
else:
raise DagsterInvalidDefinitionError(
"In {source} {name}, received a list containing invalid types for input "
'"{input_name}" {arg_desc} in solid invocation {solid_name}. '
"Lists can only contain the output from previous solid invocations.".format(
source=current_context().source,
name=current_context().name,
arg_desc=arg_desc,
input_name=input_name,
solid_name=solid_name,
)
)
elif isinstance(output_node, tuple) and all(
map(lambda item: isinstance(item, InvokedSolidOutputHandle), output_node)
):
raise DagsterInvalidDefinitionError(
"In {source} {name}, received a tuple of multiple outputs for "
'input "{input_name}" {arg_desc} in solid invocation {solid_name}. '
"Must pass individual output, available from tuple: {options}".format(
source=current_context().source,
name=current_context().name,
arg_desc=arg_desc,
input_name=input_name,
solid_name=solid_name,
options=output_node._fields,
)
)
elif isinstance(output_node, CallableSolidNode) or isinstance(
output_node, ISolidDefinition
):
raise DagsterInvalidDefinitionError(
"In {source} {name}, received an un-invoked solid for input "
'"{input_name}" {arg_desc} in solid invocation "{solid_name}". '
"Did you forget parentheses?".format(
source=current_context().source,
name=current_context().name,
arg_desc=arg_desc,
input_name=input_name,
solid_name=solid_name,
)
)
else:
raise DagsterInvalidDefinitionError(
"In {source} {name}, received invalid type {type} for input "
'"{input_name}" {arg_desc} in solid invocation "{solid_name}". '
"Must pass the output from previous solid invocations or inputs to the "
"composition function as inputs when invoking solids during composition.".format(
source=current_context().source,
name=current_context().name,
type=type(output_node),
arg_desc=arg_desc,
input_name=input_name,
solid_name=solid_name,
)
)
def alias(self, name):
return CallableSolidNode(self.solid_def, name, self.tags)
def tag(self, tags):
tags = validate_tags(tags)
return CallableSolidNode(
self.solid_def,
self.given_alias,
frozentags(tags) if self.tags is None else self.tags.updated_with(tags),
)
def with_hooks(self, hook_defs):
hook_defs = check.set_param(hook_defs, "hook_defs", of_type=HookDefinition)
return CallableSolidNode(
self.solid_def, self.given_alias, self.tags, hook_defs.union(self.hook_defs)
)
class InvokedSolidNode(
namedtuple(
"_InvokedSolidNode", "solid_name solid_def input_bindings input_mappings tags hook_defs"
)
):
"""The metadata about a solid invocation saved by the current composition context.
"""
def __new__(
cls, solid_name, solid_def, input_bindings, input_mappings, tags=None, hook_defs=None
):
return super(cls, InvokedSolidNode).__new__(
cls,
check.str_param(solid_name, "solid_name"),
check.inst_param(solid_def, "solid_def", ISolidDefinition),
check.dict_param(input_bindings, "input_bindings", key_type=str),
check.dict_param(
input_mappings, "input_mappings", key_type=str, value_type=InputMappingNode
),
check.opt_inst_param(tags, "tags", frozentags),
check.opt_set_param(hook_defs, "hook_defs", HookDefinition),
)
class InvokedSolidOutputHandle(object):
"""The return value for an output when invoking a solid in a composition function.
"""
def __init__(self, solid_name, output_name):
self.solid_name = check.str_param(solid_name, "solid_name")
self.output_name = check.str_param(output_name, "output_name")
def __iter__(self):
raise DagsterInvariantViolationError(
'Attempted to iterate over an {cls}. This object represents the output "{out}" '
'from the solid "{solid}". Consider yielding multiple Outputs if you seek to pass '
"different parts of this output to different solids.".format(
cls=self.__class__.__name__, out=self.output_name, solid=self.solid_name
)
)
def __getitem__(self, idx):
raise DagsterInvariantViolationError(
'Attempted to index in to an {cls}. This object represents the output "{out}" '
'from the solid "{solid}". Consider yielding multiple Outputs if you seek to pass '
"different parts of this output to different solids.".format(
cls=self.__class__.__name__, out=self.output_name, solid=self.solid_name
)
)
def alias(self, _):
raise DagsterInvariantViolationError(
"In {source} {name}, attempted to call alias method for {cls}. This object "
'represents the output "{out}" from the already invoked solid "{solid}". Consider '
"checking the location of parentheses.".format(
source=current_context().source,
name=current_context().name,
cls=self.__class__.__name__,
solid=self.solid_name,
out=self.output_name,
)
)
def with_hooks(self, _):
raise DagsterInvariantViolationError(
"In {source} {name}, attempted to call hook method for {cls}. This object "
'represents the output "{out}" from the already invoked solid "{solid}". Consider '
"checking the location of parentheses.".format(
source=current_context().source,
name=current_context().name,
cls=self.__class__.__name__,
solid=self.solid_name,
out=self.output_name,
)
)
class InputMappingNode(object):
def __init__(self, input_def):
self.input_def = input_def
def composite_mapping_from_output(output, output_defs, solid_name):
# output can be different types
check.list_param(output_defs, "output_defs", OutputDefinition)
check.str_param(solid_name, "solid_name")
# single output
if isinstance(output, InvokedSolidOutputHandle):
if len(output_defs) == 1:
defn = output_defs[0]
return {defn.name: defn.mapping_from(output.solid_name, output.output_name)}
else:
raise DagsterInvalidDefinitionError(
"Returned a single output ({solid_name}.{output_name}) in "
"@composite_solid {name} but {num} outputs are defined. "
"Return a dict to map defined outputs.".format(
solid_name=output.solid_name,
output_name=output.output_name,
name=solid_name,
num=len(output_defs),
)
)
output_mapping_dict = {}
output_def_dict = {output_def.name: output_def for output_def in output_defs}
# tuple returned directly
if isinstance(output, tuple) and all(
map(lambda item: isinstance(item, InvokedSolidOutputHandle), output)
):
for handle in output:
if handle.output_name not in output_def_dict:
raise DagsterInvalidDefinitionError(
"Output name mismatch returning output tuple in @composite_solid {name}. "
"No matching OutputDefinition named {output_name} for {solid_name}.{output_name}."
"Return a dict to map to the desired OutputDefinition".format(
name=solid_name,
output_name=handle.output_name,
solid_name=handle.solid_name,
)
)
output_mapping_dict[handle.output_name] = output_def_dict[
handle.output_name
].mapping_from(handle.solid_name, handle.output_name)
return output_mapping_dict
# mapping dict
if isinstance(output, dict):
for name, handle in output.items():
if name not in output_def_dict:
raise DagsterInvalidDefinitionError(
"@composite_solid {name} referenced key {key} which does not match any "
"OutputDefinitions. Valid options are: {options}".format(
name=solid_name, key=name, options=list(output_def_dict.keys())
)
)
if not isinstance(handle, InvokedSolidOutputHandle):
raise DagsterInvalidDefinitionError(
"@composite_solid {name} returned problematic dict entry under "
"key {key} of type {type}. Dict values must be outputs of "
"invoked solids".format(name=solid_name, key=name, type=type(handle))
)
output_mapping_dict[name] = output_def_dict[name].mapping_from(
handle.solid_name, handle.output_name
)
return output_mapping_dict
# error
if output is not None:
raise DagsterInvalidDefinitionError(
"@composite_solid {name} returned problematic value "
"of type {type}. Expected return value from invoked solid or dict mapping "
"output name to return values from invoked solids".format(
name=solid_name, type=type(output)
)
)
| 40.176015 | 103 | 0.604545 |
dfc920c76909526b364aa90d857f462d594a3a55 | 826 | py | Python | HungrYTWeb/urls.py | andriov/footStore-django-webServer | 5741cc78b857e2f73ea12b6f387c22c4f2788d30 | [
"MIT"
] | null | null | null | HungrYTWeb/urls.py | andriov/footStore-django-webServer | 5741cc78b857e2f73ea12b6f387c22c4f2788d30 | [
"MIT"
] | 10 | 2020-06-05T17:43:35.000Z | 2022-02-11T03:38:45.000Z | HungrYTWeb/urls.py | andriov/footStore-django-webServer | 5741cc78b857e2f73ea12b6f387c22c4f2788d30 | [
"MIT"
] | null | null | null | """HungrYTWeb URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^products/', include('products.urls')),
url(r'^admin/', admin.site.urls),
]
| 35.913043 | 79 | 0.700969 |
92c1d5b85ead74032daa909b1c9cef10ee31f926 | 1,761 | py | Python | src/calculate.py | Bonifase/b-covid-19-estimator-py | bc424cbed7db6641bbd6cba62d963dbcfa9d3702 | [
"MIT"
] | null | null | null | src/calculate.py | Bonifase/b-covid-19-estimator-py | bc424cbed7db6641bbd6cba62d963dbcfa9d3702 | [
"MIT"
] | null | null | null | src/calculate.py | Bonifase/b-covid-19-estimator-py | bc424cbed7db6641bbd6cba62d963dbcfa9d3702 | [
"MIT"
] | null | null | null | def result_calculator(data, multiplier):
impact = {}
# number of infected people in days from now
reported_cases = data.get('reportedCases')
impact['currentlyInfected'] = reported_cases * multiplier
total_number_of_days = data['timeToElapse']
periods_multplier = {'weeks': 7, 'months': 30}
if data['periodType'] in periods_multplier.keys():
total_number_of_days = data['timeToElapse'] * periods_multplier[
data['periodType']]
number_of_doubles = total_number_of_days // 3
impact['infectionsByRequestedTime'] = impact[
'currentlyInfected'] * (2**number_of_doubles)
# estimated severe positive cases that require hospitalization to recover.
impact['severeCasesByRequestedTime'] = int(
impact['infectionsByRequestedTime'] * 0.15
)
# available hospital beds for severe positive patients
available_beds = (
data['totalHospitalBeds'] * 0.35
)
impact['hospitalBedsByRequestedTime'] = int(
available_beds - impact['severeCasesByRequestedTime']
)
# number of severe positive cases that will require ICU care.
impact['casesForICUByRequestedTime'] = int(
impact['infectionsByRequestedTime'] * 0.05
)
# number of severe positive cases that will require ventilators.
impact['casesForVentilatorsByRequestedTime'] = int(
impact['infectionsByRequestedTime'] * 0.02
)
# money the economy is likely to lose daily
pop_income = data[
'region']['avgDailyIncomePopulation'] * data[
'region']['avgDailyIncomeInUSD']
dollars_in_flight = (impact[
'infectionsByRequestedTime'] * pop_income) / total_number_of_days
impact['dollarsInFlight'] = int(dollars_in_flight)
return impact
| 38.282609 | 79 | 0.694492 |
70a9acedb8f515742fc11ba5c300951bcca6b348 | 4,106 | py | Python | jopy/styles/plots.py | jowr/jopy | 5f7e6581e51c385d16154d95b5473e9f6b6e76e7 | [
"MIT"
] | 2 | 2017-11-11T17:59:42.000Z | 2020-04-04T17:53:59.000Z | jopy/styles/plots.py | jowr/jopy | 5f7e6581e51c385d16154d95b5473e9f6b6e76e7 | [
"MIT"
] | null | null | null | jopy/styles/plots.py | jowr/jopy | 5f7e6581e51c385d16154d95b5473e9f6b6e76e7 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import print_function, division
import matplotlib
import matplotlib.pyplot as plt
import copy
class Figure(matplotlib.figure.Figure):
def _get_axis(self,**kwargs):
ax = kwargs.pop('ax', self._get_axes()[0])
return ax
def _get_axes(self,**kwargs):
ax = kwargs.pop('ax', [])
ax = kwargs.pop('axs', ax)
if ax is None or len(ax)<1:
try: ax = super(Figure, self)._get_axes()
except: ax = [plt.gca()]; pass
return ax
def get_legend_handles_labels_axis(self,ax=None,axs=None):
"""Extracts the handles and labels from an axis or from a list of axes.
Useful for manual legend processing and customisation.
"""
ax = self._get_axes(ax=ax,axs=axs)
handles = []; labels = []
for a in ax:
handlestmp, labelstmp = a.get_legend_handles_labels()
handles.extend(handlestmp)
labels.extend(labelstmp)
return handles, labels, ax[0]
def draw_legend(self, **kwargs):
"""Puts a legend on the provided axis.
Can be used with kwargs like ncol=2 and alike, which are passed
on to the corrresponding pyplot routines.
"""
tc = kwargs.pop('textcolour', matplotlib.rcParams["text.color"])
tc = kwargs.pop('textcolor', tc)
#kwargs.setdefault('loc', 0)
#kwargs.setdefault('frameon', True)
h, l, a = self.get_legend_handles_labels_axis(ax=kwargs.pop('ax', None),axs=kwargs.pop('axs', None))
#handles = copy.copy(kwargs.pop('handles', handles))
handles = []
for h in kwargs.pop('handles', h):
handles.append(copy.copy(h))
handles[-1].set_alpha(1.0)
labels = []
for l in kwargs.pop('labels', l):
labels.append(copy.copy(l))
legend = a.legend(handles,labels,**kwargs)
try:
rect = legend.get_frame()
rect.set_facecolor(matplotlib.rcParams["grid.color"])
rect.set_linewidth(0)
rect.set_edgecolor(tc)
# Change the alpha value, make sure it is visible
def set_alpha(objList):
for o in objList:
try: o.set_alpha(1.0)
except: matplotlib.artist.setp(o, alpha=1.0); pass
#mpl.artist.setp(o, markersize=6)
#mpl.artist.setp(o, alpha=np.max([1.0,o.get_alpha()]))
# h.set_alpha(np.max([1.0,h.get_alpha()]))
# #mpl.artist.setp(h, alpha=np.max([1.0,h.get_alpha()]))
# mpl.artist.setp(h, markersize=6)
set_alpha(legend.legendHandles)
set_alpha(legend.get_lines())
set_alpha(legend.get_patches())
#
#for h in legend.legendHandles:
# h.set_alpha(np.max([1.0,h.get_alpha()]))
# #mpl.artist.setp(h, alpha=np.max([1.0,h.get_alpha()]))
# mpl.artist.setp(h, markersize=6)
# Change the legend label colors to almost black, too
for t in legend.texts:
t.set_color(tc)
except AttributeError:
# There are no labled objects
pass
return legend
def to_file(self, name, **kwargs):
dic = dict(bbox_inches='tight')
dic.update(**kwargs)
self.savefig(name, **dic)
def to_raster(self, name, **kwargs):
dic = dict(dpi=300)
dic.update(**kwargs)
if name.endswith(".png") or name.endswith(".jpg"):
self.to_file(name, **dic)
else:
raise ValueError("You can only save jpg and png images as raster images.")
def to_power_point(self, name, **kwargs):
dic = dict(dpi=600, transparent=True)
dic.update(**kwargs)
if name.endswith(".png"):
self.to_raster(name, **dic)
else:
raise ValueError("You should use png images with MS PowerPoint.") | 36.660714 | 108 | 0.549196 |
3d5592e4126384f71931680421321c58332bc023 | 1,751 | py | Python | gpytorch/mlls/exact_marginal_log_likelihood.py | gully/gpytorch | 10695c05eec760b0c3c2893d809f1b0bd72c48d2 | [
"MIT"
] | null | null | null | gpytorch/mlls/exact_marginal_log_likelihood.py | gully/gpytorch | 10695c05eec760b0c3c2893d809f1b0bd72c48d2 | [
"MIT"
] | null | null | null | gpytorch/mlls/exact_marginal_log_likelihood.py | gully/gpytorch | 10695c05eec760b0c3c2893d809f1b0bd72c48d2 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
from .marginal_log_likelihood import MarginalLogLikelihood
from ..likelihoods import _GaussianLikelihoodBase
from ..distributions import MultivariateNormal
class ExactMarginalLogLikelihood(MarginalLogLikelihood):
def __init__(self, likelihood, model):
"""
A special MLL designed for exact inference
Args:
- likelihood: (Likelihood) - the likelihood for the model
- model: (Module) - the exact GP model
"""
if not isinstance(likelihood, _GaussianLikelihoodBase):
raise RuntimeError("Likelihood must be Gaussian for exact inference")
super(ExactMarginalLogLikelihood, self).__init__(likelihood, model)
def forward(self, output, target, *params):
if not isinstance(output, MultivariateNormal):
raise RuntimeError("ExactMarginalLogLikelihood can only operate on Gaussian random variables")
# Get the log prob of the marginal distribution
output = self.likelihood(output, *params)
res = output.log_prob(target)
# Add additional terms (SGPR / learned inducing points, heteroskedastic likelihood models)
for added_loss_term in self.model.added_loss_terms():
res = res.add(added_loss_term.loss(*params))
# Add log probs of priors on the (functions of) parameters
for _, prior, closure, _ in self.named_priors():
res.add_(prior.log_prob(closure()).sum())
# Scale by the amount of data we have
num_data = target.size(-1)
return res.div_(num_data)
def pyro_factor(self, output, target, *params):
import pyro
loss = self(output, target, *params)
pyro.factor("gp_mll", loss)
return loss
| 38.065217 | 106 | 0.679612 |
759a12d3167969d71a6661e6b61c964b4d71d806 | 9,142 | py | Python | jobs/payment-jobs/tasks/cfs_create_account_task.py | thorwolpert/sbc-pay | ea355dfb13e783ed1e86ed92efaa45293463c348 | [
"Apache-2.0"
] | null | null | null | jobs/payment-jobs/tasks/cfs_create_account_task.py | thorwolpert/sbc-pay | ea355dfb13e783ed1e86ed92efaa45293463c348 | [
"Apache-2.0"
] | null | null | null | jobs/payment-jobs/tasks/cfs_create_account_task.py | thorwolpert/sbc-pay | ea355dfb13e783ed1e86ed92efaa45293463c348 | [
"Apache-2.0"
] | null | null | null | # Copyright © 2019 Province of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Task to create CFS account offline."""
import re
from datetime import datetime
from typing import Dict, List
from flask import current_app
from pay_api.models import CfsAccount as CfsAccountModel
from pay_api.models import PaymentAccount as PaymentAccountModel
from pay_api.services.cfs_service import CFSService
from pay_api.services.oauth_service import OAuthService
from pay_api.utils.constants import RECEIPT_METHOD_PAD_DAILY
from pay_api.utils.enums import AuthHeaderType, CfsAccountStatus, ContentType, PaymentMethod
from sentry_sdk import capture_message
from services import routing_slip
from utils import mailer
from utils.auth import get_token
class CreateAccountTask: # pylint: disable=too-few-public-methods
"""Create CFS Account."""
@classmethod
def create_accounts(cls): # pylint: disable=too-many-locals
"""Find all pending accounts to be created in CFS.
Steps:
1. Find all pending CFS accounts.
2. Create CFS accounts.
3. Publish a message to the queue if successful.
"""
# Pass payment method if offline account creation has be restricted based on payment method.
pending_accounts: List[CfsAccountModel] = CfsAccountModel.find_all_pending_accounts()
current_app.logger.info(f'Found {len(pending_accounts)} CFS Accounts to be created.')
if len(pending_accounts) == 0:
return
auth_token = get_token()
for pending_account in pending_accounts:
# Find the payment account and create the pay system instance.
pay_account: PaymentAccountModel = PaymentAccountModel.find_by_id(pending_account.account_id)
if pay_account.payment_method in (PaymentMethod.CASH.value, PaymentMethod.CHEQUE.value):
routing_slip.create_cfs_account(pending_account, pay_account)
else:
cls._create_cfs_account(pending_account, pay_account, auth_token)
@classmethod
def _get_account_contact(cls, auth_token: str, auth_account_id: str):
"""Return account contact by calling auth API."""
get_contact_endpoint = current_app.config.get('AUTH_API_ENDPOINT') + f'orgs/{auth_account_id}/contacts'
contact_response = OAuthService.get(get_contact_endpoint, auth_token, AuthHeaderType.BEARER, ContentType.JSON)
return contact_response.json().get('contacts')[0]
@classmethod
def _create_cfs_account(cls, pending_account: CfsAccountModel, pay_account: PaymentAccountModel, auth_token: str):
# If PAD Account creation in CFS is paused, then just continue
# TODO Remove once PAD account bugs are fixed and stable on CAS side.
if current_app.config.get('CFS_STOP_PAD_ACCOUNT_CREATION') and \
pay_account.payment_method == PaymentMethod.PAD.value:
current_app.logger.info('Continuing to next record as CFS PAD account creation is stopped.')
return
current_app.logger.info(
f'Creating pay system instance for {pay_account.payment_method} for account {pay_account.id}.')
account_contact = cls._get_account_contact(auth_token, pay_account.auth_account_id)
contact_info: Dict[str, str] = {
'city': account_contact.get('city'),
'postalCode': account_contact.get('postalCode'),
'province': account_contact.get('region'),
'addressLine1': account_contact.get('street'),
'country': account_contact.get('country')
}
payment_info: Dict[str, any] = {
'bankInstitutionNumber': pending_account.bank_number,
'bankTransitNumber': pending_account.bank_branch_number,
'bankAccountNumber': pending_account.bank_account_number,
}
# For an existing CFS Account, call update.. This is to handle PAD update when CFS is offline
try:
if pending_account.cfs_account and pending_account.cfs_party and pending_account.cfs_site:
# This means, PAD account details have changed. So update banking details for this CFS account
bank_details = CFSService.update_bank_details(name=pay_account.auth_account_id,
party_number=pending_account.cfs_party,
account_number=pending_account.cfs_account,
site_number=pending_account.cfs_site,
payment_info=payment_info)
pending_account.payment_instrument_number = bank_details.get('payment_instrument_number', None)
else: # It's a new account, now create
# If the account have banking information, then create a PAD account else a regular account.
if pending_account.bank_number and pending_account.bank_branch_number \
and pending_account.bank_account_number:
cfs_account_details = CFSService.create_cfs_account(identifier=pay_account.auth_account_id,
contact_info=contact_info,
payment_info=payment_info,
receipt_method=RECEIPT_METHOD_PAD_DAILY)
else:
cfs_account_details = CFSService.create_cfs_account(identifier=pay_account.auth_account_id,
contact_info=contact_info,
receipt_method=None)
pending_account.payment_instrument_number = cfs_account_details.get('payment_instrument_number',
None)
pending_account.cfs_account = cfs_account_details.get('account_number')
pending_account.cfs_site = cfs_account_details.get('site_number')
pending_account.cfs_party = cfs_account_details.get('party_number')
except Exception as e: # NOQA # pylint: disable=broad-except
# publish to mailer queue.
is_user_error = False
if pay_account.payment_method == PaymentMethod.PAD.value:
is_user_error = CreateAccountTask._check_user_error(e.response) # pylint: disable=no-member
capture_message(f'Error on creating CFS Account: account id={pay_account.id}, '
f'auth account : {pay_account.auth_account_id}, ERROR : {str(e)}', level='error')
current_app.logger.error(e)
pending_account.rollback()
if is_user_error:
capture_message(f'User Input needed for creating CFS Account: account id={pay_account.id}, '
f'auth account : {pay_account.auth_account_id}, ERROR : Invalid Bank Details',
level='error')
mailer.publish_mailer_events('PadSetupFailed', pay_account)
pending_account.status = CfsAccountStatus.INACTIVE.value
pending_account.save()
return
# If the account has an activation time set ,
# before that it shud be set to the PENDING_PAD_ACTIVATION status.
is_account_in_pad_confirmation_period = pay_account.pad_activation_date is not None and \
pay_account.pad_activation_date > datetime.today()
pending_account.status = CfsAccountStatus.PENDING_PAD_ACTIVATION.value if \
is_account_in_pad_confirmation_period else CfsAccountStatus.ACTIVE.value
pending_account.save()
@staticmethod
def _check_user_error(response) -> bool:
"""Check and see if its an error to be fixed by user."""
headers = response.headers
# CAS errors are in the below format
# [Errors = [34] Bank Account Number is Invalid]
# [Errors = [32] Branch Number is Invalid]
# [Errors = [31] Bank Number is Invalid]
error_strings = ['Bank Account Number', 'Branch Number', 'Bank Number']
if cas_error := headers.get('CAS-Returned-Messages', None):
# searches for error message and invalid word
if any(re.match(f'.+{word}.+invalid.+', cas_error, re.IGNORECASE) for word in error_strings):
return True
return False
| 55.406061 | 118 | 0.645701 |
429c84dbf78c6998b1c40fc91331df8c2db5560a | 4,012 | py | Python | main.py | mpkg-project/mpkg-autobuild | bff4682b06399a72fbaca2ae0927481a4480cc15 | [
"Apache-2.0"
] | null | null | null | main.py | mpkg-project/mpkg-autobuild | bff4682b06399a72fbaca2ae0927481a4480cc15 | [
"Apache-2.0"
] | null | null | null | main.py | mpkg-project/mpkg-autobuild | bff4682b06399a72fbaca2ae0927481a4480cc15 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# coding: utf-8
import json
import os
import re
from multiprocessing.dummy import Pool
from sys import argv
from mpkg.config import HOME, GetConfig, SetConfig
from mpkg.load import HasConflict, Load, Prepare, Sorted
from mpkg.utils import GetPage, PreInstall
jobs = 10
s = GetPage(
'https://github.com/mpkg-project/mpkg-autobuild/releases/download/AutoBuild/warning.txt')
merged = re.findall('^merging (.*)', s, re.M)
failed = re.findall('^failed: (.*)', s, re.M)
failed = failed[0].split('|') if failed else []
for string in re.findall('^ dependency: (.*)', s, re.M):
failed += string.split('|')
merged = [x for x in merged if not x in failed]
def readlist(file) -> list:
if not os.path.exists(file):
return []
with open(file, 'r', encoding='utf8') as f:
return [line.strip() for line in f.read().splitlines()]
def loadsources(sources):
with Pool(jobs) as p:
items = [x for x in p.map(Load, sources) if x]
softs, pkgs = Sorted(items)
return softs, pkgs
def write(value):
print('warning: '+value)
with open('release/warning.txt', 'a') as f:
f.write(value+'\n')
def getsofts(file, lock=[]):
sources = readlist(file)
if not sources:
return []
softs, pkgs = [], []
if lock:
for source in sources:
if source in lock:
softs_, pkgs_ = loadsources(sources)
softs += softs_
pkgs += pkgs_
else:
softs_, pkgs_ = loadsources(sources)
for soft in softs_:
if soft['id'] in lock:
softs.append(soft)
else:
print(f'pass {soft["id"]}')
for pkg in pkgs_:
if pkg.ID in lock:
pkgs.append(pkg)
else:
print(f'pass {pkg.ID}')
else:
softs, pkgs = loadsources(sources)
score = HasConflict(softs, pkgs)
if score:
write(f'id conflict: {set(score)}')
pkgs = [pkg for pkg in pkgs if not pkg.needConfig]
with Pool(jobs) as p:
err = [result for result in p.map(Prepare, pkgs) if result]
if err:
write('failed: ' + '|'.join([pkg.ID for pkg in err]))
for soft in [pkg.json_data['packages'] for pkg in pkgs if pkg not in err]:
softs += soft
return softs
def merge_softs(old, new, output=False):
old = dict([(soft['id'], soft) for soft in old])
new = dict([(soft['id'], soft) for soft in new])
for k, v in old.items():
if not k in new:
if k in merged:
write(f'deprecate {k}')
else:
if output:
write(f'merging {k}')
if 'depends' in v:
write(' dependency: {0}'.format(
'|'.join(v['depends'])))
new[k] = v
return list(new.values())
PreInstall()
repo = argv[1]
if not os.path.exists('release'):
os.mkdir('release')
os.system('mpkg set unsafe yes')
for type in ['main', 'extras', 'scoop']:
os.system(
f'wget -q https://github.com/{repo}/releases/download/AutoBuild/{type}.json')
if not os.path.exists(f'{type}.json'):
write(f'no history file: {type}.json')
history = []
else:
with open(f'{type}.json', 'r', encoding='utf8') as f:
history = json.load(f)['packages']
lock = readlist(type+'.lock.list')
patch = getsofts(type+'.patch.list', lock)
softs = getsofts(type+'.list', lock)
softs = merge_softs(softs, patch)
softs = merge_softs(history, softs, output=True)
data = {}
data['packages'] = softs
filename = 'release/'+type+'.json' if not type == 'scoop' else 'scoop.json'
with open(filename, 'w', encoding='utf8') as f:
f.write(json.dumps(data))
if not os.path.exists('release/warning.txt'):
os.system('echo pass > release/warning.txt')
| 29.284672 | 93 | 0.554337 |
c8313952d9a2dba609144ad44bd470e1b78acd1f | 1,774 | py | Python | testproj/todo/serializer.py | NguyenDuyToan/drf-yasg | 20786e53c3949d14c00d70d112f4452ec850ba5f | [
"BSD-3-Clause"
] | 1 | 2020-11-23T02:11:00.000Z | 2020-11-23T02:11:00.000Z | testproj/todo/serializer.py | NguyenDuyToan/drf-yasg | 20786e53c3949d14c00d70d112f4452ec850ba5f | [
"BSD-3-Clause"
] | null | null | null | testproj/todo/serializer.py | NguyenDuyToan/drf-yasg | 20786e53c3949d14c00d70d112f4452ec850ba5f | [
"BSD-3-Clause"
] | 1 | 2020-11-23T02:11:01.000Z | 2020-11-23T02:11:01.000Z | from collections import OrderedDict
from django.utils import timezone
from rest_framework import serializers
from rest_framework_recursive.fields import RecursiveField
from .models import Todo, TodoAnother, TodoTree, TodoYetAnother
class TodoSerializer(serializers.ModelSerializer):
class Meta:
model = Todo
fields = ('title', 'a_hidden_field',)
a_hidden_field = serializers.HiddenField(default=timezone.now)
class TodoAnotherSerializer(serializers.ModelSerializer):
todo = TodoSerializer()
class Meta:
model = TodoAnother
fields = ('title', 'todo')
class TodoYetAnotherSerializer(serializers.ModelSerializer):
class Meta:
model = TodoYetAnother
fields = ('title', 'todo')
depth = 2
swagger_schema_fields = {
'example': OrderedDict([
('title', 'parent'),
('todo', OrderedDict([
('title', 'child'),
('todo', None),
])),
])
}
class TodoTreeSerializer(serializers.ModelSerializer):
children = serializers.ListField(child=RecursiveField(), source='children.all')
class Meta:
model = TodoTree
fields = ('id', 'title', 'children')
class TodoRecursiveSerializer(serializers.ModelSerializer):
parent = RecursiveField(read_only=True)
parent_id = serializers.PrimaryKeyRelatedField(queryset=TodoTree.objects.all(), pk_field=serializers.IntegerField(),
write_only=True, allow_null=True, required=False, default=None,
source='parent')
class Meta:
model = TodoTree
fields = ('id', 'title', 'parent', 'parent_id')
| 30.067797 | 120 | 0.61894 |
be3d1eeefcccc6d39792dce1e1c806985343a140 | 467 | py | Python | raspeedi/migrations/0002_raspeedi_corvets.py | Nels885/csd_dashboard | aa5a3b970c50a2a93af722f962bd87c3728f233c | [
"MIT"
] | null | null | null | raspeedi/migrations/0002_raspeedi_corvets.py | Nels885/csd_dashboard | aa5a3b970c50a2a93af722f962bd87c3728f233c | [
"MIT"
] | null | null | null | raspeedi/migrations/0002_raspeedi_corvets.py | Nels885/csd_dashboard | aa5a3b970c50a2a93af722f962bd87c3728f233c | [
"MIT"
] | null | null | null | # Generated by Django 2.2.2 on 2019-06-11 15:41
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('squalaetp', '0001_initial'),
('raspeedi', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='raspeedi',
name='corvets',
field=models.ManyToManyField(blank=True, related_name='raspeedi', to='squalaetp.Corvet'),
),
]
| 23.35 | 101 | 0.605996 |
bdea4baa154473ede6fbb06a09711b389bd16abf | 8,652 | py | Python | perfrunner/workloads/revAB/fittingCode/fitModel_noPickling.py | tonyduydao/perfrunner | 5e0dab1af6083dc6408efcaa9f8d61262a43e6f2 | [
"Apache-2.0"
] | 1 | 2021-03-03T23:04:26.000Z | 2021-03-03T23:04:26.000Z | perfrunner/workloads/revAB/fittingCode/fitModel_noPickling.py | tonyduydao/perfrunner | 5e0dab1af6083dc6408efcaa9f8d61262a43e6f2 | [
"Apache-2.0"
] | null | null | null | perfrunner/workloads/revAB/fittingCode/fitModel_noPickling.py | tonyduydao/perfrunner | 5e0dab1af6083dc6408efcaa9f8d61262a43e6f2 | [
"Apache-2.0"
] | null | null | null | """
Script for "fitting" various graph models to approximate OSN graphs
The Barabasi-Albert model does not have varied parameters. n = # of nodes, and m is set as |E|/n.
References
-----------
[1] Sala A., Cao L., Wilson, C., Zablit, R., Zheng, H., Zhao, B. Measurement-calibrated graph models for social network experiments. In Proc. of WWW (2010).
"""
# Copyright (C) 2011 by
# Alessandra Sala <alessandra@cs.ucsb.edu>
# Lili Cao <lilicao@cs.ucsb.edu>
# Christo Wilson <bowlin@cs.ucsb.edu>
# Robert Zablit <rzablit@cs.ucsb.edu>
# Haitao Zheng <htzheng@cs.ucsb.edu>
# Ben Y. Zhao <ravenben@cs.ucsb.edu>
# All rights reserved.
# BSD license.
__author__ = "Adelbert Chang (adelbert_chang@cs.ucsb.edu), Alessandra Sala (alessandra@cs.ucsb.edu)"
__all__ = ['writeEdgeList',
'getdk2',
'fit_forestFire_mod',
'fit_randomWalk_mod',
'fit_nearestNeighbor_mod']
import os
import sys
import networkx as nx
import socialModels as sm
import twoKDistance as tk
def writeEdgeList(G, myFile):
"""
Given a nx.Graph() as input, writes the edge list of the graph to a file.
"""
outfile = open(myFile, 'w')
for edge in G.edges_iter():
outfile.write(str(edge[0]) + '\t' + str(edge[1]) + '\n')
outfile.close()
def getdk2(G, graphID, dkPath, resultPath):
"""
Extracts dK-2 distribution from G
"""
# Get edge list from G
edgeFile = resultPath + graphID + '_edgeList.txt'
writeEdgeList(G, edgeFile)
os.system(dkPath + ' -k 2 -i ' + edgeFile + ' > ' + resultPath + graphID + '_target.2k')
def fit_forestFire_mod(graphSize, graphID, dkPath, original2k, resultPath):
"""
Runs synthetic graph tests for various 'p' values (burn rate).
"""
outfile = open(resultPath + graphID + '_ff_dkDistances.txt', 'w')
p = 0.01
while p < 1.0:
print 'Running modified Forest Fire with parameters: n = ', graphSize, ' p = ', p
newFile = graphID + '_ff_' + str(p)
# Create synthetic graph
syntheticGraph = sm.forestFire_mod(graphSize, p)
# Write pickle, edge list, and 2k distro to file
print 'Calculating dK-2...\n'
getdk2(syntheticGraph, newFile, dkPath, resultPath)
# Find distance between the dK-2 distributions
dkDistance = tk.get_2k_distance(original2k, resultPath + newFile + '_target.2k')
outfile.write(str(dkDistance) + '\tp = ' + str(p) + '\n')
outfile.flush()
p += 0.01
outfile.close()
def fit_randomWalk_mod(graphSize, graphID, dkPath, original2k, resultPath, interval):
"""
Runs synthetic graph tests for various 'qe' and 'qv' values
If an interval was specified for fine grained sampling, please
note that the interal is closed bounds, that is [x, y] so qe_end and qv_end
will terminate after sampling the end values specified, not before.
"""
if not interval:
outfile = open(resultPath + graphID + '_rwCoarse_dkDistances.txt', 'w')
qe = 0.1
qe_end = 0.9
step = 0.1
else:
outfile = open(resultPath + graphID + '_rwFine_dkDistances.txt', 'w')
qe = interval[0] * 100
qe_end = interval[1] * 100
step = 1
outfile.write('dk-2 Distance\tqe\tqv\n')
while qe <= qe_end:
if not interval:
qv = 0.1
qv_end = 0.9
else:
qv = interval[2] * 100
qv_end = interval[3] * 100
while qv <= qv_end:
qeFloat = float(qe) / 100 if not interval else qe
qvFloat = float(qv) / 100 if not interval else qv
print 'Running modified Random Walk (coarse) with parameters: n = ', graphSize, ' qe = ', qeFloat, ' qv = ', qvFloat
newFile = graphID + '_rwCoarse_' + str(qeFloat) + '_' + str(qvFloat)
# Create synthetic graph
syntheticGraph = sm.randomWalk_mod(graphSize, qeFloat, qvFloat)
# Write pickle, edge list, and 2k distro to file
print 'Calculating dK-2...\n'
getdk2(syntheticGraph, newFile, dkPath, resultPath)
# Find distance between the dK-2 distributions
dkDistance = tk.get_2k_distance(original2k, resultPath + newFile + '_target.2k')
outfile.write(str(dkDistance) + '\tqe = ' + str(qeFloat) + '\tqv = ' + str(qvFloat) + '\n')
outfile.flush()
qv += step
qe += step
outfile.write('\n')
outfile.close()
def fit_nearestNeighbor_mod(graphSize, graphID, dkPath, original2k, resultPath, k):
"""
Runs synthetic graph tests for various 'k' and 'u' values and stores them
"""
if k:
outfile = open(resultPath + graphID + '_nnFine_dkDistances.txt', 'w')
kList = [k]
uList = []
myU = 0.01
while myU < 1:
uList.append(myU)
myU += 0.01
else:
outfile = open(resultPath + graphID + '_nnCoarse_dkDistances.txt', 'w')
kList = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
uList = [.01, .05, .10, .20, .25,.30, .35, .40, .45, .50,
.55, .60, .65, .70, .75,.80, .85, .95]
outfile.write('dk-2 Distance\tk\tu\n')
for k in kList:
for u in uList:
print 'Running modified Nearest Neighbor with parameters: n = ', graphSize, ' k = ', k, ' u = ', u
newFile = graphID + '_nn_' + str(k) + '_' + str(u)
# Create synthetic graph
syntheticGraph = sm.nearestNeighbor_mod(graphSize, u, k)
# Write pickle, edge list, and 2k distro to file
print 'Calculating dK-2...\n'
getdk2(syntheticGraph, newFile, dkPath, resultPath)
# Find distance between the dK-2 distributions
dkDistance = tk.get_2k_distance(original2k, resultPath + newFile + '_target.2k')
outfile.write(str(dkDistance) + '\tk = ' + str(k) + '\tu = ' + str(u) + '\n')
outfile.flush()
outfile.write('\n')
outfile.close()
if __name__ == "__main__":
if len(sys.argv) < 6:
sys.exit('Usage: python fitModel[_noPickling].py -ff/-rw/-nn <graph name> <pickle path> <path of dkDist code> <path of output folder> [additional parameters for modified Random Walk/Nearest Neighbor]')
modelName = sys.argv[1]
graphID = sys.argv[2]
picklePath = sys.argv[3]
dkPath = sys.argv[4]
resultPath = sys.argv[5]
if not os.path.exists('./' + resultPath):
os.makedirs('./' + resultPath)
print 'Extracting graph from pickle...'
G = nx.read_gpickle(picklePath)
# Extract dK-2
print 'Extracting dK-2...'
getdk2(G, graphID, dkPath, resultPath)
# Begin fitting
graphSize = len(G)
if modelName == '-ff':
fit_forestFire_mod(graphSize, graphID, dkPath, resultPath + graphID + '_target.2k', resultPath)
elif modelName == '-rw':
if len(sys.argv) < 7:
print 'Modified random walk testing requires -coarse/-fine parameter as last argument'
if sys.argv[6] == '-fine':
if len(sys.argv) != 11:
sys.exit('-fine flag requires additional qe_start qe_end qv_start qv_end parameters at the end.')
# If coarse sampling, no intervals are needed
if sys.argv[6] == '-coarse':
interval = []
# If fine sampling, make list of intervals in the order qe_start, qe_end, qv_start, qv_end
elif sys.argv[6] == '-fine':
qe_start = float(sys.argv[7])
qe_end = float(sys.argv[8])
qv_start = float(sys.argv[9])
qv_end = float(sys.argv[10])
interval = [qe_start, qe_end, qv_start, qv_end]
else:
sys.exit('Invalid -coarse/-fine value.')
fit_randomWalk_mod(graphSize, graphID, dkPath, resultPath + graphID + '_target.2k', resultPath, interval)
elif modelName == '-nn':
if len(sys.argv) < 7:
print 'Modified nearest neighbor testing requires -coarse/-fine parameter as last argument.'
if sys.argv[6] == '-fine':
if len(sys.argv) != 8:
sys.exit('-fine flag requires additional k parameter at the end.')
if sys.argv[6] == '-coarse':
k = 0
elif sys.argv[6] == '-fine':
k = int(sys.argv[7])
else:
sys.exit('Invalid -coarse/-fine value.')
fit_nearestNeighbor_mod(graphSize, graphID, dkPath, resultPath + graphID + '_target.2k', resultPath, k)
else:
sys.exit('Invalid parameters for -ff/-rw/-nn')
| 33.66537 | 209 | 0.587147 |
81c45c1238963f27af978a713a93fd4bdb07d892 | 2,006 | py | Python | tools/diff.py | remynaps/amanda | 1fecae03d65a89647c6ce0c6333d09a4d1632078 | [
"Unlicense"
] | 5 | 2017-10-10T10:25:20.000Z | 2022-02-04T14:37:14.000Z | tools/diff.py | remynaps/amanda | 1fecae03d65a89647c6ce0c6333d09a4d1632078 | [
"Unlicense"
] | null | null | null | tools/diff.py | remynaps/amanda | 1fecae03d65a89647c6ce0c6333d09a4d1632078 | [
"Unlicense"
] | 4 | 2017-10-10T10:25:40.000Z | 2021-01-24T10:23:47.000Z | #!/usr/bin/env python3
# Place this file in the folder that holds this repository.
# Use `git clone https://github.com/Rubykuby/amanda-resources` to clone all
# other Amanda projects.
# Edit the res_dir and caps variables accordingly.
import os
ama_dir = os.path.join("amanda", "src")
res_dir = os.path.join("amanda-resources", "Amanda205", "Amalib")
caps = True
def main():
print("ama_dir: {}".format(ama_dir))
print("res_dir: {}".format(res_dir))
not_in_ama_dir = []
not_in_res_dir = []
# Makes a list (files) of all files in ama_dir.
for root, _, files in os.walk(ama_dir):
# Loops through the list of files
for f in files:
# Capitalises first letter of target file if that is the case.
if caps: target_f = "{}{}".format(f[0].upper(), f[1:])
else: target_f = f
# Tests if file actually exists in res_dir
if not os.path.isfile(os.path.join(res_dir, target_f)):
not_in_res_dir.append(target_f)
else:
cmd = "diff {} {}".format(os.path.join(ama_dir, f),
os.path.join(res_dir, target_f))
print("$ {}".format(cmd))
os.system(cmd)
for root, _, files in os.walk(res_dir):
for f in files:
if caps: target_f = "{}{}".format(f[0].lower(), f[1:])
else: target_f = f
if not os.path.isfile(os.path.join(ama_dir, target_f)):
not_in_ama_dir.append(target_f)
print()
print("The following files from {} do not exist in {}:".format(res_dir,
ama_dir))
for f in not_in_ama_dir:
print(f)
print()
print("The following files from {} do not exist in {}:".format(ama_dir,
res_dir))
for f in not_in_res_dir:
print(f)
if __name__ == "__main__":
main()
| 32.885246 | 76 | 0.540877 |
a535ef20fc59f8219d277879176e8a17842cf2b0 | 361 | py | Python | prepnet/executor/executor_base.py | elda27/prepnet | 0f05018969496321aaa770b7e22bda858dab0ad6 | [
"MIT"
] | null | null | null | prepnet/executor/executor_base.py | elda27/prepnet | 0f05018969496321aaa770b7e22bda858dab0ad6 | [
"MIT"
] | 6 | 2020-08-03T15:44:18.000Z | 2020-08-15T17:39:45.000Z | prepnet/executor/executor_base.py | elda27/prepnet | 0f05018969496321aaa770b7e22bda858dab0ad6 | [
"MIT"
] | null | null | null | from abc import ABCMeta, abstractmethod
from typing import Union, List
import pandas as pd
class ExecutorBase(metaclass=ABCMeta):
@abstractmethod
def encode(self, df: Union[pd.Series, pd.DataFrame]):
raise NotImplementedError()
@abstractmethod
def decode(self, df: Union[pd.Series, pd.DataFrame]):
raise NotImplementedError()
| 25.785714 | 57 | 0.725762 |
b4d4c9a8062b87ea62a483ec9e5406dc63e3e3bc | 13,491 | py | Python | NewsSentiment/models/singletarget/lcfst.py | jamie-iai/NewsMTSC | 912d2a84b2c81962c06729a31d974bdf95d33dc9 | [
"MIT"
] | null | null | null | NewsSentiment/models/singletarget/lcfst.py | jamie-iai/NewsMTSC | 912d2a84b2c81962c06729a31d974bdf95d33dc9 | [
"MIT"
] | null | null | null | NewsSentiment/models/singletarget/lcfst.py | jamie-iai/NewsMTSC | 912d2a84b2c81962c06729a31d974bdf95d33dc9 | [
"MIT"
] | null | null | null | # adapted from https://github.com/yangheng95/LCF-ABSA and
# https://github.com/StevePhan101/LCFS-BERT/
from argparse import Namespace
from typing import Dict
import numpy as np
import torch
import torch.nn as nn
# from transformers.modeling_bert import BertPooler, BertSelfAttention
from NewsSentiment.consts import *
from NewsSentiment.dataset import FXDataset
from NewsSentiment.models.FXBaseModel import FXBaseModel
class PointwiseFeedForward(nn.Module):
"""
A two-feed-forward-layer module
"""
def __init__(self, d_hid, d_inner_hid=None, d_out=None, dropout=0):
super(PointwiseFeedForward, self).__init__()
if d_inner_hid is None:
d_inner_hid = d_hid
if d_out is None:
d_out = d_inner_hid
self.w_1 = nn.Conv1d(d_hid, d_inner_hid, 1) # position-wise
self.w_2 = nn.Conv1d(d_inner_hid, d_out, 1) # position-wise
self.dropout = nn.Dropout(dropout)
self.relu = nn.ReLU()
def forward(self, x):
output = self.relu(self.w_1(x.transpose(1, 2)))
output = self.w_2(output).transpose(2, 1)
output = self.dropout(output)
return output
class SelfAttention(nn.Module):
def __init__(self, config, opt):
super(SelfAttention, self).__init__()
self.opt = opt
self.config = config
self.SA = None # BertSelfAttention(config)
self.tanh = torch.nn.Tanh()
def forward(self, inputs):
zero_tensor = torch.tensor(
np.zeros((inputs.size(0), 1, 1, self.opt.max_seq_len), dtype=np.float32),
dtype=torch.float32,
).to(self.opt.device)
SA_out = self.SA(inputs, zero_tensor)
return self.tanh(SA_out[0])
class LCFST_BERT(FXBaseModel):
@staticmethod
def get_language_models():
return (BERT_BASE_UNCASED,)
@staticmethod
def get_input_field_ids():
return [
(BERT_BASE_UNCASED, FIELD_TEXT_THEN_TARGET_IDS_WITH_SPECIAL_TOKENS),
(
BERT_BASE_UNCASED,
FIELD_TEXT_THEN_TARGET_IDS_WITH_SPECIAL_TOKENS_SEGMENT_IDS,
),
(BERT_BASE_UNCASED, FIELD_TEXT_IDS_WITH_SPECIAL_TOKENS),
(BERT_BASE_UNCASED, FIELD_TARGET_IDS_WITH_SPECIAL_TOKENS),
(BERT_BASE_UNCASED, FIELD_TEXT_IDS_WITH_SPECIAL_TOKENS_TARGET_MASK),
(BERT_BASE_UNCASED, FIELD_SYNTAX_HOP_DISTANCE_TO_TARGET),
(BERT_BASE_UNCASED, FIELD_SYNTAX_DEPENDENCY_MATRIX),
]
def __init__(self, transformer_models: Dict, opt: Namespace):
super(LCFST_BERT, self).__init__()
bert = transformer_models[BERT_BASE_UNCASED]
self.bert_spc = bert
self.opt = opt
# self.bert_local = copy.deepcopy(bert) # Uncomment the line to use dual Bert
self.bert_local = (
bert # Default to use single Bert and reduce memory requirements
)
self.dropout = nn.Dropout(self.opt.dropout)
self.bert_SA = SelfAttention(bert.config, self.opt)
self.dependency_tree_to_attention_vector = nn.Linear(opt.max_seq_len, 1)
# perform the softmax along each batch (dim 0)
self.softmax = nn.Softmax(dim=0)
# self.linear_double = nn.Linear(
# bert.config.hidden_size * 2, bert.config.hidden_size
# )
self.mean_pooling_double = PointwiseFeedForward(
bert.config.hidden_size * 3,
bert.config.hidden_size,
bert.config.hidden_size,
)
self.linear_single = nn.Linear(bert.config.hidden_size, bert.config.hidden_size)
self.bert_pooler = None # BertPooler(bert.config)
self.dense = nn.Linear(bert.config.hidden_size, self.opt.polarities_dim)
def feature_dynamic_mask(self, text_local_indices, aspect_indices, distances_input):
texts = text_local_indices.cpu().numpy()
asps = aspect_indices.cpu().numpy()
if distances_input is not None:
distances_input = distances_input.cpu().numpy()
mask_len = self.opt.SRD
masked_text_raw_indices = np.ones(
(
text_local_indices.size(0),
self.opt.max_seq_len,
self.bert_local.config.hidden_size,
),
dtype=np.float32,
)
for text_i, asp_i in zip(range(len(texts)), range(len(asps))):
if distances_input is None:
# this should never be reached
asp_len = np.count_nonzero(asps[asp_i]) - 2
try:
asp_begin = np.argwhere(texts[text_i] == asps[asp_i][1])[0][0]
except:
continue
if asp_begin >= mask_len:
mask_begin = asp_begin - mask_len
else:
mask_begin = 0
for i in range(mask_begin):
masked_text_raw_indices[text_i][i] = np.zeros(
(self.bert_local.config.hidden_size), dtype=np.float
)
for j in range(asp_begin + asp_len + mask_len, self.opt.max_seq_len):
masked_text_raw_indices[text_i][j] = np.zeros(
(self.bert_local.config.hidden_size), dtype=np.float
)
else:
distances_i = distances_input[text_i]
for i, dist in enumerate(distances_i):
# iterate the distances and set the mask to 0 for those that have a
# too large distance
if dist > mask_len:
masked_text_raw_indices[text_i][i] = np.zeros(
(self.bert_local.config.hidden_size), dtype=np.float
)
masked_text_raw_indices = torch.from_numpy(masked_text_raw_indices)
return masked_text_raw_indices.to(self.opt.device)
def feature_dynamic_weighted(
self, text_local_indices, aspect_indices, distances_input
):
texts = text_local_indices.cpu().numpy()
asps = aspect_indices.cpu().numpy()
if distances_input is not None:
distances_input = distances_input.cpu().numpy()
masked_text_raw_indices = np.ones(
(
text_local_indices.size(0),
self.opt.max_seq_len,
self.bert_local.config.hidden_size,
),
dtype=np.float32,
)
mask_len = self.opt.SRD
for text_i, asp_i in zip(range(len(texts)), range(len(asps))):
if distances_input is None:
asp_len = np.count_nonzero(asps[asp_i]) - 2
try:
asp_begin = np.argwhere(texts[text_i] == asps[asp_i][1])[0][0]
asp_avg_index = (asp_begin * 2 + asp_len) / 2
except:
continue
distances = np.zeros(np.count_nonzero(texts[text_i]), dtype=np.float32)
for i in range(1, np.count_nonzero(texts[text_i]) - 1):
if abs(i - asp_avg_index) + asp_len / 2 > self.opt.SRD:
distances[i] = 1 - (
abs(i - asp_avg_index) + asp_len / 2 - self.opt.SRD
) / np.count_nonzero(texts[text_i])
else:
distances[i] = 1
for i in range(len(distances)):
masked_text_raw_indices[text_i][i] = (
masked_text_raw_indices[text_i][i] * distances[i]
)
else:
distances_i = distances_input[text_i] # distances of batch i-th
for i, dist in enumerate(distances_i):
if dist > mask_len:
distances_i[i] = 1 - (dist - mask_len) / np.count_nonzero(
texts[text_i]
)
else:
distances_i[i] = 1
for i in range(len(distances_i)):
masked_text_raw_indices[text_i][i] = (
masked_text_raw_indices[text_i][i] * distances_i[i]
)
masked_text_raw_indices = torch.from_numpy(masked_text_raw_indices)
return masked_text_raw_indices.to(self.opt.device)
def forward(self, inputs):
text_target_bert_indices = FXDataset.get_input_by_params(
inputs, BERT_BASE_UNCASED, FIELD_TEXT_THEN_TARGET_IDS_WITH_SPECIAL_TOKENS,
)
text_target_bert_segments_ids = FXDataset.get_input_by_params(
inputs,
BERT_BASE_UNCASED,
FIELD_TEXT_THEN_TARGET_IDS_WITH_SPECIAL_TOKENS_SEGMENT_IDS,
)
text_local_indices = FXDataset.get_input_by_params(
inputs, BERT_BASE_UNCASED, FIELD_TEXT_IDS_WITH_SPECIAL_TOKENS
)
aspect_indices = FXDataset.get_input_by_params(
inputs, BERT_BASE_UNCASED, FIELD_TARGET_IDS_WITH_SPECIAL_TOKENS
)
syntax_hop_distance_to_target = FXDataset.get_input_by_params(
inputs, BERT_BASE_UNCASED, FIELD_SYNTAX_HOP_DISTANCE_TO_TARGET
)
syntax_dependency_matrix = FXDataset.get_input_by_params(
inputs, BERT_BASE_UNCASED, FIELD_SYNTAX_DEPENDENCY_MATRIX
)
# apply bert
bert_spc_out, _, _ = self.bert_spc(
text_target_bert_indices, text_target_bert_segments_ids
)
bert_local_out, _, _ = self.bert_local(text_local_indices)
# perform LCFS masking (better than LCF masking)
if self.opt.local_context_focus == "cdm":
masked_local_text_vec = self.feature_dynamic_mask(
text_local_indices, aspect_indices, syntax_hop_distance_to_target
)
bert_local_out_weighted_syntax_distance = torch.mul(
bert_local_out, masked_local_text_vec
)
elif self.opt.local_context_focus == "cdw":
weighted_text_local_features = self.feature_dynamic_weighted(
text_local_indices, aspect_indices, syntax_hop_distance_to_target
)
bert_local_out_weighted_syntax_distance = torch.mul(
bert_local_out, weighted_text_local_features
)
else:
raise NotImplementedError
# perform dependency weight vector weighting ("masking")
dependency_weight_vector = self.dependency_tree_to_attention_vector(
syntax_dependency_matrix
)
# we need to have some normalization because the other vectors that are stacked later are all yielded
# by normalized weighting vectors. besides the implementation below, we for now simply use softmax, but
# cf https://stats.stackexchange.com/questions/481798
dependency_weight_vector_normalized = self.softmax(dependency_weight_vector)
# since softmax sums to 1 in each batch item, we multiply this with the seqlen to match the "power" of the
# other things that are later stacked (there, each weight scalar of the vector that is multiplied
# with the bert output is between 0 and 1, whereas after softmax they all sum to 1)
seqlen = text_target_bert_indices.shape[1]
dependency_weight_vector_normalized = (
dependency_weight_vector_normalized * seqlen
)
# we linearly normalize the dependency weight vector because its values will currently be just any float
# we do this because the other (two) components that are concatenated later are either original bert's output
# or bert's output multiplied with a weight mask (each scalar between 0 and 1), so we do the same here
# shape: batch, seq, 1
# for each batch item, it should be normalized independently of the other batch items so that each of scalar
# in the batch item is between 0 and 1
# perform sigmoid first to handle very large and very small values. after this values will be between 0 and 1
# dependency_weight_vector = self.sigmoid(dependency_weight_vector)
# dependency_weight_vector_min_batch_wise, _ = dependency_weight_vector.min(dim=1, keepdim=True)
# dependency_weight_vector_max_batch_wise, _ = dependency_weight_vector.max(dim=1, keepdim=True)
# actual linear normalization
# dependency_weight_vector_normalized = dependency_weight_vector-dependency_weight_vector_min_batch_wise
# dependency_weight_vector_normalized = dependency_weight_vector_normalized / (
# dependency_weight_vector_max_batch_wise - dependency_weight_vector_min_batch_wise
# )
# repeat for scalar wise multiplication
dependency_weight_vector_normalized = dependency_weight_vector_normalized.repeat(
1, 1, bert_local_out.shape[2]
)
# multiply with bert
bert_local_out_weighted_dependency_tree = torch.mul(
bert_local_out, dependency_weight_vector_normalized
)
out_cat = torch.cat(
(
bert_local_out_weighted_syntax_distance,
bert_local_out_weighted_dependency_tree,
bert_spc_out,
),
dim=-1,
)
mean_pool = self.mean_pooling_double(out_cat)
self_attention_out = self.bert_SA(mean_pool)
pooled_out = self.bert_pooler(self_attention_out)
dense_out = self.dense(pooled_out)
return dense_out
| 43.801948 | 117 | 0.622193 |
944d065a74361bbcc40941f34a2a8a5e7c779a6c | 20,094 | py | Python | tools/maya/quat.py | highfestiva/life | b05b592502d72980ab55e13e84330b74a966f377 | [
"BSD-3-Clause"
] | 9 | 2019-09-03T18:33:31.000Z | 2022-02-04T04:00:02.000Z | tools/maya/quat.py | highfestiva/life | b05b592502d72980ab55e13e84330b74a966f377 | [
"BSD-3-Clause"
] | null | null | null | tools/maya/quat.py | highfestiva/life | b05b592502d72980ab55e13e84330b74a966f377 | [
"BSD-3-Clause"
] | null | null | null | # ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is the Python Computer Graphics Kit.
#
# The Initial Developer of the Original Code is Matthias Baas.
# Portions created by the Initial Developer are Copyright (C) 2004
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
# $Id: quat.py,v 1.1 2005/08/15 15:39:48 mbaas Exp $
import types, math
from vec3 import vec3 as _vec3
from vec4 import vec4 as _vec4
from mat3 import mat3 as _mat3
from mat4 import mat4 as _mat4
# Comparison threshold
_epsilon = 1E-12
# quat
class quat:
"""Quaternion class.
Quaternions are an extension to complex numbers and can be used
to store rotations. They are composed of four floats which can be
seen as an angle and an axis of rotation.
"""
def __init__(self, *args):
"""Constructor.
0 arguments: zeroes
1 float argument: w component, x,y,z = (0,0,0)
1 quat argument: Make a copy
1 mat3 argument: Initialize by rotation matrix
1 mat4 argument: Initialize by rotation matrix
2 arguments: angle & axis (doesn't have to be of unit length)
4 arguments: components w,x,y,z
"""
# 0 arguments
if len(args)==0:
self.w, self.x, self.y, self.z = (0.0, 0.0, 0.0, 0.0)
# 1 arguments
elif len(args)==1:
T = type(args[0])
# Scalar
if T==float or T==int or T==int:
self.w = float(args[0])
self.x, self.y, self.z = (0.0, 0.0, 0.0)
# quat
elif isinstance(args[0], quat):
q=args[0]
self.w = q.w
self.x = q.x
self.y = q.y
self.z = q.z
# mat3 or mat4
elif isinstance(args[0], _mat3) or isinstance(args[0], _mat4):
self.fromMat(args[0])
# List or Tuple
elif T==list or T==tuple:
dummy = quat(*args[0])
self.w = dummy.w
self.x = dummy.x
self.y = dummy.y
self.z = dummy.z
# String
elif T==bytes:
s=args[0].replace(","," ").replace(" "," ").strip().split(" ")
if s==[""]:
s=[]
f=[float(x) for x in s]
dummy = quat(f)
self.w = dummy.w
self.x = dummy.x
self.y = dummy.y
self.z = dummy.z
else:
raise TypeError("quat() arg can't be converted to quat")
# 2 arguments (angle & axis)
elif len(args)==2:
angle, axis = args
self.fromAngleAxis(angle,axis)
# 4 arguments
elif len(args)==4:
w,x,y,z = args
self.w = float(w)
self.x = float(x)
self.y = float(y)
self.z = float(z)
else:
raise TypeError("quat() arg can't be converted to quat")
def __repr__(self):
return 'quat('+repr(self.w)+', '+repr(self.x)+', '+repr(self.y)+', '+repr(self.z)+')'
def __str__(self):
fmt="%1.4f"
return '('+fmt%self.w+', '+fmt%self.x+', '+fmt%self.y+', '+fmt%self.z+')'
def __eq__(self, other):
"""== operator
>>> a=quat(1,2,3,4)
>>> b=quat(6,7,8,9)
>>> c=quat(6,7,8,9)
>>> print a==b
0
>>> print b==c
1
>>> print a==None
0
"""
global _epsilon
if isinstance(other, quat):
return (abs(self.x-other.x)<=_epsilon and
abs(self.y-other.y)<=_epsilon and
abs(self.z-other.z)<=_epsilon and
abs(self.w-other.w)<=_epsilon)
else:
return False
def __ne__(self, other):
"""!= operator
>>> a=quat(1,2,3,4)
>>> b=quat(6,7,8,9)
>>> c=quat(6,7,8,9)
>>> print a!=b
1
>>> print b!=c
0
>>> print a!=None
1
"""
return not (self==other)
def __add__(self, other):
"""Addition.
>>> q=quat(0.9689, 0.2160, 0.1080, 0.0540)
>>> print q+q
(1.9378, 0.4320, 0.2160, 0.1080)
"""
if isinstance(other, quat):
return quat(self.w+other.w, self.x+other.x,
self.y+other.y, self.z+other.z)
else:
raise TypeError("unsupported operand type for +")
def __sub__(self, other):
"""Subtraction.
>>> q=quat(0.9689, 0.2160, 0.1080, 0.0540)
>>> print q-q
(0.0000, 0.0000, 0.0000, 0.0000)
"""
if isinstance(other, quat):
return quat(self.w-other.w, self.x-other.x,
self.y-other.y, self.z-other.z)
else:
raise TypeError("unsupported operand type for +")
def __mul__(self, other):
"""Multiplication.
>>> q=quat(0.9689, 0.2160, 0.1080, 0.0540)
>>> print q*2.0
(1.9378, 0.4320, 0.2160, 0.1080)
>>> print 2.0*q
(1.9378, 0.4320, 0.2160, 0.1080)
>>> print q*q
(0.8775, 0.4186, 0.2093, 0.1046)
"""
T = type(other)
# quat*scalar
if T==float or T==int or T==int:
return quat(self.w*other, self.x*other, self.y*other, self.z*other)
# quat*quat
if isinstance(other, quat):
w1,x1,y1,z1 = self.w,self.x,self.y,self.z
w2,x2,y2,z2 = other.w,other.x,other.y,other.z
return quat(w1*w2-x1*x2-y1*y2-z1*z2,
w1*x2+x1*w2+y1*z2-z1*y2,
w1*y2+y1*w2-x1*z2+z1*x2,
w1*z2+z1*w2+x1*y2-y1*x2)
# quat*vec3
if isinstance(other, _vec3) or isinstance(other, _vec4):
return self.rotateVec(other)
# unsupported
else:
# Try to delegate the operation to the other operand
if getattr(other,"__rmul__",None)!=None:
return other.__rmul__(self)
else:
raise TypeError("unsupported operand type for *")
__rmul__ = __mul__
def __div__(self, other):
"""Division.
>>> q=quat(0.9689, 0.2160, 0.1080, 0.0540)
>>> print q/2.0
(0.4844, 0.1080, 0.0540, 0.0270)
"""
T = type(other)
# quat/scalar
if T==float or T==int or T==int:
return quat(self.w/other, self.x/other, self.y/other, self.z/other)
# unsupported
else:
raise TypeError("unsupported operand type for /")
__truediv__ = __div__
def __pow__(self, other):
"""Return self**q."""
# if modulo!=None:
# raise TypeError, "unsupported operation"
q = quat(other)
return (q*self.log()).exp()
def __neg__(self):
"""Negation.
>>> q=quat(0.9689, 0.2160, 0.1080, 0.0540)
>>> print -q
(-0.9689, -0.2160, -0.1080, -0.0540)
"""
return quat(-self.w, -self.x, -self.y, -self.z)
def __pos__(self):
"""
>>> q=quat(0.9689, 0.2160, 0.1080, 0.0540)
>>> print +q
(0.9689, 0.2160, 0.1080, 0.0540)
"""
return quat(+self.w, +self.x, +self.y, +self.z)
def __abs__(self):
"""Return magnitude.
>>> q=quat(0.9689, 0.2160, 0.1080, 0.0540)
>>> print round(abs(q),5)
1.0
"""
return math.sqrt(self.w*self.w + self.x*self.x +
self.y*self.y + self.z*self.z)
def __getitem__(self, key):
T=type(key)
if T==slice:
start, stop, step = key.start, key.stop, key.step
start = 0 if start==None else start
stop = 4 if stop ==None else stop
step = 1 if step ==None else step
return list(map(lambda x: self.__getitem__(x), range(start, stop, step)))
if T!=int and T!=int:
raise TypeError("index must be integer or slice")
if key==0: return self.w
elif key==1: return self.x
elif key==2: return self.y
elif key==3: return self.z
else:
raise IndexError("index out of range")
def totuple(self):
return (self.w, self.x, self.y, self.z)
def conjugate(self):
"""Return conjugate.
>>> q=quat(0.9689, 0.2160, 0.1080, 0.0540)
>>> print q.conjugate()
(0.9689, -0.2160, -0.1080, -0.0540)
"""
return quat(self.w, -self.x, -self.y, -self.z)
def normalize(self):
"""Return normalized quaternion.
>>> q=quat(0.9, 0.5, 0.2, 0.3)
>>> q=q.normalize()
>>> print q
(0.8250, 0.4583, 0.1833, 0.2750)
>>> print abs(q)
1.0
"""
nlen = 1.0/abs(self)
return quat(self.w*nlen, self.x*nlen, self.y*nlen, self.z*nlen)
def inverse(self):
"""Return inverse.
>>> q=quat(0.9, 0.5, 0.2, 0.3)
>>> print q.inverse()
(0.7563, -0.4202, -0.1681, -0.2521)
"""
len_2 = self.w*self.w + self.x*self.x + self.y*self.y + self.z*self.z
return self.conjugate()/len_2
def toAngleAxis(self):
"""Return angle (in radians) and rotation axis.
>>> q=quat(0.9, 0.5, 0.2, 0.3)
>>> angle, axis = q.toAngleAxis()
>>> print round(angle,4)
1.2011
>>> print axis
(0.8111, 0.3244, 0.4867)
"""
nself = self.normalize()
# Clamp nself.w (since the quat has to be normalized it should
# be between -1 and 1 anyway, but it might be slightly off due
# to numerical inaccuracies)
w = max(min(nself.w,1.0),-1.0)
w = math.acos(w)
s = math.sin(w)
if s<1E-12:
return (0.0, _vec3(0.0,0.0,0.0))
return (2.0*w, _vec3(nself.x/s, nself.y/s, nself.z/s))
def fromAngleAxis(self, angle, axis):
"""Initialize self from an angle (in radians) and an axis and returns self."""
if axis==_vec3(0):
self.w = 1.0
self.x = 0.0
self.y = 0.0
self.z = 0.0
else:
angle/=2.0
self.w = math.cos(angle)
x, y, z = axis
s = math.sin(angle)/math.sqrt(x*x+y*y+z*z)
self.x = x*s
self.y = y*s
self.z = z*s
dummy = self.normalize()
self.w = dummy.w
self.x = dummy.x
self.y = dummy.y
self.z = dummy.z
return self
def toMat3(self):
"""Return rotation matrix as mat3."""
x,y,z,w = self.x, self.y, self.z, self.w
xx = 2.0*x*x
yy = 2.0*y*y
zz = 2.0*z*z
xy = 2.0*x*y
zw = 2.0*z*w
xz = 2.0*x*z
yw = 2.0*y*w
yz = 2.0*y*z
xw = 2.0*x*w
return _mat3(1.0-yy-zz, xy-zw, xz+yw,
xy+zw, 1.0-xx-zz, yz-xw,
xz-yw, yz+xw, 1.0-xx-yy)
def toMat4(self):
"""Return rotation matrix as mat4."""
x,y,z,w = self.x, self.y, self.z, self.w
xx = 2.0*x*x
yy = 2.0*y*y
zz = 2.0*z*z
xy = 2.0*x*y
zw = 2.0*z*w
xz = 2.0*x*z
yw = 2.0*y*w
yz = 2.0*y*z
xw = 2.0*x*w
return _mat4(1.0-yy-zz, xy-zw, xz+yw, 0.0,
xy+zw, 1.0-xx-zz, yz-xw, 0.0,
xz-yw, yz+xw, 1.0-xx-yy, 0.0,
0.0, 0.0, 0.0, 1.0)
def fromMat(self, m):
try:
return self._fromMat(m)
except:
pass
bestqlist = []
bestcnt = 0
for exponent in range(2, 7):
qlist = []
dist = 10**-exponent
for axis in [(-1,0,0),(+1,0,0),(0,-1,0),(0,1,0),(0,0,-1),(0,0,1)]:
rot = m * _mat4.rotation(dist, axis)
try:
qlist += [self._fromMat(rot)]
except:
pass
if len(qlist) >= bestcnt:
bestcnt = len(qlist)
bestqlist += qlist
else:
break
qlist = bestqlist
r = quat(0,0,0,0)
for q in qlist:
r += q
r = r.normalize()
#print("Got a matrix-2-quaternion lerp of", r, "using", len(qlist), "checks and dist", dist)
self.w, self.x, self.y, self.z = r[:]
return self
def _fromMat(self, m):
"""Initialize self from either a mat3 or mat4 and returns self."""
global _epsilon
# Jonte: start out by fetching the rotation matrix' rotation vector.
angle = 0
cosa = (m[0,0] + m[1,1] + m[2,2] - 1.0) * 0.5
try:
angle = math.acos(cosa)
except ValueError as e:
#print("Got an matrix-to-quaternion error:", e)
#print(m)
raise
#print("Angle is", angle)
v = _vec3(m[2,1] - m[1,2],
m[0,2] - m[2,0],
m[1,0] - m[0,1])
#print("Vector is", v)
if v.length() < _epsilon:
lEpsilonOne = 1.0 - _epsilon
if m[0,0] >= lEpsilonOne:
v.x = 1.0
v.y = 0.0
v.z = 0.0
elif m[1,1] >= lEpsilonOne:
v.x = 0.0
v.y = 1.0
v.z = 0.0
elif m[2,2] >= lEpsilonOne:
v.x = 0.0
v.y = 0.0
v.z = 1.0
else:
raise Exception("Uh-uh! Bad matrix!")
# Now set the vector.
self.fromAngleAxis(angle, v)
## d1,d2,d3 = m[0,0],m[1,1],m[2,2]
## t = d1+d2+d3+1.0
## if t>_epsilon:
## #print("Probable OK1!")
## s = 0.5/math.sqrt(t)
## self.w = 0.25/s
## self.x = (m[2,1]-m[1,2])*s
## self.y = (m[0,2]-m[2,0])*s
## self.z = (m[1,0]-m[0,1])*s
## else:
## ad1 = d1
## ad2 = d2
## ad3 = d3
## if ad1>=ad2 and ad1>=ad3:
## print("Probable OK2!")
## s = math.sqrt(1.0+d1-d2-d3)*2.0
## self.x = 0.5/s
## self.y = (m[0,1]+m[1,0])/s
## self.z = (m[0,2]+m[2,0])/s
## self.w = (m[1,2]+m[2,1])/s
## elif ad2>=ad1 and ad2>=ad3:
## s = math.sqrt(1.0+d2-d1-d3)*2.0
## print("Probable failure!!! s is", s)
## self.x = (m[0,1]+m[1,0])/s
## self.y = 0.5/s
## self.z = (m[1,2]+m[2,1])/s
## self.w = (m[0,2]+m[2,0])/s
## else:
## print("Probable OK3!")
## s = math.sqrt(1.0+d3-d1-d2)*2.0
## self.x = (m[0,2]+m[2,0])/s
## self.y = (m[1,2]+m[2,1])/s
## self.z = 0.5/s
## self.w = (m[0,1]+m[1,0])/s
return self
def dot(self, b):
"""Return the dot product of self and b."""
return self.w*b.w + self.x*b.x + self.y*b.y + self.z*b.z
def log(self):
"""Return the natural logarithm of self."""
global _epsilon
b = math.sqrt(self.x*self.x + self.y*self.y + self.z*self.z)
res = quat()
if abs(b)<=_epsilon:
res.x = 0.0
res.y = 0.0
res.z = 0.0
if self.w<=_epsilon:
raise ValueError("math domain error")
res.w = math.log(self.w)
else:
t = math.atan2(b, self.w)
f = t/b
res.x = f*self.x
res.y = f*self.y
res.z = f*self.z
ct = math.cos(t)
if abs(ct)<=_epsilon:
raise ValueError("math domain error")
r = self.w/ct
if r<=_epsilon:
raise ValueError("math domain error")
res.w = math.log(r)
return res
def exp(self):
"""Return the exponential of self."""
global _epsilon
b = math.sqrt(self.x*self.x + self.y*self.y + self.z*self.z)
res = quat()
if abs(b)<=_epsilon:
res.x = 0.0
res.y = 0.0
res.z = 0.0
res.w = math.exp(self.w)
else:
f = math.sin(b)/b
res.x = f*self.x
res.y = f*self.y
res.z = f*self.z
res.w = math.exp(self.w)*math.cos(b)
return res
def rotateVec(self, v):
"""Return the rotated vector v.
The quaternion must be a unit quaternion.
This operation is equivalent to turning v into a quat, computing
self*v*self.conjugate() and turning the result back into a vec3.
"""
u = _vec3(v[:3])
ww = self.w*self.w
xx = self.x*self.x
yy = self.y*self.y
zz = self.z*self.z
wx = self.w*self.x
wy = self.w*self.y
wz = self.w*self.z
xy = self.x*self.y
xz = self.x*self.z
yz = self.y*self.z
u = (ww*u.x + xx*u.x - yy*u.x - zz*u.x + 2*((xy-wz)*u.y + (xz+wy)*u.z),
ww*u.y - xx*u.y + yy*u.y - zz*u.y + 2*((xy+wz)*u.x + (yz-wx)*u.z),
ww*u.z - xx*u.z - yy*u.z + zz*u.z + 2*((xz-wy)*u.x + (yz+wx)*u.y))
if isinstance(v, _vec4):
return _vec4(u)
return _vec3(u)
def slerp(t, q0, q1, shortest=True):
"""Spherical linear interpolation between two quaternions.
The return value is an interpolation between q0 and q1. For t=0.0
the return value equals q0, for t=1.0 it equals q1.
q0 and q1 must be unit quaternions.
If shortest is True the interpolation is always done along the
shortest path.
"""
global _epsilon
ca = q0.dot(q1)
if shortest and ca<0:
ca = -ca
neg_q1 = True
else:
neg_q1 = False
o = math.acos(ca)
so = math.sin(o)
if (abs(so)<=_epsilon):
return quat(q0)
a = math.sin(o*(1.0-t)) / so
b = math.sin(o*t) / so
if neg_q1:
return q0*a - q1*b
else:
return q0*a + q1*b
def squad(t, a, b, c, d):
"""Spherical cubic interpolation."""
return slerp(2*t*(1.0-t), slerp(t,a,d), slerp(t,b,c))
######################################################################
def _test():
import doctest, quat
failed, total = doctest.testmod(quat)
print("%d/%d failed" % (failed, total))
if __name__=="__main__":
_test()
# q = quat(1.5,_vec3(1,0,0))
# print q
# m=q.toMat4().getMat3()
# print m
# w=quat(m)
# print w
| 30.399395 | 100 | 0.476461 |
c966de4edb25ae94084128faaf54026c236217ed | 3,295 | py | Python | tools/ivwpy/colorprint.py | ImagiaViz/inviwo | a00bb6b0551bc1cf26dc0366c827c1a557a9603d | [
"BSD-2-Clause"
] | 1 | 2021-06-21T11:56:55.000Z | 2021-06-21T11:56:55.000Z | tools/ivwpy/colorprint.py | ImagiaViz/inviwo | a00bb6b0551bc1cf26dc0366c827c1a557a9603d | [
"BSD-2-Clause"
] | null | null | null | tools/ivwpy/colorprint.py | ImagiaViz/inviwo | a00bb6b0551bc1cf26dc0366c827c1a557a9603d | [
"BSD-2-Clause"
] | null | null | null | #*********************************************************************************
#
# Inviwo - Interactive Visualization Workshop
#
# Copyright (c) 2013-2020 Inviwo Foundation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
#*********************************************************************************
from enum import Enum, unique
@unique
class Color(Enum):
black = 0
blue = 1
cyan = 2
green = 3
magenta = 4
red = 5
white = 6
yellow = 7
light_black = 8
light_blue = 9
light_cyan = 10
light_green = 11
light_magenta = 12
light_red = 13
light_white = 14
light_yellow = 15
try:
import colorama
colorama.init()
def cprint(color, mess, **kwargs):
colors = {
Color.black : colorama.Fore.BLACK,
Color.blue : colorama.Fore.BLUE,
Color.cyan : colorama.Fore.CYAN,
Color.green : colorama.Fore.GREEN,
Color.magenta : colorama.Fore.MAGENTA,
Color.red : colorama.Fore.RED,
Color.white : colorama.Fore.WHITE,
Color.yellow : colorama.Fore.YELLOW,
Color.light_black : colorama.Fore.LIGHTBLACK_EX,
Color.light_blue : colorama.Fore.LIGHTBLUE_EX,
Color.light_cyan : colorama.Fore.LIGHTCYAN_EX,
Color.light_green : colorama.Fore.LIGHTGREEN_EX,
Color.light_magenta : colorama.Fore.LIGHTMAGENTA_EX,
Color.light_red : colorama.Fore.LIGHTRED_EX,
Color.light_white : colorama.Fore.LIGHTWHITE_EX,
Color.light_yellow : colorama.Fore.LIGHTYELLOW_EX
}
print(colors[color] + colorama.Style.BRIGHT + str(mess) + colorama.Style.RESET_ALL, **kwargs)
except ImportError:
def cprint(color, mess, **kwargs):
print(str(mess), **kwargs)
def print_text(mess, **kwargs):
print(mess, **kwargs)
def print_error(mess, **kwargs):
cprint(Color.red, mess, **kwargs)
def print_warn(mess, **kwargs):
cprint(Color.yellow, mess, **kwargs)
def print_good(mess, **kwargs):
cprint(Color.green, mess, **kwargs)
def print_info(mess, **kwargs):
cprint(Color.cyan, mess, **kwargs)
def print_pair(a,b, width=15):
print_info("{:>{width}} : ".format(a, width=width), end="")
print("{:<}".format(b)) | 33.969072 | 95 | 0.704704 |
e11e9243855df943aa148fa8df8394e45b59c3f0 | 2,514 | py | Python | src/quanguru/QuantumToolbox/IPR.py | Qfabiolous/QuanGuru | 285ca44ae857cc61337f73ea2eb600f485a09e32 | [
"BSD-3-Clause"
] | null | null | null | src/quanguru/QuantumToolbox/IPR.py | Qfabiolous/QuanGuru | 285ca44ae857cc61337f73ea2eb600f485a09e32 | [
"BSD-3-Clause"
] | null | null | null | src/quanguru/QuantumToolbox/IPR.py | Qfabiolous/QuanGuru | 285ca44ae857cc61337f73ea2eb600f485a09e32 | [
"BSD-3-Clause"
] | null | null | null | r"""
Contains functions to calculate delocalisation measure (Inverse participation ratio, shortly IPR) in various cases.
.. currentmodule:: quanguru.QuantumToolbox.IPR
Functions
---------
.. autosummary::
iprKet
iprKetNB
"""
import numpy as np # type: ignore
from scipy.sparse import spmatrix # type: ignore
from .functions import fidelityPure
from .customTypes import Matrix, matrixList
def iprKet(basis: matrixList, ket: Matrix) -> float:
r"""
Calculates inverse participation ratio :math:`1/(\sum_{i}|c_{i,k}|^{4})` of a `ket`
:math:`|k\rangle = \sum_{i}c_{i,k}|i\rangle` in a given basis :math:`\{|i\rangle\}`. The complex probability
amplitudes satisfy :math:`\sum_{i}|c_{i,k}|^{2} = 1`, therefore IPR = 1 is perfectly localised, and
IPR = :math:`1/\mathcal{D}` is uniformly localised in :math:`\mathcal{D}` dimensional space.
Parameters
----------
basis : matrixList
a ket state
ket : Matrix
a complete basis
Returns
-------
float
inverse participation ratio
Examples
--------
>>> completeBasis = completeBasis(dimension=2)
>>> state0 = normalise(0.2*basis(2, 0) + 0.8*basis(2,1))
>>> iprKet(completeBasis, state0)
1.1245136186770428
>>> state1 = normalise(0.5*basis(2, 0) + 0.5*basis(2,1))
>>> iprKet(completeBasis, state1)
2.000000000000001
>>> state2 = basis(2,1)
>>> iprKet(completeBasis, state2)
1.0
"""
return 1/sum([fidelityPure(basKet, ket)**2 for basKet in basis]) # type: ignore
def iprKetNB(ket: Matrix) -> float:
r"""
Calculates the IPR :math:`1/\sum_{i}|c_{i,k}|^{4}` of a ket :math:`|k\rangle := \begin{bmatrix} c_{1,k} \\ \vdots \\
c_{i,k}
\\ \vdots \\c_{\mathcal{D},k}
\end{bmatrix}_{\mathcal{D}\times 1}` by using each entry :math:`c_{i,k}` as a complex amplitude.
Parameters
----------
ket : Matrix
a ket state
Returns
-------
float
inverse participation ratio
Examples
--------
>>> state0 = normalise(0.2*basis(2, 0) + 0.8*basis(2,1))
>>> iprKetNB(state0)
1.1245136186770428
>>> state1 = normalise(0.5*basis(2, 0) + 0.5*basis(2,1))
>>> iprKetNB(state1)
2.000000000000001
>>> state2 = basis(2,1)
>>> iprKetNB(state2)
1.0
>>> state3 = basis(2,0)
>>> iprKetNB(state3)
1.0
"""
if isinstance(ket, spmatrix):
ket = ket.A
return 1/np.sum(np.power((np.abs(ket.flatten())), 4))
| 26.1875 | 120 | 0.59467 |
fa3f0aa08345d60440fe5e381bc0ed24f1b51860 | 917 | py | Python | manage.py | ninetor/gateis_tapin | 09c6a46c036166fd9e865dad324361ef8696700f | [
"MIT"
] | 1 | 2018-12-03T12:08:31.000Z | 2018-12-03T12:08:31.000Z | manage.py | myneworder/alldex_tapin | cbce81e72b6c917d397ea494ffdf2e299d45a2f3 | [
"MIT"
] | null | null | null | manage.py | myneworder/alldex_tapin | cbce81e72b6c917d397ea494ffdf2e299d45a2f3 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import sys
from flask import Flask
from flask_script import Manager, Command
from app import app, db
from app import config
import threading
import os
from os import path
manager = Manager(app)
@manager.command
def install():
database_dir = path.dirname(config.database)
if len(database_dir) > 0 and not path.exists(database_dir):
os.makedirs(database_dir)
db.create_all()
@manager.command
def run():
app.run()
@manager.command
def start():
app.run(debug=True)
@manager.command
def donations(start=None, end=None):
import worker_donations
worker_donations.run(start, end)
@manager.command
def testmail():
from flask_mail import Message
from app import mail
msg = Message("Hello",
sender=config.mail_from,
recipients=config.admins)
mail.send(msg)
if __name__ == '__main__':
manager.run()
| 17.634615 | 63 | 0.692475 |
5a8e47fefbc5208cc0fb78d76048a729488fea63 | 450 | py | Python | users_app/migrations/0002_auto_20200607_1319.py | habibaudu/Elite | 3f48a7cd2f9058c20aea6d3a4d626f7ccac84072 | [
"MIT"
] | null | null | null | users_app/migrations/0002_auto_20200607_1319.py | habibaudu/Elite | 3f48a7cd2f9058c20aea6d3a4d626f7ccac84072 | [
"MIT"
] | 1 | 2021-03-19T05:13:22.000Z | 2021-03-19T05:13:22.000Z | users_app/migrations/0002_auto_20200607_1319.py | habibaudu/Elite | 3f48a7cd2f9058c20aea6d3a4d626f7ccac84072 | [
"MIT"
] | null | null | null | # Generated by Django 3.0.6 on 2020-06-07 13:19
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users_app', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='invent',
name='state',
field=models.CharField(choices=[('active', 'active'), ('deleted', 'deleted')], default='active', max_length=50),
),
]
| 23.684211 | 124 | 0.593333 |
5c07e5a13d9a01d41be1c48d2396baca2a71e68b | 15,514 | py | Python | superset/db_engine_specs/hive.py | chqbook/incubator-superset | de6d96343252e56589856d12e22a08be203b856a | [
"Apache-2.0"
] | 1 | 2019-09-10T02:48:59.000Z | 2019-09-10T02:48:59.000Z | superset/db_engine_specs/hive.py | chqbook/incubator-superset | de6d96343252e56589856d12e22a08be203b856a | [
"Apache-2.0"
] | null | null | null | superset/db_engine_specs/hive.py | chqbook/incubator-superset | de6d96343252e56589856d12e22a08be203b856a | [
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=C,R,W
from datetime import datetime
import logging
import os
import re
import time
from typing import Any, Dict, List, Optional, Tuple
from urllib import parse
from sqlalchemy import Column
from sqlalchemy.engine import create_engine
from sqlalchemy.engine.base import Engine
from sqlalchemy.engine.reflection import Inspector
from sqlalchemy.engine.url import make_url
from sqlalchemy.sql.expression import ColumnClause, Select
from werkzeug.utils import secure_filename
from superset import app, conf
from superset.db_engine_specs.base import BaseEngineSpec
from superset.db_engine_specs.presto import PrestoEngineSpec
from superset.utils import core as utils
QueryStatus = utils.QueryStatus
config = app.config
tracking_url_trans = conf.get("TRACKING_URL_TRANSFORMER")
hive_poll_interval = conf.get("HIVE_POLL_INTERVAL")
class HiveEngineSpec(PrestoEngineSpec):
"""Reuses PrestoEngineSpec functionality."""
engine = "hive"
max_column_name_length = 767
# Scoping regex at class level to avoid recompiling
# 17/02/07 19:36:38 INFO ql.Driver: Total jobs = 5
jobs_stats_r = re.compile(r".*INFO.*Total jobs = (?P<max_jobs>[0-9]+)")
# 17/02/07 19:37:08 INFO ql.Driver: Launching Job 2 out of 5
launching_job_r = re.compile(
".*INFO.*Launching Job (?P<job_number>[0-9]+) out of " "(?P<max_jobs>[0-9]+)"
)
# 17/02/07 19:36:58 INFO exec.Task: 2017-02-07 19:36:58,152 Stage-18
# map = 0%, reduce = 0%
stage_progress_r = re.compile(
r".*INFO.*Stage-(?P<stage_number>[0-9]+).*"
r"map = (?P<map_progress>[0-9]+)%.*"
r"reduce = (?P<reduce_progress>[0-9]+)%.*"
)
@classmethod
def patch(cls):
from pyhive import hive # pylint: disable=no-name-in-module
from superset.db_engines import hive as patched_hive
from TCLIService import (
constants as patched_constants,
ttypes as patched_ttypes,
TCLIService as patched_TCLIService,
)
hive.TCLIService = patched_TCLIService
hive.constants = patched_constants
hive.ttypes = patched_ttypes
hive.Cursor.fetch_logs = patched_hive.fetch_logs
@classmethod
def get_all_datasource_names(
cls, db, datasource_type: str
) -> List[utils.DatasourceName]:
return BaseEngineSpec.get_all_datasource_names(db, datasource_type)
@classmethod
def fetch_data(cls, cursor, limit: int) -> List[Tuple]:
import pyhive
from TCLIService import ttypes
state = cursor.poll()
if state.operationState == ttypes.TOperationState.ERROR_STATE:
raise Exception("Query error", state.errorMessage)
try:
return super(HiveEngineSpec, cls).fetch_data(cursor, limit)
except pyhive.exc.ProgrammingError:
return []
@classmethod
def create_table_from_csv(cls, form, table):
"""Uploads a csv file and creates a superset datasource in Hive."""
def convert_to_hive_type(col_type):
"""maps tableschema's types to hive types"""
tableschema_to_hive_types = {
"boolean": "BOOLEAN",
"integer": "INT",
"number": "DOUBLE",
"string": "STRING",
}
return tableschema_to_hive_types.get(col_type, "STRING")
bucket_path = config["CSV_TO_HIVE_UPLOAD_S3_BUCKET"]
if not bucket_path:
logging.info("No upload bucket specified")
raise Exception(
"No upload bucket specified. You can specify one in the config file."
)
table_name = form.name.data
schema_name = form.schema.data
if config.get("UPLOADED_CSV_HIVE_NAMESPACE"):
if "." in table_name or schema_name:
raise Exception(
"You can't specify a namespace. "
"All tables will be uploaded to the `{}` namespace".format(
config.get("HIVE_NAMESPACE")
)
)
full_table_name = "{}.{}".format(
config.get("UPLOADED_CSV_HIVE_NAMESPACE"), table_name
)
else:
if "." in table_name and schema_name:
raise Exception(
"You can't specify a namespace both in the name of the table "
"and in the schema field. Please remove one"
)
full_table_name = (
"{}.{}".format(schema_name, table_name) if schema_name else table_name
)
filename = form.csv_file.data.filename
upload_prefix = config["CSV_TO_HIVE_UPLOAD_DIRECTORY"]
upload_path = config["UPLOAD_FOLDER"] + secure_filename(filename)
# Optional dependency
from tableschema import Table # pylint: disable=import-error
hive_table_schema = Table(upload_path).infer()
column_name_and_type = []
for column_info in hive_table_schema["fields"]:
column_name_and_type.append(
"`{}` {}".format(
column_info["name"], convert_to_hive_type(column_info["type"])
)
)
schema_definition = ", ".join(column_name_and_type)
# Optional dependency
import boto3 # pylint: disable=import-error
s3 = boto3.client("s3")
location = os.path.join("s3a://", bucket_path, upload_prefix, table_name)
s3.upload_file(
upload_path, bucket_path, os.path.join(upload_prefix, table_name, filename)
)
sql = f"""CREATE TABLE {full_table_name} ( {schema_definition} )
ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS
TEXTFILE LOCATION '{location}'
tblproperties ('skip.header.line.count'='1')"""
logging.info(form.con.data)
engine = create_engine(form.con.data.sqlalchemy_uri_decrypted)
engine.execute(sql)
@classmethod
def convert_dttm(cls, target_type: str, dttm: datetime) -> str:
tt = target_type.upper()
if tt == "DATE":
return "CAST('{}' AS DATE)".format(dttm.isoformat()[:10])
elif tt == "TIMESTAMP":
return "CAST('{}' AS TIMESTAMP)".format(dttm.strftime("%Y-%m-%d %H:%M:%S"))
return "'{}'".format(dttm.strftime("%Y-%m-%d %H:%M:%S"))
@classmethod
def adjust_database_uri(cls, uri, selected_schema=None):
if selected_schema:
uri.database = parse.quote(selected_schema, safe="")
return uri
@classmethod
def extract_error_message(cls, e):
msg = str(e)
match = re.search(r'errorMessage="(.*?)(?<!\\)"', msg)
if match:
msg = match.group(1)
return msg
@classmethod
def progress(cls, log_lines):
total_jobs = 1 # assuming there's at least 1 job
current_job = 1
stages = {}
for line in log_lines:
match = cls.jobs_stats_r.match(line)
if match:
total_jobs = int(match.groupdict()["max_jobs"]) or 1
match = cls.launching_job_r.match(line)
if match:
current_job = int(match.groupdict()["job_number"])
total_jobs = int(match.groupdict()["max_jobs"]) or 1
stages = {}
match = cls.stage_progress_r.match(line)
if match:
stage_number = int(match.groupdict()["stage_number"])
map_progress = int(match.groupdict()["map_progress"])
reduce_progress = int(match.groupdict()["reduce_progress"])
stages[stage_number] = (map_progress + reduce_progress) / 2
logging.info(
"Progress detail: {}, "
"current job {}, "
"total jobs: {}".format(stages, current_job, total_jobs)
)
stage_progress = sum(stages.values()) / len(stages.values()) if stages else 0
progress = 100 * (current_job - 1) / total_jobs + stage_progress / total_jobs
return int(progress)
@classmethod
def get_tracking_url(cls, log_lines):
lkp = "Tracking URL = "
for line in log_lines:
if lkp in line:
return line.split(lkp)[1]
@classmethod
def handle_cursor(cls, cursor, query, session):
"""Updates progress information"""
from pyhive import hive # pylint: disable=no-name-in-module
unfinished_states = (
hive.ttypes.TOperationState.INITIALIZED_STATE,
hive.ttypes.TOperationState.RUNNING_STATE,
)
polled = cursor.poll()
last_log_line = 0
tracking_url = None
job_id = None
query_id = query.id
while polled.operationState in unfinished_states:
query = session.query(type(query)).filter_by(id=query_id).one()
if query.status == QueryStatus.STOPPED:
cursor.cancel()
break
log = cursor.fetch_logs() or ""
if log:
log_lines = log.splitlines()
progress = cls.progress(log_lines)
logging.info(f"Query {query_id}: Progress total: {progress}")
needs_commit = False
if progress > query.progress:
query.progress = progress
needs_commit = True
if not tracking_url:
tracking_url = cls.get_tracking_url(log_lines)
if tracking_url:
job_id = tracking_url.split("/")[-2]
logging.info(
f"Query {query_id}: Found the tracking url: {tracking_url}"
)
tracking_url = tracking_url_trans(tracking_url)
logging.info(
f"Query {query_id}: Transformation applied: {tracking_url}"
)
query.tracking_url = tracking_url
logging.info(f"Query {query_id}: Job id: {job_id}")
needs_commit = True
if job_id and len(log_lines) > last_log_line:
# Wait for job id before logging things out
# this allows for prefixing all log lines and becoming
# searchable in something like Kibana
for l in log_lines[last_log_line:]:
logging.info(f"Query {query_id}: [{job_id}] {l}")
last_log_line = len(log_lines)
if needs_commit:
session.commit()
time.sleep(hive_poll_interval)
polled = cursor.poll()
@classmethod
def get_columns(
cls, inspector: Inspector, table_name: str, schema: Optional[str]
) -> List[Dict[str, Any]]:
return inspector.get_columns(table_name, schema)
@classmethod
def where_latest_partition(
cls,
table_name: str,
schema: Optional[str],
database,
qry: Select,
columns: Optional[List] = None,
) -> Optional[Select]:
try:
col_names, values = cls.latest_partition(
table_name, schema, database, show_first=True
)
except Exception:
# table is not partitioned
return None
if values is not None and columns is not None:
for col_name, value in zip(col_names, values):
for c in columns:
if c.get("name") == col_name:
qry = qry.where(Column(col_name) == value)
return qry
return None
@classmethod
def _get_fields(cls, cols: List[dict]) -> List[ColumnClause]:
return BaseEngineSpec._get_fields(cols)
@classmethod
def latest_sub_partition(cls, table_name, schema, database, **kwargs):
# TODO(bogdan): implement`
pass
@classmethod
def _latest_partition_from_df(cls, df) -> Optional[List[str]]:
"""Hive partitions look like ds={partition name}"""
if not df.empty:
return [df.ix[:, 0].max().split("=")[1]]
return None
@classmethod
def _partition_query(cls, table_name, limit=0, order_by=None, filters=None):
return f"SHOW PARTITIONS {table_name}"
@classmethod
def select_star(
cls,
database,
table_name: str,
engine: Engine,
schema: str = None,
limit: int = 100,
show_cols: bool = False,
indent: bool = True,
latest_partition: bool = True,
cols: Optional[List[Dict[str, Any]]] = None,
) -> str:
return super( # pylint: disable=bad-super-call
PrestoEngineSpec, cls
).select_star(
database,
table_name,
engine,
schema,
limit,
show_cols,
indent,
latest_partition,
cols,
)
@classmethod
def modify_url_for_impersonation(cls, url, impersonate_user: bool, username: str):
"""
Modify the SQL Alchemy URL object with the user to impersonate if applicable.
:param url: SQLAlchemy URL object
:param impersonate_user: Flag indicating if impersonation is enabled
:param username: Effective username
"""
# Do nothing in the URL object since instead this should modify
# the configuraiton dictionary. See get_configuration_for_impersonation
pass
@classmethod
def get_configuration_for_impersonation(
cls, uri: str, impersonate_user: bool, username: str
) -> Dict[str, str]:
"""
Return a configuration dictionary that can be merged with other configs
that can set the correct properties for impersonating users
:param uri: URI string
:param impersonate_user: Flag indicating if impersonation is enabled
:param username: Effective username
:return: Configs required for impersonation
"""
configuration = {}
url = make_url(uri)
backend_name = url.get_backend_name()
# Must be Hive connection, enable impersonation, and set param
# auth=LDAP|KERBEROS
if (
backend_name == "hive"
and "auth" in url.query.keys()
and impersonate_user is True
and username is not None
):
configuration["hive.server2.proxy.user"] = username
return configuration
@staticmethod
def execute(cursor, query: str, async_: bool = False):
kwargs = {"async": async_}
cursor.execute(query, **kwargs)
| 37.026253 | 87 | 0.596493 |
00603d45eb69b3cacea39079e99016a482d0bed1 | 14,773 | py | Python | utils/tester.py | HOUYONGKUO/D3Feat | ee0e95fb57e3621d7cbee5a6759eac30a8cd7e14 | [
"MIT"
] | 214 | 2020-03-09T02:28:26.000Z | 2022-03-18T11:15:14.000Z | utils/tester.py | aosheng1996/D3Feat | d005f3811c12764c16d4f5e9a01c6720e7e72392 | [
"MIT"
] | 49 | 2020-03-18T15:38:36.000Z | 2022-03-29T12:46:28.000Z | utils/tester.py | aosheng1996/D3Feat | d005f3811c12764c16d4f5e9a01c6720e7e72392 | [
"MIT"
] | 32 | 2020-03-09T03:12:25.000Z | 2022-03-06T08:08:54.000Z | #
#
# 0=================================0
# | Kernel Point Convolutions |
# 0=================================0
#
#
# ----------------------------------------------------------------------------------------------------------------------
#
# Class handling the test of any model
#
# ----------------------------------------------------------------------------------------------------------------------
#
# Hugues THOMAS - 11/06/2018
#
# ----------------------------------------------------------------------------------------------------------------------
#
# Imports and global variables
# \**********************************/
#
# Basic libs
import open3d
import tensorflow as tf
import numpy as np
import os
import sys
import logging
from os import makedirs
from os.path import exists, join
import time
from datasets.KITTI import make_open3d_point_cloud, make_open3d_feature
import json
# ----------------------------------------------------------------------------------------------------------------------
#
# Tester Class
# \******************/
#
def corr_dist(est, gth, xyz0, xyz1, weight=None, max_dist=1):
xyz0_est = xyz0 @ est[:3, :3].transpose() + est[:3, 3]
xyz0_gth = xyz0 @ gth[:3, :3].transpose() + gth[:3, 3]
dists = np.clip(np.sqrt(np.power(xyz0_est - xyz0_gth, 2).sum(1)), a_min=0, a_max=max_dist)
if weight is not None:
dists = weight * dists
return dists.mean()
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0.0
self.sq_sum = 0.0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
self.sq_sum += val ** 2 * n
self.var = self.sq_sum / self.count - self.avg ** 2
class Timer(object):
"""A simple timer."""
def __init__(self):
self.total_time = 0.
self.calls = 0
self.start_time = 0.
self.diff = 0.
self.avg = 0.
def reset(self):
self.total_time = 0
self.calls = 0
self.start_time = 0
self.diff = 0
self.avg = 0
def tic(self):
# using time.time instead of time.clock because time time.clock
# does not normalize for multithreading
self.start_time = time.time()
def toc(self, average=True):
self.diff = time.time() - self.start_time
self.total_time += self.diff
self.calls += 1
self.avg = self.total_time / self.calls
if average:
return self.avg
else:
return self.diff
class TimeLiner:
def __init__(self):
self._timeline_dict = None
def update_timeline(self, chrome_trace):
# convert crome trace to python dict
chrome_trace_dict = json.loads(chrome_trace)
# for first run store full trace
if self._timeline_dict is None:
self._timeline_dict = chrome_trace_dict
# for other - update only time consumption, not definitions
else:
for event in chrome_trace_dict['traceEvents']:
# events time consumption started with 'ts' prefix
if 'ts' in event:
self._timeline_dict['traceEvents'].append(event)
def save(self, f_name):
with open(f_name, 'w') as f:
json.dump(self._timeline_dict, f)
class ModelTester:
# Initiation methods
# ------------------------------------------------------------------------------------------------------------------
def __init__(self, model, restore_snap=None):
# Tensorflow Saver definition
my_vars = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope='KernelPointNetwork')
self.saver = tf.train.Saver(my_vars, max_to_keep=100)
# Create a session for running Ops on the Graph.
on_CPU = False
if on_CPU:
cProto = tf.ConfigProto(device_count={'GPU': 0})
else:
cProto = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True)
cProto.gpu_options.allow_growth = True
self.sess = tf.Session(config=cProto)
# Init variables
self.sess.run(tf.global_variables_initializer())
# Name of the snapshot to restore to (None if you want to start from beginning)
# restore_snap = join(self.saving_path, 'snapshots/snap-40000')
if (restore_snap is not None):
self.saver.restore(self.sess, restore_snap)
print("Model restored from " + restore_snap)
self.experiment_str = restore_snap.split("_")[-1][:8] + "-" + restore_snap.split("-")[-1]
# for i, var in enumerate(my_vars):
# print(i, var.name)
# for v in my_vars:
# if 'kernel_points' in v.name:
# rescale_op = v.assign(tf.multiply(v, 0.10 / 0.03))
# self.sess.run(rescale_op)
# Add a softmax operation for predictions
# self.prob_logits = tf.nn.softmax(model.logits)
# Test main methods
# ------------------------------------------------------------------------------------------------------------------
def generate_descriptor(self, model, dataset):
self.sess.run(dataset.test_init_op)
use_random_points = False
if use_random_points:
self.experiment_str = self.experiment_str + '-rand'
else:
self.experiment_str = self.experiment_str + '-pred'
descriptor_path = f'geometric_registration/D3Feat_{self.experiment_str}/descriptors'
keypoint_path = f'geometric_registration/D3Feat_{self.experiment_str}/keypoints'
score_path = f'geometric_registration/D3Feat_{self.experiment_str}/scores'
if not exists(descriptor_path):
makedirs(descriptor_path)
if not exists(keypoint_path):
makedirs(keypoint_path)
if not exists(score_path):
makedirs(score_path)
t = []
for i in range(dataset.num_test):
stime = time.time()
ops = [model.anchor_inputs, model.out_features, model.out_scores, model.anc_id]
[inputs, features, scores, anc_id] = self.sess.run(ops, {model.dropout_prob: 1.0})
t += [time.time() - stime]
if use_random_points:
num_points = inputs['in_batches'][0].shape[0] - 1
# keypts_ind = np.random.choice(np.arange(num_points), num_keypts)
keypts_loc = inputs['backup_points'][:]
anc_features = features[:]
else:
# selecet keypoint based on scores
scores_first_pcd = scores[inputs['in_batches'][0][:-1]]
selected_keypoints_id = np.argsort(scores_first_pcd, axis=0)[:].squeeze()
keypts_score = scores[selected_keypoints_id]
keypts_loc = inputs['backup_points'][selected_keypoints_id]
anc_features = features[selected_keypoints_id]
scene = anc_id.decode("utf-8").split("/")[0]
num_frag = int(anc_id.decode("utf-8").split("_")[-1][:-4])
descriptor_path_scene = join(descriptor_path, scene)
keypoint_path_scene = join(keypoint_path, scene)
score_path_scene = join(score_path, scene)
if not exists(descriptor_path_scene):
os.mkdir(descriptor_path_scene)
if not exists(keypoint_path_scene):
os.mkdir(keypoint_path_scene)
if not exists(score_path_scene):
os.mkdir(score_path_scene)
np.save(join(descriptor_path_scene, 'cloud_bin_{}.D3Feat'.format(num_frag)), anc_features.astype(np.float32))
np.save(join(keypoint_path_scene, 'cloud_bin_{}'.format(num_frag)), keypts_loc.astype(np.float32))
np.save(join(score_path_scene, 'cloud_bin_{}'.format(num_frag)), keypts_score.astype(np.float32))
print("Generate cloud_bin_{0} for {1}".format(num_frag, scene))
print("*" * 40)
print("Avergae Feature Extraction Time:", np.mean(t))
def test_kitti(self, model, dataset):
self.sess.run(dataset.test_init_op)
use_random_points = False
if use_random_points:
num_keypts = 5000
icp_save_path = f'geometric_registration_kitti/D3Feat_{self.experiment_str}-rand{num_keypts}'
else:
num_keypts = 250
icp_save_path = f'geometric_registration_kitti/D3Feat_{self.experiment_str}-pred{num_keypts}'
if not exists(icp_save_path):
makedirs(icp_save_path)
ch = logging.StreamHandler(sys.stdout)
logging.getLogger().setLevel(logging.INFO)
logging.basicConfig(format='%(asctime)s %(message)s', datefmt='%m/%d %H:%M:%S', handlers=[ch])
success_meter, loss_meter, rte_meter, rre_meter = AverageMeter(), AverageMeter(), AverageMeter(), AverageMeter()
feat_timer, reg_timer = Timer(), Timer()
for i in range(dataset.num_test):
feat_timer.tic()
ops = [model.anchor_inputs, model.out_features, model.out_scores, model.anc_id, model.pos_id, model.accuracy]
[inputs, features, scores, anc_id, pos_id, accuracy] = self.sess.run(ops, {model.dropout_prob: 1.0})
feat_timer.toc()
# print(accuracy, anc_id)
stack_lengths = inputs['stack_lengths']
first_pcd_indices = np.arange(stack_lengths[0])
second_pcd_indices = np.arange(stack_lengths[1]) + stack_lengths[0]
# anc_points = inputs['points'][0][first_pcd_indices]
# pos_points = inputs['points'][0][second_pcd_indices]
# anc_features = features[first_pcd_indices]
# pos_features = features[second_pcd_indices]
# anc_scores = scores[first_pcd_indices]
# pos_scores = scores[second_pcd_indices]
if use_random_points:
anc_keypoints_id = np.random.choice(stack_lengths[0], num_keypts)
pos_keypoints_id = np.random.choice(stack_lengths[1], num_keypts) + stack_lengths[0]
anc_points = inputs['points'][0][anc_keypoints_id]
pos_points = inputs['points'][0][pos_keypoints_id]
anc_features = features[anc_keypoints_id]
pos_features = features[pos_keypoints_id]
anc_scores = scores[anc_keypoints_id]
pos_scores = scores[pos_keypoints_id]
else:
scores_anc_pcd = scores[first_pcd_indices]
scores_pos_pcd = scores[second_pcd_indices]
anc_keypoints_id = np.argsort(scores_anc_pcd, axis=0)[-num_keypts:].squeeze()
pos_keypoints_id = np.argsort(scores_pos_pcd, axis=0)[-num_keypts:].squeeze() + stack_lengths[0]
anc_points = inputs['points'][0][anc_keypoints_id]
anc_features = features[anc_keypoints_id]
anc_scores = scores[anc_keypoints_id]
pos_points = inputs['points'][0][pos_keypoints_id]
pos_features = features[pos_keypoints_id]
pos_scores = scores[pos_keypoints_id]
pcd0 = make_open3d_point_cloud(anc_points)
pcd1 = make_open3d_point_cloud(pos_points)
feat0 = make_open3d_feature(anc_features, 32, anc_features.shape[0])
feat1 = make_open3d_feature(pos_features, 32, pos_features.shape[0])
reg_timer.tic()
filename = anc_id.decode("utf-8") + "-" + pos_id.decode("utf-8").split("@")[-1] + '.npz'
if os.path.exists(join(icp_save_path, filename)):
data = np.load(join(icp_save_path, filename))
T_ransac = data['trans']
print(f"Read from {join(icp_save_path, filename)}")
else:
distance_threshold = dataset.voxel_size * 1.0
ransac_result = open3d.registration.registration_ransac_based_on_feature_matching(
pcd0, pcd1, feat0, feat1, distance_threshold,
open3d.registration.TransformationEstimationPointToPoint(False), 4, [
open3d.registration.CorrespondenceCheckerBasedOnEdgeLength(0.9),
open3d.registration.CorrespondenceCheckerBasedOnDistance(distance_threshold)
],
open3d.registration.RANSACConvergenceCriteria(50000, 1000)
# open3d.registration.RANSACConvergenceCriteria(4000000, 10000)
)
# print(ransac_result)
T_ransac = ransac_result.transformation.astype(np.float32)
np.savez(join(icp_save_path, filename),
trans=T_ransac,
anc_pts=anc_points,
pos_pts=pos_points,
anc_scores=anc_scores,
pos_scores=pos_scores
)
reg_timer.toc()
T_gth = inputs['trans']
# loss_ransac = corr_dist(T_ransac, T_gth, anc_points, pos_points, weight=None, max_dist=1)
loss_ransac = 0
rte = np.linalg.norm(T_ransac[:3, 3] - T_gth[:3, 3])
rre = np.arccos((np.trace(T_ransac[:3, :3].transpose() @ T_gth[:3, :3]) - 1) / 2)
if rte < 2:
rte_meter.update(rte)
if not np.isnan(rre) and rre < np.pi / 180 * 5:
rre_meter.update(rre * 180 / np.pi)
if rte < 2 and not np.isnan(rre) and rre < np.pi / 180 * 5:
success_meter.update(1)
else:
success_meter.update(0)
logging.info(f"{anc_id} Failed with RTE: {rte}, RRE: {rre * 180 / np.pi}")
loss_meter.update(loss_ransac)
if (i + 1) % 10 == 0:
logging.info(
f"{i+1} / {dataset.num_test}: Feat time: {feat_timer.avg}," +
f" Reg time: {reg_timer.avg}, Loss: {loss_meter.avg}, RTE: {rte_meter.avg}," +
f" RRE: {rre_meter.avg}, Success: {success_meter.sum} / {success_meter.count}" +
f" ({success_meter.avg * 100} %)"
)
feat_timer.reset()
reg_timer.reset()
logging.info(
f"Total loss: {loss_meter.avg}, RTE: {rte_meter.avg}, var: {rte_meter.var}," +
f" RRE: {rre_meter.avg}, var: {rre_meter.var}, Success: {success_meter.sum} " +
f"/ {success_meter.count} ({success_meter.avg * 100} %)"
)
| 40.922438 | 121 | 0.559331 |
83cfe495580a68abe4e2807356d28aec20e1f684 | 6,674 | py | Python | bindings/python/ensmallen_graph/datasets/string/sulfolobusislandicus.py | caufieldjh/ensmallen_graph | 14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a | [
"MIT"
] | null | null | null | bindings/python/ensmallen_graph/datasets/string/sulfolobusislandicus.py | caufieldjh/ensmallen_graph | 14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a | [
"MIT"
] | null | null | null | bindings/python/ensmallen_graph/datasets/string/sulfolobusislandicus.py | caufieldjh/ensmallen_graph | 14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a | [
"MIT"
] | null | null | null | """
This file offers the methods to automatically retrieve the graph Sulfolobus islandicus.
The graph is automatically retrieved from the STRING repository.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-02 21:58:00.660756
The undirected graph Sulfolobus islandicus has 2601 nodes and 188974 weighted
edges, of which none are self-loops. The graph is dense as it has a density
of 0.05589 and has 27 connected components, where the component with most
nodes has 2538 nodes and the component with the least nodes has 2 nodes.
The graph median node degree is 118, the mean node degree is 145.31, and
the node degree mode is 3. The top 5 most central nodes are 930945.SiRe_1456
(degree 893), 930945.SiRe_2622 (degree 783), 930945.SiRe_1455 (degree 756),
930945.SiRe_1458 (degree 754) and 930945.SiRe_2615 (degree 732).
References
---------------------
Please cite the following if you use the data:
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.string import SulfolobusIslandicus
# Then load the graph
graph = SulfolobusIslandicus()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen_graph import EnsmallenGraph # pylint: disable=import-error
def SulfolobusIslandicus(
directed: bool = False,
verbose: int = 2,
cache_path: str = "graphs/string",
**additional_graph_kwargs: Dict
) -> EnsmallenGraph:
"""Return new instance of the Sulfolobus islandicus graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False,
Wether to load the graph as directed or undirected.
By default false.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache_path: str = "graphs",
Where to store the downloaded graphs.
additional_graph_kwargs: Dict,
Additional graph kwargs.
Returns
-----------------------
Instace of Sulfolobus islandicus graph.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-02 21:58:00.660756
The undirected graph Sulfolobus islandicus has 2601 nodes and 188974 weighted
edges, of which none are self-loops. The graph is dense as it has a density
of 0.05589 and has 27 connected components, where the component with most
nodes has 2538 nodes and the component with the least nodes has 2 nodes.
The graph median node degree is 118, the mean node degree is 145.31, and
the node degree mode is 3. The top 5 most central nodes are 930945.SiRe_1456
(degree 893), 930945.SiRe_2622 (degree 783), 930945.SiRe_1455 (degree 756),
930945.SiRe_1458 (degree 754) and 930945.SiRe_2615 (degree 732).
References
---------------------
Please cite the following if you use the data:
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.string import SulfolobusIslandicus
# Then load the graph
graph = SulfolobusIslandicus()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
return AutomaticallyRetrievedGraph(
graph_name="SulfolobusIslandicus",
dataset="string",
directed=directed,
verbose=verbose,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| 35.312169 | 223 | 0.703177 |
74dfdca53b9ea5c835da3d9e29048aebf4cc80d7 | 4,994 | py | Python | src/sim/System.py | vovojh/gem5 | 1ee55788fde4c01ef56c47c4cb2a5b0aa2283d4c | [
"BSD-3-Clause"
] | 3 | 2017-11-19T06:49:54.000Z | 2018-12-26T18:08:11.000Z | src/sim/System.py | vovojh/gem5 | 1ee55788fde4c01ef56c47c4cb2a5b0aa2283d4c | [
"BSD-3-Clause"
] | null | null | null | src/sim/System.py | vovojh/gem5 | 1ee55788fde4c01ef56c47c4cb2a5b0aa2283d4c | [
"BSD-3-Clause"
] | 6 | 2016-07-31T18:48:18.000Z | 2022-03-06T22:41:28.000Z | # Copyright (c) 2005-2007 The Regents of The University of Michigan
# Copyright (c) 2011 Regents of the University of California
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Nathan Binkert
# Rick Strong
from m5.SimObject import SimObject
from m5.defines import buildEnv
from m5.params import *
from m5.proxy import *
from DVFSHandler import *
from SimpleMemory import *
class MemoryMode(Enum): vals = ['invalid', 'atomic', 'timing',
'atomic_noncaching']
class System(MemObject):
type = 'System'
cxx_header = "sim/system.hh"
system_port = MasterPort("System port")
@classmethod
def export_method_cxx_predecls(cls, code):
code('#include "sim/system.hh"')
@classmethod
def export_methods(cls, code):
code('''
Enums::MemoryMode getMemoryMode() const;
void setMemoryMode(Enums::MemoryMode mode);
''')
memories = VectorParam.AbstractMemory(Self.all,
"All memories in the system")
mem_mode = Param.MemoryMode('atomic', "The mode the memory system is in")
# When reserving memory on the host, we have the option of
# reserving swap space or not (by passing MAP_NORESERVE to
# mmap). By enabling this flag, we accomodate cases where a large
# (but sparse) memory is simulated.
mmap_using_noreserve = Param.Bool(False, "mmap the backing store " \
"without reserving swap")
# The memory ranges are to be populated when creating the system
# such that these can be passed from the I/O subsystem through an
# I/O bridge or cache
mem_ranges = VectorParam.AddrRange([], "Ranges that constitute main memory")
cache_line_size = Param.Unsigned(64, "Cache line size in bytes")
work_item_id = Param.Int(-1, "specific work item id")
num_work_ids = Param.Int(16, "Number of distinct work item types")
work_begin_cpu_id_exit = Param.Int(-1,
"work started on specific id, now exit simulation")
work_begin_ckpt_count = Param.Counter(0,
"create checkpoint when work items begin count value is reached")
work_begin_exit_count = Param.Counter(0,
"exit simulation when work items begin count value is reached")
work_end_ckpt_count = Param.Counter(0,
"create checkpoint when work items end count value is reached")
work_end_exit_count = Param.Counter(0,
"exit simulation when work items end count value is reached")
work_cpus_ckpt_count = Param.Counter(0,
"create checkpoint when active cpu count value is reached")
init_param = Param.UInt64(0, "numerical value to pass into simulator")
boot_osflags = Param.String("a", "boot flags to pass to the kernel")
kernel = Param.String("", "file that contains the kernel code")
kernel_addr_check = Param.Bool(True,
"whether to address check on kernel (disable for baremetal)")
readfile = Param.String("", "file to read startup script from")
symbolfile = Param.String("", "file to get the symbols from")
load_addr_mask = Param.UInt64(0xffffffffff,
"Address to mask loading binaries with")
load_offset = Param.UInt64(0, "Address to offset loading binaries with")
multi_thread = Param.Bool(False,
"Supports multi-threaded CPUs? Impacts Thread/Context IDs")
# Dynamic voltage and frequency handler for the system, disabled by default
# Provide list of domains that need to be controlled by the handler
dvfs_handler = DVFSHandler()
| 46.240741 | 80 | 0.718863 |
aa27bd1a49dbe32ef634d16bd21135de6298cce0 | 500 | py | Python | python/source/iitm/computation_thinking/sortListInserted.py | dineshkumarsarangapani/leetcode-solutions | 1559e4ed71b7cbb081071434a029bbf1794e022e | [
"Apache-2.0"
] | 1 | 2020-10-18T09:28:17.000Z | 2020-10-18T09:28:17.000Z | python/source/iitm/computation_thinking/sortListInserted.py | dineshkumarsarangapani/leetcode-solutions | 1559e4ed71b7cbb081071434a029bbf1794e022e | [
"Apache-2.0"
] | null | null | null | python/source/iitm/computation_thinking/sortListInserted.py | dineshkumarsarangapani/leetcode-solutions | 1559e4ed71b7cbb081071434a029bbf1794e022e | [
"Apache-2.0"
] | null | null | null | def insertList(l, x):
newList = []
inserted = False
for z in l:
if not inserted and x < z:
newList.append(x)
inserted = True
newList.append(z)
if not inserted:
newList.append(x)
return newList
if __name__ == '__main__':
l = [5,4,3,2,1]
l = insertList(l, 5)
print(l)
l = insertList(l, 4)
print(l)
l = insertList(l, 3)
print(l)
l = insertList(l, 2)
print(l)
l = insertList(l, 1)
print(l)
| 19.230769 | 34 | 0.518 |
5840fe524a17f83008ac9d48d72467167c769754 | 18,278 | py | Python | PRESUBMIT.py | natinusala/skia | 0b75be78f543651c26e49c6c15030c107a16e299 | [
"BSD-3-Clause"
] | 54 | 2016-04-05T17:45:19.000Z | 2022-01-31T06:27:33.000Z | PRESUBMIT.py | natinusala/skia | 0b75be78f543651c26e49c6c15030c107a16e299 | [
"BSD-3-Clause"
] | 25 | 2016-03-18T04:01:06.000Z | 2020-06-27T15:39:35.000Z | PRESUBMIT.py | natinusala/skia | 0b75be78f543651c26e49c6c15030c107a16e299 | [
"BSD-3-Clause"
] | 50 | 2016-03-03T20:31:58.000Z | 2022-03-31T18:26:13.000Z | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for Skia.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl.
"""
import collections
import csv
import fnmatch
import os
import re
import subprocess
import sys
import traceback
REVERT_CL_SUBJECT_PREFIX = 'Revert '
# Please add the complete email address here (and not just 'xyz@' or 'xyz').
PUBLIC_API_OWNERS = (
'mtklein@google.com',
'reed@chromium.org',
'reed@google.com',
'bsalomon@chromium.org',
'bsalomon@google.com',
'djsollen@chromium.org',
'djsollen@google.com',
'hcm@chromium.org',
'hcm@google.com',
)
AUTHORS_FILE_NAME = 'AUTHORS'
RELEASE_NOTES_FILE_NAME = 'RELEASE_NOTES.txt'
DOCS_PREVIEW_URL = 'https://skia.org/?cl={issue}'
GOLD_TRYBOT_URL = 'https://gold.skia.org/search?issue='
SERVICE_ACCOUNT_SUFFIX = [
'@%s.iam.gserviceaccount.com' % project for project in [
'skia-buildbots.google.com', 'skia-swarming-bots', 'skia-public',
'skia-corp.google.com', 'chops-service-accounts']]
def _CheckChangeHasEol(input_api, output_api, source_file_filter=None):
"""Checks that files end with at least one \n (LF)."""
eof_files = []
for f in input_api.AffectedSourceFiles(source_file_filter):
contents = input_api.ReadFile(f, 'rb')
# Check that the file ends in at least one newline character.
if len(contents) > 1 and contents[-1:] != '\n':
eof_files.append(f.LocalPath())
if eof_files:
return [output_api.PresubmitPromptWarning(
'These files should end in a newline character:',
items=eof_files)]
return []
def _JsonChecks(input_api, output_api):
"""Run checks on any modified json files."""
failing_files = []
for affected_file in input_api.AffectedFiles(None):
affected_file_path = affected_file.LocalPath()
is_json = affected_file_path.endswith('.json')
is_metadata = (affected_file_path.startswith('site/') and
affected_file_path.endswith('/METADATA'))
if is_json or is_metadata:
try:
input_api.json.load(open(affected_file_path, 'r'))
except ValueError:
failing_files.append(affected_file_path)
results = []
if failing_files:
results.append(
output_api.PresubmitError(
'The following files contain invalid json:\n%s\n\n' %
'\n'.join(failing_files)))
return results
def _IfDefChecks(input_api, output_api):
"""Ensures if/ifdef are not before includes. See skbug/3362 for details."""
comment_block_start_pattern = re.compile('^\s*\/\*.*$')
comment_block_middle_pattern = re.compile('^\s+\*.*')
comment_block_end_pattern = re.compile('^\s+\*\/.*$')
single_line_comment_pattern = re.compile('^\s*//.*$')
def is_comment(line):
return (comment_block_start_pattern.match(line) or
comment_block_middle_pattern.match(line) or
comment_block_end_pattern.match(line) or
single_line_comment_pattern.match(line))
empty_line_pattern = re.compile('^\s*$')
def is_empty_line(line):
return empty_line_pattern.match(line)
failing_files = []
for affected_file in input_api.AffectedSourceFiles(None):
affected_file_path = affected_file.LocalPath()
if affected_file_path.endswith('.cpp') or affected_file_path.endswith('.h'):
f = open(affected_file_path)
for line in f.xreadlines():
if is_comment(line) or is_empty_line(line):
continue
# The below will be the first real line after comments and newlines.
if line.startswith('#if 0 '):
pass
elif line.startswith('#if ') or line.startswith('#ifdef '):
failing_files.append(affected_file_path)
break
results = []
if failing_files:
results.append(
output_api.PresubmitError(
'The following files have #if or #ifdef before includes:\n%s\n\n'
'See https://bug.skia.org/3362 for why this should be fixed.' %
'\n'.join(failing_files)))
return results
def _CopyrightChecks(input_api, output_api, source_file_filter=None):
results = []
year_pattern = r'\d{4}'
year_range_pattern = r'%s(-%s)?' % (year_pattern, year_pattern)
years_pattern = r'%s(,%s)*,?' % (year_range_pattern, year_range_pattern)
copyright_pattern = (
r'Copyright (\([cC]\) )?%s \w+' % years_pattern)
for affected_file in input_api.AffectedSourceFiles(source_file_filter):
if ('third_party/' in affected_file.LocalPath() or
'tests/sksl/' in affected_file.LocalPath()):
continue
contents = input_api.ReadFile(affected_file, 'rb')
if not re.search(copyright_pattern, contents):
results.append(output_api.PresubmitError(
'%s is missing a correct copyright header.' % affected_file))
return results
def _InfraTests(input_api, output_api):
"""Run the infra tests."""
results = []
if not any(f.LocalPath().startswith('infra')
for f in input_api.AffectedFiles()):
return results
cmd = ['python', os.path.join('infra', 'bots', 'infra_tests.py')]
try:
subprocess.check_output(cmd)
except subprocess.CalledProcessError as e:
results.append(output_api.PresubmitError(
'`%s` failed:\n%s' % (' '.join(cmd), e.output)))
return results
def _CheckGNFormatted(input_api, output_api):
"""Make sure any .gn files we're changing have been formatted."""
files = []
for f in input_api.AffectedFiles(include_deletes=False):
if (f.LocalPath().endswith('.gn') or
f.LocalPath().endswith('.gni')):
files.append(f)
if not files:
return []
cmd = ['python', os.path.join('bin', 'fetch-gn')]
try:
subprocess.check_output(cmd)
except subprocess.CalledProcessError as e:
return [output_api.PresubmitError(
'`%s` failed:\n%s' % (' '.join(cmd), e.output))]
results = []
for f in files:
gn = 'gn.exe' if 'win32' in sys.platform else 'gn'
gn = os.path.join(input_api.PresubmitLocalPath(), 'bin', gn)
cmd = [gn, 'format', '--dry-run', f.LocalPath()]
try:
subprocess.check_output(cmd)
except subprocess.CalledProcessError:
fix = 'bin/gn format ' + f.LocalPath()
results.append(output_api.PresubmitError(
'`%s` failed, try\n\t%s' % (' '.join(cmd), fix)))
return results
def _CheckIncludesFormatted(input_api, output_api):
"""Make sure #includes in files we're changing have been formatted."""
files = [str(f) for f in input_api.AffectedFiles() if f.Action() != 'D']
cmd = ['python',
'tools/rewrite_includes.py',
'--dry-run'] + files
if 0 != subprocess.call(cmd):
return [output_api.PresubmitError('`%s` failed' % ' '.join(cmd))]
return []
class _WarningsAsErrors():
def __init__(self, output_api):
self.output_api = output_api
self.old_warning = None
def __enter__(self):
self.old_warning = self.output_api.PresubmitPromptWarning
self.output_api.PresubmitPromptWarning = self.output_api.PresubmitError
return self.output_api
def __exit__(self, ex_type, ex_value, ex_traceback):
self.output_api.PresubmitPromptWarning = self.old_warning
def _CheckDEPSValid(input_api, output_api):
"""Ensure that DEPS contains valid entries."""
results = []
script = os.path.join('infra', 'bots', 'check_deps.py')
relevant_files = ('DEPS', script)
for f in input_api.AffectedFiles():
if f.LocalPath() in relevant_files:
break
else:
return results
cmd = ['python', script]
try:
subprocess.check_output(cmd, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
results.append(output_api.PresubmitError(e.output))
return results
def _CommonChecks(input_api, output_api):
"""Presubmit checks common to upload and commit."""
results = []
sources = lambda x: (x.LocalPath().endswith('.h') or
x.LocalPath().endswith('.py') or
x.LocalPath().endswith('.sh') or
x.LocalPath().endswith('.m') or
x.LocalPath().endswith('.mm') or
x.LocalPath().endswith('.go') or
x.LocalPath().endswith('.c') or
x.LocalPath().endswith('.cc') or
x.LocalPath().endswith('.cpp'))
results.extend(_CheckChangeHasEol(
input_api, output_api, source_file_filter=sources))
with _WarningsAsErrors(output_api):
results.extend(input_api.canned_checks.CheckChangeHasNoCR(
input_api, output_api, source_file_filter=sources))
results.extend(input_api.canned_checks.CheckChangeHasNoStrayWhitespace(
input_api, output_api, source_file_filter=sources))
results.extend(_JsonChecks(input_api, output_api))
results.extend(_IfDefChecks(input_api, output_api))
results.extend(_CopyrightChecks(input_api, output_api,
source_file_filter=sources))
results.extend(_CheckDEPSValid(input_api, output_api))
results.extend(_CheckIncludesFormatted(input_api, output_api))
results.extend(_CheckGNFormatted(input_api, output_api))
return results
def CheckChangeOnUpload(input_api, output_api):
"""Presubmit checks for the change on upload."""
results = []
results.extend(_CommonChecks(input_api, output_api))
# Run on upload, not commit, since the presubmit bot apparently doesn't have
# coverage or Go installed.
results.extend(_InfraTests(input_api, output_api))
results.extend(_CheckReleaseNotesForPublicAPI(input_api, output_api))
return results
class CodeReview(object):
"""Abstracts which codereview tool is used for the specified issue."""
def __init__(self, input_api):
self._issue = input_api.change.issue
self._gerrit = input_api.gerrit
def GetOwnerEmail(self):
return self._gerrit.GetChangeOwner(self._issue)
def GetSubject(self):
return self._gerrit.GetChangeInfo(self._issue)['subject']
def GetDescription(self):
return self._gerrit.GetChangeDescription(self._issue)
def GetReviewers(self):
code_review_label = (
self._gerrit.GetChangeInfo(self._issue)['labels']['Code-Review'])
return [r['email'] for r in code_review_label.get('all', [])]
def GetApprovers(self):
approvers = []
code_review_label = (
self._gerrit.GetChangeInfo(self._issue)['labels']['Code-Review'])
for m in code_review_label.get('all', []):
if m.get("value") == 1:
approvers.append(m["email"])
return approvers
def _CheckOwnerIsInAuthorsFile(input_api, output_api):
results = []
if input_api.change.issue:
cr = CodeReview(input_api)
owner_email = cr.GetOwnerEmail()
# Service accounts don't need to be in AUTHORS.
for suffix in SERVICE_ACCOUNT_SUFFIX:
if owner_email.endswith(suffix):
return results
try:
authors_content = ''
for line in open(AUTHORS_FILE_NAME):
if not line.startswith('#'):
authors_content += line
email_fnmatches = re.findall('<(.*)>', authors_content)
for email_fnmatch in email_fnmatches:
if fnmatch.fnmatch(owner_email, email_fnmatch):
# Found a match, the user is in the AUTHORS file break out of the loop
break
else:
results.append(
output_api.PresubmitError(
'The email %s is not in Skia\'s AUTHORS file.\n'
'Issue owner, this CL must include an addition to the Skia AUTHORS '
'file.'
% owner_email))
except IOError:
# Do not fail if authors file cannot be found.
traceback.print_exc()
input_api.logging.error('AUTHORS file not found!')
return results
def _CheckReleaseNotesForPublicAPI(input_api, output_api):
"""Checks to see if release notes file is updated with public API changes."""
results = []
public_api_changed = False
release_file_changed = False
for affected_file in input_api.AffectedFiles():
affected_file_path = affected_file.LocalPath()
file_path, file_ext = os.path.splitext(affected_file_path)
# We only care about files that end in .h and are under the top-level
# include dir, but not include/private.
if (file_ext == '.h' and
file_path.split(os.path.sep)[0] == 'include' and
'private' not in file_path):
public_api_changed = True
elif affected_file_path == RELEASE_NOTES_FILE_NAME:
release_file_changed = True
if public_api_changed and not release_file_changed:
results.append(output_api.PresubmitPromptWarning(
'If this change affects a client API, please add a summary line '
'to the %s file.' % RELEASE_NOTES_FILE_NAME))
return results
def _CheckLGTMsForPublicAPI(input_api, output_api):
"""Check LGTMs for public API changes.
For public API files make sure there is an LGTM from the list of owners in
PUBLIC_API_OWNERS.
"""
results = []
requires_owner_check = False
for affected_file in input_api.AffectedFiles():
affected_file_path = affected_file.LocalPath()
file_path, file_ext = os.path.splitext(affected_file_path)
# We only care about files that end in .h and are under the top-level
# include dir, but not include/private.
if (file_ext == '.h' and
'include' == file_path.split(os.path.sep)[0] and
'private' not in file_path):
requires_owner_check = True
if not requires_owner_check:
return results
lgtm_from_owner = False
if input_api.change.issue:
cr = CodeReview(input_api)
if re.match(REVERT_CL_SUBJECT_PREFIX, cr.GetSubject(), re.I):
# It is a revert CL, ignore the public api owners check.
return results
if input_api.gerrit:
for reviewer in cr.GetReviewers():
if reviewer in PUBLIC_API_OWNERS:
# If an owner is specified as an reviewer in Gerrit then ignore the
# public api owners check.
return results
else:
match = re.search(r'^TBR=(.*)$', cr.GetDescription(), re.M)
if match:
tbr_section = match.group(1).strip().split(' ')[0]
tbr_entries = tbr_section.split(',')
for owner in PUBLIC_API_OWNERS:
if owner in tbr_entries or owner.split('@')[0] in tbr_entries:
# If an owner is specified in the TBR= line then ignore the public
# api owners check.
return results
if cr.GetOwnerEmail() in PUBLIC_API_OWNERS:
# An owner created the CL that is an automatic LGTM.
lgtm_from_owner = True
for approver in cr.GetApprovers():
if approver in PUBLIC_API_OWNERS:
# Found an lgtm in a message from an owner.
lgtm_from_owner = True
break
if not lgtm_from_owner:
results.append(
output_api.PresubmitError(
"If this CL adds to or changes Skia's public API, you need an LGTM "
"from any of %s. If this CL only removes from or doesn't change "
"Skia's public API, please add a short note to the CL saying so. "
"Add one of the owners as a reviewer to your CL as well as to the "
"TBR= line. If you don't know if this CL affects Skia's public "
"API, treat it like it does." % str(PUBLIC_API_OWNERS)))
return results
def PostUploadHook(gerrit, change, output_api):
"""git cl upload will call this hook after the issue is created/modified.
This hook does the following:
* Adds a link to preview docs changes if there are any docs changes in the CL.
* Adds 'No-Try: true' if the CL contains only docs changes.
"""
if not change.issue:
return []
# Skip PostUploadHooks for all auto-commit service account bots. New
# patchsets (caused due to PostUploadHooks) invalidates the CQ+2 vote from
# the "--use-commit-queue" flag to "git cl upload".
for suffix in SERVICE_ACCOUNT_SUFFIX:
if change.author_email.endswith(suffix):
return []
results = []
at_least_one_docs_change = False
all_docs_changes = True
for affected_file in change.AffectedFiles():
affected_file_path = affected_file.LocalPath()
file_path, _ = os.path.splitext(affected_file_path)
if 'site' == file_path.split(os.path.sep)[0]:
at_least_one_docs_change = True
else:
all_docs_changes = False
if at_least_one_docs_change and not all_docs_changes:
break
footers = change.GitFootersFromDescription()
description_changed = False
# If the change includes only doc changes then add No-Try: true in the
# CL's description if it does not exist yet.
if all_docs_changes and 'true' not in footers.get('No-Try', []):
description_changed = True
change.AddDescriptionFooter('No-Try', 'true')
results.append(
output_api.PresubmitNotifyResult(
'This change has only doc changes. Automatically added '
'\'No-Try: true\' to the CL\'s description'))
# If there is at least one docs change then add preview link in the CL's
# description if it does not already exist there.
docs_preview_link = DOCS_PREVIEW_URL.format(issue=change.issue)
if (at_least_one_docs_change
and docs_preview_link not in footers.get('Docs-Preview', [])):
# Automatically add a link to where the docs can be previewed.
description_changed = True
change.AddDescriptionFooter('Docs-Preview', docs_preview_link)
results.append(
output_api.PresubmitNotifyResult(
'Automatically added a link to preview the docs changes to the '
'CL\'s description'))
# If the description has changed update it.
if description_changed:
gerrit.UpdateDescription(
change.FullDescriptionText(), change.issue)
return results
def CheckChangeOnCommit(input_api, output_api):
"""Presubmit checks for the change on commit."""
results = []
results.extend(_CommonChecks(input_api, output_api))
results.extend(_CheckLGTMsForPublicAPI(input_api, output_api))
results.extend(_CheckOwnerIsInAuthorsFile(input_api, output_api))
# Checks for the presence of 'DO NOT''SUBMIT' in CL description and in
# content of files.
results.extend(
input_api.canned_checks.CheckDoNotSubmit(input_api, output_api))
return results
| 35.76908 | 80 | 0.684211 |
bdd7a91eb15509cf6d86d31974fec324b8e9f39e | 14,609 | py | Python | modules/dbnd/src/dbnd/_core/settings/log.py | dmytrostriletskyi/dbnd | d4a5f5167523e80439c9d64182cdc87b40cbc48f | [
"Apache-2.0"
] | 224 | 2020-01-02T10:46:37.000Z | 2022-03-02T13:54:08.000Z | modules/dbnd/src/dbnd/_core/settings/log.py | dmytrostriletskyi/dbnd | d4a5f5167523e80439c9d64182cdc87b40cbc48f | [
"Apache-2.0"
] | 16 | 2020-03-11T09:37:58.000Z | 2022-01-26T10:22:08.000Z | modules/dbnd/src/dbnd/_core/settings/log.py | dmytrostriletskyi/dbnd | d4a5f5167523e80439c9d64182cdc87b40cbc48f | [
"Apache-2.0"
] | 24 | 2020-03-24T13:53:50.000Z | 2022-03-22T11:55:18.000Z | from __future__ import print_function
import logging
import os
import sys
from logging.config import DictConfigurator
from typing import Callable, List, Optional
from dbnd._core.configuration.environ_config import in_quiet_mode
from dbnd._core.log.config import configure_logging_dictConfig
from dbnd._core.log.logging_utils import find_handler, setup_log_file, try_init_sentry
from dbnd._core.parameter.parameter_builder import parameter
from dbnd._core.task import config
from dbnd._core.utils.basics.format_exception import format_exception_as_str
from dbnd._core.utils.project.project_fs import databand_system_path
from dbnd._vendor.tbvaccine import TBVaccine
logger = logging.getLogger(__name__)
class LoggingConfig(config.Config):
"""Databand's logger configuration"""
_conf__task_family = "log"
disabled = parameter(description="Should logging be disabled").value(False)
debug_log_config = parameter(
description="Debug our logging configuration system"
).value(False)
capture_stdout_stderr = parameter(
description="Should logger retransmit all output wrtten to stdout/stderr"
).value(True)
capture_task_run_log = parameter.help("Capture task output into log").value(True)
override_airflow_logging_on_task_run = parameter(
description="Replace airflow logger with databand logger"
).value(True)
support_jupyter = parameter(
description="Support logging output to Jupiter UI"
).value(False)
level = parameter(description="Logging level. DEBUG/INFO/WARN/ERROR").value("INFO")
formatter = parameter(
description="Log formatting string (logging library convention)"
)[str]
formatter_colorlog = parameter(
description="Log formatting string (logging library convention)"
)[str]
formatter_simple = parameter(
description="Log formatting string (logging library convention)"
)[str]
console_formatter_name = parameter(
description="The name of the formatter logging to console output"
)[str]
file_formatter_name = parameter(
description="The name of the formatter logging to file output"
)[str]
# sentry config
sentry_url = parameter(
default=None,
description="URL for setting up sentry logger. Notice - make sure the url is exposed to dbnd run environment",
)[str]
sentry_env = parameter(default="dev", description="Environment for sentry logger")[
str
]
sentry_release = parameter(default="", description="Release for sentry logger")[str]
sentry_debug = parameter(default=False, description="Unable debug flag for sentry")[
bool
]
file_log = parameter(default=None, description="Log to file (off by default)")[str]
stream_stdout = parameter(
description="Should databand'a logger stream stdout instead of stderr"
).value(False)
custom_dict_config = parameter(
default=None, description="Advanced: Customized logging configuration"
)[Callable]
at_warn = parameter.help("name of loggers to put in WARNING mode").c[List[str]]
at_debug = parameter.help("name of loggers to put in DEBUG mode").c[List[str]]
exception_no_color = parameter(
default=False, description="Do not use colors in exception handling"
)[bool]
exception_simple = parameter(
default=False, description="Simple mode of exception handling"
)[bool]
send_body_to_server = parameter(
default=True, description="Enable or disable sending log file to server."
)[bool]
preview_head_bytes = parameter(
default=15 * 1024, # 15KB
description="Max head size of the log file, bytes to be sent to server.\n"
"Default: 15KB.",
)[int]
preview_tail_bytes = parameter(
default=15 * 1024, # 15KB
description="Max tail size of the log file, bytes to be sent to server.\n"
"Default: 15KB.",
)[int]
remote_logging_disabled = parameter.help(
"for tasks using a cloud environment, don't copy the task log to cloud storage"
).value(False)
targets_log_level = parameter(
default="DEBUG",
description="Should log the time it takes for marshalling and unmarshalling targets",
)[str]
disable_colors = parameter(default=False, description="Disabling any colored logs.")
sqlalchemy_print = parameter(description="enable sqlalchemy logger").value(False)
sqlalchemy_trace = parameter(description="trace sqlalchemy queries").value(False)
api_profile = parameter(description="profile api calls").value(False)
def _initialize(self):
super(LoggingConfig, self)._initialize()
self.task_log_file_formatter = None
def format_exception_as_str(self, exc_info, isolate=True):
if self.exception_simple:
return format_exception_as_str(exc_info)
try:
tbvaccine = TBVaccine(
no_colors=self.exception_no_color,
show_vars=False,
skip_non_user_on_isolate=True,
isolate=isolate,
)
return tbvaccine.format_tb(*exc_info)
except Exception as ex:
logger.info("Failed to format exception: %s", ex)
return format_exception_as_str(exc_info)
def get_dbnd_logging_config(self, filename=None):
if self.custom_dict_config:
if not in_quiet_mode():
logger.info("Using user provided logging config")
self.log_debug("Using log.custom_dict_config")
return self.settings.log.custom_dict_config()
return self.get_dbnd_logging_config_base(filename=filename)
def get_dbnd_logging_config_base(self, filename=None):
# type: (LoggingConfig, Optional[str]) -> Optional[dict]
self.log_debug("Using log.get_dbnd_logging_config_base")
log_settings = self
log_level = log_settings.level
# we want to have "real" output, so nothing can catch our handler
# in opposite to what airflow is doing
console_stream = (
sys.__stdout__ if log_settings.stream_stdout else sys.__stderr__
)
if "ipykernel" in sys.modules and self.support_jupyter:
# we can not use __stdout__ or __stderr__ as it will not be printed into jupyter web UI
# at the same time using sys.stdout when airflow is active is very dangerous
# as it can create dangerous loop from airflow redirection into root logger
self.log_debug("ipykernel: checking on console_stream again")
console_stream = sys.stdout if log_settings.stream_stdout else sys.stderr
# dummy path, we will not write to this file
task_file_handler_file = databand_system_path("logs", "task.log")
self.log_debug("task_file_handler_file: %s", task_file_handler_file)
setup_log_file(task_file_handler_file)
config = {
"version": 1,
"disable_existing_loggers": False,
"filters": {
"task_context_filter": {
"()": "dbnd._core.log.logging_utils.TaskContextFilter"
}
},
"formatters": {
"formatter": {"format": log_settings.formatter},
"formatter_simple": {"format": log_settings.formatter_simple},
"formatter_colorlog": {
"()": "dbnd._vendor.colorlog.ColoredFormatter",
"format": log_settings.formatter_colorlog,
"reset": True,
},
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"stream": console_stream,
"formatter": log_settings.console_formatter_name,
"filters": ["task_context_filter"],
}
},
"root": {"handlers": ["console"], "level": log_level},
}
if filename:
setup_log_file(filename)
config["handlers"]["file"] = {
"class": "logging.FileHandler",
"formatter": log_settings.file_formatter_name,
"filename": filename,
"encoding": "utf-8",
}
config["root"]["handlers"].append("file")
loggers = config.setdefault("loggers", {})
for logger_warn in log_settings.at_warn:
loggers[logger_warn] = {"level": logging.WARNING, "propagate": True}
for logger_debug in log_settings.at_debug:
loggers[logger_debug] = {"level": logging.DEBUG, "propagate": True}
if log_settings.sqlalchemy_print:
loggers["sqlalchemy.engine"] = {"level": logging.INFO, "propagate": True}
self.log_debug("Log config: %s", config)
return config
def configure_dbnd_logging(self):
if self.disabled:
self.log_debug("Log is disabled, skipping configure_dbnd_logging")
return
# start by trying to initiate Sentry setup - has side effect of changing the logging config
self.log_debug("Initialize Sentry setup")
try_init_sentry()
if self.disable_colors:
self.log_debug("Colors are disabled")
self.disable_color_logs()
dict_config = self.get_dbnd_logging_config(filename=self.file_log)
airflow_task_log_handler = None
if self.override_airflow_logging_on_task_run:
airflow_task_log_handler = self.dbnd_override_airflow_logging_on_task_run()
try:
self.log_debug("configure_logging_dictConfig: %s", dict_config)
configure_logging_dictConfig(dict_config=dict_config)
except Exception as e:
# we print it this way, as it could be that now "logging" is down!
print(
"Failed to load reload logging configuration with dbnd settings! Exception: %s"
% (e,),
file=sys.__stderr__,
)
raise
if airflow_task_log_handler:
self.log_debug("logging.root.handlers.append(airflow_task_log_handler)")
logging.root.handlers.append(airflow_task_log_handler)
self.log_debug("Databand logging is up!")
def dbnd_override_airflow_logging_on_task_run(self):
# EXISTING STATE:
# root logger use Console handler -> prints to current sys.stdout
# on `airflow run` without interactive -> we have `redirect_stderr` applied that will redirect sys.stdout
# into logger `airflow.task`, that will save everything into file.
# EVERY output of root logger will go through CONSOLE handler into AIRFLOW.TASK without being printed to screen
self.log_debug("dbnd_override_airflow_logging_on_task_run")
if not sys.stderr or not _safe_is_typeof(sys.stderr, "StreamLogWriter"):
self.log_debug(
"Airflow logging is already replaced by dbnd stream log writer! sys.stderr=%s",
sys.stderr,
)
return
# NEW STATE
# we will move airflow.task file handler to root level
# we will set propogate
# we will stop redirect of airflow logging
# this will disable stdout ,stderr redirection
sys.stderr = sys.__stderr__
sys.stdout = sys.__stdout__
airflow_root_console_handler = find_handler(logging.root, "console")
self.log_debug("airflow_root_console_handler:%s", airflow_root_console_handler)
if _safe_is_typeof(airflow_root_console_handler, "RedirectStdHandler"):
# we are removing this console logger
# this is the logger that capable to create self loop
# as it writes to "latest" sys.stdout,
# if you have stdout redirection into any of loggers, that will propogate into root
# you get very busy message loop that is really hard to debug
self.log_debug("airflow_root_console_handler has been removed")
logging.root.handlers.remove(airflow_root_console_handler)
airflow_task_logger = logging.getLogger("airflow.task")
self.log_debug("airflow_task_logger: %s", airflow_task_logger)
airflow_task_log_handler = find_handler(airflow_task_logger, "task")
if airflow_task_log_handler:
self.log_debug("airflow_task_log_handler: %s", airflow_task_log_handler)
logging.root.handlers.append(airflow_task_log_handler)
airflow_task_logger.propagate = True
airflow_task_logger.handlers = []
self.log_debug(
"dbnd_override_airflow_logging_on_task_run logging.root: %s", logging.root
)
return airflow_task_log_handler
def get_task_log_file_handler(self, log_file):
if not self.task_log_file_formatter:
config = self.get_dbnd_logging_config()
configurator = DictConfigurator(config)
file_formatter_config = configurator.config.get("formatters").get(
self.file_formatter_name
)
self.task_log_file_formatter = configurator.configure_formatter(
file_formatter_config
)
# "formatter": log_settings.file_formatter,
log_file = str(log_file)
setup_log_file(log_file)
handler = logging.FileHandler(filename=log_file, encoding="utf-8")
handler.setFormatter(self.task_log_file_formatter)
handler.setLevel(self.level)
return handler
def disable_color_logs(self):
""" Removes colors from any console related config"""
logger.debug("disabling color logs")
os.environ["ANSI_COLORS_DISABLED"] = "True" # disabling termcolor.colored
self.exception_no_color = True
if self.console_formatter_name == "formatter_colorlog":
self.console_formatter_name = "formatter_simple"
def log_debug(self, msg, *args):
if not self.debug_log_config:
if not self.disabled:
# we don't want to print ANYTHING if we are disabled
logger.debug(msg, *args)
return
try:
# we print to stderr as well in case logging is broken
print("DEBUG_LOG_CONFIG:" + msg % args, file=sys.__stderr__)
logger.info("DEBUG_LOG_CONFIG:" + msg, *args)
except Exception:
pass
def _safe_is_typeof(value, name):
if not value:
return
return isinstance(value, object) and value.__class__.__name__.endswith(name)
| 40.134615 | 120 | 0.657198 |
528c354d97b0e4ed499c457b639e14a70eae3de8 | 645 | py | Python | chapter-6-rnns/util.py | danitrod/chollet-deep-learning | c314f6201e99b560b8133d9fb6fd6f47d224b2fb | [
"MIT"
] | null | null | null | chapter-6-rnns/util.py | danitrod/chollet-deep-learning | c314f6201e99b560b8133d9fb6fd6f47d224b2fb | [
"MIT"
] | null | null | null | chapter-6-rnns/util.py | danitrod/chollet-deep-learning | c314f6201e99b560b8133d9fb6fd6f47d224b2fb | [
"MIT"
] | null | null | null | import matplotlib.pyplot as plt
import numpy as np
def plot_history(hist):
epochs = np.array(hist.epoch) + 1
plt.plot(epochs, hist.history['loss'], 'yo', label='Training loss')
plt.plot(epochs, hist.history['val_loss'], 'r--', label='Validatoin loss')
plt.xticks(epochs)
plt.xlabel = 'Epoch'
plt.title('Loss')
plt.legend()
plt.show()
plt.plot(epochs, hist.history['acc'], 'yo', label='Training accuracy')
plt.plot(epochs, hist.history['val_acc'],
'r--', label='Validation accuracy')
plt.xticks(epochs)
plt.xlabel = 'Epoch'
plt.title('Accuracy')
plt.legend()
plt.show()
| 28.043478 | 78 | 0.629457 |
5c9369eb4f662091793a4c8a4ad565703f43ebec | 13,573 | py | Python | pytorch_pretrained_bert/tokenization.py | yuzcccc/pytorch-pretrained-BERT | 6b2136a8a9f70f99f719a10e793e6a7f4ef26565 | [
"Apache-2.0"
] | 5 | 2019-04-06T13:25:55.000Z | 2021-05-04T01:57:50.000Z | pytorch_pretrained_bert/tokenization.py | yuzcccc/pytorch-pretrained-BERT | 6b2136a8a9f70f99f719a10e793e6a7f4ef26565 | [
"Apache-2.0"
] | null | null | null | pytorch_pretrained_bert/tokenization.py | yuzcccc/pytorch-pretrained-BERT | 6b2136a8a9f70f99f719a10e793e6a7f4ef26565 | [
"Apache-2.0"
] | 6 | 2020-01-20T10:42:50.000Z | 2021-05-17T14:08:38.000Z | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HugginFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tokenization classes."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import unicodedata
import os
import logging
from .file_utils import cached_path
logging.basicConfig(format = '%(asctime)s - %(levelname)s - %(name)s - %(message)s',
datefmt = '%m/%d/%Y %H:%M:%S',
level = logging.INFO)
logger = logging.getLogger(__name__)
PRETRAINED_VOCAB_ARCHIVE_MAP = {
'bert-base-uncased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-uncased-vocab.txt",
'bert-large-uncased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-uncased-vocab.txt",
'bert-base-cased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-cased-vocab.txt",
'bert-base-multilingual': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-multilingual-vocab.txt",
'bert-base-chinese': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-chinese-vocab.txt",
}
def convert_to_unicode(text):
"""Converts `text` to Unicode (if it's not already), assuming utf-8 input."""
if isinstance(text, str):
return text
elif isinstance(text, bytes):
return text.decode("utf-8", "ignore")
else:
raise ValueError("Unsupported string type: %s" % (type(text)))
def printable_text(text):
"""Returns text encoded in a way suitable for print or `tf.logging`."""
# These functions want `str` for both Python2 and Python3, but in one case
# it's a Unicode string and in the other it's a byte string.
if isinstance(text, str):
return text
elif isinstance(text, bytes):
return text.decode("utf-8", "ignore")
else:
raise ValueError("Unsupported string type: %s" % (type(text)))
def load_vocab(vocab_file):
"""Loads a vocabulary file into a dictionary."""
vocab = collections.OrderedDict()
index = 0
with open(vocab_file, "r", encoding="utf8") as reader:
while True:
token = convert_to_unicode(reader.readline())
if not token:
break
token = token.strip()
vocab[token] = index
index += 1
return vocab
def whitespace_tokenize(text):
"""Runs basic whitespace cleaning and splitting on a peice of text."""
text = text.strip()
if not text:
return []
tokens = text.split()
return tokens
class BertTokenizer(object):
"""Runs end-to-end tokenization: punctuation splitting + wordpiece"""
def __init__(self, vocab_file, do_lower_case=True):
if not os.path.isfile(vocab_file):
raise ValueError(
"Can't find a vocabulary file at path '{}'. To load the vocabulary from a Google pretrained "
"model use `tokenizer = BertTokenizer.from_pretrained(PRETRAINED_MODEL_NAME)`".format(vocab_file))
self.vocab = load_vocab(vocab_file)
self.ids_to_tokens = collections.OrderedDict(
[(ids, tok) for tok, ids in self.vocab.items()])
self.basic_tokenizer = BasicTokenizer(do_lower_case=do_lower_case)
self.wordpiece_tokenizer = WordpieceTokenizer(vocab=self.vocab)
def tokenize(self, text):
split_tokens = []
for token in self.basic_tokenizer.tokenize(text):
for sub_token in self.wordpiece_tokenizer.tokenize(token):
split_tokens.append(sub_token)
return split_tokens
def convert_tokens_to_ids(self, tokens):
"""Converts a sequence of tokens into ids using the vocab."""
ids = []
for token in tokens:
ids.append(self.vocab[token])
return ids
def convert_ids_to_tokens(self, ids):
"""Converts a sequence of ids in wordpiece tokens using the vocab."""
tokens = []
for i in ids:
tokens.append(self.ids_to_tokens[i])
return tokens
@classmethod
def from_pretrained(cls, pretrained_model_name, do_lower_case=True):
"""
Instantiate a PreTrainedBertModel from a pre-trained model file.
Download and cache the pre-trained model file if needed.
"""
if pretrained_model_name in PRETRAINED_VOCAB_ARCHIVE_MAP:
vocab_file = PRETRAINED_VOCAB_ARCHIVE_MAP[pretrained_model_name]
else:
vocab_file = pretrained_model_name
# redirect to the cache, if necessary
try:
resolved_vocab_file = cached_path(vocab_file)
if resolved_vocab_file == vocab_file:
logger.info("loading vocabulary file {}".format(vocab_file))
else:
logger.info("loading vocabulary file {} from cache at {}".format(
vocab_file, resolved_vocab_file))
# Instantiate tokenizer.
tokenizer = cls(resolved_vocab_file, do_lower_case)
except FileNotFoundError:
logger.error(
"Model name '{}' was not found in model name list ({}). "
"We assumed '{}' was a path or url but couldn't find any file "
"associated to this path or url.".format(
pretrained_model_name,
', '.join(PRETRAINED_VOCAB_ARCHIVE_MAP.keys()),
pretrained_model_name))
tokenizer = None
return tokenizer
class BasicTokenizer(object):
"""Runs basic tokenization (punctuation splitting, lower casing, etc.)."""
def __init__(self, do_lower_case=True):
"""Constructs a BasicTokenizer.
Args:
do_lower_case: Whether to lower case the input.
"""
self.do_lower_case = do_lower_case
def tokenize(self, text):
"""Tokenizes a piece of text."""
text = convert_to_unicode(text)
text = self._clean_text(text)
# This was added on November 1st, 2018 for the multilingual and Chinese
# models. This is also applied to the English models now, but it doesn't
# matter since the English models were not trained on any Chinese data
# and generally don't have any Chinese data in them (there are Chinese
# characters in the vocabulary because Wikipedia does have some Chinese
# words in the English Wikipedia.).
text = self._tokenize_chinese_chars(text)
orig_tokens = whitespace_tokenize(text)
split_tokens = []
for token in orig_tokens:
if self.do_lower_case:
token = token.lower()
token = self._run_strip_accents(token)
split_tokens.extend(self._run_split_on_punc(token))
output_tokens = whitespace_tokenize(" ".join(split_tokens))
return output_tokens
def _run_strip_accents(self, text):
"""Strips accents from a piece of text."""
text = unicodedata.normalize("NFD", text)
output = []
for char in text:
cat = unicodedata.category(char)
if cat == "Mn":
continue
output.append(char)
return "".join(output)
def _run_split_on_punc(self, text):
"""Splits punctuation on a piece of text."""
chars = list(text)
i = 0
start_new_word = True
output = []
while i < len(chars):
char = chars[i]
if _is_punctuation(char):
output.append([char])
start_new_word = True
else:
if start_new_word:
output.append([])
start_new_word = False
output[-1].append(char)
i += 1
return ["".join(x) for x in output]
def _tokenize_chinese_chars(self, text):
"""Adds whitespace around any CJK character."""
output = []
for char in text:
cp = ord(char)
if self._is_chinese_char(cp):
output.append(" ")
output.append(char)
output.append(" ")
else:
output.append(char)
return "".join(output)
def _is_chinese_char(self, cp):
"""Checks whether CP is the codepoint of a CJK character."""
# This defines a "chinese character" as anything in the CJK Unicode block:
# https://en.wikipedia.org/wiki/CJK_Unified_Ideographs_(Unicode_block)
#
# Note that the CJK Unicode block is NOT all Japanese and Korean characters,
# despite its name. The modern Korean Hangul alphabet is a different block,
# as is Japanese Hiragana and Katakana. Those alphabets are used to write
# space-separated words, so they are not treated specially and handled
# like the all of the other languages.
if ((cp >= 0x4E00 and cp <= 0x9FFF) or #
(cp >= 0x3400 and cp <= 0x4DBF) or #
(cp >= 0x20000 and cp <= 0x2A6DF) or #
(cp >= 0x2A700 and cp <= 0x2B73F) or #
(cp >= 0x2B740 and cp <= 0x2B81F) or #
(cp >= 0x2B820 and cp <= 0x2CEAF) or
(cp >= 0xF900 and cp <= 0xFAFF) or #
(cp >= 0x2F800 and cp <= 0x2FA1F)): #
return True
return False
def _clean_text(self, text):
"""Performs invalid character removal and whitespace cleanup on text."""
output = []
for char in text:
cp = ord(char)
if cp == 0 or cp == 0xfffd or _is_control(char):
continue
if _is_whitespace(char):
output.append(" ")
else:
output.append(char)
return "".join(output)
class WordpieceTokenizer(object):
"""Runs WordPiece tokenization."""
def __init__(self, vocab, unk_token="[UNK]", max_input_chars_per_word=100):
self.vocab = vocab
self.unk_token = unk_token
self.max_input_chars_per_word = max_input_chars_per_word
def tokenize(self, text):
"""Tokenizes a piece of text into its word pieces.
This uses a greedy longest-match-first algorithm to perform tokenization
using the given vocabulary.
For example:
input = "unaffable"
output = ["un", "##aff", "##able"]
Args:
text: A single token or whitespace separated tokens. This should have
already been passed through `BasicTokenizer.
Returns:
A list of wordpiece tokens.
"""
text = convert_to_unicode(text)
output_tokens = []
for token in whitespace_tokenize(text):
chars = list(token)
if len(chars) > self.max_input_chars_per_word:
output_tokens.append(self.unk_token)
continue
is_bad = False
start = 0
sub_tokens = []
while start < len(chars):
end = len(chars)
cur_substr = None
while start < end:
substr = "".join(chars[start:end])
if start > 0:
substr = "##" + substr
if substr in self.vocab:
cur_substr = substr
break
end -= 1
if cur_substr is None:
is_bad = True
break
sub_tokens.append(cur_substr)
start = end
if is_bad:
output_tokens.append(self.unk_token)
else:
output_tokens.extend(sub_tokens)
return output_tokens
def _is_whitespace(char):
"""Checks whether `chars` is a whitespace character."""
# \t, \n, and \r are technically contorl characters but we treat them
# as whitespace since they are generally considered as such.
if char == " " or char == "\t" or char == "\n" or char == "\r":
return True
cat = unicodedata.category(char)
if cat == "Zs":
return True
return False
def _is_control(char):
"""Checks whether `chars` is a control character."""
# These are technically control characters but we count them as whitespace
# characters.
if char == "\t" or char == "\n" or char == "\r":
return False
cat = unicodedata.category(char)
if cat.startswith("C"):
return True
return False
def _is_punctuation(char):
"""Checks whether `chars` is a punctuation character."""
cp = ord(char)
# We treat all non-letter/number ASCII as punctuation.
# Characters such as "^", "$", and "`" are not in the Unicode
# Punctuation class but we treat them as punctuation anyways, for
# consistency.
if ((cp >= 33 and cp <= 47) or (cp >= 58 and cp <= 64) or
(cp >= 91 and cp <= 96) or (cp >= 123 and cp <= 126)):
return True
cat = unicodedata.category(char)
if cat.startswith("P"):
return True
return False
| 36.983651 | 117 | 0.601857 |
f4b9894e18e934d8f571cc1b23e6a349c4a821b0 | 1,756 | py | Python | mani_skill/utils/misc.py | ic/ManiSkill | 1944029ec96a4f927d9db302d2fe62832a25192c | [
"Apache-2.0"
] | 71 | 2021-07-29T02:29:54.000Z | 2022-03-24T23:27:58.000Z | mani_skill/utils/misc.py | ic/ManiSkill | 1944029ec96a4f927d9db302d2fe62832a25192c | [
"Apache-2.0"
] | 25 | 2021-08-03T16:12:07.000Z | 2022-03-30T02:40:41.000Z | mani_skill/utils/misc.py | ic/ManiSkill | 1944029ec96a4f927d9db302d2fe62832a25192c | [
"Apache-2.0"
] | 7 | 2021-08-14T09:59:45.000Z | 2021-11-14T00:57:24.000Z | import numpy as np
def sample_from_tuple_or_scalar(rng, x):
if isinstance(x, tuple):
return rng.uniform(low=x[0], high=x[1])
else:
return x
import pathlib, yaml
def get_model_ids_from_yaml(yaml_file_path):
path = pathlib.Path(yaml_file_path).resolve()
with path.open("r") as f:
raw_yaml = yaml.load(f, Loader=yaml.SafeLoader)
return list(raw_yaml.keys())
def get_raw_yaml(yaml_file_path):
path = pathlib.Path(yaml_file_path).resolve()
with path.open("r") as f:
raw_yaml = yaml.load(f, Loader=yaml.SafeLoader)
return raw_yaml
def get_actor_state(actor):
'''
returns actor state with shape (13, )
actor_state[:3] = pose p
actor_state[3:7] = pose q
actor_state[7:10] = velocity
actor_state[10:13] = angular velocity
'''
pose = actor.get_pose()
p = pose.p # (3, )
q = pose.q # (4, )
vel = actor.get_velocity() # (3, )
ang_vel = actor.get_angular_velocity() # (3, )
return np.concatenate([p, q, vel, ang_vel], axis=0)
def get_articulation_state(art):
root_link = art.get_links()[0]
base_pose = root_link.get_pose()
base_vel = root_link.get_velocity()
base_ang_vel = root_link.get_angular_velocity()
qpos = art.get_qpos()
qvel = art.get_qvel()
return base_pose.p, base_pose.q, base_vel, base_ang_vel, qpos, qvel
def get_pad_articulation_state(art, max_dof):
base_pos, base_quat, base_vel, base_ang_vel, qpos, qvel = get_articulation_state(art)
k = len(qpos)
pad_obj_internal_state = np.zeros(2 * max_dof)
pad_obj_internal_state[:k] = qpos
pad_obj_internal_state[max_dof : max_dof+k] = qvel
return np.concatenate([base_pos, base_quat, base_vel, base_ang_vel, pad_obj_internal_state])
| 30.275862 | 96 | 0.679954 |
d1fd4cf9d39dee83907a1275853f007fa20d58e5 | 19,193 | py | Python | mmdet/models/backbones/seresnet.py | greathope/NDL-image-detection-3rd-solution | 0cb13e38be0a00d7ab4ceed60dac40d60a56a4dd | [
"MIT"
] | 2 | 2020-03-21T06:47:32.000Z | 2021-11-12T07:50:21.000Z | mmdet/models/backbones/seresnet.py | greathope/NDL-image-detection-3rd-solution | 0cb13e38be0a00d7ab4ceed60dac40d60a56a4dd | [
"MIT"
] | 4 | 2021-03-19T11:32:25.000Z | 2022-03-12T00:19:27.000Z | mmdet/models/backbones/seresnet.py | greathope/NDL-image-detection-3rd-solution | 0cb13e38be0a00d7ab4ceed60dac40d60a56a4dd | [
"MIT"
] | 1 | 2020-03-25T02:29:36.000Z | 2020-03-25T02:29:36.000Z | import torch.nn as nn
import torch.utils.checkpoint as cp
from mmcv.cnn import constant_init, kaiming_init
from mmcv.runner import load_checkpoint
from torch.nn.modules.batchnorm import _BatchNorm
from mmdet.models.plugins import GeneralizedAttention
from mmdet.ops import ContextBlock, DeformConv, ModulatedDeformConv
from ..registry import BACKBONES
from ..utils import build_conv_layer, build_norm_layer
class SEModule(nn.Module):
def __init__(self, channels, reduction):
super(SEModule, self).__init__()
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc1 = nn.Conv2d(channels, channels // reduction, kernel_size=1,
padding=0)
self.relu=nn.ReLU(inplace=True)
self.fc2 = nn.Conv2d(channels // reduction, channels, kernel_size=1,
padding=0)
self.sigmoid = nn.Sigmoid()
def forward(self, x):
module_input = x
x = self.avg_pool(x)
x = self.fc1(x)
x = self.relu(x)
x = self.fc2(x)
x = self.sigmoid(x)
return module_input * x
class BasicBlock(nn.Module):
expansion = 1
def __init__(self,
inplanes,
planes,
stride=1,
dilation=1,
downsample=None,
style='pytorch',
with_cp=False,
conv_cfg=None,
norm_cfg=dict(type='BN'),
dcn=None,
gcb=None,
gen_attention=None):
super(BasicBlock, self).__init__()
assert dcn is None, "Not implemented yet."
assert gen_attention is None, "Not implemented yet."
assert gcb is None, "Not implemented yet."
self.norm1_name, norm1 = build_norm_layer(norm_cfg, planes, postfix=1)
self.norm2_name, norm2 = build_norm_layer(norm_cfg, planes, postfix=2)
self.conv1 = build_conv_layer(
conv_cfg,
inplanes,
planes,
3,
stride=stride,
padding=dilation,
dilation=dilation,
bias=False)
self.add_module(self.norm1_name, norm1)
self.conv2 = build_conv_layer(
conv_cfg, planes, planes, 3, padding=1, bias=False)
self.add_module(self.norm2_name, norm2)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
self.dilation = dilation
assert not with_cp
@property
def norm1(self):
return getattr(self, self.norm1_name)
@property
def norm2(self):
return getattr(self, self.norm2_name)
def forward(self, x):
identity = x
out = self.conv1(x)
out = self.norm1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.norm2(out)
if self.downsample is not None:
identity = self.downsample(x)
out += identity
out = self.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self,
inplanes,
planes,
stride=1,
dilation=1,
downsample=None,
style='pytorch',
with_cp=False,
conv_cfg=None,
norm_cfg=dict(type='BN'),
dcn=None,
gcb=None,
gen_attention=None):
"""Bottleneck block for ResNet.
If style is "pytorch", the stride-two layer is the 3x3 conv layer,
if it is "caffe", the stride-two layer is the first 1x1 conv layer.
"""
super(Bottleneck, self).__init__()
assert style in ['pytorch', 'caffe']
assert dcn is None or isinstance(dcn, dict)
assert gcb is None or isinstance(gcb, dict)
assert gen_attention is None or isinstance(gen_attention, dict)
self.inplanes = inplanes
self.planes = planes
self.stride = stride
self.dilation = dilation
self.style = style
self.with_cp = with_cp
self.conv_cfg = conv_cfg
self.norm_cfg = norm_cfg
self.dcn = dcn
self.with_dcn = dcn is not None
self.gcb = gcb
self.with_gcb = gcb is not None
self.gen_attention = gen_attention
self.with_gen_attention = gen_attention is not None
if self.style == 'pytorch':
self.conv1_stride = 1
self.conv2_stride = stride
else:
self.conv1_stride = stride
self.conv2_stride = 1
self.norm1_name, norm1 = build_norm_layer(norm_cfg, planes, postfix=1)
self.norm2_name, norm2 = build_norm_layer(norm_cfg, planes, postfix=2)
self.norm3_name, norm3 = build_norm_layer(
norm_cfg, planes * self.expansion, postfix=3)
self.conv1 = build_conv_layer(
conv_cfg,
inplanes,
planes,
kernel_size=1,
stride=self.conv1_stride,
bias=False)
self.add_module(self.norm1_name, norm1)
fallback_on_stride = False
self.with_modulated_dcn = False
if self.with_dcn:
fallback_on_stride = dcn.get('fallback_on_stride', False)
self.with_modulated_dcn = dcn.get('modulated', False)
if not self.with_dcn or fallback_on_stride:
self.conv2 = build_conv_layer(
conv_cfg,
planes,
planes,
kernel_size=3,
stride=self.conv2_stride,
padding=dilation,
dilation=dilation,
bias=False)
else:
assert conv_cfg is None, 'conv_cfg must be None for DCN'
self.deformable_groups = dcn.get('deformable_groups', 1)
if not self.with_modulated_dcn:
conv_op = DeformConv
offset_channels = 18
else:
conv_op = ModulatedDeformConv
offset_channels = 27
self.conv2_offset = nn.Conv2d(
planes,
self.deformable_groups * offset_channels,
kernel_size=3,
stride=self.conv2_stride,
padding=dilation,
dilation=dilation)
self.conv2 = conv_op(
planes,
planes,
kernel_size=3,
stride=self.conv2_stride,
padding=dilation,
dilation=dilation,
deformable_groups=self.deformable_groups,
bias=False)
self.add_module(self.norm2_name, norm2)
self.conv3 = build_conv_layer(
conv_cfg,
planes,
planes * self.expansion,
kernel_size=1,
bias=False)
self.add_module(self.norm3_name, norm3)
self.se_module = SEModule(planes * 4, reduction=16) # se module
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
if self.with_gcb:
gcb_inplanes = planes * self.expansion
self.context_block = ContextBlock(inplanes=gcb_inplanes, **gcb)
# gen_attention
if self.with_gen_attention:
self.gen_attention_block = GeneralizedAttention(
planes, **gen_attention)
@property
def norm1(self):
return getattr(self, self.norm1_name)
@property
def norm2(self):
return getattr(self, self.norm2_name)
@property
def norm3(self):
return getattr(self, self.norm3_name)
def forward(self, x):
def _inner_forward(x):
identity = x
out = self.conv1(x)
out = self.norm1(out)
out = self.relu(out)
if not self.with_dcn:
out = self.conv2(out)
elif self.with_modulated_dcn:
offset_mask = self.conv2_offset(out)
offset = offset_mask[:, :18 * self.deformable_groups, :, :]
mask = offset_mask[:, -9 * self.deformable_groups:, :, :]
mask = mask.sigmoid()
out = self.conv2(out, offset, mask)
else:
offset = self.conv2_offset(out)
out = self.conv2(out, offset)
out = self.norm2(out)
out = self.relu(out)
if self.with_gen_attention:
out = self.gen_attention_block(out)
out = self.conv3(out)
out = self.norm3(out)
if self.with_gcb:
out = self.context_block(out)
if self.downsample is not None:
identity = self.downsample(x)
out = self.se_module(out) + identity # add se module
# out += identity
return out
if self.with_cp and x.requires_grad:
out = cp.checkpoint(_inner_forward, x)
else:
out = _inner_forward(x)
out = self.relu(out)
return out
def make_res_layer(block,
inplanes,
planes,
blocks,
stride=1,
dilation=1,
style='pytorch',
with_cp=False,
conv_cfg=None,
norm_cfg=dict(type='BN'),
dcn=None,
gcb=None,
gen_attention=None,
gen_attention_blocks=[]):
downsample = None
if stride != 1 or inplanes != planes * block.expansion:
downsample = nn.Sequential(
build_conv_layer(
conv_cfg,
inplanes,
planes * block.expansion,
kernel_size=1,
stride=stride,
bias=False),
build_norm_layer(norm_cfg, planes * block.expansion)[1],
)
layers = []
layers.append(
block(
inplanes=inplanes,
planes=planes,
stride=stride,
dilation=dilation,
downsample=downsample,
style=style,
with_cp=with_cp,
conv_cfg=conv_cfg,
norm_cfg=norm_cfg,
dcn=dcn,
gcb=gcb,
gen_attention=gen_attention if
(0 in gen_attention_blocks) else None))
inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(
block(
inplanes=inplanes,
planes=planes,
stride=1,
dilation=dilation,
style=style,
with_cp=with_cp,
conv_cfg=conv_cfg,
norm_cfg=norm_cfg,
dcn=dcn,
gcb=gcb,
gen_attention=gen_attention if
(i in gen_attention_blocks) else None))
return nn.Sequential(*layers)
@BACKBONES.register_module
class SeResNet(nn.Module):
"""SeResNet backbone.
Args:
depth (int): Depth of resnet, from {18, 34, 50, 101, 152}.
in_channels (int): Number of input image channels. Normally 3.
num_stages (int): Resnet stages, normally 4.
strides (Sequence[int]): Strides of the first block of each stage.
dilations (Sequence[int]): Dilation of each stage.
out_indices (Sequence[int]): Output from which stages.
style (str): `pytorch` or `caffe`. If set to "pytorch", the stride-two
layer is the 3x3 conv layer, otherwise the stride-two layer is
the first 1x1 conv layer.
frozen_stages (int): Stages to be frozen (stop grad and set eval mode).
-1 means not freezing any parameters.
norm_cfg (dict): dictionary to construct and config norm layer.
norm_eval (bool): Whether to set norm layers to eval mode, namely,
freeze running stats (mean and var). Note: Effect on Batch Norm
and its variants only.
with_cp (bool): Use checkpoint or not. Using checkpoint will save some
memory while slowing down the training speed.
zero_init_residual (bool): whether to use zero init for last norm layer
in resblocks to let them behave as identity.
Example:
>>> from mmdet.models import SeResNet
>>> import torch
>>> self = ResNet(depth=18)
>>> self.eval()
>>> inputs = torch.rand(1, 3, 32, 32)
>>> level_outputs = self.forward(inputs)
>>> for level_out in level_outputs:
... print(tuple(level_out.shape))
(1, 64, 8, 8)
(1, 128, 4, 4)
(1, 256, 2, 2)
(1, 512, 1, 1)
"""
arch_settings = {
18: (BasicBlock, (2, 2, 2, 2)),
34: (BasicBlock, (3, 4, 6, 3)),
50: (Bottleneck, (3, 4, 6, 3)),
101: (Bottleneck, (3, 4, 23, 3)),
152: (Bottleneck, (3, 8, 36, 3))
}
def __init__(self,
depth,
in_channels=3,
num_stages=4,
strides=(1, 2, 2, 2),
dilations=(1, 1, 1, 1),
out_indices=(0, 1, 2, 3),
style='pytorch',
frozen_stages=-1,
conv_cfg=None,
norm_cfg=dict(type='BN', requires_grad=True),
norm_eval=True,
dcn=None,
stage_with_dcn=(False, False, False, False),
gcb=None,
stage_with_gcb=(False, False, False, False),
gen_attention=None,
stage_with_gen_attention=((), (), (), ()),
with_cp=False,
zero_init_residual=True):
super(SeResNet, self).__init__()
if depth not in self.arch_settings:
raise KeyError('invalid depth {} for resnet'.format(depth))
self.depth = depth
self.num_stages = num_stages
assert num_stages >= 1 and num_stages <= 4
self.strides = strides
self.dilations = dilations
assert len(strides) == len(dilations) == num_stages
self.out_indices = out_indices
assert max(out_indices) < num_stages
self.style = style
self.frozen_stages = frozen_stages
self.conv_cfg = conv_cfg
self.norm_cfg = norm_cfg
self.with_cp = with_cp
self.norm_eval = norm_eval
self.dcn = dcn
self.stage_with_dcn = stage_with_dcn
if dcn is not None:
assert len(stage_with_dcn) == num_stages
self.gen_attention = gen_attention
self.gcb = gcb
self.stage_with_gcb = stage_with_gcb
if gcb is not None:
assert len(stage_with_gcb) == num_stages
self.zero_init_residual = zero_init_residual
self.block, stage_blocks = self.arch_settings[depth]
self.stage_blocks = stage_blocks[:num_stages]
self.inplanes = 64
self._make_stem_layer(in_channels)
self.res_layers = []
for i, num_blocks in enumerate(self.stage_blocks):
stride = strides[i]
dilation = dilations[i]
dcn = self.dcn if self.stage_with_dcn[i] else None
gcb = self.gcb if self.stage_with_gcb[i] else None
planes = 64 * 2**i
res_layer = make_res_layer(
self.block,
self.inplanes,
planes,
num_blocks,
stride=stride,
dilation=dilation,
style=self.style,
with_cp=with_cp,
conv_cfg=conv_cfg,
norm_cfg=norm_cfg,
dcn=dcn,
gcb=gcb,
gen_attention=gen_attention,
gen_attention_blocks=stage_with_gen_attention[i])
self.inplanes = planes * self.block.expansion
layer_name = 'layer{}'.format(i + 1)
self.add_module(layer_name, res_layer)
self.res_layers.append(layer_name)
self._freeze_stages()
self.feat_dim = self.block.expansion * 64 * 2**(
len(self.stage_blocks) - 1)
@property
def norm1(self):
return getattr(self, self.norm1_name)
def _make_stem_layer(self, in_channels):
self.conv1 = build_conv_layer(
self.conv_cfg,
in_channels,
64,
kernel_size=7,
stride=2,
padding=3,
bias=False)
self.norm1_name, norm1 = build_norm_layer(self.norm_cfg, 64, postfix=1)
self.add_module(self.norm1_name, norm1)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
def _freeze_stages(self):
if self.frozen_stages >= 0:
self.norm1.eval()
for m in [self.conv1, self.norm1]:
for param in m.parameters():
param.requires_grad = False
for i in range(1, self.frozen_stages + 1):
m = getattr(self, 'layer{}'.format(i))
m.eval()
for param in m.parameters():
param.requires_grad = False
def init_weights(self, pretrained=None):
if isinstance(pretrained, str):
for m in self.modules():
if isinstance(m, nn.Conv2d):
kaiming_init(m)
elif isinstance(m, (_BatchNorm, nn.GroupNorm)):
constant_init(m, 1)
from mmdet.apis import get_root_logger
logger = get_root_logger()
load_checkpoint(self, pretrained, strict=False, logger=logger)
elif pretrained is None:
for m in self.modules():
if isinstance(m, nn.Conv2d):
kaiming_init(m)
elif isinstance(m, (_BatchNorm, nn.GroupNorm)):
constant_init(m, 1)
if self.dcn is not None:
for m in self.modules():
if isinstance(m, Bottleneck) and hasattr(
m, 'conv2_offset'):
constant_init(m.conv2_offset, 0)
if self.zero_init_residual:
for m in self.modules():
if isinstance(m, Bottleneck):
constant_init(m.norm3, 0)
elif isinstance(m, BasicBlock):
constant_init(m.norm2, 0)
else:
raise TypeError('pretrained must be a str or None')
def forward(self, x):
x = self.conv1(x)
x = self.norm1(x)
x = self.relu(x)
x = self.maxpool(x)
outs = []
for i, layer_name in enumerate(self.res_layers):
res_layer = getattr(self, layer_name)
x = res_layer(x)
if i in self.out_indices:
outs.append(x)
return tuple(outs)
def train(self, mode=True):
super(SeResNet, self).train(mode)
self._freeze_stages()
if mode and self.norm_eval:
for m in self.modules():
# trick: eval have effect on BatchNorm only
if isinstance(m, _BatchNorm):
m.eval()
| 33.554196 | 79 | 0.53853 |
d8295008a0a009f192ca6ed8fc3bead67945fbae | 24,869 | py | Python | da/transport/sock.py | shalakhansidmul/distalgo | 47398f16a0bc287966fe0427574c4b2b4d753b7f | [
"MIT"
] | null | null | null | da/transport/sock.py | shalakhansidmul/distalgo | 47398f16a0bc287966fe0427574c4b2b4d753b7f | [
"MIT"
] | null | null | null | da/transport/sock.py | shalakhansidmul/distalgo | 47398f16a0bc287966fe0427574c4b2b4d753b7f | [
"MIT"
] | null | null | null | # Copyright (c) 2010-2017 Bo Lin
# Copyright (c) 2010-2017 Yanhong Annie Liu
# Copyright (c) 2010-2017 Stony Brook University
# Copyright (c) 2010-2017 The Research Foundation of SUNY
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import sys
import time
import random
import socket
import logging
import threading
from .base import *
from .manager import transport
from .mesgloop import SelectorLoop
from ..common import VERSION_BYTES, get_runtime_option
__all__ = [
"SocketTransport", "UdpTransport", "TcpTransport",
"HEADER_SIZE", "BYTEORDER"
]
logger = logging.getLogger(__name__)
HEADER_SIZE = 8
ADDRESS_SIZE = 4
BYTEORDER = 'big'
MAX_RETRIES = 3
DEFAULT_MESSAGE_BUFFER_SIZE = (4 * 1024)
DEFAULT_MIN_PORT = 10000
DEFAULT_MAX_PORT = 65535
class SocketTransport(Transport):
"""Base class for socket-based transports.
"""
capabilities = ~(ChannelCaps.INTERHOST)
def __init__(self, authkey):
super().__init__(authkey)
self._log = logger.getChild(self.__class__.__name__)
self.port = None
self._port_bytes = None
self.conn = None
self.mesgloop = None
self.shared_loop = False
self.buffer_size = 0
def initialize(self, port=None, strict=False, linear=False,
retries=MAX_RETRIES, **rest):
super().initialize(**rest)
self.buffer_size = get_runtime_option('message_buffer_size',
DEFAULT_MESSAGE_BUFFER_SIZE)
min_port = get_runtime_option('min_port', DEFAULT_MIN_PORT)
max_port = get_runtime_option('max_port', DEFAULT_MAX_PORT)
assert self.conn is not None
try:
_, bound_port = self.conn.getsockname()
if bound_port != 0:
# We've already inherited the socket from the parent
self.port = bound_port
return
except OSError as e:
# This is what we get on Windows if we call `getsockname()` on an
# unbound socket...
pass
self.port = port
if self.port is None:
if not strict:
self.port = random.randint(min_port, max_port)
else:
raise NoAvailablePortsException("Port number not specified!")
address = None
retry = 1
while True:
address = (self.hostname, self.port)
try:
self.conn.bind(address)
break
except socket.error as e:
address = None
if not strict and retry < retries:
if linear:
self.port += 1
else:
self.port = random.randint(min_port, max_port)
retry += 1
else:
raise BindingException(
"Failed to bind to an available port.") from e
self._log.debug("Transport initialized at address %s", address)
def serialize(self, pipe, pid):
from multiprocessing.reduction import send_handle
send_handle(pipe, self.conn.fileno(), pid)
def start(self, queue, mesgloop=None):
if self.conn is None:
raise InvalidTransportStateException(
"Transport has not been initialized!")
self.queue = queue
self.mesgloop = mesgloop
if self.mesgloop is None:
self.mesgloop = SelectorLoop()
self.shared_loop = False
else:
self.shared_loop = True
self.mesgloop.start()
def close(self):
if self.conn is None:
self._log.debug("Already stopped.")
else:
if self.mesgloop:
if not self.shared_loop:
self.mesgloop.stop()
else:
self.mesgloop.deregister(self.conn)
try:
self.conn.close()
except OSError:
pass
finally:
self.conn = None
self.queue = None
self._log.debug("Transport stopped.")
@property
def address(self):
return self.port
@property
def address_bytes(self):
if self._port_bytes is None:
self._port_bytes = int(self.port).to_bytes(ADDRESS_SIZE, BYTEORDER)
return self._port_bytes
@property
def started(self):
return self.queue is not None
def __str__(self):
fmt = "<{0.__class__.__qualname__}({0.hostname}:{0.port})>"
return fmt.format(self)
# UDP Implementation:
DIGEST_LENGTH = 16
DIGEST_HOLDER = b'0' * DIGEST_LENGTH
@transport
class UdpTransport(SocketTransport):
"""A channel that supports sending and receiving messages via UDP.
"""
capabilities = ~(ChannelCaps.INTERHOST)
data_offset = 4 + DIGEST_LENGTH
def __init__(self, authkey):
super().__init__(authkey)
def initialize(self, strict=False, pipe=None, **params):
try:
if pipe is None:
self.conn = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.conn.set_inheritable(True)
if strict:
self.conn.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
else:
from multiprocessing.reduction import recv_handle
self.conn = socket.fromfd(recv_handle(pipe),
socket.AF_INET, socket.SOCK_DGRAM)
super().initialize(strict=strict, **params)
except Exception as e:
if self.conn is not None:
self.conn.close()
self.conn = None
raise e
def start(self, queue, mesgloop=None):
super().start(queue, mesgloop)
assert self.mesgloop is not None
self.mesgloop.register(self.conn, self._recvmesg1)
self._log.debug("Transport started.")
def _packet_from(self, chunk):
if self.authkey is not None:
import hmac
digest = hmac.new(self.authkey, chunk, 'md5').digest()
return (VERSION_BYTES, digest, chunk)
else:
return (VERSION_BYTES, DIGEST_HOLDER, chunk)
def _verify_packet(self, chunk, addr):
if chunk[:4] != VERSION_BYTES:
raise VersionMismatchException("wrong version: {}".format(chunk[:4]))
if self.authkey is not None:
with memoryview(chunk)[self.data_offset:] as data:
import hmac
digest = hmac.new(self.authkey, data, 'md5').digest()
if digest != chunk[4:self.data_offset]:
raise AuthenticationException(
"wrong digest from {}: {}"
.format(addr, chunk[4:self.data_offset]))
else:
if chunk[4:self.data_offset] != DIGEST_HOLDER:
raise AuthenticationException('{} requires a cookie.'
.format(addr))
def _sendmsg_nix(self, packet, target):
packet_size = sum(len(e) for e in packet)
return self.conn.sendmsg(packet, [], 0, target) == packet_size
def _sendmsg_nt(self, packet, target):
from itertools import chain
buf = bytes(chain(*packet))
return self.conn.sendto(buf, target) == len(buf)
if sys.platform == 'win32':
_sendmsg = _sendmsg_nt
else:
_sendmsg = _sendmsg_nix
def send(self, chunk, target, wait=0.01, retries=MAX_RETRIES, **_):
if self.conn is None:
raise InvalidTransportStateException(
"Invalid transport state for sending.")
if len(chunk) > self.buffer_size:
self._log.warning("Data size exceeded maximum buffer size!"
" Outgoing packet dropped.")
self._log.debug("Dropped packet: %s", chunk)
raise PacketSizeExceededException()
else:
if target is None:
raise NoTargetTransportException()
packet = self._packet_from(chunk)
cnt = 0
while True:
try:
if self._sendmsg(packet, target):
return
else:
raise TransportException("Unable to send full chunk.")
except PermissionError as e:
# The 'conntrack' module of iptables will cause UDP `sendto`
# to return `EPERM` if it's sending too fast:
self._log.debug("Packet to %s dropped by kernel, "
"reduce send rate.", target)
cnt += 1
if cnt >= retries:
raise TransportException("Packet blocked by OS.") from e
else:
time.sleep(wait)
def _recvmsg_nt(self):
chunk, remote = self.conn.recvfrom(self.buffer_size)
return chunk, None, 0, remote
def _recvmsg_nix(self):
return self.conn.recvmsg(self.buffer_size)
if sys.platform == 'win32':
_recvmsg = _recvmsg_nt
socket.MSG_ERRQUEUE = 0
elif sys.platform == 'darwin':
_recvmsg = _recvmsg_nix
socket.MSG_ERRQUEUE = 0
else:
_recvmsg = _recvmsg_nix
def _recvmesg1(self, _conn, _data):
try:
chunk, _, flags, remote= self._recvmsg()
if not chunk:
# XXX: zero length packet == closed socket??
self._log.debug("Transport closed, terminating receive loop.")
elif flags & socket.MSG_TRUNC:
self._log.debug("Dropped truncated packet. ")
elif flags & socket.MSG_ERRQUEUE:
self._log.debug("No data received. ")
else:
try:
self._verify_packet(chunk, remote)
self.queue.append((self, chunk, remote))
except TransportException as e:
self._log.warning("Packet from %s dropped due to: %r",
remote, e)
except (socket.error, AttributeError) as e:
self._log.debug("Terminating receive loop due to %r", e)
# TCP Implementation:
MAX_TCP_BACKLOG = 10
MAX_TCP_CONN = 200
TCP_RECV_BUFFER_SIZE = 256
TCP_DEFAULT_TIMEOUT = 5
#
# Authentication stuff
#
MESSAGE_LENGTH = 20
KEY_CHALLENGE = b'#KY#'
VER_CHALLENGE = b'#VR#'
WELCOME = b'#WELCOME#'
FAILURE = b'#FAILURE#'
class AuxConnectionData:
"""Auxiliary data associated with each TCP connection.
"""
def __init__(self, peername, message_size, digest=None, provision=False):
self.peername = peername
self.message_size = message_size
self.digest = digest
if provision:
self.provision()
def provision(self):
del self.digest
self.buf = bytearray(self.message_size * 2)
self.view = memoryview(self.buf)
self.lastptr = 0
self.freeptr = 0
class ConnectionClosedException(TransportException): pass
@transport
class TcpTransport(SocketTransport):
"""A channel that supports sending and receiving messages via TCP.
"""
capabilities = ~((ChannelCaps.FIFO) |
(ChannelCaps.RELIABLE) |
(ChannelCaps.INTERHOST))
data_offset = 0
def __init__(self, authkey):
super().__init__(authkey)
self.cache = None
self.lock = None
def initialize(self, strict=False, pipe=None, **params):
try:
if pipe is None:
self.conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if strict and not get_runtime_option('tcp_dont_reuse_addr', False):
self.conn.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
else:
from multiprocessing.reduction import recv_handle
self.conn = socket.fromfd(recv_handle(pipe),
socket.AF_INET, socket.SOCK_STREAM)
super().initialize(strict=strict, **params)
except Exception as e:
if self.conn is not None:
self.conn.close()
self.conn = None
raise e
def start(self, queue, mesgloop=None):
self.conn.listen(MAX_TCP_BACKLOG)
self.conn.settimeout(TCP_DEFAULT_TIMEOUT)
super().start(queue, mesgloop)
assert self.mesgloop is not None
self.mesgloop.register(self.conn, self._accept)
if self.cache is None:
self.cache = dict()
if self.lock is None:
self.lock = threading.Lock()
self._log.debug("Transport started.")
def close(self):
if self.lock:
with self.lock:
for conn in self.cache.values():
self.mesgloop.deregister(conn)
conn.close()
self.cache.clear()
super().close()
def _deliver_challenge(self, conn, addr):
import os
digest = None
if self.authkey is not None:
import hmac
message = os.urandom(MESSAGE_LENGTH)
self._send_1((KEY_CHALLENGE, VERSION_BYTES, message),
conn, addr)
digest = hmac.new(self.authkey, message, 'md5').digest()
else:
self._send_1((VER_CHALLENGE, VERSION_BYTES), conn, addr)
return digest
def _verify_challenge(self, conn, auxdata):
"""Verify a remote peer has the proper key and version."""
addr = auxdata.peername
# FIXME: is it possible we may not get the whole message in one go?
message = conn.recv(TCP_RECV_BUFFER_SIZE)
self.mesgloop.deregister(conn)
port_bytes = message[:ADDRESS_SIZE]
message = message[ADDRESS_SIZE:]
if self.authkey is not None:
if message != auxdata.digest:
self._send_1((FAILURE,), conn, addr)
raise AuthenticationException(
'Digest from {0.peername} was wrong.'.format(auxdata))
else:
if message == KEY_CHALLENGE:
raise AuthenticationException(
'{0.peername} requires a cookie.'.format(auxdata))
if message != VER_CHALLENGE:
raise VersionMismatchException(
'Version from {0.peername} is different.'.format(auxdata))
# Set the remote peer's port number to its listen port:
remote_port = int.from_bytes(port_bytes, BYTEORDER)
auxdata.peername = addr[0], remote_port
with self.lock:
if auxdata.peername in self.cache:
self._log.debug("Dropping duplicate connection from %s.",
auxdata.peername)
conn.close()
return
else:
self.cache[auxdata.peername] = conn
self._send_1((WELCOME,), conn, addr)
auxdata.provision()
self.mesgloop.register(conn, self._recvmesg_wrapper,
(self._receive_1, auxdata))
def _answer_challenge(self, conn, addr):
# FIXME: same here...
message = conn.recv(TCP_RECV_BUFFER_SIZE)
self._log.debug("=========answering %r", message)
if self.authkey is not None:
import hmac
if message[:len(KEY_CHALLENGE)] != KEY_CHALLENGE:
self._send_challenge_reply(KEY_CHALLENGE, conn, addr)
raise AuthenticationException('{} has no cookie.'.
format(addr))
if message[len(KEY_CHALLENGE):len(KEY_CHALLENGE)+4] != VERSION_BYTES:
raise VersionMismatchException('Version at {} is different.'.
format(addr))
message = message[len(KEY_CHALLENGE)+4:]
digest = hmac.new(self.authkey, message, 'md5').digest()
self._send_challenge_reply(digest, conn, addr)
else:
if message[:len(KEY_CHALLENGE)] == KEY_CHALLENGE:
self._send_challenge_reply(KEY_CHALLENGE, conn, addr)
raise AuthenticationException('{} requires a cookie.'.
format(addr))
elif message != VER_CHALLENGE + VERSION_BYTES:
self._send_challenge_reply(FAILURE, conn, addr)
raise VersionMismatchException('Version at {} is different.'.
format(addr))
else:
self._send_challenge_reply(VER_CHALLENGE, conn, addr)
# FIXME: ...and here
response = conn.recv(len(WELCOME))
if len(response) == 0:
# Remote side dropped the connection, either because they
# terminated, or we already have a connection
raise ConnectionClosedException()
elif response != WELCOME:
raise AuthenticationException('digest was rejected by {}.'.
format(addr))
def _send_challenge_reply(self, result, conn, addr):
self._send_1((self.address_bytes, result), conn, addr)
def _accept(self, conn, auxdata):
conn, addr = self.conn.accept()
self._log.debug("Accepted connection from %s.", addr)
digest = self._deliver_challenge(conn, auxdata)
self.mesgloop.register(conn, self._recvmesg_wrapper,
(self._verify_challenge,
AuxConnectionData(addr,
self.buffer_size,
digest)))
def _connect(self, target):
self._log.debug("Initiating connection to %s.", target)
conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
conn.settimeout(TCP_DEFAULT_TIMEOUT)
conn.connect(target)
try:
self._answer_challenge(conn, target)
self._log.debug("Connection to %s established.", target)
return conn
except TransportException as e:
conn.close()
raise e
def _cleanup(self, conn, remote):
if conn is None:
return
self._log.debug("Cleanup connection to %s.", remote)
if self.mesgloop:
self.mesgloop.deregister(conn)
if remote in self.cache:
with self.lock:
try:
if self.cache.get(remote) is conn:
del self.cache[remote]
except AttributeError:
pass
try:
conn.close()
except OSError:
pass
def send(self, chunk, target, retries=MAX_RETRIES, wait=0.05,
retry_refused_connections=False, **_):
"""Send `chunk` to `target`."""
if target is None:
raise NoTargetTransportException()
header = int(len(chunk)).to_bytes(HEADER_SIZE, BYTEORDER)
message = (header, chunk)
retry = 1
saved = conn = None
try:
while True:
with self.lock:
saved = conn = self.cache.get(target)
try:
if conn is None:
conn = self._connect(target)
self._send_1(message, conn, target)
return
except ConnectionRefusedError as e:
if (not retry_refused_connections) or retry > retries:
raise TransportException(
'connection refused by {}'.format(target)) from e
except (socket.error, socket.timeout) as e:
self._log.debug("Sending to %s failed on %dth try: %r",
target, retry, e)
if conn is not None:
conn.close()
conn = None
except ConnectionClosedException:
pass
if retry > retries:
raise TransportException('max retries reached.') from e
if retry > 1:
time.sleep(wait)
retry += 1
finally:
if conn is not None:
if saved is not conn:
self._cleanup(saved, target)
with self.lock:
self.cache[target] = conn
self.mesgloop.register(
conn, self._recvmesg_wrapper,
(self._receive_1,
AuxConnectionData(target, self.buffer_size,
provision=True)))
else:
if target in self.cache:
with self.lock:
try:
del self.cache[target]
except KeyError:
pass
def _send_1(self, data, conn, target=None):
msglen = sum(len(chunk) for chunk in data)
sent = conn.sendmsg(data)
if sent != msglen:
self._log.debug("_send_1: only sent %d/%d bytes. ", sent, msglen)
raise socket.error("Unable to send full chunk.")
else:
self._log.debug("Sent %d bytes to %s.", msglen, target)
def _send_1_nt(self, data, conn, target=None):
from itertools import chain
buf = bytes(chain(*data))
conn.sendall(buf)
self._log.debug("Sent %d bytes to %s.", len(buf), target)
if sys.platform == 'win32':
_send_1 = _send_1_nt
def _recvmesg_wrapper(self, conn, job):
callback, aux = job
try:
callback(conn, aux)
except TransportException as e:
self._log.warning("Exception when handling %s: %r",
aux.peername, e)
self._cleanup(conn, aux.peername)
except socket.error as e:
self._log.debug(
"socket.error when receiving from %s: %r",
aux.peername, e)
self._cleanup(conn, aux.peername)
def _receive_1(self, conn, aux):
buf = aux.buf
view = aux.view
fptr = 0
remote = aux.peername
if aux.freeptr > 0:
fptr = aux.freeptr
aux.freeptr = 0
rbuf = view[fptr:]
else:
rbuf = buf
rlen = conn.recv_into(rbuf)
if rlen == 0:
self._log.debug("Peer disconnected: %s.", remote)
self._cleanup(conn, remote)
return
self._log.debug("%d/%d bytes received from %s.", rlen, len(rbuf), remote)
datalen = fptr + rlen
fptr = aux.lastptr
aux.lastptr = 0
cnt = 0
while fptr < (datalen - HEADER_SIZE):
pstart = fptr + HEADER_SIZE
psize = int.from_bytes(view[fptr:pstart], BYTEORDER)
pend = pstart + psize
if psize > 0:
if pend <= datalen:
chunk = bytes(view[pstart:pend])
self.queue.append((self, chunk, remote))
cnt += 1
else:
break
else:
self._log.debug("Invalid message header: %d!", psize)
fptr = pend
self._log.debug("%d message(s) received.", cnt)
if fptr != datalen:
leftover = datalen - fptr
self._log.debug("%d bytes leftover.", leftover)
if fptr > len(buf) / 2:
buf[:leftover] = buf[fptr:datalen]
aux.freeptr = leftover
self._log.debug("Leftover bytes moved to buffer start.")
else:
aux.lastptr = fptr
aux.freeptr = datalen
| 37.340841 | 83 | 0.553299 |
264800384fb4c02c1f961446aafa1d73e875fc57 | 5,757 | py | Python | airflow/kubernetes/pod.py | discordapp/incubator-airflow | f0f00e58af32d4143854fb0f912953258e547886 | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 3 | 2019-03-28T05:59:39.000Z | 2019-10-03T22:05:25.000Z | airflow/kubernetes/pod.py | discord/incubator-airflow | f0f00e58af32d4143854fb0f912953258e547886 | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 7 | 2019-03-27T07:58:14.000Z | 2020-02-12T17:42:33.000Z | airflow/kubernetes/pod.py | discordapp/incubator-airflow | f0f00e58af32d4143854fb0f912953258e547886 | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 2 | 2018-11-01T22:36:10.000Z | 2019-11-23T13:36:53.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
class Resources:
def __init__(
self,
request_memory=None,
request_cpu=None,
request_ephemeral_storage=None,
limit_memory=None,
limit_cpu=None,
limit_gpu=None,
limit_ephemeral_storage=None):
self.request_memory = request_memory
self.request_cpu = request_cpu
self.request_ephemeral_storage = request_ephemeral_storage
self.limit_memory = limit_memory
self.limit_cpu = limit_cpu
self.limit_gpu = limit_gpu
self.limit_ephemeral_storage = limit_ephemeral_storage
def is_empty_resource_request(self):
return not self.has_limits() and not self.has_requests()
def has_limits(self):
return self.limit_cpu is not None or \
self.limit_memory is not None or \
self.limit_gpu is not None or \
self.limit_ephemeral_storage is not None
def has_requests(self):
return self.request_cpu is not None or \
self.request_memory is not None or \
self.request_ephemeral_storage is not None
class Port:
def __init__(
self,
name=None,
container_port=None):
self.name = name
self.container_port = container_port
class Pod:
"""
Represents a kubernetes pod and manages execution of a single pod.
:param image: The docker image
:type image: str
:param envs: A dict containing the environment variables
:type envs: dict
:param cmds: The command to be run on the pod
:type cmds: list[str]
:param secrets: Secrets to be launched to the pod
:type secrets: list[airflow.contrib.kubernetes.secret.Secret]
:param result: The result that will be returned to the operator after
successful execution of the pod
:type result: any
:param image_pull_policy: Specify a policy to cache or always pull an image
:type image_pull_policy: str
:param image_pull_secrets: Any image pull secrets to be given to the pod.
If more than one secret is required, provide a comma separated list:
secret_a,secret_b
:type image_pull_secrets: str
:param affinity: A dict containing a group of affinity scheduling rules
:type affinity: dict
:param hostnetwork: If True enable host networking on the pod
:type hostnetwork: bool
:param tolerations: A list of kubernetes tolerations
:type tolerations: list
:param security_context: A dict containing the security context for the pod
:type security_context: dict
:param configmaps: A list containing names of configmaps object
mounting env variables to the pod
:type configmaps: list[str]
:param pod_runtime_info_envs: environment variables about
pod runtime information (ip, namespace, nodeName, podName)
:type pod_runtime_info_envs: list[PodRuntimeEnv]
:param dnspolicy: Specify a dnspolicy for the pod
:type dnspolicy: str
"""
def __init__(
self,
image,
envs,
cmds,
args=None,
secrets=None,
labels=None,
node_selectors=None,
name=None,
ports=None,
volumes=None,
volume_mounts=None,
namespace='default',
result=None,
image_pull_policy='IfNotPresent',
image_pull_secrets=None,
init_containers=None,
service_account_name=None,
resources=None,
annotations=None,
affinity=None,
hostnetwork=False,
tolerations=None,
security_context=None,
configmaps=None,
pod_runtime_info_envs=None,
dnspolicy=None,
host_aliases=None
):
self.image = image
self.envs = envs or {}
self.cmds = cmds
self.args = args or []
self.secrets = secrets or []
self.result = result
self.labels = labels or {}
self.name = name
self.ports = ports or []
self.volumes = volumes or []
self.volume_mounts = volume_mounts or []
self.node_selectors = node_selectors or {}
self.namespace = namespace
self.image_pull_policy = image_pull_policy
self.image_pull_secrets = image_pull_secrets
self.init_containers = init_containers
self.service_account_name = service_account_name
self.resources = resources or Resources()
self.annotations = annotations or {}
self.affinity = affinity or {}
self.hostnetwork = hostnetwork or False
self.tolerations = tolerations or []
self.security_context = security_context
self.configmaps = configmaps or []
self.pod_runtime_info_envs = pod_runtime_info_envs or []
self.dnspolicy = dnspolicy
self.host_aliases = host_aliases or []
| 37.141935 | 92 | 0.652249 |
120441ad8169fb80b62bf2903f7474d56efe3761 | 24,264 | py | Python | neat_local/genome.py | Osrip/Novelty_criticality_PyTorch-NEAT | ff37eede4aea2cbb4075414a960477c215219f73 | [
"Apache-2.0"
] | null | null | null | neat_local/genome.py | Osrip/Novelty_criticality_PyTorch-NEAT | ff37eede4aea2cbb4075414a960477c215219f73 | [
"Apache-2.0"
] | null | null | null | neat_local/genome.py | Osrip/Novelty_criticality_PyTorch-NEAT | ff37eede4aea2cbb4075414a960477c215219f73 | [
"Apache-2.0"
] | null | null | null | """Handles genomes (individuals in the population)."""
from __future__ import division, print_function
from itertools import count
from random import choice, random, shuffle
import sys
from neat.activations import ActivationFunctionSet
from neat.aggregations import AggregationFunctionSet
from neat.config import ConfigParameter, write_pretty_params
from neat.genes import DefaultConnectionGene, DefaultNodeGene
from neat.graphs import creates_cycle
from neat.six_util import iteritems, iterkeys
class DefaultGenomeConfig(object):
"""Sets up and holds configuration information for the DefaultGenome class."""
allowed_connectivity = ['unconnected', 'fs_neat_nohidden', 'fs_neat', 'fs_neat_hidden',
'full_nodirect', 'full', 'full_direct',
'partial_nodirect', 'partial', 'partial_direct']
def __init__(self, params):
# Create full set of available activation functions.
self.activation_defs = ActivationFunctionSet()
# ditto for aggregation functions - name difference for backward compatibility
self.aggregation_function_defs = AggregationFunctionSet()
self.aggregation_defs = self.aggregation_function_defs
self._params = [ConfigParameter('num_inputs', int),
ConfigParameter('num_outputs', int),
ConfigParameter('num_hidden', int),
ConfigParameter('feed_forward', bool),
ConfigParameter('compatibility_disjoint_coefficient', float),
ConfigParameter('compatibility_weight_coefficient', float),
ConfigParameter('conn_add_prob', float),
ConfigParameter('conn_delete_prob', float),
ConfigParameter('node_add_prob', float),
ConfigParameter('node_delete_prob', float),
ConfigParameter('single_structural_mutation', bool, 'false'),
ConfigParameter('structural_mutation_surer', str, 'default'),
ConfigParameter('initial_connection', str, 'unconnected')]
# Gather configuration data from the gene classes.
self.node_gene_type = params['node_gene_type']
self._params += self.node_gene_type.get_config_params()
self.connection_gene_type = params['connection_gene_type']
self._params += self.connection_gene_type.get_config_params()
# Use the configuration data to interpret the supplied parameters.
for p in self._params:
setattr(self, p.name, p.interpret(params))
# By convention, input pins have negative keys, and the output
# pins have keys 0,1,...
self.input_keys = [-i - 1 for i in range(self.num_inputs)]
self.output_keys = [i for i in range(self.num_outputs)]
self.connection_fraction = None
# Verify that initial connection type is valid.
# pylint: disable=access-member-before-definition
if 'partial' in self.initial_connection:
c, p = self.initial_connection.split()
self.initial_connection = c
self.connection_fraction = float(p)
if not (0 <= self.connection_fraction <= 1):
raise RuntimeError(
"'partial' connection value must be between 0.0 and 1.0, inclusive.")
assert self.initial_connection in self.allowed_connectivity
# Verify structural_mutation_surer is valid.
# pylint: disable=access-member-before-definition
if self.structural_mutation_surer.lower() in ['1','yes','true','on']:
self.structural_mutation_surer = 'true'
elif self.structural_mutation_surer.lower() in ['0','no','false','off']:
self.structural_mutation_surer = 'false'
elif self.structural_mutation_surer.lower() == 'default':
self.structural_mutation_surer = 'default'
else:
error_string = "Invalid structural_mutation_surer {!r}".format(
self.structural_mutation_surer)
raise RuntimeError(error_string)
self.node_indexer = None
def add_activation(self, name, func):
self.activation_defs.add(name, func)
def add_aggregation(self, name, func):
self.aggregation_function_defs.add(name, func)
def save(self, f):
if 'partial' in self.initial_connection:
if not (0 <= self.connection_fraction <= 1):
raise RuntimeError(
"'partial' connection value must be between 0.0 and 1.0, inclusive.")
f.write('initial_connection = {0} {1}\n'.format(self.initial_connection,
self.connection_fraction))
else:
f.write('initial_connection = {0}\n'.format(self.initial_connection))
assert self.initial_connection in self.allowed_connectivity
write_pretty_params(f, self, [p for p in self._params
if not 'initial_connection' in p.name])
def get_new_node_key(self, node_dict):
if self.node_indexer is None:
self.node_indexer = count(max(list(iterkeys(node_dict))) + 1)
new_id = next(self.node_indexer)
assert new_id not in node_dict
return new_id
def check_structural_mutation_surer(self):
if self.structural_mutation_surer == 'true':
return True
elif self.structural_mutation_surer == 'false':
return False
elif self.structural_mutation_surer == 'default':
return self.single_structural_mutation
else:
error_string = "Invalid structural_mutation_surer {!r}".format(
self.structural_mutation_surer)
raise RuntimeError(error_string)
class DefaultGenome(object):
"""
A genome for generalized neural networks.
Terminology
pin: Point at which the network is conceptually connected to the external world;
pins are either input or output.
node: Analog of a physical neuron.
connection: Connection between a pin/node output and a node's input, or between a node's
output and a pin/node input.
key: Identifier for an object, unique within the set of similar objects.
Design assumptions and conventions.
1. Each output pin is connected only to the output of its own unique
neuron by an implicit connection with weight one. This connection
is permanently enabled.
2. The output pin's key is always the same as the key for its
associated neuron.
3. Output neurons can be modified but not deleted.
4. The input values are applied to the input pins unmodified.
"""
@classmethod
def parse_config(cls, param_dict):
param_dict['node_gene_type'] = DefaultNodeGene
param_dict['connection_gene_type'] = DefaultConnectionGene
return DefaultGenomeConfig(param_dict)
@classmethod
def write_config(cls, f, config):
config.save(f)
def __init__(self, key):
# Unique identifier for a genome instance.
self.key = key
# (gene_key, gene) pairs for gene sets.
self.connections = {}
self.nodes = {}
# Fitness results.
self.fitness = None
# ADDED
self.generation = None
def configure_new(self, config):
"""Configure a new genome based on the given configuration."""
# Create node genes for the output pins.
for node_key in config.output_keys:
self.nodes[node_key] = self.create_node(config, node_key)
# Add hidden nodes if requested.
if config.num_hidden > 0:
for i in range(config.num_hidden):
node_key = config.get_new_node_key(self.nodes)
assert node_key not in self.nodes
node = self.create_node(config, node_key)
self.nodes[node_key] = node
# Add connections based on initial connectivity type.
if 'fs_neat' in config.initial_connection:
if config.initial_connection == 'fs_neat_nohidden':
self.connect_fs_neat_nohidden(config)
elif config.initial_connection == 'fs_neat_hidden':
self.connect_fs_neat_hidden(config)
else:
if config.num_hidden > 0:
print(
"Warning: initial_connection = fs_neat will not connect to hidden nodes;",
"\tif this is desired, set initial_connection = fs_neat_nohidden;",
"\tif not, set initial_connection = fs_neat_hidden",
sep='\n', file=sys.stderr);
self.connect_fs_neat_nohidden(config)
elif 'full' in config.initial_connection:
if config.initial_connection == 'full_nodirect':
self.connect_full_nodirect(config)
elif config.initial_connection == 'full_direct':
self.connect_full_direct(config)
else:
if config.num_hidden > 0:
print(
"Warning: initial_connection = full with hidden nodes will not do direct input-output connections;",
"\tif this is desired, set initial_connection = full_nodirect;",
"\tif not, set initial_connection = full_direct",
sep='\n', file=sys.stderr);
self.connect_full_nodirect(config)
elif 'partial' in config.initial_connection:
if config.initial_connection == 'partial_nodirect':
self.connect_partial_nodirect(config)
elif config.initial_connection == 'partial_direct':
self.connect_partial_direct(config)
else:
if config.num_hidden > 0:
print(
"Warning: initial_connection = partial with hidden nodes will not do direct input-output connections;",
"\tif this is desired, set initial_connection = partial_nodirect {0};".format(
config.connection_fraction),
"\tif not, set initial_connection = partial_direct {0}".format(
config.connection_fraction),
sep='\n', file=sys.stderr);
self.connect_partial_nodirect(config)
def configure_crossover(self, genome1, genome2, config):
""" Configure a new genome by crossover from two parent genomes. """
assert isinstance(genome1.fitness, (int, float))
assert isinstance(genome2.fitness, (int, float))
if genome1.fitness > genome2.fitness:
parent1, parent2 = genome1, genome2
else:
parent1, parent2 = genome2, genome1
# Inherit connection genes
for key, cg1 in iteritems(parent1.connections):
cg2 = parent2.connections.get(key)
if cg2 is None:
# Excess or disjoint gene: copy from the fittest parent.
self.connections[key] = cg1.copy()
else:
# Homologous gene: combine genes from both parents.
self.connections[key] = cg1.crossover(cg2)
# Inherit node genes
parent1_set = parent1.nodes
parent2_set = parent2.nodes
for key, ng1 in iteritems(parent1_set):
ng2 = parent2_set.get(key)
assert key not in self.nodes
if ng2 is None:
# Extra gene: copy from the fittest parent
self.nodes[key] = ng1.copy()
else:
# Homologous gene: combine genes from both parents.
self.nodes[key] = ng1.crossover(ng2)
def mutate(self, config):
""" Mutates this genome. """
if config.single_structural_mutation:
div = max(1,(config.node_add_prob + config.node_delete_prob +
config.conn_add_prob + config.conn_delete_prob))
r = random()
if r < (config.node_add_prob/div):
self.mutate_add_node(config)
elif r < ((config.node_add_prob + config.node_delete_prob)/div):
self.mutate_delete_node(config)
elif r < ((config.node_add_prob + config.node_delete_prob +
config.conn_add_prob)/div):
self.mutate_add_connection(config)
elif r < ((config.node_add_prob + config.node_delete_prob +
config.conn_add_prob + config.conn_delete_prob)/div):
self.mutate_delete_connection()
else:
if random() < config.node_add_prob:
self.mutate_add_node(config)
if random() < config.node_delete_prob:
self.mutate_delete_node(config)
if random() < config.conn_add_prob:
self.mutate_add_connection(config)
if random() < config.conn_delete_prob:
self.mutate_delete_connection()
# Mutate connection genes.
for cg in self.connections.values():
cg.mutate(config)
# Mutate node genes (bias, response, etc.).
for ng in self.nodes.values():
ng.mutate(config)
def mutate_add_node(self, config):
if not self.connections:
if config.check_structural_mutation_surer():
self.mutate_add_connection(config)
return
# Choose a random connection to split
conn_to_split = choice(list(self.connections.values()))
new_node_id = config.get_new_node_key(self.nodes)
ng = self.create_node(config, new_node_id)
self.nodes[new_node_id] = ng
# Disable this connection and create two new connections joining its nodes via
# the given node. The new node+connections have roughly the same behavior as
# the original connection (depending on the activation function of the new node).
conn_to_split.enabled = False
i, o = conn_to_split.key
self.add_connection(config, i, new_node_id, 1.0, True)
self.add_connection(config, new_node_id, o, conn_to_split.weight, True)
def add_connection(self, config, input_key, output_key, weight, enabled):
# TODO: Add further validation of this connection addition?
assert isinstance(input_key, int)
assert isinstance(output_key, int)
assert output_key >= 0
assert isinstance(enabled, bool)
key = (input_key, output_key)
connection = config.connection_gene_type(key)
connection.init_attributes(config)
connection.weight = weight
connection.enabled = enabled
self.connections[key] = connection
def mutate_add_connection(self, config):
"""
Attempt to add a new connection, the only restriction being that the output
node cannot be one of the network input pins.
"""
possible_outputs = list(iterkeys(self.nodes))
out_node = choice(possible_outputs)
possible_inputs = possible_outputs + config.input_keys
in_node = choice(possible_inputs)
# Don't duplicate connections.
key = (in_node, out_node)
if key in self.connections:
# TODO: Should this be using mutation to/from rates? Hairy to configure...
if config.check_structural_mutation_surer():
self.connections[key].enabled = True
return
# Don't allow connections between two output nodes
if in_node in config.output_keys and out_node in config.output_keys:
return
# No need to check for connections between input nodes:
# they cannot be the output end of a connection (see above).
# For feed-forward networks, avoid creating cycles.
if config.feed_forward and creates_cycle(list(iterkeys(self.connections)), key):
return
cg = self.create_connection(config, in_node, out_node)
self.connections[cg.key] = cg
def mutate_delete_node(self, config):
# Do nothing if there are no non-output nodes.
available_nodes = [k for k in iterkeys(self.nodes) if k not in config.output_keys]
if not available_nodes:
return -1
del_key = choice(available_nodes)
connections_to_delete = set()
for k, v in iteritems(self.connections):
if del_key in v.key:
connections_to_delete.add(v.key)
for key in connections_to_delete:
del self.connections[key]
del self.nodes[del_key]
return del_key
def mutate_delete_connection(self):
if self.connections:
key = choice(list(self.connections.keys()))
del self.connections[key]
def distance(self, other, config):
"""
Returns the genetic distance between this genome and the other. This distance value
is used to compute genome compatibility for speciation.
"""
# Compute node gene distance component.
node_distance = 0.0
if self.nodes or other.nodes:
disjoint_nodes = 0
for k2 in iterkeys(other.nodes):
if k2 not in self.nodes:
disjoint_nodes += 1
for k1, n1 in iteritems(self.nodes):
n2 = other.nodes.get(k1)
if n2 is None:
disjoint_nodes += 1
else:
# Homologous genes compute their own distance value.
node_distance += n1.distance(n2, config)
max_nodes = max(len(self.nodes), len(other.nodes))
node_distance = (node_distance +
(config.compatibility_disjoint_coefficient *
disjoint_nodes)) / max_nodes
# Compute connection gene differences.
connection_distance = 0.0
if self.connections or other.connections:
disjoint_connections = 0
for k2 in iterkeys(other.connections):
if k2 not in self.connections:
disjoint_connections += 1
for k1, c1 in iteritems(self.connections):
c2 = other.connections.get(k1)
if c2 is None:
disjoint_connections += 1
else:
# Homologous genes compute their own distance value.
connection_distance += c1.distance(c2, config)
max_conn = max(len(self.connections), len(other.connections))
connection_distance = (connection_distance +
(config.compatibility_disjoint_coefficient *
disjoint_connections)) / max_conn
distance = node_distance + connection_distance
return distance
def size(self):
"""
Returns genome 'complexity', taken to be
(number of nodes, number of enabled connections)
"""
num_enabled_connections = sum([1 for cg in self.connections.values() if cg.enabled])
return len(self.nodes), num_enabled_connections
def __str__(self):
s = "Key: {0}\nFitness: {1}\nNodes:".format(self.key, self.fitness)
for k, ng in iteritems(self.nodes):
s += "\n\t{0} {1!s}".format(k, ng)
s += "\nConnections:"
connections = list(self.connections.values())
connections.sort()
for c in connections:
s += "\n\t" + str(c)
return s
@staticmethod
def create_node(config, node_id):
node = config.node_gene_type(node_id)
node.init_attributes(config)
return node
@staticmethod
def create_connection(config, input_id, output_id):
connection = config.connection_gene_type((input_id, output_id))
connection.init_attributes(config)
return connection
def connect_fs_neat_nohidden(self, config):
"""
Randomly connect one input to all output nodes
(FS-NEAT without connections to hidden, if any).
Originally connect_fs_neat.
"""
input_id = choice(config.input_keys)
for output_id in config.output_keys:
connection = self.create_connection(config, input_id, output_id)
self.connections[connection.key] = connection
def connect_fs_neat_hidden(self, config):
"""
Randomly connect one input to all hidden and output nodes
(FS-NEAT with connections to hidden, if any).
"""
input_id = choice(config.input_keys)
others = [i for i in iterkeys(self.nodes) if i not in config.input_keys]
for output_id in others:
connection = self.create_connection(config, input_id, output_id)
self.connections[connection.key] = connection
def compute_full_connections(self, config, direct):
"""
Compute connections for a fully-connected feed-forward genome--each
input connected to all hidden nodes
(and output nodes if ``direct`` is set or there are no hidden nodes),
each hidden node connected to all output nodes.
(Recurrent genomes will also include node self-connections.)
"""
hidden = [i for i in iterkeys(self.nodes) if i not in config.output_keys]
output = [i for i in iterkeys(self.nodes) if i in config.output_keys]
connections = []
if hidden:
for input_id in config.input_keys:
for h in hidden:
connections.append((input_id, h))
for h in hidden:
for output_id in output:
connections.append((h, output_id))
if direct or (not hidden):
for input_id in config.input_keys:
for output_id in output:
connections.append((input_id, output_id))
# For recurrent genomes, include node self-connections.
if not config.feed_forward:
for i in iterkeys(self.nodes):
connections.append((i, i))
return connections
def connect_full_nodirect(self, config):
"""
Create a fully-connected genome
(except without direct input-output unless no hidden nodes).
"""
for input_id, output_id in self.compute_full_connections(config, False):
connection = self.create_connection(config, input_id, output_id)
self.connections[connection.key] = connection
def connect_full_direct(self, config):
""" Create a fully-connected genome, including direct input-output connections. """
for input_id, output_id in self.compute_full_connections(config, True):
connection = self.create_connection(config, input_id, output_id)
self.connections[connection.key] = connection
def connect_partial_nodirect(self, config):
"""
Create a partially-connected genome,
with (unless no hidden nodes) no direct input-output connections."""
assert 0 <= config.connection_fraction <= 1
all_connections = self.compute_full_connections(config, False)
shuffle(all_connections)
num_to_add = int(round(len(all_connections) * config.connection_fraction))
for input_id, output_id in all_connections[:num_to_add]:
connection = self.create_connection(config, input_id, output_id)
self.connections[connection.key] = connection
def connect_partial_direct(self, config):
"""
Create a partially-connected genome,
including (possibly) direct input-output connections.
"""
assert 0 <= config.connection_fraction <= 1
all_connections = self.compute_full_connections(config, True)
shuffle(all_connections)
num_to_add = int(round(len(all_connections) * config.connection_fraction))
for input_id, output_id in all_connections[:num_to_add]:
connection = self.create_connection(config, input_id, output_id)
self.connections[connection.key] = connection
| 42.198261 | 127 | 0.617829 |
3a83dd75b4a662d734ffb315811a5292d52ddf01 | 555 | py | Python | deep-rl/lib/python2.7/site-packages/OpenGL/raw/GL/EXT/rescale_normal.py | ShujaKhalid/deep-rl | 99c6ba6c3095d1bfdab81bd01395ced96bddd611 | [
"MIT"
] | 210 | 2016-04-09T14:26:00.000Z | 2022-03-25T18:36:19.000Z | deep-rl/lib/python2.7/site-packages/OpenGL/raw/GL/EXT/rescale_normal.py | ShujaKhalid/deep-rl | 99c6ba6c3095d1bfdab81bd01395ced96bddd611 | [
"MIT"
] | 72 | 2016-09-04T09:30:19.000Z | 2022-03-27T17:06:53.000Z | deep-rl/lib/python2.7/site-packages/OpenGL/raw/GL/EXT/rescale_normal.py | ShujaKhalid/deep-rl | 99c6ba6c3095d1bfdab81bd01395ced96bddd611 | [
"MIT"
] | 64 | 2016-04-09T14:26:49.000Z | 2022-03-21T11:19:47.000Z | '''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.GL import _types as _cs
# End users want this...
from OpenGL.raw.GL._types import *
from OpenGL.raw.GL import _errors
from OpenGL.constant import Constant as _C
import ctypes
_EXTENSION_NAME = 'GL_EXT_rescale_normal'
def _f( function ):
return _p.createFunction( function,_p.PLATFORM.GL,'GL_EXT_rescale_normal',error_checker=_errors._error_checker)
GL_RESCALE_NORMAL_EXT=_C('GL_RESCALE_NORMAL_EXT',0x803A)
| 34.6875 | 115 | 0.801802 |
181a090c6bffaf528a58d95538f104ff5abd0d14 | 6,462 | py | Python | cryton/lib/util/creator.py | slashsec-edu/cryton-core | 30931c276cf1ed239b50cdb96d5711468d361eea | [
"MIT"
] | null | null | null | cryton/lib/util/creator.py | slashsec-edu/cryton-core | 30931c276cf1ed239b50cdb96d5711468d361eea | [
"MIT"
] | null | null | null | cryton/lib/util/creator.py | slashsec-edu/cryton-core | 30931c276cf1ed239b50cdb96d5711468d361eea | [
"MIT"
] | null | null | null | from typing import Union
from cryton.lib.util import exceptions, logger
from cryton.lib.models import stage, plan, step, worker
from cryton.cryton_rest_api.models import OutputMapping
def create_plan(template: dict) -> int:
"""
Check if Plan structure is correct and add it to DB.
:param template: Plan from file ({"plan": ... })
:return: Added Plan object ID
:raises
exceptions.ValidationError
exceptions.PlanCreationFailedError
"""
# Validate plan template.
plan_dict = template.get("plan")
plan.Plan.validate(plan_dict=plan_dict)
# Get defaults.
plan_name = plan_dict.get("name")
stages_list = plan_dict.pop("stages")
# Create Plan object.
plan_obj = plan.Plan(**plan_dict)
plan_obj_id = plan_obj.model.id
# Create Stages.
for stage_dict in stages_list:
try:
stage_obj_id = create_stage(stage_dict, plan_obj_id)
stage_dict.update({"stage_model_id": stage_obj_id})
except Exception as ex:
plan_obj.delete()
raise exceptions.PlanCreationFailedError(message=ex, plan_name=plan_name)
# Create Stage dependencies.
for stage_dict in stages_list:
stage_id = stage_dict.get("stage_model_id")
stage_dependencies = stage_dict.get("depends_on", [])
stage_obj = stage.Stage(stage_model_id=stage_id)
for dependency_name in stage_dependencies:
try:
dependency_id = stage.StageModel.objects.get(name=dependency_name, plan_model_id=plan_obj_id).id
except stage.StageModel.DoesNotExist as ex:
plan_obj.delete()
raise exceptions.DependencyDoesNotExist(message=ex, stage_name=dependency_name)
stage_obj.add_dependency(dependency_id)
logger.logger.info("plan created", plan_name=plan_obj.name, plan_id=plan_obj_id, status="success")
return plan_obj_id
def create_stage(stage_dict: dict, plan_model_id: int) -> int:
"""
Add Stage to DB.
:param stage_dict: Stage dictionary
:param plan_model_id: Plan ID
:return: Added Stage object ID
:raises
exceptions.StageCreationFailedError
"""
stage_dict.update({"plan_model_id": plan_model_id})
steps_list = stage_dict.pop("steps")
# Create Stage object.
stage_obj = stage.Stage(**stage_dict)
stage_obj_id = stage_obj.model.id
# Create Steps.
for step_dict in steps_list:
step_obj_id = create_step(step_dict, stage_obj_id)
step_dict.update({"step_model_id": step_obj_id})
# Create Successors.
for step_dict in steps_list:
step_id = step_dict.get("step_model_id")
step_successor_list = step_dict.get("next", [])
step_obj = step.Step(step_model_id=step_id)
# Set Step successors.
for step_successor in step_successor_list:
successors, s_type, s_values = (step_successor.get(key) for key in ["step", "type", "value"])
if not isinstance(successors, list):
successors = [successors]
for successor_name in successors:
create_successor(step_obj, stage_obj_id, successor_name, s_type, s_values)
logger.logger.info("stage created", stage_name=stage_obj.name, stage_id=stage_obj_id, status="success")
return stage_obj_id
def create_step(step_dict: dict, stage_model_id: int) -> int:
"""
Add Step to DB.
:param step_dict: Step dictionary
:param stage_model_id: Stage ID
:return: Added Step object ID
"""
step_dict.update({"stage_model_id": stage_model_id})
# Set 'is_final' flag.
step_successor_list = step_dict.get("next", [])
if len(step_successor_list) == 0:
step_dict.update({"is_final": True})
# Create Step object.
step_obj = step.Step(**step_dict)
step_model_id = step_obj.model.id
# Create OutputMappings.
output_mappings = step_dict.get("output_mapping", [])
for output_mapping in output_mappings:
create_output_mapping(output_mapping, step_model_id)
logger.logger.info("step created", step_name=step_obj.name, step_id=step_model_id, status="success")
return step_model_id
def create_successor(parent_step: step.Step, stage_id: int, successor_name: str, successor_type: str,
successor_values: Union[list, str]):
"""
Add successor and its links between parent and successor Step to DB.
:param parent_step: Parent Step
:param stage_id: Stage ID
:param successor_name: Successor's name
:param successor_type: Successor's type
:param successor_values: Successor's values (or just one)
:return: None
:raises:
exceptions.InvalidSuccessorType
exceptions.InvalidSuccessorValue
exceptions.SuccessorCreationFailedError
"""
try:
successor_step_id = step.StepModel.objects.get(name=successor_name, stage_model_id=stage_id).id
except step.StepModel.DoesNotExist as ex:
raise exceptions.SuccessorCreationFailedError(message=ex, successor_name=successor_name)
if not isinstance(successor_values, list):
successor_values = [successor_values]
for successor_value in successor_values:
parent_step.add_successor(successor_step_id, successor_type, successor_value)
def create_output_mapping(output_mapping: dict, step_model_id: int):
"""
Add output mapping for Step to DB.
:param output_mapping: Output mapping
:param step_model_id: Step ID
:return: None
"""
name_from = output_mapping.get("name_from")
name_to = output_mapping.get("name_to")
OutputMapping.objects.create(step_model_id=step_model_id, name_from=name_from, name_to=name_to)
def create_worker(name: str, address: str, q_prefix: str = None) -> int:
"""
Update prefix and add Worker to DB.
:param name: Worker's name
:param address: Worker's address
:param q_prefix: Worker's q_prefix
:return: Added Worker object ID
"""
if name is None or name == "":
raise exceptions.WrongParameterError(message="Parameter cannot be empty", param_name="name")
if address is None or address == "":
raise exceptions.WrongParameterError(message="Parameter cannot be empty", param_name="address")
if q_prefix is None or q_prefix == "":
q_prefix = f"{name}_{address}"
worker_obj = worker.Worker(name=name, address=address, q_prefix=q_prefix)
return worker_obj.model.id
| 34.92973 | 112 | 0.693129 |
865d78b208ce0700dfd6f542b46b58ec2a4213e4 | 184 | py | Python | ccal/add_conda_to_path.py | kberkey/ccal | 92aa8372997dccec2908928f71a11b6c8327d7aa | [
"MIT"
] | 9 | 2017-10-09T16:54:58.000Z | 2018-12-14T19:49:03.000Z | ccal/add_conda_to_path.py | kberkey/ccal | 92aa8372997dccec2908928f71a11b6c8327d7aa | [
"MIT"
] | 8 | 2017-03-11T04:43:04.000Z | 2018-12-10T09:47:14.000Z | ccal/add_conda_to_path.py | kberkey/ccal | 92aa8372997dccec2908928f71a11b6c8327d7aa | [
"MIT"
] | 4 | 2017-03-10T19:12:28.000Z | 2022-01-02T21:11:40.000Z | from os import environ
def add_conda_to_path(conda_directory_path):
environ["PATH"] = "{}:{}".format(
"{}/{}".format(conda_directory_path, "bin"), environ["PATH"]
)
| 20.444444 | 68 | 0.630435 |
91fbbaa2ba6d6726cff9ac3b7bfbc2e5d422fc78 | 3,570 | py | Python | ceilometer/tests/db.py | lexxito/monitoring | bec8dfb8d3610331c7ae5ec543e0b8da0948c164 | [
"Apache-2.0"
] | 1 | 2016-04-15T17:14:59.000Z | 2016-04-15T17:14:59.000Z | ceilometer/tests/db.py | lexxito/monitoring | bec8dfb8d3610331c7ae5ec543e0b8da0948c164 | [
"Apache-2.0"
] | null | null | null | ceilometer/tests/db.py | lexxito/monitoring | bec8dfb8d3610331c7ae5ec543e0b8da0948c164 | [
"Apache-2.0"
] | null | null | null | # -*- encoding: utf-8 -*-
#
# Copyright © 2012 New Dream Network, LLC (DreamHost)
# Copyright © 2013 eNovance
#
# Author: Doug Hellmann <doug.hellmann@dreamhost.com>
# Julien Danjou <julien@danjou.info>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base classes for API tests."""
import os
import uuid
import warnings
import six
from ceilometer.openstack.common.fixture import config
from ceilometer import storage
from ceilometer.tests import base as test_base
class TestBase(test_base.BaseTestCase):
def setUp(self):
super(TestBase, self).setUp()
if self.database_connection is None:
self.skipTest("No connection URL set")
self.CONF = self.useFixture(config.Config()).conf
self.CONF.set_override('connection', str(self.database_connection),
group='database')
with warnings.catch_warnings():
warnings.filterwarnings(
action='ignore',
message='.*you must provide a username and password.*')
try:
self.conn = storage.get_connection(self.CONF)
except storage.StorageBadVersion as e:
self.skipTest(str(e))
self.conn.upgrade()
self.CONF([], project='ceilometer')
# Set a default location for the pipeline config file so the
# tests work even if ceilometer is not installed globally on
# the system.
self.CONF.set_override(
'pipeline_cfg_file',
self.path_get('etc/ceilometer/pipeline.yaml')
)
def tearDown(self):
self.conn.clear()
self.conn = None
super(TestBase, self).tearDown()
class MongoDBFakeConnectionUrl(object):
def __init__(self):
self.url = os.environ.get('CEILOMETER_TEST_MONGODB_URL')
if not self.url:
raise RuntimeError(
"No MongoDB test URL set,"
"export CEILOMETER_TEST_MONGODB_URL environment variable")
def __str__(self):
return '%(url)s_%(db)s' % dict(url=self.url, db=uuid.uuid4().hex)
class DB2FakeConnectionUrl(MongoDBFakeConnectionUrl):
def __init__(self):
self.url = (os.environ.get('CEILOMETER_TEST_DB2_URL') or
os.environ.get('CEILOMETER_TEST_MONGODB_URL'))
if not self.url:
raise RuntimeError(
"No DB2 test URL set, "
"export CEILOMETER_TEST_DB2_URL environment variable")
else:
# This is to make sure that the db2 driver is used when
# CEILOMETER_TEST_DB2_URL was not set
self.url = self.url.replace('mongodb:', 'db2:', 1)
@six.add_metaclass(test_base.SkipNotImplementedMeta)
class MixinTestsWithBackendScenarios(object):
scenarios = [
('sqlalchemy', dict(database_connection='sqlite://')),
('mongodb', dict(database_connection=MongoDBFakeConnectionUrl())),
('hbase', dict(database_connection='hbase://__test__')),
('db2', dict(database_connection=DB2FakeConnectionUrl())),
]
| 33.679245 | 75 | 0.651541 |
8f0c0c4ff8eb2cb7e24e03b6b7fe97254ced8f7f | 119 | py | Python | tests/test_charlesweir_subtract.py | longr/git-advance-example | c5d82efff40945af57a01d42a7c43c3e3a591d49 | [
"MIT"
] | null | null | null | tests/test_charlesweir_subtract.py | longr/git-advance-example | c5d82efff40945af57a01d42a7c43c3e3a591d49 | [
"MIT"
] | 15 | 2021-11-29T13:15:19.000Z | 2021-12-16T11:39:02.000Z | tests/test_charlesweir_subtract.py | longr/git-advance-example | c5d82efff40945af57a01d42a7c43c3e3a591d49 | [
"MIT"
] | 5 | 2021-12-15T10:41:18.000Z | 2021-12-16T11:02:56.000Z | from pythoncalculator.charlesweir_subtract import subtract
def test_subtract():
assert subtract(1, 3) == -2
| 19.833333 | 59 | 0.731092 |
f9d2e6bc4d250eb100400b538c011cc4ef0a43a8 | 5,342 | py | Python | maml_examples/test_maml_ant.py | kvas7andy/maml_rl | 2496040a820b34d7982dc9853e3db0b10a309b24 | [
"MIT"
] | null | null | null | maml_examples/test_maml_ant.py | kvas7andy/maml_rl | 2496040a820b34d7982dc9853e3db0b10a309b24 | [
"MIT"
] | null | null | null | maml_examples/test_maml_ant.py | kvas7andy/maml_rl | 2496040a820b34d7982dc9853e3db0b10a309b24 | [
"MIT"
] | null | null | null | from rllab.baselines.linear_feature_baseline import LinearFeatureBaseline
from rllab.envs.mujoco.ant_env_rand import AntEnvRand
from rllab.envs.mujoco.ant_env_oracle import AntEnvOracle
from rllab.envs.normalized_env import normalize
from rllab.misc.instrument import stub, run_experiment_lite
from sandbox.rocky.tf.algos.vpg import VPG
from sandbox.rocky.tf.algos.trpo import TRPO
from sandbox.rocky.tf.policies.minimal_gauss_mlp_policy import GaussianMLPPolicy
from sandbox.rocky.tf.envs.base import TfEnv
import csv
import joblib
import numpy as np
import os
import pickle
import tensorflow as tf
stub(globals())
file1 = 'data/local/posticml-trpo-maml-antpos-200/maml1_fbs20_mbs20_flr_0.1_mlr0.01/itr_375.pkl'
file2 = 'data/local/posticml-trpo-maml-antpos-200/randenv100traj/itr_575.pkl'
file3 = 'data/local/posticml-trpo-maml-antpos-200/oracleenv100traj/itr_550.pkl'
make_video = False # generate results if False, run code to make video if True
run_id = 1 # for if you want to run this script in multiple terminals (need to have different ids for each run)
if not make_video:
test_num_goals = 5
np.random.seed(1)
goals = np.random.uniform(0.0, 3.0, size=(test_num_goals, ))
else:
np.random.seed(1)
test_num_goals = 2
goals = [0.0, 3.0]
file_ext = 'mp4' # can be mp4 or gif
print(goals)
gen_name = 'icml_ant_results_'
names = ['maml','pretrain','random', 'oracle']
exp_names = [gen_name + name for name in names]
step_sizes = [0.1, 0.2, 1.0, 0.0]
initial_params_files = [file1, None, file2, file3]
all_avg_returns = []
for step_i, initial_params_file in zip(range(len(step_sizes)), initial_params_files):
avg_returns = []
for g_i, goal in enumerate(goals):
my_exp_name = 'test' + str(run_id) + '/' + names[step_i] + '/goal' + str(g_i)
if initial_params_file is not None and 'oracle' in initial_params_file:
env = normalize(AntEnvOracle())
n_itr = 1
else:
env = normalize(AntEnvRand())
n_itr = 4
env = TfEnv(env)
policy = GaussianMLPPolicy( # random policy
name='policy',
env_spec=env.spec,
hidden_nonlinearity=tf.nn.relu,
hidden_sizes=(100, 100),
)
if initial_params_file is not None:
policy = None
baseline = LinearFeatureBaseline(env_spec=env.spec)
algo = VPG(
env=env,
policy=policy,
load_policy=initial_params_file,
baseline=baseline,
batch_size=4000, # 2x
max_path_length=200,
n_itr=n_itr,
reset_arg=goal,
optimizer_args={'init_learning_rate': step_sizes[step_i], 'tf_optimizer_args': {'learning_rate': 0.5*step_sizes[step_i]}, 'tf_optimizer_cls': tf.train.GradientDescentOptimizer}
)
run_experiment_lite(
algo.train(),
# Number of parallel workers for sampling
n_parallel=4,
# Only keep the snapshot parameters for the last iteration
snapshot_mode="all",
# Specifies the seed for the experiment. If this is not provided, a random seed
# will be used
seed=1,
exp_prefix='ant_test_posticml',
exp_name=my_exp_name,
plot=True,
)
# get return from the experiment
with open('data/local/ant-test-posticml/' + my_exp_name +'/progress.csv', 'r') as f:
reader = csv.reader(f, delimiter=',')
i = 0
row = None
returns = []
for row in reader:
i+=1
if i ==1:
ret_idx = row.index('AverageReturn')
else:
returns.append(float(row[ret_idx]))
avg_returns.append(returns)
if make_video:
data_loc = 'data/local/ant-test-posticml/test'+str(run_id)+'/'
save_loc = 'data/local/ant-test-posticml/test/'
param_file = initial_params_file
save_prefix = save_loc + names[step_i] + '_goal_' + str(goal)
video_filename = save_prefix + 'prestep.' + file_ext
os.system('python scripts/sim_policy.py ' + param_file + ' --speedup=4 --max_path_length=300 --video_filename='+video_filename)
for itr_i in range(3):
param_file = data_loc + 'itr_' + str(itr_i) + '.pkl'
video_filename = save_prefix + 'step_'+str(itr_i)+'.'+file_ext
os.system('python scripts/sim_policy.py ' + param_file + ' --speedup=4 --max_path_length=300 --video_filename='+video_filename)
all_avg_returns.append(avg_returns)
task_avg_returns = []
for itr in range(len(all_avg_returns[step_i][0])):
task_avg_returns.append([ret[itr] for ret in all_avg_returns[step_i]])
if not make_video:
results = {'task_avg_returns': task_avg_returns}
with open(exp_names[step_i] + '.pkl', 'wb') as f:
pickle.dump(results, f)
for i in range(len(initial_params_files)):
returns = []
std_returns = []
returns.append(np.mean([ret[itr] for ret in all_avg_returns[i]]))
std_returns.append(np.std([ret[itr] for ret in all_avg_returns[i]]))
print(initial_params_files[i])
print(returns)
print(std_returns)
| 34.24359 | 188 | 0.635343 |
7a2890976aff9f919d0b66948340144e5e5c44cd | 897 | py | Python | deepxde/data/constraint.py | zongzi13545329/PINNS | 9adf81ec13668387aa155b24fe509cdadaca9cf2 | [
"Apache-2.0"
] | 1 | 2021-12-02T14:31:38.000Z | 2021-12-02T14:31:38.000Z | deepxde/data/constraint.py | zongzi13545329/PINNS | 9adf81ec13668387aa155b24fe509cdadaca9cf2 | [
"Apache-2.0"
] | null | null | null | deepxde/data/constraint.py | zongzi13545329/PINNS | 9adf81ec13668387aa155b24fe509cdadaca9cf2 | [
"Apache-2.0"
] | null | null | null | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from .data import Data
from .. import config
from ..backend import tf
class Constraint(Data):
"""General constraints."""
def __init__(self, constraint, train_x, test_x):
self.constraint = constraint
self.train_x = train_x
self.test_x = test_x
def losses(self, targets, outputs, loss, model):
f = tf.cond(
model.net.training,
lambda: self.constraint(model.net.inputs, outputs, self.train_x),
lambda: self.constraint(model.net.inputs, outputs, self.test_x),
)
return loss(tf.zeros(tf.shape(f), dtype=config.real(tf)), f)
def train_next_batch(self, batch_size=None):
return self.train_x, None
def test(self):
return self.test_x, None
| 28.935484 | 78 | 0.643255 |
0b8890a19a872e9560629839344d3c9ed1653e15 | 3,478 | py | Python | zaqarclient/queues/client.py | mail2nsrajesh/python-zaqarclient | 405399f72b77eb4757199fe5208d7283260431a7 | [
"Apache-2.0"
] | 31 | 2015-01-29T20:10:50.000Z | 2021-03-05T21:39:36.000Z | zaqarclient/queues/client.py | mail2nsrajesh/python-zaqarclient | 405399f72b77eb4757199fe5208d7283260431a7 | [
"Apache-2.0"
] | null | null | null | zaqarclient/queues/client.py | mail2nsrajesh/python-zaqarclient | 405399f72b77eb4757199fe5208d7283260431a7 | [
"Apache-2.0"
] | 7 | 2016-03-02T14:47:24.000Z | 2018-04-09T08:32:34.000Z | # Copyright (c) 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
A `Client` is a high-level abstraction on top of Zaqar features. It
exposes the server features with an object-oriented interface, which
encourages dot notation and automatic, but lazy, resources
allocation. A `Client` allows you to control everything, from public
interfaces to admin endpoints.
To create a `Client` instance, you supply an url pointing to the
server and a version number::
from zaqarclient.queues import client
cli = client.Client(\'http://zaqar.example.com:8888/\', version=2)
which will load the appropriate client based on the specified
version. Optionally, you can also supply a config dictionary::
from zaqarclient.queues import client
cli = client.Client(\'http://zaqar.example.com:8888/\',
version=2, conf={})
The arguments passed to this function will be passed to the client
instances as well.
It's recommended to use `Client` instances instead of accessing the
lower level API as it has been designed to ease the interaction with
the server and it gives enough control for the most common cases.
A simple example for accessing an existing queue through a client
instance - based on the API v2 - would look like::
from zaqarclient.queues import client
cli = client.Client(\'http://zaqar.example.com:8888/\', version=2)
queue = cli.queue(\'my_queue\')
Through the queue instance will be then possible to access all the
features associated with the queue itself like posting messages,
getting message and deleting messages.
As mentioned previously in this documentation, a client instance
allows you to also access admin endpoints, for example::
from zaqarclient.queues import client
cli = client.Client(\'http://zaqar.example.com:8888/\', version=2)
flavor = cli.flavor(\'tasty\',
pool=\'my-pool-group\',
auto_create=True)
flavor.delete()
`Client` uses the lower-level API to access the server, which means
anything you can do with this client instance can be done by accessing
the underlying API, although not recommended.
"""
from zaqarclient import errors
from zaqarclient.queues.v1 import client as cv1
from zaqarclient.queues.v2 import client as cv2
_CLIENTS = {1: cv1.Client,
1.1: cv1.Client,
2: cv2.Client}
def Client(url=None, version=None, conf=None, session=None):
# NOTE: Please don't mix use the Client object with different version at
# the same time. Because the cache mechanism of queue's metadata will lead
# to unexpected response value.
# Please see zaqarclient.queues.v1.queues.Queue.metadata and
# zaqarclient.queues.v2.queues.Queue.metadata for more detail.
try:
return _CLIENTS[version](url=url, version=version, conf=conf,
session=session)
except KeyError:
raise errors.ZaqarError('Unknown client version')
| 38.21978 | 78 | 0.727142 |
70930955e3a60c94a6c9b9fb7ff71965d69d0528 | 3,451 | py | Python | _01_base/_01_01_type-var/__init__.py | XHS-12302/pycharm-test-project | fa4c4a0490cb7a6390cb3961f9c5167cb337e50c | [
"Apache-2.0"
] | null | null | null | _01_base/_01_01_type-var/__init__.py | XHS-12302/pycharm-test-project | fa4c4a0490cb7a6390cb3961f9c5167cb337e50c | [
"Apache-2.0"
] | null | null | null | _01_base/_01_01_type-var/__init__.py | XHS-12302/pycharm-test-project | fa4c4a0490cb7a6390cb3961f9c5167cb337e50c | [
"Apache-2.0"
] | null | null | null | # _*_ coding: utf-8 _*_
"""
print()
"""
print('hello,world')
print('hello,world', 'xhsgg12302@126.com')
"""
input()
"""
# name = input('please enter your name:')
# color = input()
# print(name, ': you favorite color is ', color)
print(100 + 200)
print(3.91 * 3.2)
print(10/3)
print(10 // 3) # 地板除
print(10 % 3)
print(r'''hello,\n world''')
"""
boolean
"""
print(3 < 2)
print(2 > 1)
print(True and True)
print(False and False)
print(True or False)
print(not True)
"""
if else
"""
age = 10
if age >= 18:
print('adult')
else:
print('teenager')
"""
coding
"""
print('中文')
print('\u4e2d\u6587')
print('ABC'.encode("utf-8"))
print('中文'.encode('utf-8'))
x = b'\xe4\xb8\xad\xe6\x96\x87'
print(x.decode('utf-8'))
c = b'\xe4\xb8\xad\xe6\x96\xff'
print(c.decode('utf-8', errors='ignore'))
print(len("ABC"))
print('ABC'.__len__())
print(len(b'asd'))
print(len('中文'))
print(len('中文'.encode('utf-8')))
"""
格式化
在python中,采用的格式化方法和C语言是一致的,用%实现 %d, %f, %s, %x
"""
print('Hello, %s' % 'world')
print('Hello, %s ,%s' % ('world', 'xhsgg12302@126.com'))
print('%-4d - %02d + %02d' % (2, 1, 11))
print('%.4f' % 3.1415926) # 会四舍五入
print('growth rate: %d %%' % 7) # 需要转义的话用两个%% 表示
"""
格式化二
"""
print('Hello , {0}, 成绩提升了 {1:.1f} %'.format('小明', 17.125))
"""
list 是一种有序集合,可以随时添加和删除其中的元素,数据类型可以不同
"""
classmates = ['Michael', 'Bob', 'Tracy']
complex_list = ['Michael', 'Bob', ['wt', 'site', 'xhsgg12302@126.com'], 'Tracy']
print(classmates)
print(len(classmates))
print(len(complex_list))
print(classmates[1]) # 也会出现list越界
print(classmates[-3]) # 获取倒数第二个
classmates.append('Administrator') # append方法返回None
# append(),insert(pos,element),pop,pop(pos)
"""
tuple 另一种有序列表:tuple。特性,不可变,不可更改 ,用圆括号 来表示 () 在定义变量的时候就需要初始化
"""
teachers = ('Michael', 'Bob', 'Tracy')
# 可以正常取值,但是并不能改变数据了。
# 定义一个元素的tuple,如果x = (1),这样的话并不是元组这儿按照数学公式中的小括号计算了,所以应该 x = (1,)
t2 = (1,)
t3 = (1, ) # 和t2没有区别
print(len(t2))
print(len(t3))
# 可变的tuple 元素中有一个元素是list的这个可以更改
t4 = ('a', 'b', ['A', 'B'])
t4[2][0] = 'X'
t4[2][1] = 'Y'
print(t4)
"""
条件判断 if else
"""
# condition 1
age = 10
if age >= 18:
print('adult')
# condition 2
if age >= 18:
print('adult')
else:
print('teenager')
# condition 3
if age >= 18:
print('adult')
elif age >= 16:
print('teenager')
else:
print('kid')
# if x: 只要 x 为非零数值,非空字符串,非空list等,就判断为True,否则为False
# input() 返回str,如果要和整数进行比较的话,就需要用 方法 int() 进行转换,但是 int() 如果转换非数字类型的会报错。
print()
"""
循环
for(),while()
"""
# for()
names = ['Michael', 'Bob', 'Tracy']
for name in names:
print(name)
rst = 0
for x in (1, 2, 3, 4, 5, 6, 7, 8, 9):
rst += x
print(rst)
print(range(5))
print(list(range(5)))
for x in range(5):
print(x, end='\t') # 根据函数说明猜到如何打印不换行 ^_^
print()
# while()
while_rst = 0
while_n = 99
while while_n > 0:
while_rst += while_n
while_n -= 2
print(while_rst)
# break,continue ignore...
"""
dict and set
"""
d = {'Michael': 95, 'Bob': 75, 'Tracy': 85}
print(d['Michael'])
d['Bob'] = 65
d['Bob'] = 45
print(d['Bob'])
# 判断是否存在某个key,不然干取报错
'Thomas' in d # False
# 还可以通过get()取值 key不存在返回 None ,pop(key) 删除dict中的元素
# 集合
s = set([1, 2, 3])
s.add(4)
s.remove(2)
# 集合可以看作是无序,无重复元素的集合,可以做数学意义伤的交集,并集等操作
s1 = set([1, 2, 3])
s2 = set([2, 3, 4])
s1 & s2 # {2, 3}
s1 | s2 # {1, 2, 3 ,4}
tuple1 = (1, 2, 3)
tuple2 = (1, [2, 3])
d['tuple1'] = tuple1
print(d)
d['tuple1'] = tuple2
print(d)
s.add(tuple1)
# s.add(tuple2) # tuple2 中的元素规定为不可变元素,但是其中有一个是[2, 3]为list。所以报错
print(s)
print(tuple1)
print(tuple2)
| 16.916667 | 80 | 0.604752 |
7fde2231b148743f782bc623ba598dc74595c06d | 23,837 | py | Python | preprocessing/data_conversion.py | KCL-BMEIS/VS_Seg | 8d79f062ca5d1b65d4a1f9f632f981992a7780b5 | [
"Apache-2.0"
] | 19 | 2021-01-04T14:35:22.000Z | 2022-03-23T07:14:53.000Z | preprocessing/data_conversion.py | KCL-BMEIS/VS_Seg | 8d79f062ca5d1b65d4a1f9f632f981992a7780b5 | [
"Apache-2.0"
] | null | null | null | preprocessing/data_conversion.py | KCL-BMEIS/VS_Seg | 8d79f062ca5d1b65d4a1f9f632f981992a7780b5 | [
"Apache-2.0"
] | 3 | 2021-03-27T22:20:38.000Z | 2021-09-15T10:36:10.000Z | """
@authors: reubendo, aaronkujawa
Adapted from: https://github.com/SlicerRt/SlicerRT/edit/master/BatchProcessing/BatchStructureSetConversion.py
usage from command line:
[path_slicer] --python-script [path_to_this_python_file] --input-folder [path_input] --output-folder [path_output]
[optional] --register [T1 or T2] --export_all_structures
description:
--input-folder [path_input] ... path_input is a path to a folder containing sub-folders named vs_gk_<case_number>_t1
and vs_gk_<case_number>_t2, which contain image files in DICOM format and the
contours.json file
--register ... optional keyword:
if not used, no registration will be performed. The T1 and T2 image will be exported as
vs_gk_t1_refT1.nii.gz and vs_gk_t2_refT2.nii.gz . The tumour segmentations will be exported as
vs_gk_seg_refT1.nii.gz with the dimensions of the T1 image and vs_gk_seg_refT2.nii.gz with the
dimensions of the T2 image.
--register T1: The T2 image will be registered to the T1 image. The exported image files will be named
vs_gk_t1_refT1.nii.gz and vs_gk_t1_refT1.nii.gz. Only one segmentation with the dimensions of the T1
image will be exported, named vs_gk_seg_refT1.nii.gz
--register T2: The T1 image will be registered to the T2 image. The exported image files will be named
vs_gk_t1_refT2.nii.gz and vs_gk_t1_refT2.nii.gz. Only one segmentation with the dimensions of the T2
image will be exported, named vs_gk_seg_refT2.nii.gz
--export_all_structures ... optional keyword: if used, all structures in the contours.json file will be exported, not
only the tumour. The exported structures will be named
vs_gk_struc<structure_index>_<structure_name>_refT1.nii.gz and
vs_gk_struc<structure_index>_<structure_name>_refT2.nii.gz where structure_index refers to
the order of the structures in the contours.json file (starting at 1) and structure_name
is the name of the structure as specified in the contours.json file.
If --register T1 or --register T2 is used, only one of the two files is exported.
Remarks:
3D Slicer version has to be 4.13 or newer
For Mac [path_slicer] = /Applications/Slicer.app/Contents/MacOS/Slicer
"""
from __future__ import absolute_import
import os
import vtk, slicer
from slicer.ScriptedLoadableModule import *
import argparse
import sys
import logging
from DICOMLib import DICOMUtils
import re
import glob
import json
import numpy as np
def loadCheckedLoadables(self):
# method copied from https://github.com/Slicer/Slicer/blob/b3a78b1cf7cbe6e832ffe6b149bec39d9539f4c6/Modules/
# Scripted/DICOMLib/DICOMBrowser.py#L495
# to overwrite the source code method
# only change is that the first three lines are commented out, because they reset the "selected" attribute which
# can be set manually before calling this method to decide which series are loaded into slicer
"""Invoke the load method on each plugin for the loadable
(DICOMLoadable or qSlicerDICOMLoadable) instances that are selected"""
# if self.advancedViewButton.checkState() == 0:
# self.examineForLoading()
# self.loadableTable.updateSelectedFromCheckstate()
# TODO: add check that disables all referenced stuff to be considered?
# get all the references from the checked loadables
referencedFileLists = []
for plugin in self.loadablesByPlugin:
for loadable in self.loadablesByPlugin[plugin]:
if hasattr(loadable, "referencedInstanceUIDs"):
instanceFileList = []
for instance in loadable.referencedInstanceUIDs:
instanceFile = slicer.dicomDatabase.fileForInstance(instance)
if instanceFile != "":
instanceFileList.append(instanceFile)
if len(instanceFileList) and not self.isFileListInCheckedLoadables(instanceFileList):
referencedFileLists.append(instanceFileList)
# if applicable, find all loadables from the file lists
loadEnabled = False
if len(referencedFileLists):
(self.referencedLoadables, loadEnabled) = self.getLoadablesFromFileLists(referencedFileLists)
automaticallyLoadReferences = int(
slicer.util.settingsValue("DICOM/automaticallyLoadReferences", qt.QMessageBox.InvalidRole)
)
if slicer.app.commandOptions().testingEnabled:
automaticallyLoadReferences = qt.QMessageBox.No
if loadEnabled and automaticallyLoadReferences == qt.QMessageBox.InvalidRole:
self.showReferenceDialogAndProceed()
elif loadEnabled and automaticallyLoadReferences == qt.QMessageBox.Yes:
self.addReferencesAndProceed()
else:
self.proceedWithReferencedLoadablesSelection()
def import_T1_and_T2_data(input_folder, case_number):
patient_dir1 = f"vs_gk_{case_number}_t1"
patient_dir2 = f"vs_gk_{case_number}_t2"
slicer.dicomDatabase.initializeDatabase()
DICOMUtils.importDicom(os.path.join(input_folder, patient_dir1)) # import T1 folder files
DICOMUtils.importDicom(os.path.join(input_folder, patient_dir2)) # import T2 folder files
logging.info("Import DICOM data from " + os.path.join(input_folder, patient_dir1))
logging.info("Import DICOM data from " + os.path.join(input_folder, patient_dir2))
slicer.mrmlScene.Clear(0) # clear the scene
logging.info(slicer.dicomDatabase.patients())
patient = slicer.dicomDatabase.patients()[0] # select the patient from the database (which has only one patient)
# get all available series for the current patient
studies = slicer.dicomDatabase.studiesForPatient(patient)
series = [slicer.dicomDatabase.seriesForStudy(study) for study in studies]
seriesUIDs = [uid for uidList in series for uid in uidList]
# activate the selection window in the dicom widget
dicomWidget = slicer.modules.dicom.widgetRepresentation().self()
dicomWidget.browserWidget.onSeriesSelected(seriesUIDs)
dicomWidget.browserWidget.examineForLoading()
# get all available series that are loadable
loadables = dicomWidget.browserWidget.loadableTable.loadables
# loop over loadables and select for loading
counter = 0
to_load = []
for key in loadables:
name = loadables[key].name
if "RTSTRUCT" in name or (("t1_" in name or "t2_" in name) and not " MR " in name):
loadables[key].selected = True
counter += 1
to_load.append(loadables[key].name)
else:
loadables[key].selected = False
# check if exactly 4 loadables (2 images and 2 RT structures were selected)
assert counter == 4, (
f"Not exactly 4, but {counter} files selected for loading of case {case_number}. \n"
f"Selected files are {to_load}"
)
# perform loading operation
loadCheckedLoadables(dicomWidget.browserWidget)
# # to load all loadables of the patient use instead:
# DICOMUtils.loadPatientByUID(patient) # load selected patient into slicer
### finished dicom widged operations ###
### now in slicer data module ###
# get RT structure nodes
ns = getNodesByClass("vtkMRMLSegmentationNode") # gets the nodes that correspond to RT structures
assert len(ns) == 2, f"Not exactly 2, but {len(ns)} node of class vtkMRMLSegmentationNode."
RTSS1 = ns[0] # picks the first RT structure
RTSS2 = ns[1]
ref1 = RTSS1.GetNodeReference("referenceImageGeometryRef")
ref2 = RTSS2.GetNodeReference("referenceImageGeometryRef")
ref1_name = ref1.GetName()
ref2_name = ref2.GetName()
print(ref1_name)
print(ref2_name)
# make sure that 1-variables are always related to T1 image/segmentation
if "t1_" in ref1_name and "t2_" in ref2_name:
print("T1 first")
elif "t2_" in ref1_name and "t1_" in ref2_name:
print("T2 first")
RTSS1, RTSS2 = RTSS2, RTSS1
ref1, ref2 = ref2, ref1
else:
raise Error("Series names do not contain proper t1 or t2 identifiers.")
return ref1, ref2, RTSS1, RTSS2
def register_and_resample(input_node, reference_node, transform_node=None, interpolationMode="Linear"):
# when loaded with slicer, the matrix in tfm file is multiplied with LPS_to_RAS transforms from both sides
# furthermore the transformNode will be set to FromParent instead of ToParent, which has the same effect
# as inverting it before application to the volume node
if transform_node:
# make a temporary copy of the input node on which the transform can be hardened
copy_input_node = slicer.modules.volumes.logic().CloneVolume(slicer.mrmlScene, input_node, "translated")
copy_input_node.SetAndObserveTransformNodeID(transform_node.GetID())
logic = slicer.vtkSlicerTransformLogic()
logic.hardenTransform(copy_input_node)
print("hardened transformation")
else:
copy_input_node = slicer.modules.volumes.logic().CloneVolume(slicer.mrmlScene, input_node, "copy")
# resample volume
registered_and_resampled_node = slicer.mrmlScene.AddNewNodeByClass(copy_input_node.GetClassName())
parameters = {
"inputVolume": copy_input_node,
"referenceVolume": reference_node,
"outputVolume": registered_and_resampled_node,
"interpolationMode": interpolationMode,
"defaultValue": 0.0,
}
slicer.cli.run(slicer.modules.brainsresample, None, parameters, wait_for_completion=True)
slicer.mrmlScene.RemoveNode(copy_input_node) # remove temporary copy of input node
registered_and_resampled_node.SetName(input_node.GetName() + "_registered_and_resampled")
return registered_and_resampled_node
def createSegNodeFromContourPoints(segmentationNode, contours, name):
# set up contour objects
contoursPolyData = vtk.vtkPolyData()
contourPoints = vtk.vtkPoints()
contourLines = vtk.vtkCellArray()
contoursPolyData.SetLines(contourLines)
contoursPolyData.SetPoints(contourPoints)
for contour in contours:
startPointIndex = contourPoints.GetNumberOfPoints()
contourLine = vtk.vtkPolyLine()
linePointIds = contourLine.GetPointIds()
for point in contour:
linePointIds.InsertNextId(contourPoints.InsertNextPoint(point))
linePointIds.InsertNextId(startPointIndex) # make the contour line closed
contourLines.InsertNextCell(contourLine)
segment = slicer.vtkSegment()
segment.SetName(name)
# segment.SetColor(segmentColor)
segment.AddRepresentation("Planar contour", contoursPolyData)
segmentationNode.GetSegmentation().SetMasterRepresentationName("Planar contour")
segmentationNode.GetSegmentation().AddSegment(segment)
def load_LPS_contour_points(json_file_path):
with open(json_file_path, "r") as json_file:
structure_contour_list = json.load(json_file)
return structure_contour_list
def transform_contour_points(affine, contour_points):
transformed_contour_points = []
for point in contour_points:
transformed_contour_points.append((affine @ np.append(point, 1))[:3].tolist())
return np.array(transformed_contour_points)
def create_segmentation_node_with_reference_geometry(name, ref_geometry_image_node):
new_segmentation_node = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLSegmentationNode")
new_segmentation_node.CreateDefaultDisplayNodes()
new_segmentation_node.SetReferenceImageGeometryParameterFromVolumeNode(ref_geometry_image_node)
new_segmentation_node.SetName(name)
return new_segmentation_node
def create_segments_from_structure_contour_list(segmentationNode, structure_contour_list):
RAS_to_LPS = np.array([[-1, 0, 0, 0], [0, -1, 0, 0], [0, 0, 1, 0]])
for i, struc in enumerate(structure_contour_list):
# select a structure
contours = struc["LPS_contour_points"]
# transform contours from LPS to RAS
contours_RAS = []
for region in contours:
contours_RAS.append(transform_contour_points(RAS_to_LPS, region))
# create segment from contours_RAS in segmentationNode
createSegNodeFromContourPoints(segmentationNode, contours_RAS, struc["structure_name"])
def save_labelmaps_from_planar_contour(
planar_contour_segmentation_node, ref, export_only_tumour_seg, case_number, output_folder
):
pc_node = planar_contour_segmentation_node
segmentIDs = vtk.vtkStringArray() # create new array
pc_node.GetSegmentation().GetSegmentIDs(
segmentIDs
) # save IDs of all Segmentations in segmentIDs array, e.g. skull, tumor, cochlea
lm_nodes = []
if export_only_tumour_seg:
nb_structures = 1
else:
nb_structures = segmentIDs.GetNumberOfValues()
for segmentIndex in range(0, nb_structures):
# Selecting a structure
segmentID = segmentIDs.GetValue(segmentIndex)
# create new array and store only the ID of current structure segmentation in it
segmentID_a = vtk.vtkStringArray() # new array
segmentID_a.SetNumberOfValues(1) # define length
segmentID_a.SetValue(0, segmentID) # define first value by segmentID
# Creating a Label Map nodes that will store the binary segmentation
lm_node = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLLabelMapVolumeNode")
# arguments: input node, Segmentation ID (skull?), new label map node, reference volume)
slicer.modules.segmentations.logic().ExportSegmentsToLabelmapNode(pc_node, segmentID_a, lm_node, ref)
# specialcharacters to remove from output filename (they could come from name of segmented structure)
charsRoRemove = ["!", "?", ";", "*", " "]
# create filenames and remove special characters from output filename (they could come from name of segmented structure)
if export_only_tumour_seg:
if "t1_" in ref.GetName():
fileName_rt = os.path.join(
output_folder, f"vs_gk_{case_number}", f"vs_gk_seg" + "_refT1.nii.gz"
).translate({ord(i): None for i in charsRoRemove})
elif "t2_" in ref.GetName():
fileName_rt = os.path.join(
output_folder, f"vs_gk_{case_number}", f"vs_gk_seg" + "_refT2.nii.gz"
).translate({ord(i): None for i in charsRoRemove})
else:
raise Exception("Reference volume not valid.")
else:
if "t1_" in ref.GetName():
fileName_rt = os.path.join(
output_folder,
f"vs_gk_{case_number}",
f"vs_gk_struc{segmentIndex + 1}_" + segmentID + "_refT1.nii.gz",
).translate({ord(i): None for i in charsRoRemove})
elif "t2_" in ref.GetName():
fileName_rt = os.path.join(
output_folder,
f"vs_gk_{case_number}",
f"vs_gk_struc{segmentIndex + 1}_" + segmentID + "_refT2.nii.gz",
).translate({ord(i): None for i in charsRoRemove})
else:
raise Exception("Reference volume not valid.")
# save node
slicer.util.saveNode(lm_node, fileName_rt) # save planar contour points)
lm_nodes.append(lm_node)
return lm_nodes
def main(argv):
# Parse command-line arguments
parser = argparse.ArgumentParser(description="Batch Structure Set Conversion")
parser.add_argument(
"-i",
"--input-folder",
dest="input_folder",
metavar="PATH",
default="-",
required=True,
help="Folder of input DICOM study (or database path to use existing)",
)
parser.add_argument(
"-o", "--output-folder", dest="output_folder", metavar="PATH", default=".", help="Folder for output labelmaps"
)
parser.add_argument(
"-r",
"--register",
dest="register",
metavar="PATH",
default="no_registration",
help='"T1" for registration to T1 image, "T2" for registration to T2 image. ' 'Defaults to "no_registration".',
)
parser.add_argument(
"--export_all_structures",
dest="export_all_structures",
action="store_true",
help="All available structures will be exported. By default, only the VS is exported.",
)
parser.set_defaults(export_all_structures=False)
args = parser.parse_args(argv)
# Check required arguments
if args.input_folder == "-":
logging.warning("Please specify input DICOM study folder!")
if args.output_folder == ".":
logging.info(
"Current directory is selected as output folder (default). To change it, please specify --output-folder"
)
if args.register not in ["no_registration", "T1", "T2"]:
logging.error('Invalid value for keyword "--register": choose "T1" or "T2" or "no_registration"')
# Convert to python path style
input_folder = args.input_folder.replace("\\", "/")
output_folder = args.output_folder.replace("\\", "/")
register = args.register
export_all_structures = args.export_all_structures
register_T1_image_and_contour_points_to_T2_image = False
register_T2_image_and_contour_points_to_T1_image = False
if register == "T1":
register_T2_image_and_contour_points_to_T1_image = True
elif register == "T2":
register_T1_image_and_contour_points_to_T2_image = True
if export_all_structures:
export_only_tumour_seg = False
else:
export_only_tumour_seg = True
if not os.access(output_folder, os.F_OK):
os.mkdir(output_folder)
DICOMUtils.openTemporaryDatabase()
patient_dirs = glob.glob(os.path.join(input_folder, "vs_gk_*"))
# create and compile a regex pattern
pattern = re.compile(r"_([0-9]+)_t[1-2]$")
case_numbers = []
# first_case = 1
# last_case = 2
# for case_number in range(first_case, last_case + 1):
# exclude cases
# if case_number in [39, 97, 130, 160, 168, 208, 219, 227]:
# continue
for i in range(len(patient_dirs)):
# get case number from folder name
print(pattern.findall(patient_dirs[i]))
case_number = pattern.findall(patient_dirs[i])[0]
# skip iteration if case has already been dealt with
if case_number in case_numbers:
continue
case_numbers.append(case_number)
print(f"case: {case_number}")
[ref1, ref2, RTSS1, RTSS2] = import_T1_and_T2_data(input_folder, case_number)
## REGISTRATION
if register_T1_image_and_contour_points_to_T2_image:
transform_path = os.path.join(input_folder, f"vs_gk_{case_number}" + "_t1", "inv_T1_LPS_to_T2_LPS.tfm")
transformNode = slicer.util.loadNodeFromFile(transform_path, filetype="TransformFile")
ref1 = register_and_resample(
input_node=ref1, reference_node=ref2, transform_node=transformNode, interpolationMode="Linear"
)
# also transform contour points
RTSS1.SetAndObserveTransformNodeID(transformNode.GetID())
elif register_T2_image_and_contour_points_to_T1_image:
transform_path = os.path.join(input_folder, f"vs_gk_{case_number}" + "_t2", "inv_T2_LPS_to_T1_LPS.tfm")
transformNode = slicer.util.loadNodeFromFile(transform_path, filetype="TransformFile")
ref2 = register_and_resample(
input_node=ref2, reference_node=ref1, transform_node=transformNode, interpolationMode="Linear"
)
# also transform contour points
RTSS2.SetAndObserveTransformNodeID(transformNode.GetID())
## Create segments from contour files
# Create segmentation node where we will store segments
segmentationNode_T1 = create_segmentation_node_with_reference_geometry(
"SegmentationFromContourPoints_refT1", ref_geometry_image_node=ref1
)
segmentationNode_T2 = create_segmentation_node_with_reference_geometry(
"SegmentationFromContourPoints_refT2", ref_geometry_image_node=ref2
)
# load structure contour list from json file
# these are registered to original T1 and T2 images (they are not affected by registrations of the ref_geometry
# node, which only defines extent and the IJK to RAS transformation)
structure_contour_list_T1 = load_LPS_contour_points(
os.path.join(input_folder, f"vs_gk_{case_number}" + "_t1", "contours.json")
)
structure_contour_list_T2 = load_LPS_contour_points(
os.path.join(input_folder, f"vs_gk_{case_number}" + "_t2", "contours.json")
)
# create segments for all structures
create_segments_from_structure_contour_list(segmentationNode_T1, structure_contour_list_T1)
create_segments_from_structure_contour_list(segmentationNode_T2, structure_contour_list_T2)
## export all files
if register_T1_image_and_contour_points_to_T2_image:
lm_node_in_T2_list = save_labelmaps_from_planar_contour(
segmentationNode_T2, ref2, export_only_tumour_seg, case_number, output_folder
) # save contour points registered to T2 image
if register_T2_image_and_contour_points_to_T1_image:
lm_node_in_T1_list = save_labelmaps_from_planar_contour(
segmentationNode_T1, ref1, export_only_tumour_seg, case_number, output_folder
)
if not register_T1_image_and_contour_points_to_T2_image and not register_T2_image_and_contour_points_to_T1_image:
lm_node_in_T2_list = save_labelmaps_from_planar_contour(
segmentationNode_T2, ref2, export_only_tumour_seg, case_number, output_folder
) # save contour points registered to T2 image
lm_node_in_T1_list = save_labelmaps_from_planar_contour(
segmentationNode_T1, ref1, export_only_tumour_seg, case_number, output_folder
)
# save images as nifti files
if register_T1_image_and_contour_points_to_T2_image:
slicer.util.saveNode(
ref1, os.path.join(output_folder, f"vs_gk_{case_number}", f"vs_gk_t1_refT2.nii.gz")
) # pass vol node and destination filename
slicer.util.saveNode(
ref2, os.path.join(output_folder, f"vs_gk_{case_number}", f"vs_gk_t2_refT2.nii.gz")
) # pass vol node and destination filename
if register_T2_image_and_contour_points_to_T1_image:
slicer.util.saveNode(
ref1, os.path.join(output_folder, f"vs_gk_{case_number}", f"vs_gk_t1_refT1.nii.gz")
) # pass vol node and destination filename
slicer.util.saveNode(
ref2, os.path.join(output_folder, f"vs_gk_{case_number}", f"vs_gk_t2_refT1.nii.gz")
) # pass vol node and destination filename
if not register_T1_image_and_contour_points_to_T2_image and not register_T2_image_and_contour_points_to_T1_image:
slicer.util.saveNode(
ref1, os.path.join(output_folder, f"vs_gk_{case_number}", f"vs_gk_t1_refT1.nii.gz")
) # pass vol node and destination filename
slicer.util.saveNode(
ref2, os.path.join(output_folder, f"vs_gk_{case_number}", f"vs_gk_t2_refT2.nii.gz")
) # pass vol node and destination filename
sys.exit(0)
if __name__ == "__main__":
main(sys.argv[1:])
| 44.806391 | 128 | 0.692117 |
8412f0bfc5e5c0828b5bc96784bae56af00c9ffe | 459 | py | Python | library/migrations/0009_auto_20170214_1426.py | JingyiHU/Django-library | 02f631d82ba2ccba88aab3e5bd53f9a241b84891 | [
"MIT"
] | 1 | 2018-07-23T09:08:54.000Z | 2018-07-23T09:08:54.000Z | library/migrations/0009_auto_20170214_1426.py | JingyiHU/Django-library | 02f631d82ba2ccba88aab3e5bd53f9a241b84891 | [
"MIT"
] | null | null | null | library/migrations/0009_auto_20170214_1426.py | JingyiHU/Django-library | 02f631d82ba2ccba88aab3e5bd53f9a241b84891 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-14 06:26
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('library', '0008_auto_20170214_1415'),
]
operations = [
migrations.AlterField(
model_name='book',
name='price',
field=models.CharField(max_length=20, null=True),
),
]
| 21.857143 | 61 | 0.616558 |
e7717ba0dba348c1e83e151b72f7a350d2accdbd | 3,433 | py | Python | tests/integration/renderer/test_title.py | jbampton/dashR | edbcc040ffabb7956eeb4774e922794c53c557ce | [
"MIT"
] | null | null | null | tests/integration/renderer/test_title.py | jbampton/dashR | edbcc040ffabb7956eeb4774e922794c53c557ce | [
"MIT"
] | null | null | null | tests/integration/renderer/test_title.py | jbampton/dashR | edbcc040ffabb7956eeb4774e922794c53c557ce | [
"MIT"
] | null | null | null | import pdb
app_test_updating = """
library(dash)
library(dashHtmlComponents)
app <- Dash$new()
app$layout(htmlDiv(list(htmlH3("Press button see document title updating"),
htmlDiv(id="output", children="Awaiting output"),
htmlButton("Update", id="button", n_clicks=0),
htmlButton("Update Page", id="page", n_clicks=0),
htmlDiv(id="dummy"))
)
)
app$callback(output(id = 'output', property = 'children'),
list(input(id = 'page', property = 'n_clicks')),
function(n) {
Sys.sleep(5)
return(paste0("Page ", n))
})
app$run_server()
"""
app_test_no_update_title1 = """
library(dash)
library(dashHtmlComponents)
app <- Dash$new(update_title=NULL)
app$layout(htmlDiv(list(htmlH3("Press button see document title updating"),
htmlDiv(id="output", children="Awaiting output"),
htmlButton("Update", id="button", n_clicks=0),
htmlButton("Update Page", id="page", n_clicks=0),
htmlDiv(id="dummy"))
)
)
app$run_server()
"""
app_test_no_update_title2 = """
library(dash)
library(dashHtmlComponents)
app <- Dash$new(update_title="")
app$layout(htmlDiv(list(htmlH3("Press button see document title updating"),
htmlDiv(id="output", children="Awaiting output"),
htmlButton("Update", id="button", n_clicks=0),
htmlButton("Update Page", id="page", n_clicks=0),
htmlDiv(id="dummy"))
)
)
app$run_server()
"""
app_clientside_title1 = """
library(dash)
library(dashHtmlComponents)
app <- Dash$new(update_title=NULL)
app$layout(htmlDiv(list(htmlH3("Press button see document title updating"),
htmlDiv(id="output", children="Awaiting output"),
htmlButton("Update", id="button", n_clicks=0),
htmlButton("Update Page", id="page", n_clicks=0),
htmlDiv(id="dummy"))
)
)
app$callback(
output('dummy', 'children'),
params=list(input('page', 'n_clicks')),
"
function(n_clicks) {
document.title = 'Page ' + n_clicks;
return 'Page ' + n_clicks;
}"
)
app$run_server()
"""
app_clientside_title2 = """
library(dash)
library(dashHtmlComponents)
app <- Dash$new(update_title="")
app$layout(htmlDiv(list(htmlH3("Press button see document title updating"),
htmlDiv(id="output", children="Awaiting output"),
htmlButton("Update", id="button", n_clicks=0),
htmlButton("Update Page", id="page", n_clicks=0),
htmlDiv(id="dummy"))
)
)
app$callback(
output('dummy', 'children'),
params=list(input('page', 'n_clicks')),
"
function(n_clicks) {
document.title = 'Page ' + n_clicks;
return 'Page ' + n_clicks;
}"
)
app$run_server()
"""
def test_rstt001_update_title(dashr):
dashr.start_server(app_test_updating)
dashr.find_element("#page").click()
assert dashr.driver.title == "Updating..."
def test_rstt002_update_title(dashr):
dashr.start_server(app_test_no_update_title1)
assert dashr.driver.title == "Dash"
def test_rstt003_update_title(dashr):
dashr.start_server(app_test_no_update_title2)
assert dashr.driver.title == "Dash"
def test_rstt004_update_title(dashr):
dashr.start_server(app_clientside_title1)
dashr.find_element("#page").click()
dashr.wait_for_text_to_equal("#dummy", "Page 1")
assert dashr.driver.title == "Page 1"
def test_rstt005_update_title(dashr):
dashr.start_server(app_clientside_title2)
dashr.find_element("#page").click()
dashr.wait_for_text_to_equal("#dummy", "Page 1")
assert dashr.driver.title == "Page 1"
| 25.42963 | 75 | 0.685989 |
d47079b3a3183a6810130b791be1defe6eb09003 | 208 | py | Python | Python/MachineLearning/linearRegression.py | buildcodeblocks/PythonBlocks | 2e0d9fa75016fa0b5d5ab42fe55934c7777b74cd | [
"MIT"
] | 1 | 2020-09-18T05:17:00.000Z | 2020-09-18T05:17:00.000Z | Python/MachineLearning/linearRegression.py | buildcodeblocks/Blocks | 2e0d9fa75016fa0b5d5ab42fe55934c7777b74cd | [
"MIT"
] | null | null | null | Python/MachineLearning/linearRegression.py | buildcodeblocks/Blocks | 2e0d9fa75016fa0b5d5ab42fe55934c7777b74cd | [
"MIT"
] | null | null | null | from sklearn.linear_model import LinearRegression
# linear regression model
def linearRegression(X, y, fit_intercept=True):
model = LinearRegression(fit_intercept)
model.fit(X, y)
return model
| 20.8 | 49 | 0.764423 |
bc44197c0a7ad0aa18d634b52114d558faff98e0 | 568 | py | Python | setup.py | alimanfoo/dask-kubernetes | 83e4706a439ce827b7bb9784b9e1f5b0486fb254 | [
"BSD-3-Clause"
] | null | null | null | setup.py | alimanfoo/dask-kubernetes | 83e4706a439ce827b7bb9784b9e1f5b0486fb254 | [
"BSD-3-Clause"
] | null | null | null | setup.py | alimanfoo/dask-kubernetes | 83e4706a439ce827b7bb9784b9e1f5b0486fb254 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
from os.path import exists
from setuptools import setup, find_packages
setup(
name='dask-kubernetes',
version='0.5.0',
description='Native Kubernetes integration for Dask',
url='https://github.com/dask/dask-kubernetes',
keywords='dask,kubernetes,distributed',
license='BSD',
packages=find_packages(),
include_package_data=True,
long_description=(open('README.rst').read() if exists('README.rst') else ''),
zip_safe=False,
install_requires=list(open('requirements.txt').read().strip().split('\n')),
)
| 29.894737 | 81 | 0.698944 |
de41f1a644aecc6c4897c5ce42492b9d49270818 | 2,642 | py | Python | z_infra_provisioning/cloud_infra_center/ocp_upi/tools/modify-bastion.py | stev-glodowski/z_ansible_collections_samples | 999648b93cc8d38a9e30a8983a6f5fe238338fc7 | [
"Apache-2.0"
] | null | null | null | z_infra_provisioning/cloud_infra_center/ocp_upi/tools/modify-bastion.py | stev-glodowski/z_ansible_collections_samples | 999648b93cc8d38a9e30a8983a6f5fe238338fc7 | [
"Apache-2.0"
] | null | null | null | z_infra_provisioning/cloud_infra_center/ocp_upi/tools/modify-bastion.py | stev-glodowski/z_ansible_collections_samples | 999648b93cc8d38a9e30a8983a6f5fe238338fc7 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# =================================================================
# Licensed Materials - Property of IBM
#
# (c) Copyright IBM Corp. 2021 All Rights Reserved
#
# US Government Users Restricted Rights - Use, duplication or
# disclosure restricted by GSA ADP Schedule Contract with IBM Corp.
# =================================================================
import yaml
import os
"""
This script will get current cluster nodes IP addresses from Ports, and then rewrite them into bastion-template.yaml.
So that you can use bastion-template.yaml to configure DNS and HAProxy on bastion node.
"""
def get_bastion_template():
with open("bastion-template.yaml", "r") as f:
cont = yaml.load(f)
return cont
def get_infra_id():
cmd = "jq -r .infraID metadata.json"
infra_id = os.popen(cmd).read()
if infra_id == "":
os._exit(1)
infra_id = infra_id.replace("\n","")
return infra_id
def get_nodes_ips(infra_id, node_role):
"""
get nodes' IPs with different role, and return a dict like
{"masters":
{"master-0": {"ip": "172.26.103.1", "etcd": "etcd-0"}},
{"master-1": {"ip": "172.26.103.2", "etcd": "etcd-1"}},
{"master-2": {"ip": "172.26.103.3", "etcd": "etcd-2"}}
}
"""
cmd = "openstack port list | grep %s | awk '{print$4,$8}'" % (infra_id+"-"+node_role)
result = os.popen(cmd).read()
"""
The example output of above command:
$ openstack port list | grep 4bzs4-worker | awk '{print$4,$8}'
ansible-4bzs4-worker-port-1 ip_address='172.26.105.157',
ansible-4bzs4-worker-port-0 ip_address='172.26.105.34',
"""
nodes = result.split("\n")
nodes_dict = {}
for node in nodes:
name = node.split(" ")[0]
n = name.split("-")
if len(n) < 4:
break
if len(n) == 5:
name = n[2] + "-" +n[4]
else:
name = n[2]
ip = node.split(" ")[1]
ip = ip.split("'")[1]
if node_role == "master":
nodes_dict[name] = {"ip": ip, "etcd": name.replace("master", "etcd")}
else:
nodes_dict[name] = {"ip": ip}
return nodes_dict
bastion_dict = get_bastion_template()
infra_id = get_infra_id()
bootstrap = get_nodes_ips(infra_id, "bootstrap")
bastion_dict["cluster_nodes"]["bootstrap"] = bootstrap
master = get_nodes_ips(infra_id, "master")
bastion_dict["cluster_nodes"]["masters"] = master
worker = get_nodes_ips(infra_id, "worker")
bastion_dict["cluster_nodes"]["infra"] = worker
with open("bastion-template.yaml", "w") as b:
result = yaml.dump(bastion_dict)
b.write(result)
| 31.082353 | 118 | 0.579485 |
02fa554e00692abf52fe107dd991f5f1f16f0eef | 654 | py | Python | histograma/histograma.py | jhonatantft/digital-image-processing | 7f12f0c0665c4e4422d121b8a431bdd00b9bb862 | [
"MIT"
] | null | null | null | histograma/histograma.py | jhonatantft/digital-image-processing | 7f12f0c0665c4e4422d121b8a431bdd00b9bb862 | [
"MIT"
] | null | null | null | histograma/histograma.py | jhonatantft/digital-image-processing | 7f12f0c0665c4e4422d121b8a431bdd00b9bb862 | [
"MIT"
] | null | null | null | import cv2
import numpy as np
from matplotlib import pyplot as plt
img = cv2.imread('../grama.jpg',0)
# create a mask
mask = np.zeros(img.shape[:2], np.uint8)
mask[100:300, 100:400] = 255
masked_img = cv2.bitwise_and(img,img,mask = mask)
# Calculate histogram with mask and without mask
# Check third argument for mask
hist_full = cv2.calcHist([img],[0],None,[256],[0,256])
hist_mask = cv2.calcHist([img],[0],mask,[256],[0,256])
plt.subplot(221), plt.imshow(img, 'gray')
plt.subplot(222), plt.imshow(mask,'gray')
plt.subplot(223), plt.imshow(masked_img, 'gray')
plt.subplot(224), plt.plot(hist_full), plt.plot(hist_mask)
plt.xlim([0,256])
plt.show() | 28.434783 | 58 | 0.70948 |
3ff14a98bf25151f7aef1941d2a954978728dfe3 | 31,445 | py | Python | projects/UniDet/unidet/data/datasets/inst_categories.py | Othinus099/interior | 5c3eaa59722d71374422d34f42342c659d7ce8b0 | [
"Apache-2.0"
] | 349 | 2021-02-26T01:56:33.000Z | 2022-03-29T06:38:00.000Z | projects/UniDet/unidet/data/datasets/inst_categories.py | Othinus099/interior | 5c3eaa59722d71374422d34f42342c659d7ce8b0 | [
"Apache-2.0"
] | 16 | 2021-03-03T20:02:38.000Z | 2022-02-28T14:50:39.000Z | projects/UniDet/unidet/data/datasets/inst_categories.py | Othinus099/interior | 5c3eaa59722d71374422d34f42342c659d7ce8b0 | [
"Apache-2.0"
] | 33 | 2021-02-26T04:44:57.000Z | 2022-02-19T02:32:03.000Z | categories = {
'coco': [
{"color": [220, 20, 60], "isthing": 1, "id": 1, "name": "person"},
{"color": [119, 11, 32], "isthing": 1, "id": 2, "name": "bicycle"},
{"color": [0, 0, 142], "isthing": 1, "id": 3, "name": "car"},
{"color": [0, 0, 230], "isthing": 1, "id": 4, "name": "motorcycle"},
{"color": [106, 0, 228], "isthing": 1, "id": 5, "name": "airplane"},
{"color": [0, 60, 100], "isthing": 1, "id": 6, "name": "bus"},
{"color": [0, 80, 100], "isthing": 1, "id": 7, "name": "train"},
{"color": [0, 0, 70], "isthing": 1, "id": 8, "name": "truck"},
{"color": [0, 0, 192], "isthing": 1, "id": 9, "name": "boat"},
{"color": [250, 170, 30], "isthing": 1, "id": 10, "name": "traffic light"},
{"color": [100, 170, 30], "isthing": 1, "id": 11, "name": "fire hydrant"},
{"color": [220, 220, 0], "isthing": 1, "id": 13, "name": "stop sign"},
{"color": [175, 116, 175], "isthing": 1, "id": 14, "name": "parking meter"},
{"color": [250, 0, 30], "isthing": 1, "id": 15, "name": "bench"},
{"color": [165, 42, 42], "isthing": 1, "id": 16, "name": "bird"},
{"color": [255, 77, 255], "isthing": 1, "id": 17, "name": "cat"},
{"color": [0, 226, 252], "isthing": 1, "id": 18, "name": "dog"},
{"color": [182, 182, 255], "isthing": 1, "id": 19, "name": "horse"},
{"color": [0, 82, 0], "isthing": 1, "id": 20, "name": "sheep"},
{"color": [120, 166, 157], "isthing": 1, "id": 21, "name": "cow"},
{"color": [110, 76, 0], "isthing": 1, "id": 22, "name": "elephant"},
{"color": [174, 57, 255], "isthing": 1, "id": 23, "name": "bear"},
{"color": [199, 100, 0], "isthing": 1, "id": 24, "name": "zebra"},
{"color": [72, 0, 118], "isthing": 1, "id": 25, "name": "giraffe"},
{"color": [255, 179, 240], "isthing": 1, "id": 27, "name": "backpack"},
{"color": [0, 125, 92], "isthing": 1, "id": 28, "name": "umbrella"},
{"color": [209, 0, 151], "isthing": 1, "id": 31, "name": "handbag"},
{"color": [188, 208, 182], "isthing": 1, "id": 32, "name": "tie"},
{"color": [0, 220, 176], "isthing": 1, "id": 33, "name": "suitcase"},
{"color": [255, 99, 164], "isthing": 1, "id": 34, "name": "frisbee"},
{"color": [92, 0, 73], "isthing": 1, "id": 35, "name": "skis"},
{"color": [133, 129, 255], "isthing": 1, "id": 36, "name": "snowboard"},
{"color": [78, 180, 255], "isthing": 1, "id": 37, "name": "sports ball"},
{"color": [0, 228, 0], "isthing": 1, "id": 38, "name": "kite"},
{"color": [174, 255, 243], "isthing": 1, "id": 39, "name": "baseball bat"},
{"color": [45, 89, 255], "isthing": 1, "id": 40, "name": "baseball glove"},
{"color": [134, 134, 103], "isthing": 1, "id": 41, "name": "skateboard"},
{"color": [145, 148, 174], "isthing": 1, "id": 42, "name": "surfboard"},
{"color": [255, 208, 186], "isthing": 1, "id": 43, "name": "tennis racket"},
{"color": [197, 226, 255], "isthing": 1, "id": 44, "name": "bottle"},
{"color": [171, 134, 1], "isthing": 1, "id": 46, "name": "wine glass"},
{"color": [109, 63, 54], "isthing": 1, "id": 47, "name": "cup"},
{"color": [207, 138, 255], "isthing": 1, "id": 48, "name": "fork"},
{"color": [151, 0, 95], "isthing": 1, "id": 49, "name": "knife"},
{"color": [9, 80, 61], "isthing": 1, "id": 50, "name": "spoon"},
{"color": [84, 105, 51], "isthing": 1, "id": 51, "name": "bowl"},
{"color": [74, 65, 105], "isthing": 1, "id": 52, "name": "banana"},
{"color": [166, 196, 102], "isthing": 1, "id": 53, "name": "apple"},
{"color": [208, 195, 210], "isthing": 1, "id": 54, "name": "sandwich"},
{"color": [255, 109, 65], "isthing": 1, "id": 55, "name": "orange"},
{"color": [0, 143, 149], "isthing": 1, "id": 56, "name": "broccoli"},
{"color": [179, 0, 194], "isthing": 1, "id": 57, "name": "carrot"},
{"color": [209, 99, 106], "isthing": 1, "id": 58, "name": "hot dog"},
{"color": [5, 121, 0], "isthing": 1, "id": 59, "name": "pizza"},
{"color": [227, 255, 205], "isthing": 1, "id": 60, "name": "donut"},
{"color": [147, 186, 208], "isthing": 1, "id": 61, "name": "cake"},
{"color": [153, 69, 1], "isthing": 1, "id": 62, "name": "chair"},
{"color": [3, 95, 161], "isthing": 1, "id": 63, "name": "couch"},
{"color": [163, 255, 0], "isthing": 1, "id": 64, "name": "potted plant"},
{"color": [119, 0, 170], "isthing": 1, "id": 65, "name": "bed"},
{"color": [0, 182, 199], "isthing": 1, "id": 67, "name": "dining table"},
{"color": [0, 165, 120], "isthing": 1, "id": 70, "name": "toilet"},
{"color": [183, 130, 88], "isthing": 1, "id": 72, "name": "tv"},
{"color": [95, 32, 0], "isthing": 1, "id": 73, "name": "laptop"},
{"color": [130, 114, 135], "isthing": 1, "id": 74, "name": "mouse"},
{"color": [110, 129, 133], "isthing": 1, "id": 75, "name": "remote"},
{"color": [166, 74, 118], "isthing": 1, "id": 76, "name": "keyboard"},
{"color": [219, 142, 185], "isthing": 1, "id": 77, "name": "cell phone"},
{"color": [79, 210, 114], "isthing": 1, "id": 78, "name": "microwave"},
{"color": [178, 90, 62], "isthing": 1, "id": 79, "name": "oven"},
{"color": [65, 70, 15], "isthing": 1, "id": 80, "name": "toaster"},
{"color": [127, 167, 115], "isthing": 1, "id": 81, "name": "sink"},
{"color": [59, 105, 106], "isthing": 1, "id": 82, "name": "refrigerator"},
{"color": [142, 108, 45], "isthing": 1, "id": 84, "name": "book"},
{"color": [196, 172, 0], "isthing": 1, "id": 85, "name": "clock"},
{"color": [95, 54, 80], "isthing": 1, "id": 86, "name": "vase"},
{"color": [128, 76, 255], "isthing": 1, "id": 87, "name": "scissors"},
{"color": [201, 57, 1], "isthing": 1, "id": 88, "name": "teddy bear"},
{"color": [246, 0, 122], "isthing": 1, "id": 89, "name": "hair drier"},
{"color": [191, 162, 208], "isthing": 1, "id": 90, "name": "toothbrush"},
],
'cityscapes': [
{'id': i + 1, 'name': x} for i, x in enumerate(
["person", "rider", "car", "truck","bus", "train", "motorcycle", "bicycle"])
],
'mapillary': [
{'id': 1, 'name': 'animal--bird'},
{'id': 2, 'name': 'animal--ground-animal'},
{'id': 9, 'name': 'construction--flat--crosswalk-plain'},
{'id': 20, 'name': 'human--person'},
{'id': 21, 'name': 'human--rider--bicyclist'},
{'id': 22, 'name': 'human--rider--motorcyclist'},
{'id': 23, 'name': 'human--rider--other-rider'},
{'id': 24, 'name': 'marking--crosswalk-zebra'},
{'id': 33, 'name': 'object--banner'},
{'id': 34, 'name': 'object--bench'},
{'id': 35, 'name': 'object--bike-rack'},
{'id': 36, 'name': 'object--billboard'},
{'id': 37, 'name': 'object--catch-basin'},
{'id': 38, 'name': 'object--cctv-camera'},
{'id': 39, 'name': 'object--fire-hydrant'},
{'id': 40, 'name': 'object--junction-box'},
{'id': 41, 'name': 'object--mailbox'},
{'id': 42, 'name': 'object--manhole'},
{'id': 43, 'name': 'object--phone-booth'},
{'id': 45, 'name': 'object--street-light'},
{'id': 46, 'name': 'object--support--pole'},
{'id': 47, 'name': 'object--support--traffic-sign-frame'},
{'id': 48, 'name': 'object--support--utility-pole'},
{'id': 49, 'name': 'object--traffic-light'},
{'id': 50, 'name': 'object--traffic-sign--back'},
{'id': 51, 'name': 'object--traffic-sign--front'},
{'id': 52, 'name': 'object--trash-can'},
{'id': 53, 'name': 'object--vehicle--bicycle'},
{'id': 54, 'name': 'object--vehicle--boat'},
{'id': 55, 'name': 'object--vehicle--bus'},
{'id': 56, 'name': 'object--vehicle--car'},
{'id': 57, 'name': 'object--vehicle--caravan'},
{'id': 58, 'name': 'object--vehicle--motorcycle'},
{'id': 60, 'name': 'object--vehicle--other-vehicle'},
{'id': 61, 'name': 'object--vehicle--trailer'},
{'id': 62, 'name': 'object--vehicle--truck'},
{'id': 63, 'name': 'object--vehicle--wheeled-slow'},
],
'viper': [
{'id': 13, 'name': 'trafficlight', 'supercategory': ''},
{'id': 16, 'name': 'firehydrant', 'supercategory': ''},
{'id': 17, 'name': 'chair', 'supercategory': ''},
{'id': 19, 'name': 'trashcan', 'supercategory': ''},
{'id': 20, 'name': 'person', 'supercategory': ''},
{'id': 23, 'name': 'motorcycle', 'supercategory': ''},
{'id': 24, 'name': 'car', 'supercategory': ''},
{'id': 25, 'name': 'van', 'supercategory': ''},
{'id': 26, 'name': 'bus', 'supercategory': ''},
{'id': 27, 'name': 'truck', 'supercategory': ''},
],
'scannet': [
{'id': 3, 'name': 'cabinet', 'supercategory': 'furniture'},
{'id': 4, 'name': 'bed', 'supercategory': 'furniture'},
{'id': 5, 'name': 'chair', 'supercategory': 'furniture'},
{'id': 6, 'name': 'sofa', 'supercategory': 'furniture'},
{'id': 7, 'name': 'table', 'supercategory': 'furniture'},
{'id': 8, 'name': 'door', 'supercategory': 'furniture'},
{'id': 9, 'name': 'window', 'supercategory': 'furniture'},
{'id': 10, 'name': 'bookshelf', 'supercategory': 'furniture'},
{'id': 11, 'name': 'picture', 'supercategory': 'furniture'},
{'id': 12, 'name': 'counter', 'supercategory': 'furniture'},
{'id': 14, 'name': 'desk', 'supercategory': 'furniture'},
{'id': 16, 'name': 'curtain', 'supercategory': 'furniture'},
{'id': 24, 'name': 'refrigerator', 'supercategory': 'appliance'},
{'id': 28, 'name': 'shower curtain', 'supercategory': 'furniture'},
{'id': 33, 'name': 'toilet', 'supercategory': 'furniture'},
{'id': 34, 'name': 'sink', 'supercategory': 'appliance'},
{'id': 36, 'name': 'bathtub', 'supercategory': 'furniture'},
{'id': 39, 'name': 'otherfurniture', 'supercategory': 'furniture'},
],
'oid': [
{'id': 1, 'name': 'Screwdriver', 'freebase_id': '/m/01bms0'},
{'id': 2, 'name': 'Light switch', 'freebase_id': '/m/03jbxj'},
{'id': 3, 'name': 'Doughnut', 'freebase_id': '/m/0jy4k'},
{'id': 4, 'name': 'Toilet paper', 'freebase_id': '/m/09gtd'},
{'id': 5, 'name': 'Wrench', 'freebase_id': '/m/01j5ks'},
{'id': 6, 'name': 'Toaster', 'freebase_id': '/m/01k6s3'},
{'id': 7, 'name': 'Tennis ball', 'freebase_id': '/m/05ctyq'},
{'id': 8, 'name': 'Radish', 'freebase_id': '/m/015x5n'},
{'id': 9, 'name': 'Pomegranate', 'freebase_id': '/m/0jwn_'},
{'id': 10, 'name': 'Kite', 'freebase_id': '/m/02zt3'},
{'id': 11, 'name': 'Table tennis racket', 'freebase_id': '/m/05_5p_0'},
{'id': 12, 'name': 'Hamster', 'freebase_id': '/m/03qrc'},
{'id': 13, 'name': 'Barge', 'freebase_id': '/m/01btn'},
{'id': 14, 'name': 'Shower', 'freebase_id': '/m/02f9f_'},
{'id': 15, 'name': 'Printer', 'freebase_id': '/m/01m4t'},
{'id': 16, 'name': 'Snowmobile', 'freebase_id': '/m/01x3jk'},
{'id': 17, 'name': 'Fire hydrant', 'freebase_id': '/m/01pns0'},
{'id': 18, 'name': 'Limousine', 'freebase_id': '/m/01lcw4'},
{'id': 19, 'name': 'Whale', 'freebase_id': '/m/084zz'},
{'id': 20, 'name': 'Microwave oven', 'freebase_id': '/m/0fx9l'},
{'id': 21, 'name': 'Asparagus', 'freebase_id': '/m/0cjs7'},
{'id': 22, 'name': 'Lion', 'freebase_id': '/m/096mb'},
{'id': 23, 'name': 'Spatula', 'freebase_id': '/m/02d1br'},
{'id': 24, 'name': 'Torch', 'freebase_id': '/m/07dd4'},
{'id': 25, 'name': 'Volleyball', 'freebase_id': '/m/02rgn06'},
{'id': 26, 'name': 'Ambulance', 'freebase_id': '/m/012n7d'},
{'id': 27, 'name': 'Chopsticks', 'freebase_id': '/m/01_5g'},
{'id': 28, 'name': 'Raccoon', 'freebase_id': '/m/0dq75'},
{'id': 29, 'name': 'Blue jay', 'freebase_id': '/m/01f8m5'},
{'id': 30, 'name': 'Lynx', 'freebase_id': '/m/04g2r'},
{'id': 31, 'name': 'Dice', 'freebase_id': '/m/029b3'},
{'id': 32, 'name': 'Filing cabinet', 'freebase_id': '/m/047j0r'},
{'id': 33, 'name': 'Ruler', 'freebase_id': '/m/0hdln'},
{'id': 34, 'name': 'Power plugs and sockets', 'freebase_id': '/m/03bbps'},
{'id': 35, 'name': 'Bell pepper', 'freebase_id': '/m/0jg57'},
{'id': 36, 'name': 'Binoculars', 'freebase_id': '/m/0lt4_'},
{'id': 37, 'name': 'Pretzel', 'freebase_id': '/m/01f91_'},
{'id': 38, 'name': 'Hot dog', 'freebase_id': '/m/01b9xk'},
{'id': 39, 'name': 'Missile', 'freebase_id': '/m/04ylt'},
{'id': 40, 'name': 'Common fig', 'freebase_id': '/m/043nyj'},
{'id': 41, 'name': 'Croissant', 'freebase_id': '/m/015wgc'},
{'id': 42, 'name': 'Adhesive tape', 'freebase_id': '/m/03m3vtv'},
{'id': 43, 'name': 'Slow cooker', 'freebase_id': '/m/02tsc9'},
{'id': 44, 'name': 'Dog bed', 'freebase_id': '/m/0h8n6f9'},
{'id': 45, 'name': 'Harpsichord', 'freebase_id': '/m/03q5t'},
{'id': 46, 'name': 'Billiard table', 'freebase_id': '/m/04p0qw'},
{'id': 47, 'name': 'Alpaca', 'freebase_id': '/m/0pcr'},
{'id': 48, 'name': 'Harbor seal', 'freebase_id': '/m/02l8p9'},
{'id': 49, 'name': 'Grape', 'freebase_id': '/m/0388q'},
{'id': 50, 'name': 'Nail', 'freebase_id': '/m/05bm6'},
{'id': 51, 'name': 'Paper towel', 'freebase_id': '/m/02w3r3'},
{'id': 52, 'name': 'Alarm clock', 'freebase_id': '/m/046dlr'},
{'id': 53, 'name': 'Guacamole', 'freebase_id': '/m/02g30s'},
{'id': 54, 'name': 'Starfish', 'freebase_id': '/m/01h8tj'},
{'id': 55, 'name': 'Zebra', 'freebase_id': '/m/0898b'},
{'id': 56, 'name': 'Segway', 'freebase_id': '/m/076bq'},
{'id': 57, 'name': 'Sea turtle', 'freebase_id': '/m/0120dh'},
{'id': 58, 'name': 'Scissors', 'freebase_id': '/m/01lsmm'},
{'id': 59, 'name': 'Rhinoceros', 'freebase_id': '/m/03d443'},
{'id': 60, 'name': 'Kangaroo', 'freebase_id': '/m/04c0y'},
{'id': 61, 'name': 'Jaguar', 'freebase_id': '/m/0449p'},
{'id': 62, 'name': 'Leopard', 'freebase_id': '/m/0c29q'},
{'id': 63, 'name': 'Dumbbell', 'freebase_id': '/m/04h8sr'},
{'id': 64, 'name': 'Envelope', 'freebase_id': '/m/0frqm'},
{'id': 65, 'name': 'Winter melon', 'freebase_id': '/m/02cvgx'},
{'id': 66, 'name': 'Teapot', 'freebase_id': '/m/01fh4r'},
{'id': 67, 'name': 'Camel', 'freebase_id': '/m/01x_v'},
{'id': 68, 'name': 'Beaker', 'freebase_id': '/m/0d20w4'},
{'id': 69, 'name': 'Brown bear', 'freebase_id': '/m/01dxs'},
{'id': 70, 'name': 'Toilet', 'freebase_id': '/m/09g1w'},
{'id': 71, 'name': 'Teddy bear', 'freebase_id': '/m/0kmg4'},
{'id': 72, 'name': 'Briefcase', 'freebase_id': '/m/0584n8'},
{'id': 73, 'name': 'Stop sign', 'freebase_id': '/m/02pv19'},
{'id': 74, 'name': 'Tiger', 'freebase_id': '/m/07dm6'},
{'id': 75, 'name': 'Cabbage', 'freebase_id': '/m/0fbw6'},
{'id': 76, 'name': 'Giraffe', 'freebase_id': '/m/03bk1'},
{'id': 77, 'name': 'Polar bear', 'freebase_id': '/m/0633h'},
{'id': 78, 'name': 'Shark', 'freebase_id': '/m/0by6g'},
{'id': 79, 'name': 'Rabbit', 'freebase_id': '/m/06mf6'},
{'id': 80, 'name': 'Swim cap', 'freebase_id': '/m/04tn4x'},
{'id': 81, 'name': 'Pressure cooker', 'freebase_id': '/m/0h8ntjv'},
{'id': 82, 'name': 'Kitchen knife', 'freebase_id': '/m/058qzx'},
{'id': 83, 'name': 'Submarine sandwich', 'freebase_id': '/m/06pcq'},
{'id': 84, 'name': 'Flashlight', 'freebase_id': '/m/01kb5b'},
{'id': 85, 'name': 'Penguin', 'freebase_id': '/m/05z6w'},
{'id': 86, 'name': 'Snake', 'freebase_id': '/m/078jl'},
{'id': 87, 'name': 'Zucchini', 'freebase_id': '/m/027pcv'},
{'id': 88, 'name': 'Bat', 'freebase_id': '/m/01h44'},
{'id': 89, 'name': 'Food processor', 'freebase_id': '/m/03y6mg'},
{'id': 90, 'name': 'Ostrich', 'freebase_id': '/m/05n4y'},
{'id': 91, 'name': 'Sea lion', 'freebase_id': '/m/0gd36'},
{'id': 92, 'name': 'Goldfish', 'freebase_id': '/m/03fj2'},
{'id': 93, 'name': 'Elephant', 'freebase_id': '/m/0bwd_0j'},
{'id': 94, 'name': 'Rocket', 'freebase_id': '/m/09rvcxw'},
{'id': 95, 'name': 'Mouse', 'freebase_id': '/m/04rmv'},
{'id': 96, 'name': 'Oyster', 'freebase_id': '/m/0_cp5'},
{'id': 97, 'name': 'Digital clock', 'freebase_id': '/m/06_72j'},
{'id': 98, 'name': 'Otter', 'freebase_id': '/m/0cn6p'},
{'id': 99, 'name': 'Dolphin', 'freebase_id': '/m/02hj4'},
{'id': 100, 'name': 'Punching bag', 'freebase_id': '/m/0420v5'},
{'id': 101, 'name': 'Corded phone', 'freebase_id': '/m/0h8lkj8'},
{'id': 102, 'name': 'Tennis racket', 'freebase_id': '/m/0h8my_4'},
{'id': 103, 'name': 'Pancake', 'freebase_id': '/m/01dwwc'},
{'id': 104, 'name': 'Mango', 'freebase_id': '/m/0fldg'},
{'id': 105, 'name': 'Crocodile', 'freebase_id': '/m/09f_2'},
{'id': 106, 'name': 'Waffle', 'freebase_id': '/m/01dwsz'},
{'id': 107, 'name': 'Computer mouse', 'freebase_id': '/m/020lf'},
{'id': 108, 'name': 'Kettle', 'freebase_id': '/m/03s_tn'},
{'id': 109, 'name': 'Tart', 'freebase_id': '/m/02zvsm'},
{'id': 110, 'name': 'Oven', 'freebase_id': '/m/029bxz'},
{'id': 111, 'name': 'Banana', 'freebase_id': '/m/09qck'},
{'id': 112, 'name': 'Cheetah', 'freebase_id': '/m/0cd4d'},
{'id': 113, 'name': 'Raven', 'freebase_id': '/m/06j2d'},
{'id': 114, 'name': 'Frying pan', 'freebase_id': '/m/04v6l4'},
{'id': 115, 'name': 'Pear', 'freebase_id': '/m/061_f'},
{'id': 116, 'name': 'Fox', 'freebase_id': '/m/0306r'},
{'id': 117, 'name': 'Skateboard', 'freebase_id': '/m/06_fw'},
{'id': 118, 'name': 'Rugby ball', 'freebase_id': '/m/0wdt60w'},
{'id': 119, 'name': 'Watermelon', 'freebase_id': '/m/0kpqd'},
{'id': 120, 'name': 'Flute', 'freebase_id': '/m/0l14j_'},
{'id': 121, 'name': 'Canary', 'freebase_id': '/m/0ccs93'},
{'id': 122, 'name': 'Door handle', 'freebase_id': '/m/03c7gz'},
{'id': 123, 'name': 'Saxophone', 'freebase_id': '/m/06ncr'},
{'id': 124, 'name': 'Burrito', 'freebase_id': '/m/01j3zr'},
{'id': 125, 'name': 'Suitcase', 'freebase_id': '/m/01s55n'},
{'id': 126, 'name': 'Roller skates', 'freebase_id': '/m/02p3w7d'},
{'id': 127, 'name': 'Dagger', 'freebase_id': '/m/02gzp'},
{'id': 128, 'name': 'Seat belt', 'freebase_id': '/m/0dkzw'},
{'id': 129, 'name': 'Washing machine', 'freebase_id': '/m/0174k2'},
{'id': 130, 'name': 'Jet ski', 'freebase_id': '/m/01xs3r'},
{'id': 131, 'name': 'Sombrero', 'freebase_id': '/m/02jfl0'},
{'id': 132, 'name': 'Pig', 'freebase_id': '/m/068zj'},
{'id': 133, 'name': 'Drinking straw', 'freebase_id': '/m/03v5tg'},
{'id': 134, 'name': 'Peach', 'freebase_id': '/m/0dj6p'},
{'id': 135, 'name': 'Tortoise', 'freebase_id': '/m/011k07'},
{'id': 136, 'name': 'Towel', 'freebase_id': '/m/0162_1'},
{'id': 137, 'name': 'Tablet computer', 'freebase_id': '/m/0bh9flk'},
{'id': 138, 'name': 'Cucumber', 'freebase_id': '/m/015x4r'},
{'id': 139, 'name': 'Mule', 'freebase_id': '/m/0dbzx'},
{'id': 140, 'name': 'Potato', 'freebase_id': '/m/05vtc'},
{'id': 141, 'name': 'Frog', 'freebase_id': '/m/09ld4'},
{'id': 142, 'name': 'Bear', 'freebase_id': '/m/01dws'},
{'id': 143, 'name': 'Lighthouse', 'freebase_id': '/m/04h7h'},
{'id': 144, 'name': 'Belt', 'freebase_id': '/m/0176mf'},
{'id': 145, 'name': 'Baseball bat', 'freebase_id': '/m/03g8mr'},
{'id': 146, 'name': 'Racket', 'freebase_id': '/m/0dv9c'},
{'id': 147, 'name': 'Sword', 'freebase_id': '/m/06y5r'},
{'id': 148, 'name': 'Bagel', 'freebase_id': '/m/01fb_0'},
{'id': 149, 'name': 'Goat', 'freebase_id': '/m/03fwl'},
{'id': 150, 'name': 'Lizard', 'freebase_id': '/m/04m9y'},
{'id': 151, 'name': 'Parrot', 'freebase_id': '/m/0gv1x'},
{'id': 152, 'name': 'Owl', 'freebase_id': '/m/09d5_'},
{'id': 153, 'name': 'Turkey', 'freebase_id': '/m/0jly1'},
{'id': 154, 'name': 'Cello', 'freebase_id': '/m/01xqw'},
{'id': 155, 'name': 'Knife', 'freebase_id': '/m/04ctx'},
{'id': 156, 'name': 'Handgun', 'freebase_id': '/m/0gxl3'},
{'id': 157, 'name': 'Carrot', 'freebase_id': '/m/0fj52s'},
{'id': 158, 'name': 'Hamburger', 'freebase_id': '/m/0cdn1'},
{'id': 159, 'name': 'Grapefruit', 'freebase_id': '/m/0hqkz'},
{'id': 160, 'name': 'Tap', 'freebase_id': '/m/02jz0l'},
{'id': 161, 'name': 'Tea', 'freebase_id': '/m/07clx'},
{'id': 162, 'name': 'Bull', 'freebase_id': '/m/0cnyhnx'},
{'id': 163, 'name': 'Turtle', 'freebase_id': '/m/09dzg'},
{'id': 164, 'name': 'Bust', 'freebase_id': '/m/04yqq2'},
{'id': 165, 'name': 'Monkey', 'freebase_id': '/m/08pbxl'},
{'id': 166, 'name': 'Wok', 'freebase_id': '/m/084rd'},
{'id': 167, 'name': 'Broccoli', 'freebase_id': '/m/0hkxq'},
{'id': 168, 'name': 'Pitcher', 'freebase_id': '/m/054fyh'},
{'id': 169, 'name': 'Whiteboard', 'freebase_id': '/m/02d9qx'},
{'id': 170, 'name': 'Squirrel', 'freebase_id': '/m/071qp'},
{'id': 171, 'name': 'Jug', 'freebase_id': '/m/08hvt4'},
{'id': 172, 'name': 'Woodpecker', 'freebase_id': '/m/01dy8n'},
{'id': 173, 'name': 'Pizza', 'freebase_id': '/m/0663v'},
{'id': 174, 'name': 'Surfboard', 'freebase_id': '/m/019w40'},
{'id': 175, 'name': 'Sofa bed', 'freebase_id': '/m/03m3pdh'},
{'id': 176, 'name': 'Sheep', 'freebase_id': '/m/07bgp'},
{'id': 177, 'name': 'Candle', 'freebase_id': '/m/0c06p'},
{'id': 178, 'name': 'Muffin', 'freebase_id': '/m/01tcjp'},
{'id': 179, 'name': 'Cookie', 'freebase_id': '/m/021mn'},
{'id': 180, 'name': 'Apple', 'freebase_id': '/m/014j1m'},
{'id': 181, 'name': 'Chest of drawers', 'freebase_id': '/m/05kyg_'},
{'id': 182, 'name': 'Skull', 'freebase_id': '/m/016m2d'},
{'id': 183, 'name': 'Chicken', 'freebase_id': '/m/09b5t'},
{'id': 184, 'name': 'Loveseat', 'freebase_id': '/m/0703r8'},
{'id': 185, 'name': 'Baseball glove', 'freebase_id': '/m/03grzl'},
{'id': 186, 'name': 'Piano', 'freebase_id': '/m/05r5c'},
{'id': 187, 'name': 'Waste container', 'freebase_id': '/m/0bjyj5'},
{'id': 188, 'name': 'Barrel', 'freebase_id': '/m/02zn6n'},
{'id': 189, 'name': 'Swan', 'freebase_id': '/m/0dftk'},
{'id': 190, 'name': 'Taxi', 'freebase_id': '/m/0pg52'},
{'id': 191, 'name': 'Lemon', 'freebase_id': '/m/09k_b'},
{'id': 192, 'name': 'Pumpkin', 'freebase_id': '/m/05zsy'},
{'id': 193, 'name': 'Sparrow', 'freebase_id': '/m/0h23m'},
{'id': 194, 'name': 'Orange', 'freebase_id': '/m/0cyhj_'},
{'id': 195, 'name': 'Tank', 'freebase_id': '/m/07cmd'},
{'id': 196, 'name': 'Sandwich', 'freebase_id': '/m/0l515'},
{'id': 197, 'name': 'Coffee', 'freebase_id': '/m/02vqfm'},
{'id': 198, 'name': 'Juice', 'freebase_id': '/m/01z1kdw'},
{'id': 199, 'name': 'Coin', 'freebase_id': '/m/0242l'},
{'id': 200, 'name': 'Pen', 'freebase_id': '/m/0k1tl'},
{'id': 201, 'name': 'Watch', 'freebase_id': '/m/0gjkl'},
{'id': 202, 'name': 'Eagle', 'freebase_id': '/m/09csl'},
{'id': 203, 'name': 'Goose', 'freebase_id': '/m/0dbvp'},
{'id': 204, 'name': 'Falcon', 'freebase_id': '/m/0f6wt'},
{'id': 205, 'name': 'Christmas tree', 'freebase_id': '/m/025nd'},
{'id': 206, 'name': 'Sunflower', 'freebase_id': '/m/0ftb8'},
{'id': 207, 'name': 'Vase', 'freebase_id': '/m/02s195'},
{'id': 208, 'name': 'Football', 'freebase_id': '/m/01226z'},
{'id': 209, 'name': 'Canoe', 'freebase_id': '/m/0ph39'},
{'id': 210, 'name': 'High heels', 'freebase_id': '/m/06k2mb'},
{'id': 211, 'name': 'Spoon', 'freebase_id': '/m/0cmx8'},
{'id': 212, 'name': 'Mug', 'freebase_id': '/m/02jvh9'},
{'id': 213, 'name': 'Swimwear', 'freebase_id': '/m/01gkx_'},
{'id': 214, 'name': 'Duck', 'freebase_id': '/m/09ddx'},
{'id': 215, 'name': 'Cat', 'freebase_id': '/m/01yrx'},
{'id': 216, 'name': 'Tomato', 'freebase_id': '/m/07j87'},
{'id': 217, 'name': 'Cocktail', 'freebase_id': '/m/024g6'},
{'id': 218, 'name': 'Clock', 'freebase_id': '/m/01x3z'},
{'id': 219, 'name': 'Cowboy hat', 'freebase_id': '/m/025rp__'},
{'id': 220, 'name': 'Miniskirt', 'freebase_id': '/m/01cmb2'},
{'id': 221, 'name': 'Cattle', 'freebase_id': '/m/01xq0k1'},
{'id': 222, 'name': 'Strawberry', 'freebase_id': '/m/07fbm7'},
{'id': 223, 'name': 'Bronze sculpture', 'freebase_id': '/m/01yx86'},
{'id': 224, 'name': 'Pillow', 'freebase_id': '/m/034c16'},
{'id': 225, 'name': 'Squash', 'freebase_id': '/m/0dv77'},
{'id': 226, 'name': 'Traffic light', 'freebase_id': '/m/015qff'},
{'id': 227, 'name': 'Saucer', 'freebase_id': '/m/03q5c7'},
{'id': 228, 'name': 'Reptile', 'freebase_id': '/m/06bt6'},
{'id': 229, 'name': 'Cake', 'freebase_id': '/m/0fszt'},
{'id': 230, 'name': 'Plastic bag', 'freebase_id': '/m/05gqfk'},
{'id': 231, 'name': 'Studio couch', 'freebase_id': '/m/026qbn5'},
{'id': 232, 'name': 'Beer', 'freebase_id': '/m/01599'},
{'id': 233, 'name': 'Scarf', 'freebase_id': '/m/02h19r'},
{'id': 234, 'name': 'Coffee cup', 'freebase_id': '/m/02p5f1q'},
{'id': 235, 'name': 'Wine', 'freebase_id': '/m/081qc'},
{'id': 236, 'name': 'Mushroom', 'freebase_id': '/m/052sf'},
{'id': 237, 'name': 'Traffic sign', 'freebase_id': '/m/01mqdt'},
{'id': 238, 'name': 'Camera', 'freebase_id': '/m/0dv5r'},
{'id': 239, 'name': 'Rose', 'freebase_id': '/m/06m11'},
{'id': 240, 'name': 'Couch', 'freebase_id': '/m/02crq1'},
{'id': 241, 'name': 'Handbag', 'freebase_id': '/m/080hkjn'},
{'id': 242, 'name': 'Fedora', 'freebase_id': '/m/02fq_6'},
{'id': 243, 'name': 'Sock', 'freebase_id': '/m/01nq26'},
{'id': 244, 'name': 'Computer keyboard', 'freebase_id': '/m/01m2v'},
{'id': 245, 'name': 'Mobile phone', 'freebase_id': '/m/050k8'},
{'id': 246, 'name': 'Ball', 'freebase_id': '/m/018xm'},
{'id': 247, 'name': 'Balloon', 'freebase_id': '/m/01j51'},
{'id': 248, 'name': 'Horse', 'freebase_id': '/m/03k3r'},
{'id': 249, 'name': 'Boot', 'freebase_id': '/m/01b638'},
{'id': 250, 'name': 'Fish', 'freebase_id': '/m/0ch_cf'},
{'id': 251, 'name': 'Backpack', 'freebase_id': '/m/01940j'},
{'id': 252, 'name': 'Skirt', 'freebase_id': '/m/02wv6h6'},
{'id': 253, 'name': 'Van', 'freebase_id': '/m/0h2r6'},
{'id': 254, 'name': 'Bread', 'freebase_id': '/m/09728'},
{'id': 255, 'name': 'Glove', 'freebase_id': '/m/0174n1'},
{'id': 256, 'name': 'Dog', 'freebase_id': '/m/0bt9lr'},
{'id': 257, 'name': 'Airplane', 'freebase_id': '/m/0cmf2'},
{'id': 258, 'name': 'Motorcycle', 'freebase_id': '/m/04_sv'},
{'id': 259, 'name': 'Drink', 'freebase_id': '/m/0271t'},
{'id': 260, 'name': 'Book', 'freebase_id': '/m/0bt_c3'},
{'id': 261, 'name': 'Train', 'freebase_id': '/m/07jdr'},
{'id': 262, 'name': 'Flower', 'freebase_id': '/m/0c9ph5'},
{'id': 263, 'name': 'Carnivore', 'freebase_id': '/m/01lrl'},
{'id': 264, 'name': 'Human ear', 'freebase_id': '/m/039xj_'},
{'id': 265, 'name': 'Toy', 'freebase_id': '/m/0138tl'},
{'id': 266, 'name': 'Box', 'freebase_id': '/m/025dyy'},
{'id': 267, 'name': 'Truck', 'freebase_id': '/m/07r04'},
{'id': 268, 'name': 'Wheel', 'freebase_id': '/m/083wq'},
{'id': 269, 'name': 'Aircraft', 'freebase_id': '/m/0k5j'},
{'id': 270, 'name': 'Bus', 'freebase_id': '/m/01bjv'},
{'id': 271, 'name': 'Human mouth', 'freebase_id': '/m/0283dt1'},
{'id': 272, 'name': 'Sculpture', 'freebase_id': '/m/06msq'},
{'id': 273, 'name': 'Shirt', 'freebase_id': '/m/01n4qj'},
{'id': 274, 'name': 'Hat', 'freebase_id': '/m/02dl1y'},
{'id': 275, 'name': 'Vehicle registration plate', 'freebase_id': '/m/01jfm_'},
{'id': 276, 'name': 'Guitar', 'freebase_id': '/m/0342h'},
{'id': 277, 'name': 'Sun hat', 'freebase_id': '/m/02wbtzl'},
{'id': 278, 'name': 'Bottle', 'freebase_id': '/m/04dr76w'},
{'id': 279, 'name': 'Luggage and bags', 'freebase_id': '/m/0hf58v5'},
{'id': 280, 'name': 'Trousers', 'freebase_id': '/m/07mhn'},
{'id': 281, 'name': 'Bicycle wheel', 'freebase_id': '/m/01bqk0'},
{'id': 282, 'name': 'Suit', 'freebase_id': '/m/01xyhv'},
{'id': 283, 'name': 'Bowl', 'freebase_id': '/m/04kkgm'},
{'id': 284, 'name': 'Man', 'freebase_id': '/m/04yx4'},
{'id': 285, 'name': 'Flowerpot', 'freebase_id': '/m/0fm3zh'},
{'id': 286, 'name': 'Laptop', 'freebase_id': '/m/01c648'},
{'id': 287, 'name': 'Boy', 'freebase_id': '/m/01bl7v'},
{'id': 288, 'name': 'Picture frame', 'freebase_id': '/m/06z37_'},
{'id': 289, 'name': 'Bird', 'freebase_id': '/m/015p6'},
{'id': 290, 'name': 'Car', 'freebase_id': '/m/0k4j'},
{'id': 291, 'name': 'Shorts', 'freebase_id': '/m/01bfm9'},
{'id': 292, 'name': 'Woman', 'freebase_id': '/m/03bt1vf'},
{'id': 293, 'name': 'Platter', 'freebase_id': '/m/099ssp'},
{'id': 294, 'name': 'Tie', 'freebase_id': '/m/01rkbr'},
{'id': 295, 'name': 'Girl', 'freebase_id': '/m/05r655'},
{'id': 296, 'name': 'Skyscraper', 'freebase_id': '/m/079cl'},
{'id': 297, 'name': 'Person', 'freebase_id': '/m/01g317'},
{'id': 298, 'name': 'Flag', 'freebase_id': '/m/03120'},
{'id': 299, 'name': 'Jeans', 'freebase_id': '/m/0fly7'},
{'id': 300, 'name': 'Dress', 'freebase_id': '/m/01d40f'},
],
'kitti':[
{'id': 24, 'name': 'person'},
{'id': 25, 'name': 'rider'},
{'id': 26, 'name': 'car'},
{'id': 27, 'name': 'truck'},
{'id': 28, 'name': 'bus'},
{'id': 31, 'name': 'train'},
{'id': 32, 'name': 'motorcycle'},
{'id': 33, 'name': 'bicycle'},
],
'wilddash': [
{'id': 1, 'name': 'ego vehicle'},
{'id': 24, 'name': 'person'},
{'id': 25, 'name': 'rider'},
{'id': 26, 'name': 'car'},
{'id': 27, 'name': 'truck'},
{'id': 28, 'name': 'bus'},
{'id': 29, 'name': 'caravan'},
{'id': 30, 'name': 'trailer'},
{'id': 31, 'name': 'train'},
{'id': 32, 'name': 'motorcycle'},
{'id': 33, 'name': 'bicycle'},
{'id': 34, 'name': 'pickup'},
{'id': 35, 'name': 'van'},
]
} | 64.701646 | 92 | 0.48103 |
c34604433b4bc38bfaaee8237a854b2640e6bb60 | 2,678 | py | Python | service/tasks/redo_tasks.py | CottageLabs/lodestone | 2e60f2138a49633398655bb7f728fd3d6ac92c43 | [
"Apache-2.0"
] | null | null | null | service/tasks/redo_tasks.py | CottageLabs/lodestone | 2e60f2138a49633398655bb7f728fd3d6ac92c43 | [
"Apache-2.0"
] | null | null | null | service/tasks/redo_tasks.py | CottageLabs/lodestone | 2e60f2138a49633398655bb7f728fd3d6ac92c43 | [
"Apache-2.0"
] | null | null | null | from octopus.core import app
from redis import Redis
class RedoTasks:
"""
from service.tasks.redo_tasks import RedoTasks
r = RedoTasks(model, task)
# Model has to be one of 'ethesis' or 'dataset'
# Task has to be one of 'deposit', 'poll' or 'ticket create'
r.all() # to redo all jobs in given model and task
r.job(id) # to redo a specific job in a given model and task
"""
def __init__(self, model, task):
if model not in ['ethesis', 'dataset']:
raise ValueError("Model has to be one of 'ethesis' or 'dataset'")
if task not in ['deposit', 'poll']:
raise ValueError("Task has to be one of 'deposit' or 'poll'")
self.r = Redis()
self.model = model
self.task = task
self.queue = None
self.error_queue = None
if model == 'dataset':
if task == 'deposit':
self.queue = app.config.get('DATASET_SUBMIT_QUEUE')
elif task == 'poll':
self.queue = app.config.get('DATASET_POLL_QUEUE')
elif model == 'ethesis':
if task == 'deposit':
self.queue = app.config.get('ETHESIS_SUBMIT_QUEUE')
elif task == 'poll':
self.queue = app.config.get('ETHESIS_POLL_QUEUE')
if not self.queue:
raise ValueError("There is no queue defined for this model and task")
self.error_queue = "%s_error" % self.queue
def all(self):
while self.r.llen(self.error_queue) > 0:
self.r.rpoplpush(self.error_queue, self.queue)
def job(self, job_id):
if self.r.lrem(self.error_queue, job_id) > 0:
self.r.lpush(self.queue, job_id)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-m", "--model", help="ethesis or dataset")
parser.add_argument("-t", "--task", help="task to restart")
parser.add_argument("-j", "--job", help="optional job id from error queue; you must have this or --all")
parser.add_argument("-a", "--all", action="store_true", help="restart the task for all existing errors; you must have this or --job")
args = parser.parse_args()
if args.model is None or args.task is None:
parser.print_help()
exit(0)
if args.job is None and args.all is None:
parser.print_help()
exit(0)
r = RedoTasks(args.model, args.task)
if args.job is not None:
print "Acting on job {x} in {y} error queue".format(x=args.job, y=args.model)
r.job(args.job)
else:
print "Acting on all jobs in {x} error queue".format(x=args.model)
r.all() | 36.684932 | 137 | 0.598208 |
99914399c8ac0ac1d53bec2148ea81dfac1b5f48 | 695 | py | Python | Python/Fundamentals/Functions(lab-exercise)/Palindrome Integers.py | EduardV777/Softuni-Python-Exercises | 79db667028aea7dfecb3dbbd834c752180c50f44 | [
"Unlicense"
] | null | null | null | Python/Fundamentals/Functions(lab-exercise)/Palindrome Integers.py | EduardV777/Softuni-Python-Exercises | 79db667028aea7dfecb3dbbd834c752180c50f44 | [
"Unlicense"
] | null | null | null | Python/Fundamentals/Functions(lab-exercise)/Palindrome Integers.py | EduardV777/Softuni-Python-Exercises | 79db667028aea7dfecb3dbbd834c752180c50f44 | [
"Unlicense"
] | null | null | null | seq=input()
def IsItPalindrome(seq):
k=0; list1=[]
while k<len(seq):
num=" "
if seq[k]!=",":
for j in range(k,len(seq)):
if seq[j]!=",":
num+=seq[j]
k+=1
else:
k+=2
break
list1.append(num)
else:
k+=2
for k in range(0,len(list1)):
reverseVal=""
for j in range(len(list1[k])-1,-1,-1):
val=int(list1[k])
reverseVal+=list1[k][j]
if int(reverseVal)==val:
print("True")
else:
print("False")
IsItPalindrome(seq)
| 25.740741 | 47 | 0.374101 |
0f0ed2f1538d9326a8a8b92f2f552bd2873b0ede | 590 | py | Python | task02_a.py | mboehn/aoc2017 | 1bf5302c6e566e8454d3e567cfac38945c8fe955 | [
"MIT"
] | null | null | null | task02_a.py | mboehn/aoc2017 | 1bf5302c6e566e8454d3e567cfac38945c8fe955 | [
"MIT"
] | null | null | null | task02_a.py | mboehn/aoc2017 | 1bf5302c6e566e8454d3e567cfac38945c8fe955 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import csv
import func
INPUTFILE = './task02.input'
checksum = 0
with open(INPUTFILE, mode='r') as csvfile:
reader = csv.reader(csvfile, delimiter="\t")
for row in reader:
row = list(map(int, row))
print(row)
rowdiff = int(max(row)) - int(min(row))
print("{}current checksum: {}\t|max: {}\t| min: {}\t| diff: {}\t| new checksum: {}{}".format(func.bcolors.BOLD, checksum, max(row), min(row), rowdiff, checksum + rowdiff, func.bcolors.ENDC))
checksum = checksum + rowdiff
print("checksum is: {}".format(checksum))
| 26.818182 | 198 | 0.618644 |
cf17002d8887977f08725c498c86933ff720acc4 | 4,618 | py | Python | deslib/static/single_best.py | mrtrunghieu1/Mutil-DesLib-Algorithm | 4fd82c553adc34561f6698b18a08ad89a58deee6 | [
"BSD-3-Clause"
] | null | null | null | deslib/static/single_best.py | mrtrunghieu1/Mutil-DesLib-Algorithm | 4fd82c553adc34561f6698b18a08ad89a58deee6 | [
"BSD-3-Clause"
] | null | null | null | deslib/static/single_best.py | mrtrunghieu1/Mutil-DesLib-Algorithm | 4fd82c553adc34561f6698b18a08ad89a58deee6 | [
"BSD-3-Clause"
] | null | null | null | # coding=utf-8
# Author: Rafael Menelau Oliveira e Cruz <rafaelmenelau@gmail.com>
#
# License: BSD 3 clause
import numpy as np
from .base import BaseStaticEnsemble
from sklearn.utils.validation import check_X_y, check_is_fitted, check_array
class SingleBest(BaseStaticEnsemble):
"""Classification method that selects the classifier in the pool with
highest score to be used for classification. Usually, the performance of
the single best classifier is estimated based on the validation data.
Parameters
----------
pool_classifiers : list of classifiers (Default = None)
The generated_pool of classifiers trained for the corresponding
classification problem. Each base classifiers should support the method
"predict". If None, then the pool of classifiers is a bagging
classifier.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
References
----------
Britto, Alceu S., Robert Sabourin, and Luiz ES Oliveira. "Dynamic selection
of classifiers—a comprehensive review."
Pattern Recognition 47.11 (2014): 3665-3680.
Kuncheva, Ludmila I. Combining pattern classifiers: methods and algorithms.
John Wiley & Sons, 2004.
R. M. O. Cruz, R. Sabourin, and G. D. Cavalcanti, “Dynamic classifier
selection: Recent advances and perspectives,”
Information Fusion, vol. 41, pp. 195 – 216, 2018.
"""
def __init__(self, pool_classifiers=None, random_state=None):
super(SingleBest, self).__init__(pool_classifiers=pool_classifiers,
random_state=random_state)
def fit(self, X, y):
"""Fit the model by selecting the base classifier with the highest
accuracy in the dataset. The single best classifier is kept in
self.best_clf and its index is kept in self.best_clf_index.
Parameters
----------
X : array of shape = [n_samples, n_features]
Data used to fit the model.
y : array of shape = [n_samples]
class labels of each example in X.
"""
X, y = check_X_y(X, y)
super(SingleBest, self).fit(X, y)
if not self.base_already_encoded_:
y_encoded = y
else:
y_encoded = self.enc_.transform(y)
performances = self._estimate_performances(X, y_encoded)
self.best_clf_index_ = np.argmax(performances)
self.best_clf_ = self.pool_classifiers_[self.best_clf_index_]
return self
def _estimate_performances(self, X, y):
performances = np.zeros(self.n_classifiers_)
for idx, clf in enumerate(self.pool_classifiers_):
performances[idx] = clf.score(X, y)
return performances
def predict(self, X):
"""Predict the label of each sample in X and returns the predicted
label.
Parameters
----------
X : array of shape = [n_samples, n_features]
The data to be classified
Returns
-------
predicted_labels : array of shape = [n_samples]
Predicted class for each sample in X.
"""
X = check_array(X)
self._check_is_fitted()
predicted_labels = self._encode_base_labels(self.best_clf_.predict(X))
return self.classes_.take(predicted_labels)
def predict_proba(self, X):
"""Estimates the posterior probabilities for each class for each sample
in X. The returned probability estimates for all classes are ordered by
the label of classes.
Parameters
----------
X : array of shape = [n_samples, n_features]
The data to be classified
Returns
-------
predicted_proba : array of shape = [n_samples, n_classes]
Posterior probabilities estimates for each class.
"""
self._check_is_fitted()
if "predict_proba" not in dir(self.best_clf_):
raise ValueError(
"Base classifier must support the predict_proba function.")
predicted_proba = self.best_clf_.predict_proba(X)
return predicted_proba
def _check_is_fitted(self):
"""Verify if the estimator algorithm was fitted. Raises an error if it
is not fitted.
"""
check_is_fitted(self, "best_clf_")
| 34.721805 | 79 | 0.646167 |
4af81656b71017a2ef42ef5ca8631a9c96231c2c | 13,261 | py | Python | pyzoo/zoo/automl/model/Seq2Seq_pytorch.py | pinggao187/zoo-sphinx-test | c2033c79b25eba0e9f6f48be56a335aae994ea54 | [
"Apache-2.0"
] | null | null | null | pyzoo/zoo/automl/model/Seq2Seq_pytorch.py | pinggao187/zoo-sphinx-test | c2033c79b25eba0e9f6f48be56a335aae994ea54 | [
"Apache-2.0"
] | null | null | null | pyzoo/zoo/automl/model/Seq2Seq_pytorch.py | pinggao187/zoo-sphinx-test | c2033c79b25eba0e9f6f48be56a335aae994ea54 | [
"Apache-2.0"
] | 1 | 2020-09-01T06:53:08.000Z | 2020-09-01T06:53:08.000Z | #
# Copyright 2018 Analytics Zoo Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import random
import torch
import torch.nn as nn
from torch.utils.data import TensorDataset, DataLoader
from zoo.automl.model.abstract import BaseModel
from zoo.automl.common.util import *
from zoo.automl.common.metrics import Evaluator
class Encoder(nn.Module):
def __init__(self, input_dim, hidden_dim, layer_num, dropout):
super().__init__()
self.hidden_dim = hidden_dim
self.layer_num = layer_num
self.rnn = nn.LSTM(input_dim, hidden_dim, layer_num, dropout=dropout, batch_first=True)
for name, param in self.rnn.named_parameters():
if 'bias' in name:
nn.init.constant_(param, 0.0)
elif 'weight_ih' in name:
nn.init.xavier_normal_(param)
elif 'weight_hh' in name:
nn.init.orthogonal_(param)
def forward(self, input_seq):
# input_seq = [batch_size, seq_len, feature_num]
outputs, (hidden, cell) = self.rnn(input_seq)
# outputs = [batch size, seq len, hidden dim]
# hidden = [batch size, layer num, hidden dim]
# cell = [batch size, layer num, hidden dim]
# outputs are always from the top hidden layer
return hidden, cell
class Decoder(nn.Module):
def __init__(self, output_dim, hidden_dim, layer_num, dropout):
super().__init__()
self.output_dim = output_dim
self.hidden_dim = hidden_dim
self.layer_num = layer_num
self.rnn = nn.LSTM(output_dim, hidden_dim, layer_num, dropout=dropout, batch_first=True)
self.fc_out = nn.Linear(hidden_dim, output_dim)
for name, param in self.rnn.named_parameters():
if 'bias' in name:
nn.init.constant_(param, 0.0)
elif 'weight_ih' in name:
nn.init.xavier_normal_(param)
elif 'weight_hh' in name:
nn.init.orthogonal_(param)
def forward(self, decoder_input, hidden, cell):
# input = [batch size]
# hidden = [batch size, layer num, hidden dim]
# cell = [batch size, layer num, hidden dim]
# input = decoder_input.view(-1, 1)
# decoder_input = [batch size, 1], since output_dim is 1
decoder_input = decoder_input.unsqueeze(1)
# decoder_input = [batch_size, 1, 1]
output, (hidden, cell) = self.rnn(decoder_input, (hidden, cell))
# output = [batch size, seq len, hidden dim]
# hidden = [batch size, layer num, hidden dim]
# cell = [batch size, layer num, hidden dim]
# seq len will always be 1 in the decoder, therefore:
# output = [batch size, 1, hidden dim]
# hidden = [batch size, layer num, hidden dim]
# cell = [batch size, layer num, hidden dim]
prediction = self.fc_out(output.squeeze())
# prediction = [batch size, output dim]
return prediction, hidden, cell
class Seq2Seq(nn.Module):
def __init__(self, encoder, decoder, target_seq_len=1):
super().__init__()
self.encoder = encoder
self.decoder = decoder
self.target_seq_len = target_seq_len
def forward(self, source, target=None):
# past_seq_len
batch_size = source.shape[0]
output_dim = self.decoder.output_dim
# tensor to store the predicted outputs
target_seq = torch.zeros(batch_size, self.target_seq_len, output_dim)
# last hidden state of the encoder is used as the initial hidden state of the decoder
hidden, cell = self.encoder(source)
# Populate the first target sequence with end of encoding series value
# decoder_input : [batch_size, output_dim]
decoder_input = source[:, -1, :output_dim]
for i in range(self.target_seq_len):
decoder_output, hidden, cell = self.decoder(decoder_input, hidden, cell)
target_seq[:, i] = decoder_output
if target is None:
# in test mode
decoder_input = decoder_output
else:
decoder_input = target[:, i]
return target_seq
class Seq2SeqPytorch(BaseModel):
def __init__(self, check_optional_config=True, future_seq_len=1):
"""
Constructor of Vanilla LSTM model
"""
self.model = None
self.check_optional_config = check_optional_config
self.future_seq_len = future_seq_len
self.feature_num = None
self.output_dim = None
self.metric = None
self.batch_size = None
self.criterion = None
self.optimizer = None
def _get_configs(self, config):
super()._check_config(**config)
self.metric = config.get('metric', 'mean_squared_error')
self.batch_size = config.get('batch_size', 32)
self.hidden_dim = config.get('hidden_dim', 32)
self.layer_num = config.get('layer_num', 2)
self.dropout = config.get('dropout', 0.2)
self.lr = config.get("lr", 0.001)
def _load_data(self, input_data, batch_size):
x, y = input_data
data = TensorDataset(torch.from_numpy(x), torch.from_numpy(y))
data_loader = DataLoader(data, shuffle=True, batch_size=batch_size)
return data_loader
def _train_one_epoch(self, train_loader):
self.model.train()
train_losses = []
for input_seqs, target_seqs in train_loader:
self.model.zero_grad()
outputs = self.model(input_seqs, target_seqs)
loss = self.criterion(outputs, target_seqs)
# get gradients
loss.backward()
# update parameters
self.optimizer.step()
train_losses.append(loss.item())
return np.mean(train_losses)
def _val_one_epoch(self, val_loader):
self.model.eval()
val_losses = []
for val_input, val_target in val_loader:
val_out = self.model(val_input)
val_loss = self.criterion(val_out, val_target)
val_losses.append(val_loss.item())
return np.mean(val_losses)
def _test_one_epoch(self, test_loader, mc=False):
if not mc:
self.model.eval()
else:
self.model.train()
test_out_list = []
for test_input in test_loader:
# test_input is a list with one element
test_out = self.model(test_input[0])
test_out_list.append(test_out.detach().numpy())
predictions = np.concatenate(test_out_list)
y_pred = np.squeeze(predictions, axis=2)
return y_pred
def _print_model(self):
# print model and parameters
print(self.model)
print(len(list(self.model.parameters())))
for i in range(len(list(self.model.parameters()))):
print(list(self.model.parameters())[i].size())
def _expand_y(self, y):
"""
expand dims for y.
:param y:
:return:
"""
while len(y.shape) < 3:
y = np.expand_dims(y, axis=2)
return y
def _pre_processing(self, x, y, validation_data):
"""
pre_process input data
1. expand dims for y and vay_y
2. get input lengths
:param x:
:param y:
:param validation_data:
:return:
"""
y = self._expand_y(y)
self.feature_num = x.shape[2]
self.output_dim = y.shape[2]
if validation_data is not None:
val_x, val_y = validation_data
val_y = self._expand_y(val_y)
return x, y, (val_x, val_y)
def fit_eval(self, x, y, validation_data=None, mc=False, verbose=0, **config):
self._get_configs(config)
x, y, validation_data = self._pre_processing(x, y, validation_data)
# get data
train_loader = self._load_data((x, y), self.batch_size)
if validation_data:
val_loader = self._load_data(validation_data, self.batch_size)
encoder = Encoder(self.feature_num, self.hidden_dim, self.layer_num, self.dropout)
decoder = Decoder(self.output_dim, self.hidden_dim, self.layer_num, self.dropout)
self.model = Seq2Seq(encoder, decoder, target_seq_len=self.future_seq_len)
print(encoder)
print(decoder)
# self._print_model()
self.criterion = nn.MSELoss()
self.optimizer = torch.optim.Adam(self.model.parameters(), lr=self.lr)
epochs = config.get('epochs', 20)
assert (epochs > 0)
val_epoch = 1
for i in range(epochs):
train_loss = self._train_one_epoch(train_loader)
if verbose == 1:
print("Epoch : {}/{}...".format(i, epochs),
"Loss: {:.6f}...".format(train_loss),
)
if i % val_epoch == 0:
if validation_data:
val_loss = self._val_one_epoch(val_loader)
if verbose == 1:
print("Val loss: {:.6f}...".format(val_loss))
if validation_data:
result = val_loss
else:
result = train_loss
return result
def evaluate(self, x, y, metric=['mse']):
"""
Evaluate on x, y
:param x: input
:param y: target
:param metric: a list of metrics in string format
:return: a list of metric evaluation results
"""
y_pred = self.predict(x)
assert y_pred.shape == y.shape
return [Evaluator.evaluate(m, y, y_pred) for m in metric]
def predict(self, x, mc=False):
"""
Prediction on x.
:param x: input
:return: predicted y
"""
test_x = TensorDataset(torch.from_numpy(x))
test_loader = DataLoader(test_x, shuffle=False, batch_size=self.batch_size)
y_pred = self._test_one_epoch(test_loader, mc=mc)
return y_pred
def predict_with_uncertainty(self, x, n_iter=100):
test_x = TensorDataset(torch.from_numpy(x))
test_loader = DataLoader(test_x, shuffle=False, batch_size=self.batch_size)
result = np.zeros((n_iter,) + (x.shape[0], self.future_seq_len))
for i in range(n_iter):
result[i, :, :] = self._test_one_epoch(test_loader, mc=True)
prediction = result.mean(axis=0)
uncertainty = result.std(axis=0)
return prediction, uncertainty
def save(self, model_path, config_path):
"""
save model to file.
:param model_path: the model file.
:param config_path: the config file
:return:
"""
torch.save(self.model.state_dict(), model_path)
# os.rename("vanilla_lstm_tmp.h5", model_path)
config_to_save = {
"future_seq_len": self.future_seq_len,
"feature_num": self.feature_num,
"metric": self.metric,
"batch_size": self.batch_size,
"hidden_dim": self.hidden_dim,
"dropout": self.dropout,
"layer_num": self.layer_num,
"output_dim": self.output_dim,
# "lr": self.lr
}
save_config(config_path, config_to_save)
def restore(self, model_path, **config):
"""
restore model from file
:param model_path: the model file
:param config: the trial config
:return: the restored model
"""
# self.model = None
# self._build(**config)
# self.model = keras.models.load_model(model_path)
# self.model.load_weights(file_path)
self.future_seq_len = config["future_seq_len"]
self.feature_num = config["feature_num"]
self.output_dim = config["output_dim"]
# for continuous training
saved_configs = ["future_seq_len", "metric", "batch_size", "hidden_dim",
"dropout", "layer_num", "output_dim"]
assert all([c in config for c in saved_configs])
self._get_configs(config)
encoder = Encoder(self.feature_num, self.hidden_dim, self.layer_num, self.dropout)
decoder = Decoder(self.output_dim, self.hidden_dim, self.layer_num, self.dropout)
self.model = Seq2Seq(encoder, decoder, target_seq_len=self.future_seq_len)
self.model.load_state_dict(torch.load(model_path))
self.model.eval()
def _get_required_parameters(self):
return {
# 'input_shape_x',
# 'input_shape_y',
# 'out_units'
}
def _get_optional_parameters(self):
return {
'hidden_dim',
'layer_num',
'hidden_dim',
'dropout',
'lr',
'epochs',
'batch_size'
}
| 34.444156 | 96 | 0.603499 |
91fdb1d8e7c53c1194f7c0ed85efc802e047b61c | 170 | py | Python | snu/snu.py | MomsFriendlyRobotCompany/snu | 14bea044540b0ccbd4c557f56329bdea7ee22b82 | [
"MIT"
] | null | null | null | snu/snu.py | MomsFriendlyRobotCompany/snu | 14bea044540b0ccbd4c557f56329bdea7ee22b82 | [
"MIT"
] | null | null | null | snu/snu.py | MomsFriendlyRobotCompany/snu | 14bea044540b0ccbd4c557f56329bdea7ee22b82 | [
"MIT"
] | null | null | null |
# from snu.network.ip import get_ip
from snu.messages import Vector
from snu.messages import Twist
from snu.messages import Wrench
from snu.messages import Quaternion
| 18.888889 | 35 | 0.817647 |
e308e76faea0d272c858a5e4342d0e629e4e82cb | 138,936 | py | Python | Lib/site-packages/OCC/GeomPlate.py | JWerbrouck/RWTH_M1_Projekt | 7ae63a2277361fa3273cf0677b297379482b8240 | [
"bzip2-1.0.6"
] | null | null | null | Lib/site-packages/OCC/GeomPlate.py | JWerbrouck/RWTH_M1_Projekt | 7ae63a2277361fa3273cf0677b297379482b8240 | [
"bzip2-1.0.6"
] | 1 | 2022-03-17T16:46:04.000Z | 2022-03-17T16:46:04.000Z | Lib/site-packages/OCC/GeomPlate.py | JWerbrouck/RWTH_M1_Projekt | 7ae63a2277361fa3273cf0677b297379482b8240 | [
"bzip2-1.0.6"
] | null | null | null | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.1
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (3,0,0):
new_instancemethod = lambda func, inst, cls: _GeomPlate.SWIG_PyInstanceMethod_New(func)
else:
from new import instancemethod as new_instancemethod
if version_info >= (2,6,0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_GeomPlate', [dirname(__file__)])
except ImportError:
import _GeomPlate
return _GeomPlate
if fp is not None:
try:
_mod = imp.load_module('_GeomPlate', fp, pathname, description)
finally:
fp.close()
return _mod
_GeomPlate = swig_import_helper()
del swig_import_helper
else:
import _GeomPlate
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError(name)
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
def _swig_setattr_nondynamic_method(set):
def set_attr(self,name,value):
if (name == "thisown"): return self.this.own(value)
if hasattr(self,name) or (name == "this"):
set(self,name,value)
else:
raise AttributeError("You cannot add attributes to %s" % self)
return set_attr
class SwigPyIterator(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
__swig_destroy__ = _GeomPlate.delete_SwigPyIterator
def __iter__(self): return self
SwigPyIterator.value = new_instancemethod(_GeomPlate.SwigPyIterator_value,None,SwigPyIterator)
SwigPyIterator.incr = new_instancemethod(_GeomPlate.SwigPyIterator_incr,None,SwigPyIterator)
SwigPyIterator.decr = new_instancemethod(_GeomPlate.SwigPyIterator_decr,None,SwigPyIterator)
SwigPyIterator.distance = new_instancemethod(_GeomPlate.SwigPyIterator_distance,None,SwigPyIterator)
SwigPyIterator.equal = new_instancemethod(_GeomPlate.SwigPyIterator_equal,None,SwigPyIterator)
SwigPyIterator.copy = new_instancemethod(_GeomPlate.SwigPyIterator_copy,None,SwigPyIterator)
SwigPyIterator.next = new_instancemethod(_GeomPlate.SwigPyIterator_next,None,SwigPyIterator)
SwigPyIterator.__next__ = new_instancemethod(_GeomPlate.SwigPyIterator___next__,None,SwigPyIterator)
SwigPyIterator.previous = new_instancemethod(_GeomPlate.SwigPyIterator_previous,None,SwigPyIterator)
SwigPyIterator.advance = new_instancemethod(_GeomPlate.SwigPyIterator_advance,None,SwigPyIterator)
SwigPyIterator.__eq__ = new_instancemethod(_GeomPlate.SwigPyIterator___eq__,None,SwigPyIterator)
SwigPyIterator.__ne__ = new_instancemethod(_GeomPlate.SwigPyIterator___ne__,None,SwigPyIterator)
SwigPyIterator.__iadd__ = new_instancemethod(_GeomPlate.SwigPyIterator___iadd__,None,SwigPyIterator)
SwigPyIterator.__isub__ = new_instancemethod(_GeomPlate.SwigPyIterator___isub__,None,SwigPyIterator)
SwigPyIterator.__add__ = new_instancemethod(_GeomPlate.SwigPyIterator___add__,None,SwigPyIterator)
SwigPyIterator.__sub__ = new_instancemethod(_GeomPlate.SwigPyIterator___sub__,None,SwigPyIterator)
SwigPyIterator_swigregister = _GeomPlate.SwigPyIterator_swigregister
SwigPyIterator_swigregister(SwigPyIterator)
import OCC.Standard
import OCC.gp
import OCC.Adaptor3d
import OCC.GeomAbs
import OCC.TColStd
import OCC.TCollection
import OCC.MMgt
import OCC.Geom
import OCC.TColgp
import OCC.Adaptor2d
import OCC.Geom2d
import OCC.TopAbs
import OCC.math
import OCC.TColGeom2d
import OCC.Law
import OCC.GeomLProp
import OCC.AdvApp2Var
import OCC.AdvApprox
import OCC.PLib
import OCC.Plate
class GeomPlate_Aij(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:rtype: None
:param anInd1:
:type anInd1: int
:param anInd2:
:type anInd2: int
:param aVec:
:type aVec: gp_Vec
:rtype: None
"""
_GeomPlate.GeomPlate_Aij_swiginit(self,_GeomPlate.new_GeomPlate_Aij(*args))
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
GeomPlate_Aij._kill_pointed = new_instancemethod(_GeomPlate.GeomPlate_Aij__kill_pointed,None,GeomPlate_Aij)
GeomPlate_Aij_swigregister = _GeomPlate.GeomPlate_Aij_swigregister
GeomPlate_Aij_swigregister(GeomPlate_Aij)
class GeomPlate_Array1OfHCurveOnSurface(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:param Low:
:type Low: int
:param Up:
:type Up: int
:rtype: None
:param Item:
:type Item: Handle_Adaptor3d_HCurveOnSurface &
:param Low:
:type Low: int
:param Up:
:type Up: int
:rtype: None
"""
_GeomPlate.GeomPlate_Array1OfHCurveOnSurface_swiginit(self,_GeomPlate.new_GeomPlate_Array1OfHCurveOnSurface(*args))
def Init(self, *args):
"""
:param V:
:type V: Handle_Adaptor3d_HCurveOnSurface &
:rtype: None
"""
return _GeomPlate.GeomPlate_Array1OfHCurveOnSurface_Init(self, *args)
def Destroy(self, *args):
"""
:rtype: None
"""
return _GeomPlate.GeomPlate_Array1OfHCurveOnSurface_Destroy(self, *args)
def IsAllocated(self, *args):
"""
:rtype: bool
"""
return _GeomPlate.GeomPlate_Array1OfHCurveOnSurface_IsAllocated(self, *args)
def Assign(self, *args):
"""
:param Other:
:type Other: GeomPlate_Array1OfHCurveOnSurface &
:rtype: GeomPlate_Array1OfHCurveOnSurface
"""
return _GeomPlate.GeomPlate_Array1OfHCurveOnSurface_Assign(self, *args)
def Set(self, *args):
"""
:param Other:
:type Other: GeomPlate_Array1OfHCurveOnSurface &
:rtype: GeomPlate_Array1OfHCurveOnSurface
"""
return _GeomPlate.GeomPlate_Array1OfHCurveOnSurface_Set(self, *args)
def Length(self, *args):
"""
:rtype: int
"""
return _GeomPlate.GeomPlate_Array1OfHCurveOnSurface_Length(self, *args)
def Lower(self, *args):
"""
:rtype: int
"""
return _GeomPlate.GeomPlate_Array1OfHCurveOnSurface_Lower(self, *args)
def Upper(self, *args):
"""
:rtype: int
"""
return _GeomPlate.GeomPlate_Array1OfHCurveOnSurface_Upper(self, *args)
def SetValue(self, *args):
"""
:param Index:
:type Index: int
:param Value:
:type Value: Handle_Adaptor3d_HCurveOnSurface &
:rtype: None
"""
return _GeomPlate.GeomPlate_Array1OfHCurveOnSurface_SetValue(self, *args)
def Value(self, *args):
"""
:param Index:
:type Index: int
:rtype: Handle_Adaptor3d_HCurveOnSurface
"""
return _GeomPlate.GeomPlate_Array1OfHCurveOnSurface_Value(self, *args)
def ChangeValue(self, *args):
"""
:param Index:
:type Index: int
:rtype: Handle_Adaptor3d_HCurveOnSurface
"""
return _GeomPlate.GeomPlate_Array1OfHCurveOnSurface_ChangeValue(self, *args)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
GeomPlate_Array1OfHCurveOnSurface.Init = new_instancemethod(_GeomPlate.GeomPlate_Array1OfHCurveOnSurface_Init,None,GeomPlate_Array1OfHCurveOnSurface)
GeomPlate_Array1OfHCurveOnSurface.Destroy = new_instancemethod(_GeomPlate.GeomPlate_Array1OfHCurveOnSurface_Destroy,None,GeomPlate_Array1OfHCurveOnSurface)
GeomPlate_Array1OfHCurveOnSurface.IsAllocated = new_instancemethod(_GeomPlate.GeomPlate_Array1OfHCurveOnSurface_IsAllocated,None,GeomPlate_Array1OfHCurveOnSurface)
GeomPlate_Array1OfHCurveOnSurface.Assign = new_instancemethod(_GeomPlate.GeomPlate_Array1OfHCurveOnSurface_Assign,None,GeomPlate_Array1OfHCurveOnSurface)
GeomPlate_Array1OfHCurveOnSurface.Set = new_instancemethod(_GeomPlate.GeomPlate_Array1OfHCurveOnSurface_Set,None,GeomPlate_Array1OfHCurveOnSurface)
GeomPlate_Array1OfHCurveOnSurface.Length = new_instancemethod(_GeomPlate.GeomPlate_Array1OfHCurveOnSurface_Length,None,GeomPlate_Array1OfHCurveOnSurface)
GeomPlate_Array1OfHCurveOnSurface.Lower = new_instancemethod(_GeomPlate.GeomPlate_Array1OfHCurveOnSurface_Lower,None,GeomPlate_Array1OfHCurveOnSurface)
GeomPlate_Array1OfHCurveOnSurface.Upper = new_instancemethod(_GeomPlate.GeomPlate_Array1OfHCurveOnSurface_Upper,None,GeomPlate_Array1OfHCurveOnSurface)
GeomPlate_Array1OfHCurveOnSurface.SetValue = new_instancemethod(_GeomPlate.GeomPlate_Array1OfHCurveOnSurface_SetValue,None,GeomPlate_Array1OfHCurveOnSurface)
GeomPlate_Array1OfHCurveOnSurface.Value = new_instancemethod(_GeomPlate.GeomPlate_Array1OfHCurveOnSurface_Value,None,GeomPlate_Array1OfHCurveOnSurface)
GeomPlate_Array1OfHCurveOnSurface.ChangeValue = new_instancemethod(_GeomPlate.GeomPlate_Array1OfHCurveOnSurface_ChangeValue,None,GeomPlate_Array1OfHCurveOnSurface)
GeomPlate_Array1OfHCurveOnSurface._kill_pointed = new_instancemethod(_GeomPlate.GeomPlate_Array1OfHCurveOnSurface__kill_pointed,None,GeomPlate_Array1OfHCurveOnSurface)
GeomPlate_Array1OfHCurveOnSurface_swigregister = _GeomPlate.GeomPlate_Array1OfHCurveOnSurface_swigregister
GeomPlate_Array1OfHCurveOnSurface_swigregister(GeomPlate_Array1OfHCurveOnSurface)
class GeomPlate_Array1OfSequenceOfReal(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:param Low:
:type Low: int
:param Up:
:type Up: int
:rtype: None
:param Item:
:type Item: TColStd_SequenceOfReal &
:param Low:
:type Low: int
:param Up:
:type Up: int
:rtype: None
"""
_GeomPlate.GeomPlate_Array1OfSequenceOfReal_swiginit(self,_GeomPlate.new_GeomPlate_Array1OfSequenceOfReal(*args))
def Init(self, *args):
"""
:param V:
:type V: TColStd_SequenceOfReal &
:rtype: None
"""
return _GeomPlate.GeomPlate_Array1OfSequenceOfReal_Init(self, *args)
def Destroy(self, *args):
"""
:rtype: None
"""
return _GeomPlate.GeomPlate_Array1OfSequenceOfReal_Destroy(self, *args)
def IsAllocated(self, *args):
"""
:rtype: bool
"""
return _GeomPlate.GeomPlate_Array1OfSequenceOfReal_IsAllocated(self, *args)
def Assign(self, *args):
"""
:param Other:
:type Other: GeomPlate_Array1OfSequenceOfReal &
:rtype: GeomPlate_Array1OfSequenceOfReal
"""
return _GeomPlate.GeomPlate_Array1OfSequenceOfReal_Assign(self, *args)
def Set(self, *args):
"""
:param Other:
:type Other: GeomPlate_Array1OfSequenceOfReal &
:rtype: GeomPlate_Array1OfSequenceOfReal
"""
return _GeomPlate.GeomPlate_Array1OfSequenceOfReal_Set(self, *args)
def Length(self, *args):
"""
:rtype: int
"""
return _GeomPlate.GeomPlate_Array1OfSequenceOfReal_Length(self, *args)
def Lower(self, *args):
"""
:rtype: int
"""
return _GeomPlate.GeomPlate_Array1OfSequenceOfReal_Lower(self, *args)
def Upper(self, *args):
"""
:rtype: int
"""
return _GeomPlate.GeomPlate_Array1OfSequenceOfReal_Upper(self, *args)
def SetValue(self, *args):
"""
:param Index:
:type Index: int
:param Value:
:type Value: TColStd_SequenceOfReal &
:rtype: None
"""
return _GeomPlate.GeomPlate_Array1OfSequenceOfReal_SetValue(self, *args)
def Value(self, *args):
"""
:param Index:
:type Index: int
:rtype: TColStd_SequenceOfReal
"""
return _GeomPlate.GeomPlate_Array1OfSequenceOfReal_Value(self, *args)
def ChangeValue(self, *args):
"""
:param Index:
:type Index: int
:rtype: TColStd_SequenceOfReal
"""
return _GeomPlate.GeomPlate_Array1OfSequenceOfReal_ChangeValue(self, *args)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
GeomPlate_Array1OfSequenceOfReal.Init = new_instancemethod(_GeomPlate.GeomPlate_Array1OfSequenceOfReal_Init,None,GeomPlate_Array1OfSequenceOfReal)
GeomPlate_Array1OfSequenceOfReal.Destroy = new_instancemethod(_GeomPlate.GeomPlate_Array1OfSequenceOfReal_Destroy,None,GeomPlate_Array1OfSequenceOfReal)
GeomPlate_Array1OfSequenceOfReal.IsAllocated = new_instancemethod(_GeomPlate.GeomPlate_Array1OfSequenceOfReal_IsAllocated,None,GeomPlate_Array1OfSequenceOfReal)
GeomPlate_Array1OfSequenceOfReal.Assign = new_instancemethod(_GeomPlate.GeomPlate_Array1OfSequenceOfReal_Assign,None,GeomPlate_Array1OfSequenceOfReal)
GeomPlate_Array1OfSequenceOfReal.Set = new_instancemethod(_GeomPlate.GeomPlate_Array1OfSequenceOfReal_Set,None,GeomPlate_Array1OfSequenceOfReal)
GeomPlate_Array1OfSequenceOfReal.Length = new_instancemethod(_GeomPlate.GeomPlate_Array1OfSequenceOfReal_Length,None,GeomPlate_Array1OfSequenceOfReal)
GeomPlate_Array1OfSequenceOfReal.Lower = new_instancemethod(_GeomPlate.GeomPlate_Array1OfSequenceOfReal_Lower,None,GeomPlate_Array1OfSequenceOfReal)
GeomPlate_Array1OfSequenceOfReal.Upper = new_instancemethod(_GeomPlate.GeomPlate_Array1OfSequenceOfReal_Upper,None,GeomPlate_Array1OfSequenceOfReal)
GeomPlate_Array1OfSequenceOfReal.SetValue = new_instancemethod(_GeomPlate.GeomPlate_Array1OfSequenceOfReal_SetValue,None,GeomPlate_Array1OfSequenceOfReal)
GeomPlate_Array1OfSequenceOfReal.Value = new_instancemethod(_GeomPlate.GeomPlate_Array1OfSequenceOfReal_Value,None,GeomPlate_Array1OfSequenceOfReal)
GeomPlate_Array1OfSequenceOfReal.ChangeValue = new_instancemethod(_GeomPlate.GeomPlate_Array1OfSequenceOfReal_ChangeValue,None,GeomPlate_Array1OfSequenceOfReal)
GeomPlate_Array1OfSequenceOfReal._kill_pointed = new_instancemethod(_GeomPlate.GeomPlate_Array1OfSequenceOfReal__kill_pointed,None,GeomPlate_Array1OfSequenceOfReal)
GeomPlate_Array1OfSequenceOfReal_swigregister = _GeomPlate.GeomPlate_Array1OfSequenceOfReal_swigregister
GeomPlate_Array1OfSequenceOfReal_swigregister(GeomPlate_Array1OfSequenceOfReal)
class GeomPlate_BuildAveragePlane(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* Tol is a Tolerance to make the difference between the result plane and the result line. if POption = 1 : automatical parametrisation if POption = 2 : parametrisation by eigen vectors if NOption = 1 : the average plane is the inertial plane. if NOption = 2 : the average plane is the plane of max. flux.
:param Pts:
:type Pts: Handle_TColgp_HArray1OfPnt
:param NbBoundPoints:
:type NbBoundPoints: int
:param Tol:
:type Tol: float
:param POption:
:type POption: int
:param NOption:
:type NOption: int
:rtype: None
* Creates the plane from the 'best vector'
:param Normals:
:type Normals: TColgp_SequenceOfVec
:param Pts:
:type Pts: Handle_TColgp_HArray1OfPnt
:rtype: None
"""
_GeomPlate.GeomPlate_BuildAveragePlane_swiginit(self,_GeomPlate.new_GeomPlate_BuildAveragePlane(*args))
def Plane(self, *args):
"""
* Return the average Plane.
:rtype: Handle_Geom_Plane
"""
return _GeomPlate.GeomPlate_BuildAveragePlane_Plane(self, *args)
def Line(self, *args):
"""
* Return a Line when 2 eigenvalues are null.
:rtype: Handle_Geom_Line
"""
return _GeomPlate.GeomPlate_BuildAveragePlane_Line(self, *args)
def IsPlane(self, *args):
"""
* return OK if is a plane.
:rtype: bool
"""
return _GeomPlate.GeomPlate_BuildAveragePlane_IsPlane(self, *args)
def IsLine(self, *args):
"""
* return OK if is a line.
:rtype: bool
"""
return _GeomPlate.GeomPlate_BuildAveragePlane_IsLine(self, *args)
def MinMaxBox(self, *args):
"""
* computes the minimal box to include all normal projection points of the initial array on the plane.
:param Umin:
:type Umin: float &
:param Umax:
:type Umax: float &
:param Vmin:
:type Vmin: float &
:param Vmax:
:type Vmax: float &
:rtype: None
"""
return _GeomPlate.GeomPlate_BuildAveragePlane_MinMaxBox(self, *args)
def HalfSpace(*args):
"""
:param NewNormals:
:type NewNormals: TColgp_SequenceOfVec
:param Normals:
:type Normals: TColgp_SequenceOfVec
:param Bset:
:type Bset: GeomPlate_SequenceOfAij &
:param LinTol:
:type LinTol: float
:param AngTol:
:type AngTol: float
:rtype: bool
"""
return _GeomPlate.GeomPlate_BuildAveragePlane_HalfSpace(*args)
HalfSpace = staticmethod(HalfSpace)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
GeomPlate_BuildAveragePlane.Plane = new_instancemethod(_GeomPlate.GeomPlate_BuildAveragePlane_Plane,None,GeomPlate_BuildAveragePlane)
GeomPlate_BuildAveragePlane.Line = new_instancemethod(_GeomPlate.GeomPlate_BuildAveragePlane_Line,None,GeomPlate_BuildAveragePlane)
GeomPlate_BuildAveragePlane.IsPlane = new_instancemethod(_GeomPlate.GeomPlate_BuildAveragePlane_IsPlane,None,GeomPlate_BuildAveragePlane)
GeomPlate_BuildAveragePlane.IsLine = new_instancemethod(_GeomPlate.GeomPlate_BuildAveragePlane_IsLine,None,GeomPlate_BuildAveragePlane)
GeomPlate_BuildAveragePlane.MinMaxBox = new_instancemethod(_GeomPlate.GeomPlate_BuildAveragePlane_MinMaxBox,None,GeomPlate_BuildAveragePlane)
GeomPlate_BuildAveragePlane._kill_pointed = new_instancemethod(_GeomPlate.GeomPlate_BuildAveragePlane__kill_pointed,None,GeomPlate_BuildAveragePlane)
GeomPlate_BuildAveragePlane_swigregister = _GeomPlate.GeomPlate_BuildAveragePlane_swigregister
GeomPlate_BuildAveragePlane_swigregister(GeomPlate_BuildAveragePlane)
def GeomPlate_BuildAveragePlane_HalfSpace(*args):
"""
:param NewNormals:
:type NewNormals: TColgp_SequenceOfVec
:param Normals:
:type Normals: TColgp_SequenceOfVec
:param Bset:
:type Bset: GeomPlate_SequenceOfAij &
:param LinTol:
:type LinTol: float
:param AngTol:
:type AngTol: float
:rtype: bool
"""
return _GeomPlate.GeomPlate_BuildAveragePlane_HalfSpace(*args)
class GeomPlate_BuildPlateSurface(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* Constructor compatible with the old version with this constructor the constraint are given in a Array of Curve on Surface The array NbPoints contains the number of points for each constraint. The Array Tang contains the order of constraint for each Constraint: The possible values for this order has to be -1 , 0 , 1 , 2 . Order i means constraint Gi. NbIter is the maximum number of iteration to optimise the number of points for resolution Degree is the degree of resolution for Plate Tol2d is the tolerance used to test if two points of different constraint are identical in the parametric space of the initial surface Tol3d is used to test if two identical points in the 2d space are identical in 3d space TolAng is used to compare the angle between normal of two identical points in the 2d space Raises ConstructionError;
:param NPoints:
:type NPoints: Handle_TColStd_HArray1OfInteger &
:param TabCurve:
:type TabCurve: Handle_GeomPlate_HArray1OfHCurveOnSurface &
:param Tang:
:type Tang: Handle_TColStd_HArray1OfInteger &
:param Degree:
:type Degree: int
:param NbIter: default value is 3
:type NbIter: int
:param Tol2d: default value is 0.00001
:type Tol2d: float
:param Tol3d: default value is 0.0001
:type Tol3d: float
:param TolAng: default value is 0.01
:type TolAng: float
:param TolCurv: default value is 0.1
:type TolCurv: float
:param Anisotropie: default value is Standard_False
:type Anisotropie: bool
:rtype: None
:param Surf:
:type Surf: Handle_Geom_Surface &
:param Degree: default value is 3
:type Degree: int
:param NbPtsOnCur: default value is 10
:type NbPtsOnCur: int
:param NbIter: default value is 3
:type NbIter: int
:param Tol2d: default value is 0.00001
:type Tol2d: float
:param Tol3d: default value is 0.0001
:type Tol3d: float
:param TolAng: default value is 0.01
:type TolAng: float
:param TolCurv: default value is 0.1
:type TolCurv: float
:param Anisotropie: default value is Standard_False
:type Anisotropie: bool
:rtype: None
* Initializes the BuildPlateSurface framework for deforming plate surfaces using curve and point constraints. You use the first constructor if you have an initial surface to work with at construction time. If not, you use the second. You can add one later by using the method LoadInitSurface. If no initial surface is loaded, one will automatically be computed. The curve and point constraints will be defined by using the method Add. Before the call to the algorithm, the curve constraints will be transformed into sequences of discrete points. Each curve defined as a constraint will be given the value of NbPtsOnCur as the average number of points on it. Several arguments serve to improve performance of the algorithm. NbIter, for example, expresses the number of iterations allowed and is used to control the duration of computation. To optimize resolution, Degree will have the default value of 3. The surface generated must respect several tolerance values: - 2d tolerance given by Tol2d, with a default value of 0.00001 - 3d tolerance expressed by Tol3d, with a default value of 0.0001 - angular tolerance given by TolAng, with a default value of 0.01, defining the greatest angle allowed between the constraint and the target surface. Exceptions Standard_ConstructionError if NbIter is less than 1 or Degree is less than 3.
:param Degree: default value is 3
:type Degree: int
:param NbPtsOnCur: default value is 10
:type NbPtsOnCur: int
:param NbIter: default value is 3
:type NbIter: int
:param Tol2d: default value is 0.00001
:type Tol2d: float
:param Tol3d: default value is 0.0001
:type Tol3d: float
:param TolAng: default value is 0.01
:type TolAng: float
:param TolCurv: default value is 0.1
:type TolCurv: float
:param Anisotropie: default value is Standard_False
:type Anisotropie: bool
:rtype: None
"""
_GeomPlate.GeomPlate_BuildPlateSurface_swiginit(self,_GeomPlate.new_GeomPlate_BuildPlateSurface(*args))
def Init(self, *args):
"""
* Resets all constraints
:rtype: None
"""
return _GeomPlate.GeomPlate_BuildPlateSurface_Init(self, *args)
def LoadInitSurface(self, *args):
"""
* Loads the initial Surface
:param Surf:
:type Surf: Handle_Geom_Surface &
:rtype: None
"""
return _GeomPlate.GeomPlate_BuildPlateSurface_LoadInitSurface(self, *args)
def SetNbBounds(self, *args):
"""
:param NbBounds:
:type NbBounds: int
:rtype: None
"""
return _GeomPlate.GeomPlate_BuildPlateSurface_SetNbBounds(self, *args)
def Add(self, *args):
"""
* Adds the linear constraint cont.
:param Cont:
:type Cont: Handle_GeomPlate_CurveConstraint &
:rtype: None
* Adds the point constraint cont.
:param Cont:
:type Cont: Handle_GeomPlate_PointConstraint &
:rtype: None
"""
return _GeomPlate.GeomPlate_BuildPlateSurface_Add(self, *args)
def Perform(self, *args):
"""
* Calls the algorithm and computes the plate surface using the loaded constraints. If no initial surface is given, the algorithm automatically computes one. Exceptions Standard_RangeError if the value of the constraint is null or if plate is not done.
:rtype: None
"""
return _GeomPlate.GeomPlate_BuildPlateSurface_Perform(self, *args)
def CurveConstraint(self, *args):
"""
* returns the CurveConstraints of order order
:param order:
:type order: int
:rtype: Handle_GeomPlate_CurveConstraint
"""
return _GeomPlate.GeomPlate_BuildPlateSurface_CurveConstraint(self, *args)
def PointConstraint(self, *args):
"""
* returns the PointConstraint of order order
:param order:
:type order: int
:rtype: Handle_GeomPlate_PointConstraint
"""
return _GeomPlate.GeomPlate_BuildPlateSurface_PointConstraint(self, *args)
def Disc2dContour(self, *args):
"""
:param nbp:
:type nbp: int
:param Seq2d:
:type Seq2d: TColgp_SequenceOfXY
:rtype: None
"""
return _GeomPlate.GeomPlate_BuildPlateSurface_Disc2dContour(self, *args)
def Disc3dContour(self, *args):
"""
:param nbp:
:type nbp: int
:param iordre:
:type iordre: int
:param Seq3d:
:type Seq3d: TColgp_SequenceOfXYZ
:rtype: None
"""
return _GeomPlate.GeomPlate_BuildPlateSurface_Disc3dContour(self, *args)
def IsDone(self, *args):
"""
* Tests whether computation of the plate has been completed.
:rtype: bool
"""
return _GeomPlate.GeomPlate_BuildPlateSurface_IsDone(self, *args)
def Surface(self, *args):
"""
* Returns the result of the computation. This surface can then be used by GeomPlate_MakeApprox for converting the resulting surface into a BSpline.
:rtype: Handle_GeomPlate_Surface
"""
return _GeomPlate.GeomPlate_BuildPlateSurface_Surface(self, *args)
def SurfInit(self, *args):
"""
* Returns the initial surface
:rtype: Handle_Geom_Surface
"""
return _GeomPlate.GeomPlate_BuildPlateSurface_SurfInit(self, *args)
def Sense(self, *args):
"""
* Allows you to ensure that the array of curves returned by Curves2d has the correct orientation. Returns the orientation of the curves in the the array returned by Curves2d. Computation changes the orientation of these curves. Consequently, this method returns the orientation prior to computation.
:rtype: Handle_TColStd_HArray1OfInteger
"""
return _GeomPlate.GeomPlate_BuildPlateSurface_Sense(self, *args)
def Curves2d(self, *args):
"""
* Extracts the array of curves on the plate surface which correspond to the curve constraints set in Add.
:rtype: Handle_TColGeom2d_HArray1OfCurve
"""
return _GeomPlate.GeomPlate_BuildPlateSurface_Curves2d(self, *args)
def Order(self, *args):
"""
* Returns the order of the curves in the array returned by Curves2d. Computation changes this order. Consequently, this method returns the order of the curves prior to computation.
:rtype: Handle_TColStd_HArray1OfInteger
"""
return _GeomPlate.GeomPlate_BuildPlateSurface_Order(self, *args)
def G0Error(self, *args):
"""
* Returns the max distance betwen the result and the constraints
:rtype: float
* Returns the max distance between the result and the constraint Index
:param Index:
:type Index: int
:rtype: float
"""
return _GeomPlate.GeomPlate_BuildPlateSurface_G0Error(self, *args)
def G1Error(self, *args):
"""
* Returns the max angle betwen the result and the constraints
:rtype: float
* Returns the max angle between the result and the constraint Index
:param Index:
:type Index: int
:rtype: float
"""
return _GeomPlate.GeomPlate_BuildPlateSurface_G1Error(self, *args)
def G2Error(self, *args):
"""
* Returns the max difference of curvature betwen the result and the constraints
:rtype: float
* Returns the max difference of curvature between the result and the constraint Index
:param Index:
:type Index: int
:rtype: float
"""
return _GeomPlate.GeomPlate_BuildPlateSurface_G2Error(self, *args)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
GeomPlate_BuildPlateSurface.Init = new_instancemethod(_GeomPlate.GeomPlate_BuildPlateSurface_Init,None,GeomPlate_BuildPlateSurface)
GeomPlate_BuildPlateSurface.LoadInitSurface = new_instancemethod(_GeomPlate.GeomPlate_BuildPlateSurface_LoadInitSurface,None,GeomPlate_BuildPlateSurface)
GeomPlate_BuildPlateSurface.SetNbBounds = new_instancemethod(_GeomPlate.GeomPlate_BuildPlateSurface_SetNbBounds,None,GeomPlate_BuildPlateSurface)
GeomPlate_BuildPlateSurface.Add = new_instancemethod(_GeomPlate.GeomPlate_BuildPlateSurface_Add,None,GeomPlate_BuildPlateSurface)
GeomPlate_BuildPlateSurface.Perform = new_instancemethod(_GeomPlate.GeomPlate_BuildPlateSurface_Perform,None,GeomPlate_BuildPlateSurface)
GeomPlate_BuildPlateSurface.CurveConstraint = new_instancemethod(_GeomPlate.GeomPlate_BuildPlateSurface_CurveConstraint,None,GeomPlate_BuildPlateSurface)
GeomPlate_BuildPlateSurface.PointConstraint = new_instancemethod(_GeomPlate.GeomPlate_BuildPlateSurface_PointConstraint,None,GeomPlate_BuildPlateSurface)
GeomPlate_BuildPlateSurface.Disc2dContour = new_instancemethod(_GeomPlate.GeomPlate_BuildPlateSurface_Disc2dContour,None,GeomPlate_BuildPlateSurface)
GeomPlate_BuildPlateSurface.Disc3dContour = new_instancemethod(_GeomPlate.GeomPlate_BuildPlateSurface_Disc3dContour,None,GeomPlate_BuildPlateSurface)
GeomPlate_BuildPlateSurface.IsDone = new_instancemethod(_GeomPlate.GeomPlate_BuildPlateSurface_IsDone,None,GeomPlate_BuildPlateSurface)
GeomPlate_BuildPlateSurface.Surface = new_instancemethod(_GeomPlate.GeomPlate_BuildPlateSurface_Surface,None,GeomPlate_BuildPlateSurface)
GeomPlate_BuildPlateSurface.SurfInit = new_instancemethod(_GeomPlate.GeomPlate_BuildPlateSurface_SurfInit,None,GeomPlate_BuildPlateSurface)
GeomPlate_BuildPlateSurface.Sense = new_instancemethod(_GeomPlate.GeomPlate_BuildPlateSurface_Sense,None,GeomPlate_BuildPlateSurface)
GeomPlate_BuildPlateSurface.Curves2d = new_instancemethod(_GeomPlate.GeomPlate_BuildPlateSurface_Curves2d,None,GeomPlate_BuildPlateSurface)
GeomPlate_BuildPlateSurface.Order = new_instancemethod(_GeomPlate.GeomPlate_BuildPlateSurface_Order,None,GeomPlate_BuildPlateSurface)
GeomPlate_BuildPlateSurface.G0Error = new_instancemethod(_GeomPlate.GeomPlate_BuildPlateSurface_G0Error,None,GeomPlate_BuildPlateSurface)
GeomPlate_BuildPlateSurface.G1Error = new_instancemethod(_GeomPlate.GeomPlate_BuildPlateSurface_G1Error,None,GeomPlate_BuildPlateSurface)
GeomPlate_BuildPlateSurface.G2Error = new_instancemethod(_GeomPlate.GeomPlate_BuildPlateSurface_G2Error,None,GeomPlate_BuildPlateSurface)
GeomPlate_BuildPlateSurface._kill_pointed = new_instancemethod(_GeomPlate.GeomPlate_BuildPlateSurface__kill_pointed,None,GeomPlate_BuildPlateSurface)
GeomPlate_BuildPlateSurface_swigregister = _GeomPlate.GeomPlate_BuildPlateSurface_swigregister
GeomPlate_BuildPlateSurface_swigregister(GeomPlate_BuildPlateSurface)
class GeomPlate_CurveConstraint(OCC.MMgt.MMgt_TShared):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* Initializes an empty curve constraint object.
:rtype: None
* Create a constraint Order is the order of the constraint. The possible values for order are -1,0,1,2. Order i means constraints Gi Npt is the number of points associated with the constraint. TolDist is the maximum error to satisfy for G0 constraints TolAng is the maximum error to satisfy for G1 constraints TolCurv is the maximum error to satisfy for G2 constraints These errors can be replaced by laws of criterion. Raises ConstructionError if Order is not -1 , 0, 1, 2
:param Boundary:
:type Boundary: Handle_Adaptor3d_HCurveOnSurface &
:param Order:
:type Order: int
:param NPt: default value is 10
:type NPt: int
:param TolDist: default value is 0.0001
:type TolDist: float
:param TolAng: default value is 0.01
:type TolAng: float
:param TolCurv: default value is 0.1
:type TolCurv: float
:rtype: None
* Create a constraint Order is the order of the constraint. The possible values for order are -1,0. Order i means constraints Gi Npt is the number of points associated with the constraint. TolDist is the maximum error to satisfy for G0 constraints These errors can be replaced by laws of criterion. Raises ConstructionError if Order is not 0 or -1
:param Boundary:
:type Boundary: Handle_Adaptor3d_HCurve &
:param Tang:
:type Tang: int
:param NPt: default value is 10
:type NPt: int
:param TolDist: default value is 0.0001
:type TolDist: float
:rtype: None
"""
_GeomPlate.GeomPlate_CurveConstraint_swiginit(self,_GeomPlate.new_GeomPlate_CurveConstraint(*args))
def SetOrder(self, *args):
"""
* Allows you to set the order of continuity required for the constraints: G0, G1, and G2, controlled respectively by G0Criterion G1Criterion and G2Criterion.
:param Order:
:type Order: int
:rtype: None
"""
return _GeomPlate.GeomPlate_CurveConstraint_SetOrder(self, *args)
def Order(self, *args):
"""
* Returns the order of constraint, one of G0, G1 or G2.
:rtype: int
"""
return _GeomPlate.GeomPlate_CurveConstraint_Order(self, *args)
def NbPoints(self, *args):
"""
* Returns the number of points on the curve used as a constraint. The default setting is 10. This parameter affects computation time, which increases by the cube of the number of points.
:rtype: int
"""
return _GeomPlate.GeomPlate_CurveConstraint_NbPoints(self, *args)
def SetNbPoints(self, *args):
"""
* Allows you to set the number of points on the curve constraint. The default setting is 10. This parameter affects computation time, which increases by the cube of the number of points.
:param NewNb:
:type NewNb: int
:rtype: None
"""
return _GeomPlate.GeomPlate_CurveConstraint_SetNbPoints(self, *args)
def SetG0Criterion(self, *args):
"""
* Allows you to set the G0 criterion. This is the law defining the greatest distance allowed between the constraint and the target surface for each point of the constraint. If this criterion is not set, TolDist, the distance tolerance from the constructor, is used.
:param G0Crit:
:type G0Crit: Handle_Law_Function &
:rtype: None
"""
return _GeomPlate.GeomPlate_CurveConstraint_SetG0Criterion(self, *args)
def SetG1Criterion(self, *args):
"""
* Allows you to set the G1 criterion. This is the law defining the greatest angle allowed between the constraint and the target surface. If this criterion is not set, TolAng, the angular tolerance from the constructor, is used. Raises ConstructionError if the curve is not on a surface
:param G1Crit:
:type G1Crit: Handle_Law_Function &
:rtype: None
"""
return _GeomPlate.GeomPlate_CurveConstraint_SetG1Criterion(self, *args)
def SetG2Criterion(self, *args):
"""
:param G2Crit:
:type G2Crit: Handle_Law_Function &
:rtype: None
"""
return _GeomPlate.GeomPlate_CurveConstraint_SetG2Criterion(self, *args)
def G0Criterion(self, *args):
"""
* Returns the G0 criterion at the parametric point U on the curve. This is the greatest distance allowed between the constraint and the target surface at U.
:param U:
:type U: float
:rtype: float
"""
return _GeomPlate.GeomPlate_CurveConstraint_G0Criterion(self, *args)
def G1Criterion(self, *args):
"""
* Returns the G1 criterion at the parametric point U on the curve. This is the greatest angle allowed between the constraint and the target surface at U. Raises ConstructionError if the curve is not on a surface
:param U:
:type U: float
:rtype: float
"""
return _GeomPlate.GeomPlate_CurveConstraint_G1Criterion(self, *args)
def G2Criterion(self, *args):
"""
* Returns the G2 criterion at the parametric point U on the curve. This is the greatest difference in curvature allowed between the constraint and the target surface at U. Raises ConstructionError if the curve is not on a surface
:param U:
:type U: float
:rtype: float
"""
return _GeomPlate.GeomPlate_CurveConstraint_G2Criterion(self, *args)
def FirstParameter(self, *args):
"""
:rtype: float
"""
return _GeomPlate.GeomPlate_CurveConstraint_FirstParameter(self, *args)
def LastParameter(self, *args):
"""
:rtype: float
"""
return _GeomPlate.GeomPlate_CurveConstraint_LastParameter(self, *args)
def Length(self, *args):
"""
:rtype: float
"""
return _GeomPlate.GeomPlate_CurveConstraint_Length(self, *args)
def LPropSurf(self, *args):
"""
:param U:
:type U: float
:rtype: GeomLProp_SLProps
"""
return _GeomPlate.GeomPlate_CurveConstraint_LPropSurf(self, *args)
def D0(self, *args):
"""
:param U:
:type U: float
:param P:
:type P: gp_Pnt
:rtype: None
"""
return _GeomPlate.GeomPlate_CurveConstraint_D0(self, *args)
def D1(self, *args):
"""
:param U:
:type U: float
:param P:
:type P: gp_Pnt
:param V1:
:type V1: gp_Vec
:param V2:
:type V2: gp_Vec
:rtype: None
"""
return _GeomPlate.GeomPlate_CurveConstraint_D1(self, *args)
def D2(self, *args):
"""
:param U:
:type U: float
:param P:
:type P: gp_Pnt
:param V1:
:type V1: gp_Vec
:param V2:
:type V2: gp_Vec
:param V3:
:type V3: gp_Vec
:param V4:
:type V4: gp_Vec
:param V5:
:type V5: gp_Vec
:rtype: None
"""
return _GeomPlate.GeomPlate_CurveConstraint_D2(self, *args)
def Curve3d(self, *args):
"""
:rtype: Handle_Adaptor3d_HCurve
"""
return _GeomPlate.GeomPlate_CurveConstraint_Curve3d(self, *args)
def SetCurve2dOnSurf(self, *args):
"""
* loads a 2d curve associated the surface resulting of the constraints
:param Curve2d:
:type Curve2d: Handle_Geom2d_Curve &
:rtype: None
"""
return _GeomPlate.GeomPlate_CurveConstraint_SetCurve2dOnSurf(self, *args)
def Curve2dOnSurf(self, *args):
"""
* Returns a 2d curve associated the surface resulting of the constraints
:rtype: Handle_Geom2d_Curve
"""
return _GeomPlate.GeomPlate_CurveConstraint_Curve2dOnSurf(self, *args)
def SetProjectedCurve(self, *args):
"""
* loads a 2d curve resulting from the normal projection of the curve on the initial surface
:param Curve2d:
:type Curve2d: Handle_Adaptor2d_HCurve2d &
:param TolU:
:type TolU: float
:param TolV:
:type TolV: float
:rtype: None
"""
return _GeomPlate.GeomPlate_CurveConstraint_SetProjectedCurve(self, *args)
def ProjectedCurve(self, *args):
"""
* Returns the projected curve resulting from the normal projection of the curve on the initial surface
:rtype: Handle_Adaptor2d_HCurve2d
"""
return _GeomPlate.GeomPlate_CurveConstraint_ProjectedCurve(self, *args)
def _kill_pointed(self):
"""_kill_pointed(GeomPlate_CurveConstraint self)"""
return _GeomPlate.GeomPlate_CurveConstraint__kill_pointed(self)
def GetHandle(self):
"""GetHandle(GeomPlate_CurveConstraint self) -> Handle_GeomPlate_CurveConstraint"""
return _GeomPlate.GeomPlate_CurveConstraint_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
GeomPlate_CurveConstraint.SetOrder = new_instancemethod(_GeomPlate.GeomPlate_CurveConstraint_SetOrder,None,GeomPlate_CurveConstraint)
GeomPlate_CurveConstraint.Order = new_instancemethod(_GeomPlate.GeomPlate_CurveConstraint_Order,None,GeomPlate_CurveConstraint)
GeomPlate_CurveConstraint.NbPoints = new_instancemethod(_GeomPlate.GeomPlate_CurveConstraint_NbPoints,None,GeomPlate_CurveConstraint)
GeomPlate_CurveConstraint.SetNbPoints = new_instancemethod(_GeomPlate.GeomPlate_CurveConstraint_SetNbPoints,None,GeomPlate_CurveConstraint)
GeomPlate_CurveConstraint.SetG0Criterion = new_instancemethod(_GeomPlate.GeomPlate_CurveConstraint_SetG0Criterion,None,GeomPlate_CurveConstraint)
GeomPlate_CurveConstraint.SetG1Criterion = new_instancemethod(_GeomPlate.GeomPlate_CurveConstraint_SetG1Criterion,None,GeomPlate_CurveConstraint)
GeomPlate_CurveConstraint.SetG2Criterion = new_instancemethod(_GeomPlate.GeomPlate_CurveConstraint_SetG2Criterion,None,GeomPlate_CurveConstraint)
GeomPlate_CurveConstraint.G0Criterion = new_instancemethod(_GeomPlate.GeomPlate_CurveConstraint_G0Criterion,None,GeomPlate_CurveConstraint)
GeomPlate_CurveConstraint.G1Criterion = new_instancemethod(_GeomPlate.GeomPlate_CurveConstraint_G1Criterion,None,GeomPlate_CurveConstraint)
GeomPlate_CurveConstraint.G2Criterion = new_instancemethod(_GeomPlate.GeomPlate_CurveConstraint_G2Criterion,None,GeomPlate_CurveConstraint)
GeomPlate_CurveConstraint.FirstParameter = new_instancemethod(_GeomPlate.GeomPlate_CurveConstraint_FirstParameter,None,GeomPlate_CurveConstraint)
GeomPlate_CurveConstraint.LastParameter = new_instancemethod(_GeomPlate.GeomPlate_CurveConstraint_LastParameter,None,GeomPlate_CurveConstraint)
GeomPlate_CurveConstraint.Length = new_instancemethod(_GeomPlate.GeomPlate_CurveConstraint_Length,None,GeomPlate_CurveConstraint)
GeomPlate_CurveConstraint.LPropSurf = new_instancemethod(_GeomPlate.GeomPlate_CurveConstraint_LPropSurf,None,GeomPlate_CurveConstraint)
GeomPlate_CurveConstraint.D0 = new_instancemethod(_GeomPlate.GeomPlate_CurveConstraint_D0,None,GeomPlate_CurveConstraint)
GeomPlate_CurveConstraint.D1 = new_instancemethod(_GeomPlate.GeomPlate_CurveConstraint_D1,None,GeomPlate_CurveConstraint)
GeomPlate_CurveConstraint.D2 = new_instancemethod(_GeomPlate.GeomPlate_CurveConstraint_D2,None,GeomPlate_CurveConstraint)
GeomPlate_CurveConstraint.Curve3d = new_instancemethod(_GeomPlate.GeomPlate_CurveConstraint_Curve3d,None,GeomPlate_CurveConstraint)
GeomPlate_CurveConstraint.SetCurve2dOnSurf = new_instancemethod(_GeomPlate.GeomPlate_CurveConstraint_SetCurve2dOnSurf,None,GeomPlate_CurveConstraint)
GeomPlate_CurveConstraint.Curve2dOnSurf = new_instancemethod(_GeomPlate.GeomPlate_CurveConstraint_Curve2dOnSurf,None,GeomPlate_CurveConstraint)
GeomPlate_CurveConstraint.SetProjectedCurve = new_instancemethod(_GeomPlate.GeomPlate_CurveConstraint_SetProjectedCurve,None,GeomPlate_CurveConstraint)
GeomPlate_CurveConstraint.ProjectedCurve = new_instancemethod(_GeomPlate.GeomPlate_CurveConstraint_ProjectedCurve,None,GeomPlate_CurveConstraint)
GeomPlate_CurveConstraint._kill_pointed = new_instancemethod(_GeomPlate.GeomPlate_CurveConstraint__kill_pointed,None,GeomPlate_CurveConstraint)
GeomPlate_CurveConstraint.GetHandle = new_instancemethod(_GeomPlate.GeomPlate_CurveConstraint_GetHandle,None,GeomPlate_CurveConstraint)
GeomPlate_CurveConstraint_swigregister = _GeomPlate.GeomPlate_CurveConstraint_swigregister
GeomPlate_CurveConstraint_swigregister(GeomPlate_CurveConstraint)
class Handle_GeomPlate_CurveConstraint(OCC.MMgt.Handle_MMgt_TShared):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_GeomPlate.Handle_GeomPlate_CurveConstraint_swiginit(self,_GeomPlate.new_Handle_GeomPlate_CurveConstraint(*args))
DownCast = staticmethod(_GeomPlate.Handle_GeomPlate_CurveConstraint_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_GeomPlate_CurveConstraint.Nullify = new_instancemethod(_GeomPlate.Handle_GeomPlate_CurveConstraint_Nullify,None,Handle_GeomPlate_CurveConstraint)
Handle_GeomPlate_CurveConstraint.IsNull = new_instancemethod(_GeomPlate.Handle_GeomPlate_CurveConstraint_IsNull,None,Handle_GeomPlate_CurveConstraint)
Handle_GeomPlate_CurveConstraint.GetObject = new_instancemethod(_GeomPlate.Handle_GeomPlate_CurveConstraint_GetObject,None,Handle_GeomPlate_CurveConstraint)
Handle_GeomPlate_CurveConstraint._kill_pointed = new_instancemethod(_GeomPlate.Handle_GeomPlate_CurveConstraint__kill_pointed,None,Handle_GeomPlate_CurveConstraint)
Handle_GeomPlate_CurveConstraint_swigregister = _GeomPlate.Handle_GeomPlate_CurveConstraint_swigregister
Handle_GeomPlate_CurveConstraint_swigregister(Handle_GeomPlate_CurveConstraint)
def Handle_GeomPlate_CurveConstraint_DownCast(*args):
return _GeomPlate.Handle_GeomPlate_CurveConstraint_DownCast(*args)
Handle_GeomPlate_CurveConstraint_DownCast = _GeomPlate.Handle_GeomPlate_CurveConstraint_DownCast
class GeomPlate_HArray1OfHCurveOnSurface(OCC.MMgt.MMgt_TShared):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:param Low:
:type Low: int
:param Up:
:type Up: int
:rtype: None
:param Low:
:type Low: int
:param Up:
:type Up: int
:param V:
:type V: Handle_Adaptor3d_HCurveOnSurface &
:rtype: None
"""
_GeomPlate.GeomPlate_HArray1OfHCurveOnSurface_swiginit(self,_GeomPlate.new_GeomPlate_HArray1OfHCurveOnSurface(*args))
def Init(self, *args):
"""
:param V:
:type V: Handle_Adaptor3d_HCurveOnSurface &
:rtype: None
"""
return _GeomPlate.GeomPlate_HArray1OfHCurveOnSurface_Init(self, *args)
def Length(self, *args):
"""
:rtype: int
"""
return _GeomPlate.GeomPlate_HArray1OfHCurveOnSurface_Length(self, *args)
def Lower(self, *args):
"""
:rtype: int
"""
return _GeomPlate.GeomPlate_HArray1OfHCurveOnSurface_Lower(self, *args)
def Upper(self, *args):
"""
:rtype: int
"""
return _GeomPlate.GeomPlate_HArray1OfHCurveOnSurface_Upper(self, *args)
def SetValue(self, *args):
"""
:param Index:
:type Index: int
:param Value:
:type Value: Handle_Adaptor3d_HCurveOnSurface &
:rtype: None
"""
return _GeomPlate.GeomPlate_HArray1OfHCurveOnSurface_SetValue(self, *args)
def Value(self, *args):
"""
:param Index:
:type Index: int
:rtype: Handle_Adaptor3d_HCurveOnSurface
"""
return _GeomPlate.GeomPlate_HArray1OfHCurveOnSurface_Value(self, *args)
def ChangeValue(self, *args):
"""
:param Index:
:type Index: int
:rtype: Handle_Adaptor3d_HCurveOnSurface
"""
return _GeomPlate.GeomPlate_HArray1OfHCurveOnSurface_ChangeValue(self, *args)
def Array1(self, *args):
"""
:rtype: GeomPlate_Array1OfHCurveOnSurface
"""
return _GeomPlate.GeomPlate_HArray1OfHCurveOnSurface_Array1(self, *args)
def ChangeArray1(self, *args):
"""
:rtype: GeomPlate_Array1OfHCurveOnSurface
"""
return _GeomPlate.GeomPlate_HArray1OfHCurveOnSurface_ChangeArray1(self, *args)
def _kill_pointed(self):
"""_kill_pointed(GeomPlate_HArray1OfHCurveOnSurface self)"""
return _GeomPlate.GeomPlate_HArray1OfHCurveOnSurface__kill_pointed(self)
def GetHandle(self):
"""GetHandle(GeomPlate_HArray1OfHCurveOnSurface self) -> Handle_GeomPlate_HArray1OfHCurveOnSurface"""
return _GeomPlate.GeomPlate_HArray1OfHCurveOnSurface_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
GeomPlate_HArray1OfHCurveOnSurface.Init = new_instancemethod(_GeomPlate.GeomPlate_HArray1OfHCurveOnSurface_Init,None,GeomPlate_HArray1OfHCurveOnSurface)
GeomPlate_HArray1OfHCurveOnSurface.Length = new_instancemethod(_GeomPlate.GeomPlate_HArray1OfHCurveOnSurface_Length,None,GeomPlate_HArray1OfHCurveOnSurface)
GeomPlate_HArray1OfHCurveOnSurface.Lower = new_instancemethod(_GeomPlate.GeomPlate_HArray1OfHCurveOnSurface_Lower,None,GeomPlate_HArray1OfHCurveOnSurface)
GeomPlate_HArray1OfHCurveOnSurface.Upper = new_instancemethod(_GeomPlate.GeomPlate_HArray1OfHCurveOnSurface_Upper,None,GeomPlate_HArray1OfHCurveOnSurface)
GeomPlate_HArray1OfHCurveOnSurface.SetValue = new_instancemethod(_GeomPlate.GeomPlate_HArray1OfHCurveOnSurface_SetValue,None,GeomPlate_HArray1OfHCurveOnSurface)
GeomPlate_HArray1OfHCurveOnSurface.Value = new_instancemethod(_GeomPlate.GeomPlate_HArray1OfHCurveOnSurface_Value,None,GeomPlate_HArray1OfHCurveOnSurface)
GeomPlate_HArray1OfHCurveOnSurface.ChangeValue = new_instancemethod(_GeomPlate.GeomPlate_HArray1OfHCurveOnSurface_ChangeValue,None,GeomPlate_HArray1OfHCurveOnSurface)
GeomPlate_HArray1OfHCurveOnSurface.Array1 = new_instancemethod(_GeomPlate.GeomPlate_HArray1OfHCurveOnSurface_Array1,None,GeomPlate_HArray1OfHCurveOnSurface)
GeomPlate_HArray1OfHCurveOnSurface.ChangeArray1 = new_instancemethod(_GeomPlate.GeomPlate_HArray1OfHCurveOnSurface_ChangeArray1,None,GeomPlate_HArray1OfHCurveOnSurface)
GeomPlate_HArray1OfHCurveOnSurface._kill_pointed = new_instancemethod(_GeomPlate.GeomPlate_HArray1OfHCurveOnSurface__kill_pointed,None,GeomPlate_HArray1OfHCurveOnSurface)
GeomPlate_HArray1OfHCurveOnSurface.GetHandle = new_instancemethod(_GeomPlate.GeomPlate_HArray1OfHCurveOnSurface_GetHandle,None,GeomPlate_HArray1OfHCurveOnSurface)
GeomPlate_HArray1OfHCurveOnSurface_swigregister = _GeomPlate.GeomPlate_HArray1OfHCurveOnSurface_swigregister
GeomPlate_HArray1OfHCurveOnSurface_swigregister(GeomPlate_HArray1OfHCurveOnSurface)
class Handle_GeomPlate_HArray1OfHCurveOnSurface(OCC.MMgt.Handle_MMgt_TShared):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_GeomPlate.Handle_GeomPlate_HArray1OfHCurveOnSurface_swiginit(self,_GeomPlate.new_Handle_GeomPlate_HArray1OfHCurveOnSurface(*args))
DownCast = staticmethod(_GeomPlate.Handle_GeomPlate_HArray1OfHCurveOnSurface_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_GeomPlate_HArray1OfHCurveOnSurface.Nullify = new_instancemethod(_GeomPlate.Handle_GeomPlate_HArray1OfHCurveOnSurface_Nullify,None,Handle_GeomPlate_HArray1OfHCurveOnSurface)
Handle_GeomPlate_HArray1OfHCurveOnSurface.IsNull = new_instancemethod(_GeomPlate.Handle_GeomPlate_HArray1OfHCurveOnSurface_IsNull,None,Handle_GeomPlate_HArray1OfHCurveOnSurface)
Handle_GeomPlate_HArray1OfHCurveOnSurface.GetObject = new_instancemethod(_GeomPlate.Handle_GeomPlate_HArray1OfHCurveOnSurface_GetObject,None,Handle_GeomPlate_HArray1OfHCurveOnSurface)
Handle_GeomPlate_HArray1OfHCurveOnSurface._kill_pointed = new_instancemethod(_GeomPlate.Handle_GeomPlate_HArray1OfHCurveOnSurface__kill_pointed,None,Handle_GeomPlate_HArray1OfHCurveOnSurface)
Handle_GeomPlate_HArray1OfHCurveOnSurface_swigregister = _GeomPlate.Handle_GeomPlate_HArray1OfHCurveOnSurface_swigregister
Handle_GeomPlate_HArray1OfHCurveOnSurface_swigregister(Handle_GeomPlate_HArray1OfHCurveOnSurface)
def Handle_GeomPlate_HArray1OfHCurveOnSurface_DownCast(*args):
return _GeomPlate.Handle_GeomPlate_HArray1OfHCurveOnSurface_DownCast(*args)
Handle_GeomPlate_HArray1OfHCurveOnSurface_DownCast = _GeomPlate.Handle_GeomPlate_HArray1OfHCurveOnSurface_DownCast
class GeomPlate_HArray1OfSequenceOfReal(OCC.MMgt.MMgt_TShared):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:param Low:
:type Low: int
:param Up:
:type Up: int
:rtype: None
:param Low:
:type Low: int
:param Up:
:type Up: int
:param V:
:type V: TColStd_SequenceOfReal &
:rtype: None
"""
_GeomPlate.GeomPlate_HArray1OfSequenceOfReal_swiginit(self,_GeomPlate.new_GeomPlate_HArray1OfSequenceOfReal(*args))
def Init(self, *args):
"""
:param V:
:type V: TColStd_SequenceOfReal &
:rtype: None
"""
return _GeomPlate.GeomPlate_HArray1OfSequenceOfReal_Init(self, *args)
def Length(self, *args):
"""
:rtype: int
"""
return _GeomPlate.GeomPlate_HArray1OfSequenceOfReal_Length(self, *args)
def Lower(self, *args):
"""
:rtype: int
"""
return _GeomPlate.GeomPlate_HArray1OfSequenceOfReal_Lower(self, *args)
def Upper(self, *args):
"""
:rtype: int
"""
return _GeomPlate.GeomPlate_HArray1OfSequenceOfReal_Upper(self, *args)
def SetValue(self, *args):
"""
:param Index:
:type Index: int
:param Value:
:type Value: TColStd_SequenceOfReal &
:rtype: None
"""
return _GeomPlate.GeomPlate_HArray1OfSequenceOfReal_SetValue(self, *args)
def Value(self, *args):
"""
:param Index:
:type Index: int
:rtype: TColStd_SequenceOfReal
"""
return _GeomPlate.GeomPlate_HArray1OfSequenceOfReal_Value(self, *args)
def ChangeValue(self, *args):
"""
:param Index:
:type Index: int
:rtype: TColStd_SequenceOfReal
"""
return _GeomPlate.GeomPlate_HArray1OfSequenceOfReal_ChangeValue(self, *args)
def Array1(self, *args):
"""
:rtype: GeomPlate_Array1OfSequenceOfReal
"""
return _GeomPlate.GeomPlate_HArray1OfSequenceOfReal_Array1(self, *args)
def ChangeArray1(self, *args):
"""
:rtype: GeomPlate_Array1OfSequenceOfReal
"""
return _GeomPlate.GeomPlate_HArray1OfSequenceOfReal_ChangeArray1(self, *args)
def _kill_pointed(self):
"""_kill_pointed(GeomPlate_HArray1OfSequenceOfReal self)"""
return _GeomPlate.GeomPlate_HArray1OfSequenceOfReal__kill_pointed(self)
def GetHandle(self):
"""GetHandle(GeomPlate_HArray1OfSequenceOfReal self) -> Handle_GeomPlate_HArray1OfSequenceOfReal"""
return _GeomPlate.GeomPlate_HArray1OfSequenceOfReal_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
GeomPlate_HArray1OfSequenceOfReal.Init = new_instancemethod(_GeomPlate.GeomPlate_HArray1OfSequenceOfReal_Init,None,GeomPlate_HArray1OfSequenceOfReal)
GeomPlate_HArray1OfSequenceOfReal.Length = new_instancemethod(_GeomPlate.GeomPlate_HArray1OfSequenceOfReal_Length,None,GeomPlate_HArray1OfSequenceOfReal)
GeomPlate_HArray1OfSequenceOfReal.Lower = new_instancemethod(_GeomPlate.GeomPlate_HArray1OfSequenceOfReal_Lower,None,GeomPlate_HArray1OfSequenceOfReal)
GeomPlate_HArray1OfSequenceOfReal.Upper = new_instancemethod(_GeomPlate.GeomPlate_HArray1OfSequenceOfReal_Upper,None,GeomPlate_HArray1OfSequenceOfReal)
GeomPlate_HArray1OfSequenceOfReal.SetValue = new_instancemethod(_GeomPlate.GeomPlate_HArray1OfSequenceOfReal_SetValue,None,GeomPlate_HArray1OfSequenceOfReal)
GeomPlate_HArray1OfSequenceOfReal.Value = new_instancemethod(_GeomPlate.GeomPlate_HArray1OfSequenceOfReal_Value,None,GeomPlate_HArray1OfSequenceOfReal)
GeomPlate_HArray1OfSequenceOfReal.ChangeValue = new_instancemethod(_GeomPlate.GeomPlate_HArray1OfSequenceOfReal_ChangeValue,None,GeomPlate_HArray1OfSequenceOfReal)
GeomPlate_HArray1OfSequenceOfReal.Array1 = new_instancemethod(_GeomPlate.GeomPlate_HArray1OfSequenceOfReal_Array1,None,GeomPlate_HArray1OfSequenceOfReal)
GeomPlate_HArray1OfSequenceOfReal.ChangeArray1 = new_instancemethod(_GeomPlate.GeomPlate_HArray1OfSequenceOfReal_ChangeArray1,None,GeomPlate_HArray1OfSequenceOfReal)
GeomPlate_HArray1OfSequenceOfReal._kill_pointed = new_instancemethod(_GeomPlate.GeomPlate_HArray1OfSequenceOfReal__kill_pointed,None,GeomPlate_HArray1OfSequenceOfReal)
GeomPlate_HArray1OfSequenceOfReal.GetHandle = new_instancemethod(_GeomPlate.GeomPlate_HArray1OfSequenceOfReal_GetHandle,None,GeomPlate_HArray1OfSequenceOfReal)
GeomPlate_HArray1OfSequenceOfReal_swigregister = _GeomPlate.GeomPlate_HArray1OfSequenceOfReal_swigregister
GeomPlate_HArray1OfSequenceOfReal_swigregister(GeomPlate_HArray1OfSequenceOfReal)
class Handle_GeomPlate_HArray1OfSequenceOfReal(OCC.MMgt.Handle_MMgt_TShared):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_GeomPlate.Handle_GeomPlate_HArray1OfSequenceOfReal_swiginit(self,_GeomPlate.new_Handle_GeomPlate_HArray1OfSequenceOfReal(*args))
DownCast = staticmethod(_GeomPlate.Handle_GeomPlate_HArray1OfSequenceOfReal_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_GeomPlate_HArray1OfSequenceOfReal.Nullify = new_instancemethod(_GeomPlate.Handle_GeomPlate_HArray1OfSequenceOfReal_Nullify,None,Handle_GeomPlate_HArray1OfSequenceOfReal)
Handle_GeomPlate_HArray1OfSequenceOfReal.IsNull = new_instancemethod(_GeomPlate.Handle_GeomPlate_HArray1OfSequenceOfReal_IsNull,None,Handle_GeomPlate_HArray1OfSequenceOfReal)
Handle_GeomPlate_HArray1OfSequenceOfReal.GetObject = new_instancemethod(_GeomPlate.Handle_GeomPlate_HArray1OfSequenceOfReal_GetObject,None,Handle_GeomPlate_HArray1OfSequenceOfReal)
Handle_GeomPlate_HArray1OfSequenceOfReal._kill_pointed = new_instancemethod(_GeomPlate.Handle_GeomPlate_HArray1OfSequenceOfReal__kill_pointed,None,Handle_GeomPlate_HArray1OfSequenceOfReal)
Handle_GeomPlate_HArray1OfSequenceOfReal_swigregister = _GeomPlate.Handle_GeomPlate_HArray1OfSequenceOfReal_swigregister
Handle_GeomPlate_HArray1OfSequenceOfReal_swigregister(Handle_GeomPlate_HArray1OfSequenceOfReal)
def Handle_GeomPlate_HArray1OfSequenceOfReal_DownCast(*args):
return _GeomPlate.Handle_GeomPlate_HArray1OfSequenceOfReal_DownCast(*args)
Handle_GeomPlate_HArray1OfSequenceOfReal_DownCast = _GeomPlate.Handle_GeomPlate_HArray1OfSequenceOfReal_DownCast
class GeomPlate_HSequenceOfCurveConstraint(OCC.MMgt.MMgt_TShared):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:rtype: None
"""
_GeomPlate.GeomPlate_HSequenceOfCurveConstraint_swiginit(self,_GeomPlate.new_GeomPlate_HSequenceOfCurveConstraint(*args))
def IsEmpty(self, *args):
"""
:rtype: bool
"""
return _GeomPlate.GeomPlate_HSequenceOfCurveConstraint_IsEmpty(self, *args)
def Length(self, *args):
"""
:rtype: int
"""
return _GeomPlate.GeomPlate_HSequenceOfCurveConstraint_Length(self, *args)
def Clear(self, *args):
"""
:rtype: None
"""
return _GeomPlate.GeomPlate_HSequenceOfCurveConstraint_Clear(self, *args)
def Append(self, *args):
"""
:param anItem:
:type anItem: Handle_GeomPlate_CurveConstraint &
:rtype: None
:param aSequence:
:type aSequence: Handle_GeomPlate_HSequenceOfCurveConstraint &
:rtype: None
"""
return _GeomPlate.GeomPlate_HSequenceOfCurveConstraint_Append(self, *args)
def Prepend(self, *args):
"""
:param anItem:
:type anItem: Handle_GeomPlate_CurveConstraint &
:rtype: None
:param aSequence:
:type aSequence: Handle_GeomPlate_HSequenceOfCurveConstraint &
:rtype: None
"""
return _GeomPlate.GeomPlate_HSequenceOfCurveConstraint_Prepend(self, *args)
def Reverse(self, *args):
"""
:rtype: None
"""
return _GeomPlate.GeomPlate_HSequenceOfCurveConstraint_Reverse(self, *args)
def InsertBefore(self, *args):
"""
:param anIndex:
:type anIndex: int
:param anItem:
:type anItem: Handle_GeomPlate_CurveConstraint &
:rtype: None
:param anIndex:
:type anIndex: int
:param aSequence:
:type aSequence: Handle_GeomPlate_HSequenceOfCurveConstraint &
:rtype: None
"""
return _GeomPlate.GeomPlate_HSequenceOfCurveConstraint_InsertBefore(self, *args)
def InsertAfter(self, *args):
"""
:param anIndex:
:type anIndex: int
:param anItem:
:type anItem: Handle_GeomPlate_CurveConstraint &
:rtype: None
:param anIndex:
:type anIndex: int
:param aSequence:
:type aSequence: Handle_GeomPlate_HSequenceOfCurveConstraint &
:rtype: None
"""
return _GeomPlate.GeomPlate_HSequenceOfCurveConstraint_InsertAfter(self, *args)
def Exchange(self, *args):
"""
:param anIndex:
:type anIndex: int
:param anOtherIndex:
:type anOtherIndex: int
:rtype: None
"""
return _GeomPlate.GeomPlate_HSequenceOfCurveConstraint_Exchange(self, *args)
def Split(self, *args):
"""
:param anIndex:
:type anIndex: int
:rtype: Handle_GeomPlate_HSequenceOfCurveConstraint
"""
return _GeomPlate.GeomPlate_HSequenceOfCurveConstraint_Split(self, *args)
def SetValue(self, *args):
"""
:param anIndex:
:type anIndex: int
:param anItem:
:type anItem: Handle_GeomPlate_CurveConstraint &
:rtype: None
"""
return _GeomPlate.GeomPlate_HSequenceOfCurveConstraint_SetValue(self, *args)
def Value(self, *args):
"""
:param anIndex:
:type anIndex: int
:rtype: Handle_GeomPlate_CurveConstraint
"""
return _GeomPlate.GeomPlate_HSequenceOfCurveConstraint_Value(self, *args)
def ChangeValue(self, *args):
"""
:param anIndex:
:type anIndex: int
:rtype: Handle_GeomPlate_CurveConstraint
"""
return _GeomPlate.GeomPlate_HSequenceOfCurveConstraint_ChangeValue(self, *args)
def Remove(self, *args):
"""
:param anIndex:
:type anIndex: int
:rtype: None
:param fromIndex:
:type fromIndex: int
:param toIndex:
:type toIndex: int
:rtype: None
"""
return _GeomPlate.GeomPlate_HSequenceOfCurveConstraint_Remove(self, *args)
def Sequence(self, *args):
"""
:rtype: GeomPlate_SequenceOfCurveConstraint
"""
return _GeomPlate.GeomPlate_HSequenceOfCurveConstraint_Sequence(self, *args)
def ChangeSequence(self, *args):
"""
:rtype: GeomPlate_SequenceOfCurveConstraint
"""
return _GeomPlate.GeomPlate_HSequenceOfCurveConstraint_ChangeSequence(self, *args)
def ShallowCopy(self, *args):
"""
:rtype: Handle_GeomPlate_HSequenceOfCurveConstraint
"""
return _GeomPlate.GeomPlate_HSequenceOfCurveConstraint_ShallowCopy(self, *args)
def _kill_pointed(self):
"""_kill_pointed(GeomPlate_HSequenceOfCurveConstraint self)"""
return _GeomPlate.GeomPlate_HSequenceOfCurveConstraint__kill_pointed(self)
def GetHandle(self):
"""GetHandle(GeomPlate_HSequenceOfCurveConstraint self) -> Handle_GeomPlate_HSequenceOfCurveConstraint"""
return _GeomPlate.GeomPlate_HSequenceOfCurveConstraint_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
GeomPlate_HSequenceOfCurveConstraint.IsEmpty = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfCurveConstraint_IsEmpty,None,GeomPlate_HSequenceOfCurveConstraint)
GeomPlate_HSequenceOfCurveConstraint.Length = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfCurveConstraint_Length,None,GeomPlate_HSequenceOfCurveConstraint)
GeomPlate_HSequenceOfCurveConstraint.Clear = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfCurveConstraint_Clear,None,GeomPlate_HSequenceOfCurveConstraint)
GeomPlate_HSequenceOfCurveConstraint.Append = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfCurveConstraint_Append,None,GeomPlate_HSequenceOfCurveConstraint)
GeomPlate_HSequenceOfCurveConstraint.Prepend = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfCurveConstraint_Prepend,None,GeomPlate_HSequenceOfCurveConstraint)
GeomPlate_HSequenceOfCurveConstraint.Reverse = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfCurveConstraint_Reverse,None,GeomPlate_HSequenceOfCurveConstraint)
GeomPlate_HSequenceOfCurveConstraint.InsertBefore = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfCurveConstraint_InsertBefore,None,GeomPlate_HSequenceOfCurveConstraint)
GeomPlate_HSequenceOfCurveConstraint.InsertAfter = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfCurveConstraint_InsertAfter,None,GeomPlate_HSequenceOfCurveConstraint)
GeomPlate_HSequenceOfCurveConstraint.Exchange = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfCurveConstraint_Exchange,None,GeomPlate_HSequenceOfCurveConstraint)
GeomPlate_HSequenceOfCurveConstraint.Split = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfCurveConstraint_Split,None,GeomPlate_HSequenceOfCurveConstraint)
GeomPlate_HSequenceOfCurveConstraint.SetValue = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfCurveConstraint_SetValue,None,GeomPlate_HSequenceOfCurveConstraint)
GeomPlate_HSequenceOfCurveConstraint.Value = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfCurveConstraint_Value,None,GeomPlate_HSequenceOfCurveConstraint)
GeomPlate_HSequenceOfCurveConstraint.ChangeValue = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfCurveConstraint_ChangeValue,None,GeomPlate_HSequenceOfCurveConstraint)
GeomPlate_HSequenceOfCurveConstraint.Remove = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfCurveConstraint_Remove,None,GeomPlate_HSequenceOfCurveConstraint)
GeomPlate_HSequenceOfCurveConstraint.Sequence = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfCurveConstraint_Sequence,None,GeomPlate_HSequenceOfCurveConstraint)
GeomPlate_HSequenceOfCurveConstraint.ChangeSequence = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfCurveConstraint_ChangeSequence,None,GeomPlate_HSequenceOfCurveConstraint)
GeomPlate_HSequenceOfCurveConstraint.ShallowCopy = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfCurveConstraint_ShallowCopy,None,GeomPlate_HSequenceOfCurveConstraint)
GeomPlate_HSequenceOfCurveConstraint._kill_pointed = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfCurveConstraint__kill_pointed,None,GeomPlate_HSequenceOfCurveConstraint)
GeomPlate_HSequenceOfCurveConstraint.GetHandle = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfCurveConstraint_GetHandle,None,GeomPlate_HSequenceOfCurveConstraint)
GeomPlate_HSequenceOfCurveConstraint_swigregister = _GeomPlate.GeomPlate_HSequenceOfCurveConstraint_swigregister
GeomPlate_HSequenceOfCurveConstraint_swigregister(GeomPlate_HSequenceOfCurveConstraint)
class Handle_GeomPlate_HSequenceOfCurveConstraint(OCC.MMgt.Handle_MMgt_TShared):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_GeomPlate.Handle_GeomPlate_HSequenceOfCurveConstraint_swiginit(self,_GeomPlate.new_Handle_GeomPlate_HSequenceOfCurveConstraint(*args))
DownCast = staticmethod(_GeomPlate.Handle_GeomPlate_HSequenceOfCurveConstraint_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_GeomPlate_HSequenceOfCurveConstraint.Nullify = new_instancemethod(_GeomPlate.Handle_GeomPlate_HSequenceOfCurveConstraint_Nullify,None,Handle_GeomPlate_HSequenceOfCurveConstraint)
Handle_GeomPlate_HSequenceOfCurveConstraint.IsNull = new_instancemethod(_GeomPlate.Handle_GeomPlate_HSequenceOfCurveConstraint_IsNull,None,Handle_GeomPlate_HSequenceOfCurveConstraint)
Handle_GeomPlate_HSequenceOfCurveConstraint.GetObject = new_instancemethod(_GeomPlate.Handle_GeomPlate_HSequenceOfCurveConstraint_GetObject,None,Handle_GeomPlate_HSequenceOfCurveConstraint)
Handle_GeomPlate_HSequenceOfCurveConstraint._kill_pointed = new_instancemethod(_GeomPlate.Handle_GeomPlate_HSequenceOfCurveConstraint__kill_pointed,None,Handle_GeomPlate_HSequenceOfCurveConstraint)
Handle_GeomPlate_HSequenceOfCurveConstraint_swigregister = _GeomPlate.Handle_GeomPlate_HSequenceOfCurveConstraint_swigregister
Handle_GeomPlate_HSequenceOfCurveConstraint_swigregister(Handle_GeomPlate_HSequenceOfCurveConstraint)
def Handle_GeomPlate_HSequenceOfCurveConstraint_DownCast(*args):
return _GeomPlate.Handle_GeomPlate_HSequenceOfCurveConstraint_DownCast(*args)
Handle_GeomPlate_HSequenceOfCurveConstraint_DownCast = _GeomPlate.Handle_GeomPlate_HSequenceOfCurveConstraint_DownCast
class GeomPlate_HSequenceOfPointConstraint(OCC.MMgt.MMgt_TShared):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:rtype: None
"""
_GeomPlate.GeomPlate_HSequenceOfPointConstraint_swiginit(self,_GeomPlate.new_GeomPlate_HSequenceOfPointConstraint(*args))
def IsEmpty(self, *args):
"""
:rtype: bool
"""
return _GeomPlate.GeomPlate_HSequenceOfPointConstraint_IsEmpty(self, *args)
def Length(self, *args):
"""
:rtype: int
"""
return _GeomPlate.GeomPlate_HSequenceOfPointConstraint_Length(self, *args)
def Clear(self, *args):
"""
:rtype: None
"""
return _GeomPlate.GeomPlate_HSequenceOfPointConstraint_Clear(self, *args)
def Append(self, *args):
"""
:param anItem:
:type anItem: Handle_GeomPlate_PointConstraint &
:rtype: None
:param aSequence:
:type aSequence: Handle_GeomPlate_HSequenceOfPointConstraint &
:rtype: None
"""
return _GeomPlate.GeomPlate_HSequenceOfPointConstraint_Append(self, *args)
def Prepend(self, *args):
"""
:param anItem:
:type anItem: Handle_GeomPlate_PointConstraint &
:rtype: None
:param aSequence:
:type aSequence: Handle_GeomPlate_HSequenceOfPointConstraint &
:rtype: None
"""
return _GeomPlate.GeomPlate_HSequenceOfPointConstraint_Prepend(self, *args)
def Reverse(self, *args):
"""
:rtype: None
"""
return _GeomPlate.GeomPlate_HSequenceOfPointConstraint_Reverse(self, *args)
def InsertBefore(self, *args):
"""
:param anIndex:
:type anIndex: int
:param anItem:
:type anItem: Handle_GeomPlate_PointConstraint &
:rtype: None
:param anIndex:
:type anIndex: int
:param aSequence:
:type aSequence: Handle_GeomPlate_HSequenceOfPointConstraint &
:rtype: None
"""
return _GeomPlate.GeomPlate_HSequenceOfPointConstraint_InsertBefore(self, *args)
def InsertAfter(self, *args):
"""
:param anIndex:
:type anIndex: int
:param anItem:
:type anItem: Handle_GeomPlate_PointConstraint &
:rtype: None
:param anIndex:
:type anIndex: int
:param aSequence:
:type aSequence: Handle_GeomPlate_HSequenceOfPointConstraint &
:rtype: None
"""
return _GeomPlate.GeomPlate_HSequenceOfPointConstraint_InsertAfter(self, *args)
def Exchange(self, *args):
"""
:param anIndex:
:type anIndex: int
:param anOtherIndex:
:type anOtherIndex: int
:rtype: None
"""
return _GeomPlate.GeomPlate_HSequenceOfPointConstraint_Exchange(self, *args)
def Split(self, *args):
"""
:param anIndex:
:type anIndex: int
:rtype: Handle_GeomPlate_HSequenceOfPointConstraint
"""
return _GeomPlate.GeomPlate_HSequenceOfPointConstraint_Split(self, *args)
def SetValue(self, *args):
"""
:param anIndex:
:type anIndex: int
:param anItem:
:type anItem: Handle_GeomPlate_PointConstraint &
:rtype: None
"""
return _GeomPlate.GeomPlate_HSequenceOfPointConstraint_SetValue(self, *args)
def Value(self, *args):
"""
:param anIndex:
:type anIndex: int
:rtype: Handle_GeomPlate_PointConstraint
"""
return _GeomPlate.GeomPlate_HSequenceOfPointConstraint_Value(self, *args)
def ChangeValue(self, *args):
"""
:param anIndex:
:type anIndex: int
:rtype: Handle_GeomPlate_PointConstraint
"""
return _GeomPlate.GeomPlate_HSequenceOfPointConstraint_ChangeValue(self, *args)
def Remove(self, *args):
"""
:param anIndex:
:type anIndex: int
:rtype: None
:param fromIndex:
:type fromIndex: int
:param toIndex:
:type toIndex: int
:rtype: None
"""
return _GeomPlate.GeomPlate_HSequenceOfPointConstraint_Remove(self, *args)
def Sequence(self, *args):
"""
:rtype: GeomPlate_SequenceOfPointConstraint
"""
return _GeomPlate.GeomPlate_HSequenceOfPointConstraint_Sequence(self, *args)
def ChangeSequence(self, *args):
"""
:rtype: GeomPlate_SequenceOfPointConstraint
"""
return _GeomPlate.GeomPlate_HSequenceOfPointConstraint_ChangeSequence(self, *args)
def ShallowCopy(self, *args):
"""
:rtype: Handle_GeomPlate_HSequenceOfPointConstraint
"""
return _GeomPlate.GeomPlate_HSequenceOfPointConstraint_ShallowCopy(self, *args)
def _kill_pointed(self):
"""_kill_pointed(GeomPlate_HSequenceOfPointConstraint self)"""
return _GeomPlate.GeomPlate_HSequenceOfPointConstraint__kill_pointed(self)
def GetHandle(self):
"""GetHandle(GeomPlate_HSequenceOfPointConstraint self) -> Handle_GeomPlate_HSequenceOfPointConstraint"""
return _GeomPlate.GeomPlate_HSequenceOfPointConstraint_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
GeomPlate_HSequenceOfPointConstraint.IsEmpty = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfPointConstraint_IsEmpty,None,GeomPlate_HSequenceOfPointConstraint)
GeomPlate_HSequenceOfPointConstraint.Length = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfPointConstraint_Length,None,GeomPlate_HSequenceOfPointConstraint)
GeomPlate_HSequenceOfPointConstraint.Clear = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfPointConstraint_Clear,None,GeomPlate_HSequenceOfPointConstraint)
GeomPlate_HSequenceOfPointConstraint.Append = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfPointConstraint_Append,None,GeomPlate_HSequenceOfPointConstraint)
GeomPlate_HSequenceOfPointConstraint.Prepend = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfPointConstraint_Prepend,None,GeomPlate_HSequenceOfPointConstraint)
GeomPlate_HSequenceOfPointConstraint.Reverse = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfPointConstraint_Reverse,None,GeomPlate_HSequenceOfPointConstraint)
GeomPlate_HSequenceOfPointConstraint.InsertBefore = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfPointConstraint_InsertBefore,None,GeomPlate_HSequenceOfPointConstraint)
GeomPlate_HSequenceOfPointConstraint.InsertAfter = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfPointConstraint_InsertAfter,None,GeomPlate_HSequenceOfPointConstraint)
GeomPlate_HSequenceOfPointConstraint.Exchange = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfPointConstraint_Exchange,None,GeomPlate_HSequenceOfPointConstraint)
GeomPlate_HSequenceOfPointConstraint.Split = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfPointConstraint_Split,None,GeomPlate_HSequenceOfPointConstraint)
GeomPlate_HSequenceOfPointConstraint.SetValue = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfPointConstraint_SetValue,None,GeomPlate_HSequenceOfPointConstraint)
GeomPlate_HSequenceOfPointConstraint.Value = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfPointConstraint_Value,None,GeomPlate_HSequenceOfPointConstraint)
GeomPlate_HSequenceOfPointConstraint.ChangeValue = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfPointConstraint_ChangeValue,None,GeomPlate_HSequenceOfPointConstraint)
GeomPlate_HSequenceOfPointConstraint.Remove = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfPointConstraint_Remove,None,GeomPlate_HSequenceOfPointConstraint)
GeomPlate_HSequenceOfPointConstraint.Sequence = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfPointConstraint_Sequence,None,GeomPlate_HSequenceOfPointConstraint)
GeomPlate_HSequenceOfPointConstraint.ChangeSequence = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfPointConstraint_ChangeSequence,None,GeomPlate_HSequenceOfPointConstraint)
GeomPlate_HSequenceOfPointConstraint.ShallowCopy = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfPointConstraint_ShallowCopy,None,GeomPlate_HSequenceOfPointConstraint)
GeomPlate_HSequenceOfPointConstraint._kill_pointed = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfPointConstraint__kill_pointed,None,GeomPlate_HSequenceOfPointConstraint)
GeomPlate_HSequenceOfPointConstraint.GetHandle = new_instancemethod(_GeomPlate.GeomPlate_HSequenceOfPointConstraint_GetHandle,None,GeomPlate_HSequenceOfPointConstraint)
GeomPlate_HSequenceOfPointConstraint_swigregister = _GeomPlate.GeomPlate_HSequenceOfPointConstraint_swigregister
GeomPlate_HSequenceOfPointConstraint_swigregister(GeomPlate_HSequenceOfPointConstraint)
class Handle_GeomPlate_HSequenceOfPointConstraint(OCC.MMgt.Handle_MMgt_TShared):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_GeomPlate.Handle_GeomPlate_HSequenceOfPointConstraint_swiginit(self,_GeomPlate.new_Handle_GeomPlate_HSequenceOfPointConstraint(*args))
DownCast = staticmethod(_GeomPlate.Handle_GeomPlate_HSequenceOfPointConstraint_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_GeomPlate_HSequenceOfPointConstraint.Nullify = new_instancemethod(_GeomPlate.Handle_GeomPlate_HSequenceOfPointConstraint_Nullify,None,Handle_GeomPlate_HSequenceOfPointConstraint)
Handle_GeomPlate_HSequenceOfPointConstraint.IsNull = new_instancemethod(_GeomPlate.Handle_GeomPlate_HSequenceOfPointConstraint_IsNull,None,Handle_GeomPlate_HSequenceOfPointConstraint)
Handle_GeomPlate_HSequenceOfPointConstraint.GetObject = new_instancemethod(_GeomPlate.Handle_GeomPlate_HSequenceOfPointConstraint_GetObject,None,Handle_GeomPlate_HSequenceOfPointConstraint)
Handle_GeomPlate_HSequenceOfPointConstraint._kill_pointed = new_instancemethod(_GeomPlate.Handle_GeomPlate_HSequenceOfPointConstraint__kill_pointed,None,Handle_GeomPlate_HSequenceOfPointConstraint)
Handle_GeomPlate_HSequenceOfPointConstraint_swigregister = _GeomPlate.Handle_GeomPlate_HSequenceOfPointConstraint_swigregister
Handle_GeomPlate_HSequenceOfPointConstraint_swigregister(Handle_GeomPlate_HSequenceOfPointConstraint)
def Handle_GeomPlate_HSequenceOfPointConstraint_DownCast(*args):
return _GeomPlate.Handle_GeomPlate_HSequenceOfPointConstraint_DownCast(*args)
Handle_GeomPlate_HSequenceOfPointConstraint_DownCast = _GeomPlate.Handle_GeomPlate_HSequenceOfPointConstraint_DownCast
class GeomPlate_MakeApprox(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* Converts SurfPlate into a Geom_BSplineSurface with n Bezier pieces (n<=Nbmax) of degree <= dgmax and an approximation error < Tol3d if possible the criterion CritPlate is satisfied if possible
:param SurfPlate:
:type SurfPlate: Handle_GeomPlate_Surface &
:param PlateCrit:
:type PlateCrit: AdvApp2Var_Criterion &
:param Tol3d:
:type Tol3d: float
:param Nbmax:
:type Nbmax: int
:param dgmax:
:type dgmax: int
:param Continuity: default value is GeomAbs_C1
:type Continuity: GeomAbs_Shape
:param EnlargeCoeff: default value is 1.1
:type EnlargeCoeff: float
:rtype: None
* Converts SurfPlate into a Geom_BSplineSurface with n Bezier pieces (n<=Nbmax) of degree <= dgmax and an approximation error < Tol3d if possible if CritOrder = -1 , no criterion is used if CritOrder = 0 , a PlateG0Criterion is used with max value > 10*dmax if CritOrder = 1 , a PlateG1Criterion is used with max value > 10*dmax WARNING : for CritOrder = 0 or 1, only the constraints points of SurfPlate are used to evaluate the value of the criterion
:param SurfPlate:
:type SurfPlate: Handle_GeomPlate_Surface &
:param Tol3d:
:type Tol3d: float
:param Nbmax:
:type Nbmax: int
:param dgmax:
:type dgmax: int
:param dmax:
:type dmax: float
:param CritOrder: default value is 0
:type CritOrder: int
:param Continuity: default value is GeomAbs_C1
:type Continuity: GeomAbs_Shape
:param EnlargeCoeff: default value is 1.1
:type EnlargeCoeff: float
:rtype: None
"""
_GeomPlate.GeomPlate_MakeApprox_swiginit(self,_GeomPlate.new_GeomPlate_MakeApprox(*args))
def Surface(self, *args):
"""
* Returns the BSpline surface extracted from the GeomPlate_MakeApprox object.
:rtype: Handle_Geom_BSplineSurface
"""
return _GeomPlate.GeomPlate_MakeApprox_Surface(self, *args)
def ApproxError(self, *args):
"""
* Returns the error in computation of the approximation surface. This is the distance between the entire target BSpline surface and the entire original surface generated by BuildPlateSurface and converted by GeomPlate_Surface.
:rtype: float
"""
return _GeomPlate.GeomPlate_MakeApprox_ApproxError(self, *args)
def CriterionError(self, *args):
"""
* Returns the criterion error in computation of the approximation surface. This is estimated relative to the curve and point constraints only.
:rtype: float
"""
return _GeomPlate.GeomPlate_MakeApprox_CriterionError(self, *args)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
GeomPlate_MakeApprox.Surface = new_instancemethod(_GeomPlate.GeomPlate_MakeApprox_Surface,None,GeomPlate_MakeApprox)
GeomPlate_MakeApprox.ApproxError = new_instancemethod(_GeomPlate.GeomPlate_MakeApprox_ApproxError,None,GeomPlate_MakeApprox)
GeomPlate_MakeApprox.CriterionError = new_instancemethod(_GeomPlate.GeomPlate_MakeApprox_CriterionError,None,GeomPlate_MakeApprox)
GeomPlate_MakeApprox._kill_pointed = new_instancemethod(_GeomPlate.GeomPlate_MakeApprox__kill_pointed,None,GeomPlate_MakeApprox)
GeomPlate_MakeApprox_swigregister = _GeomPlate.GeomPlate_MakeApprox_swigregister
GeomPlate_MakeApprox_swigregister(GeomPlate_MakeApprox)
class GeomPlate_PlateG0Criterion(OCC.AdvApp2Var.AdvApp2Var_Criterion):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:param Data:
:type Data: TColgp_SequenceOfXY
:param G0Data:
:type G0Data: TColgp_SequenceOfXYZ
:param Maximum:
:type Maximum: float
:param Type: default value is AdvApp2Var_Absolute
:type Type: AdvApp2Var_CriterionType
:param Repart: default value is AdvApp2Var_Regular
:type Repart: AdvApp2Var_CriterionRepartition
:rtype: None
"""
_GeomPlate.GeomPlate_PlateG0Criterion_swiginit(self,_GeomPlate.new_GeomPlate_PlateG0Criterion(*args))
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
GeomPlate_PlateG0Criterion._kill_pointed = new_instancemethod(_GeomPlate.GeomPlate_PlateG0Criterion__kill_pointed,None,GeomPlate_PlateG0Criterion)
GeomPlate_PlateG0Criterion_swigregister = _GeomPlate.GeomPlate_PlateG0Criterion_swigregister
GeomPlate_PlateG0Criterion_swigregister(GeomPlate_PlateG0Criterion)
class GeomPlate_PlateG1Criterion(OCC.AdvApp2Var.AdvApp2Var_Criterion):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:param Data:
:type Data: TColgp_SequenceOfXY
:param G1Data:
:type G1Data: TColgp_SequenceOfXYZ
:param Maximum:
:type Maximum: float
:param Type: default value is AdvApp2Var_Absolute
:type Type: AdvApp2Var_CriterionType
:param Repart: default value is AdvApp2Var_Regular
:type Repart: AdvApp2Var_CriterionRepartition
:rtype: None
"""
_GeomPlate.GeomPlate_PlateG1Criterion_swiginit(self,_GeomPlate.new_GeomPlate_PlateG1Criterion(*args))
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
GeomPlate_PlateG1Criterion._kill_pointed = new_instancemethod(_GeomPlate.GeomPlate_PlateG1Criterion__kill_pointed,None,GeomPlate_PlateG1Criterion)
GeomPlate_PlateG1Criterion_swigregister = _GeomPlate.GeomPlate_PlateG1Criterion_swigregister
GeomPlate_PlateG1Criterion_swigregister(GeomPlate_PlateG1Criterion)
class GeomPlate_PointConstraint(OCC.MMgt.MMgt_TShared):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* Constructs a point constraint object defined by Pt, a 3D point Order gives the order of constraint, one of: - -1 i.e. none, or 0 i.e.G0 when assigned to Pt - -1 i.e. none, 0 i.e. G0, 1 i.e. G1, 2 i.e. G2 when assigned to U, V and Surf. In this constructor, only TolDist is given. Distance tolerance represents the greatest distance allowed between the constraint and the target surface. Angular tolerance represents the largest angle allowed between the constraint and the target surface. Curvature tolerance represents the greatest difference in curvature allowed between the constraint and the target surface. Raises ConstructionError if Order is not 0 or -1
:param Pt:
:type Pt: gp_Pnt
:param Order:
:type Order: int
:param TolDist: default value is 0.0001
:type TolDist: float
:rtype: None
* Constructs a point constraint object defined by the intersection point of U and V on the surface Surf. Order gives the order of constraint, one of: - -1 i.e. none, or 0 i.e.G0 when assigned to Pt - -1 i.e. none, 0 i.e. G0, 1 i.e. G1, 2 i.e. G2 when assigned to U, V and Surf. In this constructor the surface to be generated must respect several tolerance values only: - the distance tolerance TolDist - the angular tolerance TolAng - the curvature tolerance, TolCurv. Distance tolerance represents the greatest distance allowed between the constraint and the target surface. Angular tolerance represents the largest angle allowed between the constraint and the target surface. Curvature tolerance represents the greatest difference in curvature allowed between the constraint and the target surface.Creates a punctual constraint.
:param U:
:type U: float
:param V:
:type V: float
:param Surf:
:type Surf: Handle_Geom_Surface &
:param Order:
:type Order: int
:param TolDist: default value is 0.0001
:type TolDist: float
:param TolAng: default value is 0.01
:type TolAng: float
:param TolCurv: default value is 0.1
:type TolCurv: float
:rtype: None
"""
_GeomPlate.GeomPlate_PointConstraint_swiginit(self,_GeomPlate.new_GeomPlate_PointConstraint(*args))
def SetOrder(self, *args):
"""
:param Order:
:type Order: int
:rtype: None
"""
return _GeomPlate.GeomPlate_PointConstraint_SetOrder(self, *args)
def Order(self, *args):
"""
* Returns the order of constraint: G0, G1, and G2, controlled respectively by G0Criterion G1Criterion and G2Criterion.
:rtype: int
"""
return _GeomPlate.GeomPlate_PointConstraint_Order(self, *args)
def SetG0Criterion(self, *args):
"""
* Allows you to set the G0 criterion. This is the law defining the greatest distance allowed between the constraint and the target surface. If this criterion is not set, {TolDist, the distance tolerance from the constructor, is used
:param TolDist:
:type TolDist: float
:rtype: None
"""
return _GeomPlate.GeomPlate_PointConstraint_SetG0Criterion(self, *args)
def SetG1Criterion(self, *args):
"""
* Allows you to set the G1 criterion. This is the law defining the greatest angle allowed between the constraint and the target surface. If this criterion is not set, TolAng, the angular tolerance from the constructor, is used. Raises ConstructionError if the point is not on the surface
:param TolAng:
:type TolAng: float
:rtype: None
"""
return _GeomPlate.GeomPlate_PointConstraint_SetG1Criterion(self, *args)
def SetG2Criterion(self, *args):
"""
* Allows you to set the G2 criterion. This is the law defining the greatest difference in curvature allowed between the constraint and the target surface. If this criterion is not set, TolCurv, the curvature tolerance from the constructor, is used. Raises ConstructionError if the point is not on the surface
:param TolCurv:
:type TolCurv: float
:rtype: None
"""
return _GeomPlate.GeomPlate_PointConstraint_SetG2Criterion(self, *args)
def G0Criterion(self, *args):
"""
* Returns the G0 criterion. This is the greatest distance allowed between the constraint and the target surface.
:rtype: float
"""
return _GeomPlate.GeomPlate_PointConstraint_G0Criterion(self, *args)
def G1Criterion(self, *args):
"""
* Returns the G1 criterion. This is the greatest angle allowed between the constraint and the target surface. Raises ConstructionError if the point is not on the surface.
:rtype: float
"""
return _GeomPlate.GeomPlate_PointConstraint_G1Criterion(self, *args)
def G2Criterion(self, *args):
"""
* Returns the G2 criterion. This is the greatest difference in curvature allowed between the constraint and the target surface. Raises ConstructionError if the point is not on the surface
:rtype: float
"""
return _GeomPlate.GeomPlate_PointConstraint_G2Criterion(self, *args)
def D0(self, *args):
"""
:param P:
:type P: gp_Pnt
:rtype: None
"""
return _GeomPlate.GeomPlate_PointConstraint_D0(self, *args)
def D1(self, *args):
"""
:param P:
:type P: gp_Pnt
:param V1:
:type V1: gp_Vec
:param V2:
:type V2: gp_Vec
:rtype: None
"""
return _GeomPlate.GeomPlate_PointConstraint_D1(self, *args)
def D2(self, *args):
"""
:param P:
:type P: gp_Pnt
:param V1:
:type V1: gp_Vec
:param V2:
:type V2: gp_Vec
:param V3:
:type V3: gp_Vec
:param V4:
:type V4: gp_Vec
:param V5:
:type V5: gp_Vec
:rtype: None
"""
return _GeomPlate.GeomPlate_PointConstraint_D2(self, *args)
def HasPnt2dOnSurf(self, *args):
"""
:rtype: bool
"""
return _GeomPlate.GeomPlate_PointConstraint_HasPnt2dOnSurf(self, *args)
def SetPnt2dOnSurf(self, *args):
"""
:param Pnt:
:type Pnt: gp_Pnt2d
:rtype: None
"""
return _GeomPlate.GeomPlate_PointConstraint_SetPnt2dOnSurf(self, *args)
def Pnt2dOnSurf(self, *args):
"""
:rtype: gp_Pnt2d
"""
return _GeomPlate.GeomPlate_PointConstraint_Pnt2dOnSurf(self, *args)
def LPropSurf(self, *args):
"""
:rtype: GeomLProp_SLProps
"""
return _GeomPlate.GeomPlate_PointConstraint_LPropSurf(self, *args)
def _kill_pointed(self):
"""_kill_pointed(GeomPlate_PointConstraint self)"""
return _GeomPlate.GeomPlate_PointConstraint__kill_pointed(self)
def GetHandle(self):
"""GetHandle(GeomPlate_PointConstraint self) -> Handle_GeomPlate_PointConstraint"""
return _GeomPlate.GeomPlate_PointConstraint_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
GeomPlate_PointConstraint.SetOrder = new_instancemethod(_GeomPlate.GeomPlate_PointConstraint_SetOrder,None,GeomPlate_PointConstraint)
GeomPlate_PointConstraint.Order = new_instancemethod(_GeomPlate.GeomPlate_PointConstraint_Order,None,GeomPlate_PointConstraint)
GeomPlate_PointConstraint.SetG0Criterion = new_instancemethod(_GeomPlate.GeomPlate_PointConstraint_SetG0Criterion,None,GeomPlate_PointConstraint)
GeomPlate_PointConstraint.SetG1Criterion = new_instancemethod(_GeomPlate.GeomPlate_PointConstraint_SetG1Criterion,None,GeomPlate_PointConstraint)
GeomPlate_PointConstraint.SetG2Criterion = new_instancemethod(_GeomPlate.GeomPlate_PointConstraint_SetG2Criterion,None,GeomPlate_PointConstraint)
GeomPlate_PointConstraint.G0Criterion = new_instancemethod(_GeomPlate.GeomPlate_PointConstraint_G0Criterion,None,GeomPlate_PointConstraint)
GeomPlate_PointConstraint.G1Criterion = new_instancemethod(_GeomPlate.GeomPlate_PointConstraint_G1Criterion,None,GeomPlate_PointConstraint)
GeomPlate_PointConstraint.G2Criterion = new_instancemethod(_GeomPlate.GeomPlate_PointConstraint_G2Criterion,None,GeomPlate_PointConstraint)
GeomPlate_PointConstraint.D0 = new_instancemethod(_GeomPlate.GeomPlate_PointConstraint_D0,None,GeomPlate_PointConstraint)
GeomPlate_PointConstraint.D1 = new_instancemethod(_GeomPlate.GeomPlate_PointConstraint_D1,None,GeomPlate_PointConstraint)
GeomPlate_PointConstraint.D2 = new_instancemethod(_GeomPlate.GeomPlate_PointConstraint_D2,None,GeomPlate_PointConstraint)
GeomPlate_PointConstraint.HasPnt2dOnSurf = new_instancemethod(_GeomPlate.GeomPlate_PointConstraint_HasPnt2dOnSurf,None,GeomPlate_PointConstraint)
GeomPlate_PointConstraint.SetPnt2dOnSurf = new_instancemethod(_GeomPlate.GeomPlate_PointConstraint_SetPnt2dOnSurf,None,GeomPlate_PointConstraint)
GeomPlate_PointConstraint.Pnt2dOnSurf = new_instancemethod(_GeomPlate.GeomPlate_PointConstraint_Pnt2dOnSurf,None,GeomPlate_PointConstraint)
GeomPlate_PointConstraint.LPropSurf = new_instancemethod(_GeomPlate.GeomPlate_PointConstraint_LPropSurf,None,GeomPlate_PointConstraint)
GeomPlate_PointConstraint._kill_pointed = new_instancemethod(_GeomPlate.GeomPlate_PointConstraint__kill_pointed,None,GeomPlate_PointConstraint)
GeomPlate_PointConstraint.GetHandle = new_instancemethod(_GeomPlate.GeomPlate_PointConstraint_GetHandle,None,GeomPlate_PointConstraint)
GeomPlate_PointConstraint_swigregister = _GeomPlate.GeomPlate_PointConstraint_swigregister
GeomPlate_PointConstraint_swigregister(GeomPlate_PointConstraint)
class Handle_GeomPlate_PointConstraint(OCC.MMgt.Handle_MMgt_TShared):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_GeomPlate.Handle_GeomPlate_PointConstraint_swiginit(self,_GeomPlate.new_Handle_GeomPlate_PointConstraint(*args))
DownCast = staticmethod(_GeomPlate.Handle_GeomPlate_PointConstraint_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_GeomPlate_PointConstraint.Nullify = new_instancemethod(_GeomPlate.Handle_GeomPlate_PointConstraint_Nullify,None,Handle_GeomPlate_PointConstraint)
Handle_GeomPlate_PointConstraint.IsNull = new_instancemethod(_GeomPlate.Handle_GeomPlate_PointConstraint_IsNull,None,Handle_GeomPlate_PointConstraint)
Handle_GeomPlate_PointConstraint.GetObject = new_instancemethod(_GeomPlate.Handle_GeomPlate_PointConstraint_GetObject,None,Handle_GeomPlate_PointConstraint)
Handle_GeomPlate_PointConstraint._kill_pointed = new_instancemethod(_GeomPlate.Handle_GeomPlate_PointConstraint__kill_pointed,None,Handle_GeomPlate_PointConstraint)
Handle_GeomPlate_PointConstraint_swigregister = _GeomPlate.Handle_GeomPlate_PointConstraint_swigregister
Handle_GeomPlate_PointConstraint_swigregister(Handle_GeomPlate_PointConstraint)
def Handle_GeomPlate_PointConstraint_DownCast(*args):
return _GeomPlate.Handle_GeomPlate_PointConstraint_DownCast(*args)
Handle_GeomPlate_PointConstraint_DownCast = _GeomPlate.Handle_GeomPlate_PointConstraint_DownCast
class GeomPlate_SequenceNodeOfSequenceOfAij(OCC.TCollection.TCollection_SeqNode):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:param I:
:type I: GeomPlate_Aij &
:param n:
:type n: TCollection_SeqNodePtr &
:param p:
:type p: TCollection_SeqNodePtr &
:rtype: None
"""
_GeomPlate.GeomPlate_SequenceNodeOfSequenceOfAij_swiginit(self,_GeomPlate.new_GeomPlate_SequenceNodeOfSequenceOfAij(*args))
def Value(self, *args):
"""
:rtype: GeomPlate_Aij
"""
return _GeomPlate.GeomPlate_SequenceNodeOfSequenceOfAij_Value(self, *args)
def _kill_pointed(self):
"""_kill_pointed(GeomPlate_SequenceNodeOfSequenceOfAij self)"""
return _GeomPlate.GeomPlate_SequenceNodeOfSequenceOfAij__kill_pointed(self)
def GetHandle(self):
"""GetHandle(GeomPlate_SequenceNodeOfSequenceOfAij self) -> Handle_GeomPlate_SequenceNodeOfSequenceOfAij"""
return _GeomPlate.GeomPlate_SequenceNodeOfSequenceOfAij_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
GeomPlate_SequenceNodeOfSequenceOfAij.Value = new_instancemethod(_GeomPlate.GeomPlate_SequenceNodeOfSequenceOfAij_Value,None,GeomPlate_SequenceNodeOfSequenceOfAij)
GeomPlate_SequenceNodeOfSequenceOfAij._kill_pointed = new_instancemethod(_GeomPlate.GeomPlate_SequenceNodeOfSequenceOfAij__kill_pointed,None,GeomPlate_SequenceNodeOfSequenceOfAij)
GeomPlate_SequenceNodeOfSequenceOfAij.GetHandle = new_instancemethod(_GeomPlate.GeomPlate_SequenceNodeOfSequenceOfAij_GetHandle,None,GeomPlate_SequenceNodeOfSequenceOfAij)
GeomPlate_SequenceNodeOfSequenceOfAij_swigregister = _GeomPlate.GeomPlate_SequenceNodeOfSequenceOfAij_swigregister
GeomPlate_SequenceNodeOfSequenceOfAij_swigregister(GeomPlate_SequenceNodeOfSequenceOfAij)
class Handle_GeomPlate_SequenceNodeOfSequenceOfAij(OCC.TCollection.Handle_TCollection_SeqNode):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfAij_swiginit(self,_GeomPlate.new_Handle_GeomPlate_SequenceNodeOfSequenceOfAij(*args))
DownCast = staticmethod(_GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfAij_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_GeomPlate_SequenceNodeOfSequenceOfAij.Nullify = new_instancemethod(_GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfAij_Nullify,None,Handle_GeomPlate_SequenceNodeOfSequenceOfAij)
Handle_GeomPlate_SequenceNodeOfSequenceOfAij.IsNull = new_instancemethod(_GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfAij_IsNull,None,Handle_GeomPlate_SequenceNodeOfSequenceOfAij)
Handle_GeomPlate_SequenceNodeOfSequenceOfAij.GetObject = new_instancemethod(_GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfAij_GetObject,None,Handle_GeomPlate_SequenceNodeOfSequenceOfAij)
Handle_GeomPlate_SequenceNodeOfSequenceOfAij._kill_pointed = new_instancemethod(_GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfAij__kill_pointed,None,Handle_GeomPlate_SequenceNodeOfSequenceOfAij)
Handle_GeomPlate_SequenceNodeOfSequenceOfAij_swigregister = _GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfAij_swigregister
Handle_GeomPlate_SequenceNodeOfSequenceOfAij_swigregister(Handle_GeomPlate_SequenceNodeOfSequenceOfAij)
def Handle_GeomPlate_SequenceNodeOfSequenceOfAij_DownCast(*args):
return _GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfAij_DownCast(*args)
Handle_GeomPlate_SequenceNodeOfSequenceOfAij_DownCast = _GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfAij_DownCast
class GeomPlate_SequenceNodeOfSequenceOfCurveConstraint(OCC.TCollection.TCollection_SeqNode):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:param I:
:type I: Handle_GeomPlate_CurveConstraint &
:param n:
:type n: TCollection_SeqNodePtr &
:param p:
:type p: TCollection_SeqNodePtr &
:rtype: None
"""
_GeomPlate.GeomPlate_SequenceNodeOfSequenceOfCurveConstraint_swiginit(self,_GeomPlate.new_GeomPlate_SequenceNodeOfSequenceOfCurveConstraint(*args))
def Value(self, *args):
"""
:rtype: Handle_GeomPlate_CurveConstraint
"""
return _GeomPlate.GeomPlate_SequenceNodeOfSequenceOfCurveConstraint_Value(self, *args)
def _kill_pointed(self):
"""_kill_pointed(GeomPlate_SequenceNodeOfSequenceOfCurveConstraint self)"""
return _GeomPlate.GeomPlate_SequenceNodeOfSequenceOfCurveConstraint__kill_pointed(self)
def GetHandle(self):
"""GetHandle(GeomPlate_SequenceNodeOfSequenceOfCurveConstraint self) -> Handle_GeomPlate_SequenceNodeOfSequenceOfCurveConstraint"""
return _GeomPlate.GeomPlate_SequenceNodeOfSequenceOfCurveConstraint_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
GeomPlate_SequenceNodeOfSequenceOfCurveConstraint.Value = new_instancemethod(_GeomPlate.GeomPlate_SequenceNodeOfSequenceOfCurveConstraint_Value,None,GeomPlate_SequenceNodeOfSequenceOfCurveConstraint)
GeomPlate_SequenceNodeOfSequenceOfCurveConstraint._kill_pointed = new_instancemethod(_GeomPlate.GeomPlate_SequenceNodeOfSequenceOfCurveConstraint__kill_pointed,None,GeomPlate_SequenceNodeOfSequenceOfCurveConstraint)
GeomPlate_SequenceNodeOfSequenceOfCurveConstraint.GetHandle = new_instancemethod(_GeomPlate.GeomPlate_SequenceNodeOfSequenceOfCurveConstraint_GetHandle,None,GeomPlate_SequenceNodeOfSequenceOfCurveConstraint)
GeomPlate_SequenceNodeOfSequenceOfCurveConstraint_swigregister = _GeomPlate.GeomPlate_SequenceNodeOfSequenceOfCurveConstraint_swigregister
GeomPlate_SequenceNodeOfSequenceOfCurveConstraint_swigregister(GeomPlate_SequenceNodeOfSequenceOfCurveConstraint)
class Handle_GeomPlate_SequenceNodeOfSequenceOfCurveConstraint(OCC.TCollection.Handle_TCollection_SeqNode):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfCurveConstraint_swiginit(self,_GeomPlate.new_Handle_GeomPlate_SequenceNodeOfSequenceOfCurveConstraint(*args))
DownCast = staticmethod(_GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfCurveConstraint_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_GeomPlate_SequenceNodeOfSequenceOfCurveConstraint.Nullify = new_instancemethod(_GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfCurveConstraint_Nullify,None,Handle_GeomPlate_SequenceNodeOfSequenceOfCurveConstraint)
Handle_GeomPlate_SequenceNodeOfSequenceOfCurveConstraint.IsNull = new_instancemethod(_GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfCurveConstraint_IsNull,None,Handle_GeomPlate_SequenceNodeOfSequenceOfCurveConstraint)
Handle_GeomPlate_SequenceNodeOfSequenceOfCurveConstraint.GetObject = new_instancemethod(_GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfCurveConstraint_GetObject,None,Handle_GeomPlate_SequenceNodeOfSequenceOfCurveConstraint)
Handle_GeomPlate_SequenceNodeOfSequenceOfCurveConstraint._kill_pointed = new_instancemethod(_GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfCurveConstraint__kill_pointed,None,Handle_GeomPlate_SequenceNodeOfSequenceOfCurveConstraint)
Handle_GeomPlate_SequenceNodeOfSequenceOfCurveConstraint_swigregister = _GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfCurveConstraint_swigregister
Handle_GeomPlate_SequenceNodeOfSequenceOfCurveConstraint_swigregister(Handle_GeomPlate_SequenceNodeOfSequenceOfCurveConstraint)
def Handle_GeomPlate_SequenceNodeOfSequenceOfCurveConstraint_DownCast(*args):
return _GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfCurveConstraint_DownCast(*args)
Handle_GeomPlate_SequenceNodeOfSequenceOfCurveConstraint_DownCast = _GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfCurveConstraint_DownCast
class GeomPlate_SequenceNodeOfSequenceOfPointConstraint(OCC.TCollection.TCollection_SeqNode):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:param I:
:type I: Handle_GeomPlate_PointConstraint &
:param n:
:type n: TCollection_SeqNodePtr &
:param p:
:type p: TCollection_SeqNodePtr &
:rtype: None
"""
_GeomPlate.GeomPlate_SequenceNodeOfSequenceOfPointConstraint_swiginit(self,_GeomPlate.new_GeomPlate_SequenceNodeOfSequenceOfPointConstraint(*args))
def Value(self, *args):
"""
:rtype: Handle_GeomPlate_PointConstraint
"""
return _GeomPlate.GeomPlate_SequenceNodeOfSequenceOfPointConstraint_Value(self, *args)
def _kill_pointed(self):
"""_kill_pointed(GeomPlate_SequenceNodeOfSequenceOfPointConstraint self)"""
return _GeomPlate.GeomPlate_SequenceNodeOfSequenceOfPointConstraint__kill_pointed(self)
def GetHandle(self):
"""GetHandle(GeomPlate_SequenceNodeOfSequenceOfPointConstraint self) -> Handle_GeomPlate_SequenceNodeOfSequenceOfPointConstraint"""
return _GeomPlate.GeomPlate_SequenceNodeOfSequenceOfPointConstraint_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
GeomPlate_SequenceNodeOfSequenceOfPointConstraint.Value = new_instancemethod(_GeomPlate.GeomPlate_SequenceNodeOfSequenceOfPointConstraint_Value,None,GeomPlate_SequenceNodeOfSequenceOfPointConstraint)
GeomPlate_SequenceNodeOfSequenceOfPointConstraint._kill_pointed = new_instancemethod(_GeomPlate.GeomPlate_SequenceNodeOfSequenceOfPointConstraint__kill_pointed,None,GeomPlate_SequenceNodeOfSequenceOfPointConstraint)
GeomPlate_SequenceNodeOfSequenceOfPointConstraint.GetHandle = new_instancemethod(_GeomPlate.GeomPlate_SequenceNodeOfSequenceOfPointConstraint_GetHandle,None,GeomPlate_SequenceNodeOfSequenceOfPointConstraint)
GeomPlate_SequenceNodeOfSequenceOfPointConstraint_swigregister = _GeomPlate.GeomPlate_SequenceNodeOfSequenceOfPointConstraint_swigregister
GeomPlate_SequenceNodeOfSequenceOfPointConstraint_swigregister(GeomPlate_SequenceNodeOfSequenceOfPointConstraint)
class Handle_GeomPlate_SequenceNodeOfSequenceOfPointConstraint(OCC.TCollection.Handle_TCollection_SeqNode):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfPointConstraint_swiginit(self,_GeomPlate.new_Handle_GeomPlate_SequenceNodeOfSequenceOfPointConstraint(*args))
DownCast = staticmethod(_GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfPointConstraint_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_GeomPlate_SequenceNodeOfSequenceOfPointConstraint.Nullify = new_instancemethod(_GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfPointConstraint_Nullify,None,Handle_GeomPlate_SequenceNodeOfSequenceOfPointConstraint)
Handle_GeomPlate_SequenceNodeOfSequenceOfPointConstraint.IsNull = new_instancemethod(_GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfPointConstraint_IsNull,None,Handle_GeomPlate_SequenceNodeOfSequenceOfPointConstraint)
Handle_GeomPlate_SequenceNodeOfSequenceOfPointConstraint.GetObject = new_instancemethod(_GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfPointConstraint_GetObject,None,Handle_GeomPlate_SequenceNodeOfSequenceOfPointConstraint)
Handle_GeomPlate_SequenceNodeOfSequenceOfPointConstraint._kill_pointed = new_instancemethod(_GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfPointConstraint__kill_pointed,None,Handle_GeomPlate_SequenceNodeOfSequenceOfPointConstraint)
Handle_GeomPlate_SequenceNodeOfSequenceOfPointConstraint_swigregister = _GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfPointConstraint_swigregister
Handle_GeomPlate_SequenceNodeOfSequenceOfPointConstraint_swigregister(Handle_GeomPlate_SequenceNodeOfSequenceOfPointConstraint)
def Handle_GeomPlate_SequenceNodeOfSequenceOfPointConstraint_DownCast(*args):
return _GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfPointConstraint_DownCast(*args)
Handle_GeomPlate_SequenceNodeOfSequenceOfPointConstraint_DownCast = _GeomPlate.Handle_GeomPlate_SequenceNodeOfSequenceOfPointConstraint_DownCast
class GeomPlate_SequenceOfAij(OCC.TCollection.TCollection_BaseSequence):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:rtype: None
"""
_GeomPlate.GeomPlate_SequenceOfAij_swiginit(self,_GeomPlate.new_GeomPlate_SequenceOfAij(*args))
def Clear(self, *args):
"""
:rtype: None
"""
return _GeomPlate.GeomPlate_SequenceOfAij_Clear(self, *args)
def Assign(self, *args):
"""
:param Other:
:type Other: GeomPlate_SequenceOfAij &
:rtype: GeomPlate_SequenceOfAij
"""
return _GeomPlate.GeomPlate_SequenceOfAij_Assign(self, *args)
def Set(self, *args):
"""
:param Other:
:type Other: GeomPlate_SequenceOfAij &
:rtype: GeomPlate_SequenceOfAij
"""
return _GeomPlate.GeomPlate_SequenceOfAij_Set(self, *args)
def Append(self, *args):
"""
:param T:
:type T: GeomPlate_Aij &
:rtype: None
:param S:
:type S: GeomPlate_SequenceOfAij &
:rtype: None
"""
return _GeomPlate.GeomPlate_SequenceOfAij_Append(self, *args)
def Prepend(self, *args):
"""
:param T:
:type T: GeomPlate_Aij &
:rtype: None
:param S:
:type S: GeomPlate_SequenceOfAij &
:rtype: None
"""
return _GeomPlate.GeomPlate_SequenceOfAij_Prepend(self, *args)
def InsertBefore(self, *args):
"""
:param Index:
:type Index: int
:param T:
:type T: GeomPlate_Aij &
:rtype: None
:param Index:
:type Index: int
:param S:
:type S: GeomPlate_SequenceOfAij &
:rtype: None
"""
return _GeomPlate.GeomPlate_SequenceOfAij_InsertBefore(self, *args)
def InsertAfter(self, *args):
"""
:param Index:
:type Index: int
:param T:
:type T: GeomPlate_Aij &
:rtype: None
:param Index:
:type Index: int
:param S:
:type S: GeomPlate_SequenceOfAij &
:rtype: None
"""
return _GeomPlate.GeomPlate_SequenceOfAij_InsertAfter(self, *args)
def First(self, *args):
"""
:rtype: GeomPlate_Aij
"""
return _GeomPlate.GeomPlate_SequenceOfAij_First(self, *args)
def Last(self, *args):
"""
:rtype: GeomPlate_Aij
"""
return _GeomPlate.GeomPlate_SequenceOfAij_Last(self, *args)
def Split(self, *args):
"""
:param Index:
:type Index: int
:param Sub:
:type Sub: GeomPlate_SequenceOfAij &
:rtype: None
"""
return _GeomPlate.GeomPlate_SequenceOfAij_Split(self, *args)
def Value(self, *args):
"""
:param Index:
:type Index: int
:rtype: GeomPlate_Aij
"""
return _GeomPlate.GeomPlate_SequenceOfAij_Value(self, *args)
def SetValue(self, *args):
"""
:param Index:
:type Index: int
:param I:
:type I: GeomPlate_Aij &
:rtype: None
"""
return _GeomPlate.GeomPlate_SequenceOfAij_SetValue(self, *args)
def ChangeValue(self, *args):
"""
:param Index:
:type Index: int
:rtype: GeomPlate_Aij
"""
return _GeomPlate.GeomPlate_SequenceOfAij_ChangeValue(self, *args)
def Remove(self, *args):
"""
:param Index:
:type Index: int
:rtype: None
:param FromIndex:
:type FromIndex: int
:param ToIndex:
:type ToIndex: int
:rtype: None
"""
return _GeomPlate.GeomPlate_SequenceOfAij_Remove(self, *args)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
GeomPlate_SequenceOfAij.Clear = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfAij_Clear,None,GeomPlate_SequenceOfAij)
GeomPlate_SequenceOfAij.Assign = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfAij_Assign,None,GeomPlate_SequenceOfAij)
GeomPlate_SequenceOfAij.Set = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfAij_Set,None,GeomPlate_SequenceOfAij)
GeomPlate_SequenceOfAij.Append = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfAij_Append,None,GeomPlate_SequenceOfAij)
GeomPlate_SequenceOfAij.Prepend = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfAij_Prepend,None,GeomPlate_SequenceOfAij)
GeomPlate_SequenceOfAij.InsertBefore = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfAij_InsertBefore,None,GeomPlate_SequenceOfAij)
GeomPlate_SequenceOfAij.InsertAfter = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfAij_InsertAfter,None,GeomPlate_SequenceOfAij)
GeomPlate_SequenceOfAij.First = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfAij_First,None,GeomPlate_SequenceOfAij)
GeomPlate_SequenceOfAij.Last = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfAij_Last,None,GeomPlate_SequenceOfAij)
GeomPlate_SequenceOfAij.Split = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfAij_Split,None,GeomPlate_SequenceOfAij)
GeomPlate_SequenceOfAij.Value = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfAij_Value,None,GeomPlate_SequenceOfAij)
GeomPlate_SequenceOfAij.SetValue = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfAij_SetValue,None,GeomPlate_SequenceOfAij)
GeomPlate_SequenceOfAij.ChangeValue = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfAij_ChangeValue,None,GeomPlate_SequenceOfAij)
GeomPlate_SequenceOfAij.Remove = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfAij_Remove,None,GeomPlate_SequenceOfAij)
GeomPlate_SequenceOfAij._kill_pointed = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfAij__kill_pointed,None,GeomPlate_SequenceOfAij)
GeomPlate_SequenceOfAij_swigregister = _GeomPlate.GeomPlate_SequenceOfAij_swigregister
GeomPlate_SequenceOfAij_swigregister(GeomPlate_SequenceOfAij)
class GeomPlate_SequenceOfCurveConstraint(OCC.TCollection.TCollection_BaseSequence):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:rtype: None
"""
_GeomPlate.GeomPlate_SequenceOfCurveConstraint_swiginit(self,_GeomPlate.new_GeomPlate_SequenceOfCurveConstraint(*args))
def Clear(self, *args):
"""
:rtype: None
"""
return _GeomPlate.GeomPlate_SequenceOfCurveConstraint_Clear(self, *args)
def Assign(self, *args):
"""
:param Other:
:type Other: GeomPlate_SequenceOfCurveConstraint &
:rtype: GeomPlate_SequenceOfCurveConstraint
"""
return _GeomPlate.GeomPlate_SequenceOfCurveConstraint_Assign(self, *args)
def Set(self, *args):
"""
:param Other:
:type Other: GeomPlate_SequenceOfCurveConstraint &
:rtype: GeomPlate_SequenceOfCurveConstraint
"""
return _GeomPlate.GeomPlate_SequenceOfCurveConstraint_Set(self, *args)
def Append(self, *args):
"""
:param T:
:type T: Handle_GeomPlate_CurveConstraint &
:rtype: None
:param S:
:type S: GeomPlate_SequenceOfCurveConstraint &
:rtype: None
"""
return _GeomPlate.GeomPlate_SequenceOfCurveConstraint_Append(self, *args)
def Prepend(self, *args):
"""
:param T:
:type T: Handle_GeomPlate_CurveConstraint &
:rtype: None
:param S:
:type S: GeomPlate_SequenceOfCurveConstraint &
:rtype: None
"""
return _GeomPlate.GeomPlate_SequenceOfCurveConstraint_Prepend(self, *args)
def InsertBefore(self, *args):
"""
:param Index:
:type Index: int
:param T:
:type T: Handle_GeomPlate_CurveConstraint &
:rtype: None
:param Index:
:type Index: int
:param S:
:type S: GeomPlate_SequenceOfCurveConstraint &
:rtype: None
"""
return _GeomPlate.GeomPlate_SequenceOfCurveConstraint_InsertBefore(self, *args)
def InsertAfter(self, *args):
"""
:param Index:
:type Index: int
:param T:
:type T: Handle_GeomPlate_CurveConstraint &
:rtype: None
:param Index:
:type Index: int
:param S:
:type S: GeomPlate_SequenceOfCurveConstraint &
:rtype: None
"""
return _GeomPlate.GeomPlate_SequenceOfCurveConstraint_InsertAfter(self, *args)
def First(self, *args):
"""
:rtype: Handle_GeomPlate_CurveConstraint
"""
return _GeomPlate.GeomPlate_SequenceOfCurveConstraint_First(self, *args)
def Last(self, *args):
"""
:rtype: Handle_GeomPlate_CurveConstraint
"""
return _GeomPlate.GeomPlate_SequenceOfCurveConstraint_Last(self, *args)
def Split(self, *args):
"""
:param Index:
:type Index: int
:param Sub:
:type Sub: GeomPlate_SequenceOfCurveConstraint &
:rtype: None
"""
return _GeomPlate.GeomPlate_SequenceOfCurveConstraint_Split(self, *args)
def Value(self, *args):
"""
:param Index:
:type Index: int
:rtype: Handle_GeomPlate_CurveConstraint
"""
return _GeomPlate.GeomPlate_SequenceOfCurveConstraint_Value(self, *args)
def SetValue(self, *args):
"""
:param Index:
:type Index: int
:param I:
:type I: Handle_GeomPlate_CurveConstraint &
:rtype: None
"""
return _GeomPlate.GeomPlate_SequenceOfCurveConstraint_SetValue(self, *args)
def ChangeValue(self, *args):
"""
:param Index:
:type Index: int
:rtype: Handle_GeomPlate_CurveConstraint
"""
return _GeomPlate.GeomPlate_SequenceOfCurveConstraint_ChangeValue(self, *args)
def Remove(self, *args):
"""
:param Index:
:type Index: int
:rtype: None
:param FromIndex:
:type FromIndex: int
:param ToIndex:
:type ToIndex: int
:rtype: None
"""
return _GeomPlate.GeomPlate_SequenceOfCurveConstraint_Remove(self, *args)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
GeomPlate_SequenceOfCurveConstraint.Clear = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfCurveConstraint_Clear,None,GeomPlate_SequenceOfCurveConstraint)
GeomPlate_SequenceOfCurveConstraint.Assign = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfCurveConstraint_Assign,None,GeomPlate_SequenceOfCurveConstraint)
GeomPlate_SequenceOfCurveConstraint.Set = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfCurveConstraint_Set,None,GeomPlate_SequenceOfCurveConstraint)
GeomPlate_SequenceOfCurveConstraint.Append = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfCurveConstraint_Append,None,GeomPlate_SequenceOfCurveConstraint)
GeomPlate_SequenceOfCurveConstraint.Prepend = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfCurveConstraint_Prepend,None,GeomPlate_SequenceOfCurveConstraint)
GeomPlate_SequenceOfCurveConstraint.InsertBefore = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfCurveConstraint_InsertBefore,None,GeomPlate_SequenceOfCurveConstraint)
GeomPlate_SequenceOfCurveConstraint.InsertAfter = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfCurveConstraint_InsertAfter,None,GeomPlate_SequenceOfCurveConstraint)
GeomPlate_SequenceOfCurveConstraint.First = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfCurveConstraint_First,None,GeomPlate_SequenceOfCurveConstraint)
GeomPlate_SequenceOfCurveConstraint.Last = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfCurveConstraint_Last,None,GeomPlate_SequenceOfCurveConstraint)
GeomPlate_SequenceOfCurveConstraint.Split = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfCurveConstraint_Split,None,GeomPlate_SequenceOfCurveConstraint)
GeomPlate_SequenceOfCurveConstraint.Value = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfCurveConstraint_Value,None,GeomPlate_SequenceOfCurveConstraint)
GeomPlate_SequenceOfCurveConstraint.SetValue = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfCurveConstraint_SetValue,None,GeomPlate_SequenceOfCurveConstraint)
GeomPlate_SequenceOfCurveConstraint.ChangeValue = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfCurveConstraint_ChangeValue,None,GeomPlate_SequenceOfCurveConstraint)
GeomPlate_SequenceOfCurveConstraint.Remove = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfCurveConstraint_Remove,None,GeomPlate_SequenceOfCurveConstraint)
GeomPlate_SequenceOfCurveConstraint._kill_pointed = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfCurveConstraint__kill_pointed,None,GeomPlate_SequenceOfCurveConstraint)
GeomPlate_SequenceOfCurveConstraint_swigregister = _GeomPlate.GeomPlate_SequenceOfCurveConstraint_swigregister
GeomPlate_SequenceOfCurveConstraint_swigregister(GeomPlate_SequenceOfCurveConstraint)
class GeomPlate_SequenceOfPointConstraint(OCC.TCollection.TCollection_BaseSequence):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:rtype: None
"""
_GeomPlate.GeomPlate_SequenceOfPointConstraint_swiginit(self,_GeomPlate.new_GeomPlate_SequenceOfPointConstraint(*args))
def Clear(self, *args):
"""
:rtype: None
"""
return _GeomPlate.GeomPlate_SequenceOfPointConstraint_Clear(self, *args)
def Assign(self, *args):
"""
:param Other:
:type Other: GeomPlate_SequenceOfPointConstraint &
:rtype: GeomPlate_SequenceOfPointConstraint
"""
return _GeomPlate.GeomPlate_SequenceOfPointConstraint_Assign(self, *args)
def Set(self, *args):
"""
:param Other:
:type Other: GeomPlate_SequenceOfPointConstraint &
:rtype: GeomPlate_SequenceOfPointConstraint
"""
return _GeomPlate.GeomPlate_SequenceOfPointConstraint_Set(self, *args)
def Append(self, *args):
"""
:param T:
:type T: Handle_GeomPlate_PointConstraint &
:rtype: None
:param S:
:type S: GeomPlate_SequenceOfPointConstraint &
:rtype: None
"""
return _GeomPlate.GeomPlate_SequenceOfPointConstraint_Append(self, *args)
def Prepend(self, *args):
"""
:param T:
:type T: Handle_GeomPlate_PointConstraint &
:rtype: None
:param S:
:type S: GeomPlate_SequenceOfPointConstraint &
:rtype: None
"""
return _GeomPlate.GeomPlate_SequenceOfPointConstraint_Prepend(self, *args)
def InsertBefore(self, *args):
"""
:param Index:
:type Index: int
:param T:
:type T: Handle_GeomPlate_PointConstraint &
:rtype: None
:param Index:
:type Index: int
:param S:
:type S: GeomPlate_SequenceOfPointConstraint &
:rtype: None
"""
return _GeomPlate.GeomPlate_SequenceOfPointConstraint_InsertBefore(self, *args)
def InsertAfter(self, *args):
"""
:param Index:
:type Index: int
:param T:
:type T: Handle_GeomPlate_PointConstraint &
:rtype: None
:param Index:
:type Index: int
:param S:
:type S: GeomPlate_SequenceOfPointConstraint &
:rtype: None
"""
return _GeomPlate.GeomPlate_SequenceOfPointConstraint_InsertAfter(self, *args)
def First(self, *args):
"""
:rtype: Handle_GeomPlate_PointConstraint
"""
return _GeomPlate.GeomPlate_SequenceOfPointConstraint_First(self, *args)
def Last(self, *args):
"""
:rtype: Handle_GeomPlate_PointConstraint
"""
return _GeomPlate.GeomPlate_SequenceOfPointConstraint_Last(self, *args)
def Split(self, *args):
"""
:param Index:
:type Index: int
:param Sub:
:type Sub: GeomPlate_SequenceOfPointConstraint &
:rtype: None
"""
return _GeomPlate.GeomPlate_SequenceOfPointConstraint_Split(self, *args)
def Value(self, *args):
"""
:param Index:
:type Index: int
:rtype: Handle_GeomPlate_PointConstraint
"""
return _GeomPlate.GeomPlate_SequenceOfPointConstraint_Value(self, *args)
def SetValue(self, *args):
"""
:param Index:
:type Index: int
:param I:
:type I: Handle_GeomPlate_PointConstraint &
:rtype: None
"""
return _GeomPlate.GeomPlate_SequenceOfPointConstraint_SetValue(self, *args)
def ChangeValue(self, *args):
"""
:param Index:
:type Index: int
:rtype: Handle_GeomPlate_PointConstraint
"""
return _GeomPlate.GeomPlate_SequenceOfPointConstraint_ChangeValue(self, *args)
def Remove(self, *args):
"""
:param Index:
:type Index: int
:rtype: None
:param FromIndex:
:type FromIndex: int
:param ToIndex:
:type ToIndex: int
:rtype: None
"""
return _GeomPlate.GeomPlate_SequenceOfPointConstraint_Remove(self, *args)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
GeomPlate_SequenceOfPointConstraint.Clear = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfPointConstraint_Clear,None,GeomPlate_SequenceOfPointConstraint)
GeomPlate_SequenceOfPointConstraint.Assign = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfPointConstraint_Assign,None,GeomPlate_SequenceOfPointConstraint)
GeomPlate_SequenceOfPointConstraint.Set = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfPointConstraint_Set,None,GeomPlate_SequenceOfPointConstraint)
GeomPlate_SequenceOfPointConstraint.Append = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfPointConstraint_Append,None,GeomPlate_SequenceOfPointConstraint)
GeomPlate_SequenceOfPointConstraint.Prepend = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfPointConstraint_Prepend,None,GeomPlate_SequenceOfPointConstraint)
GeomPlate_SequenceOfPointConstraint.InsertBefore = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfPointConstraint_InsertBefore,None,GeomPlate_SequenceOfPointConstraint)
GeomPlate_SequenceOfPointConstraint.InsertAfter = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfPointConstraint_InsertAfter,None,GeomPlate_SequenceOfPointConstraint)
GeomPlate_SequenceOfPointConstraint.First = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfPointConstraint_First,None,GeomPlate_SequenceOfPointConstraint)
GeomPlate_SequenceOfPointConstraint.Last = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfPointConstraint_Last,None,GeomPlate_SequenceOfPointConstraint)
GeomPlate_SequenceOfPointConstraint.Split = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfPointConstraint_Split,None,GeomPlate_SequenceOfPointConstraint)
GeomPlate_SequenceOfPointConstraint.Value = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfPointConstraint_Value,None,GeomPlate_SequenceOfPointConstraint)
GeomPlate_SequenceOfPointConstraint.SetValue = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfPointConstraint_SetValue,None,GeomPlate_SequenceOfPointConstraint)
GeomPlate_SequenceOfPointConstraint.ChangeValue = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfPointConstraint_ChangeValue,None,GeomPlate_SequenceOfPointConstraint)
GeomPlate_SequenceOfPointConstraint.Remove = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfPointConstraint_Remove,None,GeomPlate_SequenceOfPointConstraint)
GeomPlate_SequenceOfPointConstraint._kill_pointed = new_instancemethod(_GeomPlate.GeomPlate_SequenceOfPointConstraint__kill_pointed,None,GeomPlate_SequenceOfPointConstraint)
GeomPlate_SequenceOfPointConstraint_swigregister = _GeomPlate.GeomPlate_SequenceOfPointConstraint_swigregister
GeomPlate_SequenceOfPointConstraint_swigregister(GeomPlate_SequenceOfPointConstraint)
class GeomPlate_Surface(OCC.Geom.Geom_Surface):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:param Surfinit:
:type Surfinit: Handle_Geom_Surface &
:param Surfinter:
:type Surfinter: Plate_Plate &
:rtype: None
"""
_GeomPlate.GeomPlate_Surface_swiginit(self,_GeomPlate.new_GeomPlate_Surface(*args))
def CallSurfinit(self, *args):
"""
:rtype: Handle_Geom_Surface
"""
return _GeomPlate.GeomPlate_Surface_CallSurfinit(self, *args)
def SetBounds(self, *args):
"""
:param Umin:
:type Umin: float
:param Umax:
:type Umax: float
:param Vmin:
:type Vmin: float
:param Vmax:
:type Vmax: float
:rtype: None
"""
return _GeomPlate.GeomPlate_Surface_SetBounds(self, *args)
def RealBounds(self, *args):
"""
:param U1:
:type U1: float &
:param U2:
:type U2: float &
:param V1:
:type V1: float &
:param V2:
:type V2: float &
:rtype: None
"""
return _GeomPlate.GeomPlate_Surface_RealBounds(self, *args)
def Constraints(self, *args):
"""
:param Seq:
:type Seq: TColgp_SequenceOfXY
:rtype: None
"""
return _GeomPlate.GeomPlate_Surface_Constraints(self, *args)
def _kill_pointed(self):
"""_kill_pointed(GeomPlate_Surface self)"""
return _GeomPlate.GeomPlate_Surface__kill_pointed(self)
def GetHandle(self):
"""GetHandle(GeomPlate_Surface self) -> Handle_GeomPlate_Surface"""
return _GeomPlate.GeomPlate_Surface_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
GeomPlate_Surface.CallSurfinit = new_instancemethod(_GeomPlate.GeomPlate_Surface_CallSurfinit,None,GeomPlate_Surface)
GeomPlate_Surface.SetBounds = new_instancemethod(_GeomPlate.GeomPlate_Surface_SetBounds,None,GeomPlate_Surface)
GeomPlate_Surface.RealBounds = new_instancemethod(_GeomPlate.GeomPlate_Surface_RealBounds,None,GeomPlate_Surface)
GeomPlate_Surface.Constraints = new_instancemethod(_GeomPlate.GeomPlate_Surface_Constraints,None,GeomPlate_Surface)
GeomPlate_Surface._kill_pointed = new_instancemethod(_GeomPlate.GeomPlate_Surface__kill_pointed,None,GeomPlate_Surface)
GeomPlate_Surface.GetHandle = new_instancemethod(_GeomPlate.GeomPlate_Surface_GetHandle,None,GeomPlate_Surface)
GeomPlate_Surface_swigregister = _GeomPlate.GeomPlate_Surface_swigregister
GeomPlate_Surface_swigregister(GeomPlate_Surface)
class Handle_GeomPlate_Surface(OCC.Geom.Handle_Geom_Surface):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_GeomPlate.Handle_GeomPlate_Surface_swiginit(self,_GeomPlate.new_Handle_GeomPlate_Surface(*args))
DownCast = staticmethod(_GeomPlate.Handle_GeomPlate_Surface_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_GeomPlate_Surface.Nullify = new_instancemethod(_GeomPlate.Handle_GeomPlate_Surface_Nullify,None,Handle_GeomPlate_Surface)
Handle_GeomPlate_Surface.IsNull = new_instancemethod(_GeomPlate.Handle_GeomPlate_Surface_IsNull,None,Handle_GeomPlate_Surface)
Handle_GeomPlate_Surface.GetObject = new_instancemethod(_GeomPlate.Handle_GeomPlate_Surface_GetObject,None,Handle_GeomPlate_Surface)
Handle_GeomPlate_Surface._kill_pointed = new_instancemethod(_GeomPlate.Handle_GeomPlate_Surface__kill_pointed,None,Handle_GeomPlate_Surface)
Handle_GeomPlate_Surface_swigregister = _GeomPlate.Handle_GeomPlate_Surface_swigregister
Handle_GeomPlate_Surface_swigregister(Handle_GeomPlate_Surface)
def Handle_GeomPlate_Surface_DownCast(*args):
return _GeomPlate.Handle_GeomPlate_Surface_DownCast(*args)
Handle_GeomPlate_Surface_DownCast = _GeomPlate.Handle_GeomPlate_Surface_DownCast
| 42.475084 | 1,340 | 0.75059 |
cd2329d9b14613081582025a26bead9d9863340f | 1,294 | py | Python | test/py_tests/test_cnt.py | kalyanam-FMTGA/ray-original | f4f57896015c1c29ca571069b007c409d74824e0 | [
"BSD-3-Clause-LBNL"
] | null | null | null | test/py_tests/test_cnt.py | kalyanam-FMTGA/ray-original | f4f57896015c1c29ca571069b007c409d74824e0 | [
"BSD-3-Clause-LBNL"
] | null | null | null | test/py_tests/test_cnt.py | kalyanam-FMTGA/ray-original | f4f57896015c1c29ca571069b007c409d74824e0 | [
"BSD-3-Clause-LBNL"
] | null | null | null |
import os
import math
import string
import unittest
from unit_tools import support
from unit_tools import lcompare
class CntTestCase(unittest.TestCase):
def setUp(self):
self.oldpath = os.environ['PATH']
os.environ['PATH'] = os.path.abspath(support.BINDIR)
def tearDown(self):
os.environ['PATH'] = self.oldpath
def test_1(self):
cmd = 'cnt 5'
res0 = os.popen(cmd).read()
res = map(string.strip,string.split(res0, '\n'))
exp = [0, 1, 2, 3, 4, '']
try: lcompare.lcompare(res, exp)
except lcompare.error, e: self.fail(str(e))
def test_2(self):
cmd = 'cnt 3 2'
res0 = os.popen(cmd).read()
res = map(string.split,string.split(res0, '\n'))
exp = [[0,0], [0,1], [1,0], [1,1], [2,0], [2,1], []]
try: lcompare.llcompare(res, exp)
except lcompare.error, e: self.fail(str(e))
def test_3(self):
cmd = 'cnt 3 2 3'
res0 = os.popen(cmd).read()
res = map(string.split,string.split(res0, '\n'))
exp = [[0,0,0],[0,0,1],[0,0,2],
[0,1,0],[0,1,1],[0,1,2],
[1,0,0],[1,0,1],[1,0,2],
[1,1,0],[1,1,1],[1,1,2],
[2,0,0],[2,0,1],[2,0,2],
[2,1,0],[2,1,1],[2,1,2],
[]]
try: lcompare.llcompare(res, exp)
except lcompare.error, e: self.fail(str(e))
def main():
support.run_case(CntTestCase)
if __name__ == '__main__':
main()
# vi: set ts=4 sw=4 :
| 22.701754 | 54 | 0.602782 |
34bf1bd511fd03c2f56a24944085039a0a4f7c1c | 1,667 | py | Python | Pwnable/BKSEC2019/deadnote1/dead_note_1.py | kamithanthanh/hacmao.github.io | 87b06df827cc65f737831301bae1d5f3a2d014ff | [
"MIT"
] | 1 | 2019-06-02T02:52:32.000Z | 2019-06-02T02:52:32.000Z | Pwnable/BKSEC2019/deadnote1/dead_note_1.py | kamithanthanh/hacmao.github.io | 87b06df827cc65f737831301bae1d5f3a2d014ff | [
"MIT"
] | null | null | null | Pwnable/BKSEC2019/deadnote1/dead_note_1.py | kamithanthanh/hacmao.github.io | 87b06df827cc65f737831301bae1d5f3a2d014ff | [
"MIT"
] | 1 | 2019-06-01T16:10:59.000Z | 2019-06-01T16:10:59.000Z | from pwn import *
sh = process("./Dead_Note_Lv1")
# sh = remote("bksec.team", 4326)
def get_PIE(proc):
memory_map = open("/proc/{}/maps".format(proc.pid),"rb").readlines()
return int(memory_map[0].split("-")[0],16)
def debug(bp):
#bp = [0xea0,0xd31,0xc52]
#bp = [0x00000dfb,0x00000b7c,0x00000d10]
script = ""
PIE = get_PIE(sh)
PAPA = PIE
print hex(PIE + 0x202018) # free
print hex(PIE + 0x2020E0) # node
# script += "x/gx 0x%x\n"%(PIE + 0x202018)
for x in bp:
script += "b *0x%x\n"%(PIE+x)
gdb.attach(sh,gdbscript=script)
def add(id, count, content) :
sh.recv()
sh.sendline("1")
sh.recv()
sh.sendline(str(id))
sh.recv()
sh.sendline(str(count))
sh.recv()
sh.send(content)
def add_shell(id, count, content) :
sh.recv()
sh.sendline("1")
sh.recv()
sh.sendline(str(id))
sh.recv()
sh.sendline(str(count))
sh.recv()
jmp = "\xEB"
jmp += chr(30 -len(content))
sh.send(content + jmp)
def free(id) :
sh.recv()
sh.sendline("2")
sh.recv()
sh.sendline(str(id))
debug([0xC3A, 0x629]) #
shellcode = "\x31\xc0\x48\xbb\xd1\x9d\x96\x91\xd0\x8c\x97\xff\x48\xf7\xdb\x53\x54\x5f\x99\x52\x57\x54\x5e\xb0\x3b\x0f\x05"
add(-23, 1, "\x31\xc0\xc3") # overwrite strlen
# add_shell(0, 1, "\xBA\x2F\x2F\x73\x68")
add(-14, 1, "\x50\x5A\x50\x5E\x34\x3B\x0F\x05") # overwrite atoi -> heap
#for i in range(1, len(shellcode)) :
# add(i , 2, shellcode[i])
#add(0, 1, "aaaaaaaa")
# free(0)
print sh.recv()
sh.send("/bin/sh")
sh.interactive() | 25.257576 | 122 | 0.557289 |
a382e9d06c62ee7d49fe69319bf08ba3af605f6f | 457 | py | Python | venv/Scripts/easy_install-3.7-script.py | gauravsarkar97/JukeBox | e67c80bcb934703ff00531bd4d32a5c2dafb473c | [
"MIT"
] | 2 | 2019-07-29T15:45:31.000Z | 2019-11-17T23:33:58.000Z | venv/Scripts/easy_install-3.7-script.py | gauravsarkar97/JukeBox | e67c80bcb934703ff00531bd4d32a5c2dafb473c | [
"MIT"
] | null | null | null | venv/Scripts/easy_install-3.7-script.py | gauravsarkar97/JukeBox | e67c80bcb934703ff00531bd4d32a5c2dafb473c | [
"MIT"
] | 1 | 2019-01-06T15:18:58.000Z | 2019-01-06T15:18:58.000Z | #!C:\Users\gaura\PycharmProjects\Melody\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==39.1.0','console_scripts','easy_install-3.7'
__requires__ = 'setuptools==39.1.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==39.1.0', 'console_scripts', 'easy_install-3.7')()
)
| 35.153846 | 87 | 0.693654 |
6dcf1a7744204c063c40e451d1f0b52f69ec1dc4 | 2,119 | py | Python | przyklady/przyklady/0/Listing_17-2.py | bitounu/Nauka-Pythona | e02dd239ba2c294d6fdd98958301b3ece44f44d1 | [
"Unlicense"
] | null | null | null | przyklady/przyklady/0/Listing_17-2.py | bitounu/Nauka-Pythona | e02dd239ba2c294d6fdd98958301b3ece44f44d1 | [
"Unlicense"
] | null | null | null | przyklady/przyklady/0/Listing_17-2.py | bitounu/Nauka-Pythona | e02dd239ba2c294d6fdd98958301b3ece44f44d1 | [
"Unlicense"
] | null | null | null | # Listing 17-2.py
# Copyright Warren & Carter Sande, 2013
# Released under MIT license http://www.opensource.org/licenses/mit-license.php
# Version $version ----------------------------
# Program służący do poruszania piłkami za pomocą sprajtów
import sys, pygame
from random import *
#-----definicja klasy pochodnej -----------------------------
class KlasaMojaPilka(pygame.sprite.Sprite):
def __init__(self, plik_obrazka, polozenie, predkosc):
pygame.sprite.Sprite.__init__(self) # wywołanie inicjalizatora klasy Sprite
self.image = pygame.image.load(plik_obrazka)
self.rect = self.image.get_rect()
self.rect.left, self.rect.top = polozenie
self.predkosc = predkosc
def przesun(self):
self.rect = self.rect.move(self.predkosc)
# sprawdzamy, czy piłka uderzyła w boczną krawędź okna
# jeżeli tak, odwracamy kierunek ruchu piłki w osi x
if self.rect.left < 0 or self.rect.right > szerokosc:
self.predkosc[0] = -self.predkosc[0]
# sprawdzamy, czy piłka uderzyła w górną bądź dolną krawędź okna
# jeżeli tak, odwracamy kierunek ruchu piłki w osi y
if self.rect.top < 0 or self.rect.bottom > wysokosc:
self.predkosc[1] = -self.predkosc[1]
#----- główny program -----------------------------
rozmiar = szerokosc, wysokosc = 640, 480
ekran = pygame.display.set_mode(rozmiar)
ekran.fill([255, 255, 255])
plik_obrazka = "beach_ball.png"
pilki = []
for wiersz in range (0, 3):
for kolumna in range (0, 3):
polozenie = [kolumna * 180 + 10, wiersz * 180 + 10]
predkosc = [choice([-2, 2]), choice([-2, 2])]
pilka = KlasaMojaPilka(plik_obrazka, polozenie, predkosc)
pilki.append(pilka) # dodajemy piłkę do listy
uruchomiony = True
while uruchomiony:
for event in pygame.event.get():
if event.type == pygame.QUIT:
uruchomiony = False
pygame.time.delay(20)
ekran.fill([255, 255, 255])
for pilka in pilki:
pilka.przesun()
ekran.blit(pilka.image, pilka.rect)
pygame.display.flip()
pygame.quit()
| 37.175439 | 84 | 0.634261 |
4d8162a34fde5fc42f0cb479fc9ca65470a4f95e | 3,202 | py | Python | tests/algorithms/test_fqi.py | k4ntz/mushroom-rl | 17c8e9b2a9648a59169f3599c4ef8d259afc39f4 | [
"MIT"
] | 1 | 2020-11-06T18:32:32.000Z | 2020-11-06T18:32:32.000Z | tests/algorithms/test_fqi.py | AmmarFahmy/mushroom-rl | 2625ee7f64d5613b3b9fba00f0b7a39fece88ca5 | [
"MIT"
] | null | null | null | tests/algorithms/test_fqi.py | AmmarFahmy/mushroom-rl | 2625ee7f64d5613b3b9fba00f0b7a39fece88ca5 | [
"MIT"
] | null | null | null | import numpy as np
from sklearn.ensemble import ExtraTreesRegressor
from datetime import datetime
from helper.utils import TestUtils as tu
from mushroom_rl.algorithms import Agent
from mushroom_rl.algorithms.value import DoubleFQI, FQI
from mushroom_rl.core import Core
from mushroom_rl.environments import *
from mushroom_rl.policy import EpsGreedy
from mushroom_rl.utils.dataset import compute_J
from mushroom_rl.utils.parameters import Parameter
def learn(alg, alg_params):
mdp = CarOnHill()
np.random.seed(1)
# Policy
epsilon = Parameter(value=1.)
pi = EpsGreedy(epsilon=epsilon)
# Approximator
approximator_params = dict(input_shape=mdp.info.observation_space.shape,
n_actions=mdp.info.action_space.n,
n_estimators=50,
min_samples_split=5,
min_samples_leaf=2)
approximator = ExtraTreesRegressor
# Agent
agent = alg(mdp.info, pi, approximator,
approximator_params=approximator_params, **alg_params)
# Algorithm
core = Core(agent, mdp)
# Train
core.learn(n_episodes=5, n_episodes_per_fit=5)
test_epsilon = Parameter(0.75)
agent.policy.set_epsilon(test_epsilon)
dataset = core.evaluate(n_episodes=2)
return agent, np.mean(compute_J(dataset, mdp.info.gamma))
def test_fqi():
params = dict(n_iterations=10)
_, j = learn(FQI, params)
j_test = -0.06763797713952796
assert j == j_test
def test_fqi_save(tmpdir):
agent_path = tmpdir / 'agent_{}'.format(datetime.now().strftime("%H%M%S%f"))
params = dict(n_iterations=10)
agent_save, _ = learn(FQI, params)
agent_save.save(agent_path)
agent_load = Agent.load(agent_path)
for att, method in vars(agent_save).items():
save_attr = getattr(agent_save, att)
load_attr = getattr(agent_load, att)
tu.assert_eq(save_attr, load_attr)
def test_fqi_boosted():
params = dict(n_iterations=10, boosted=True)
_, j = learn(FQI, params)
j_test = -0.04487241596542538
assert j == j_test
def test_fqi_boosted_save(tmpdir):
agent_path = tmpdir / 'agent_{}'.format(datetime.now().strftime("%H%M%S%f"))
params = dict(n_iterations=10, boosted=True)
agent_save, _ = learn(FQI, params)
agent_save.save(agent_path)
agent_load = Agent.load(agent_path)
for att, method in vars(agent_save).items():
save_attr = getattr(agent_save, att)
load_attr = getattr(agent_load, att)
tu.assert_eq(save_attr, load_attr)
def test_double_fqi():
params = dict(n_iterations=10)
_, j = learn(DoubleFQI, params)
j_test = -0.19933233708925654
assert j == j_test
def test_double_fqi_save(tmpdir):
agent_path = tmpdir / 'agent_{}'.format(datetime.now().strftime("%H%M%S%f"))
params = dict(n_iterations=10)
agent_save, _ = learn(DoubleFQI, params)
agent_save.save(agent_path)
agent_load = Agent.load(agent_path)
for att, method in vars(agent_save).items():
save_attr = getattr(agent_save, att)
load_attr = getattr(agent_load, att)
tu.assert_eq(save_attr, load_attr)
| 26.907563 | 80 | 0.677077 |
8de834e389056ef4be7979b6477ab4e0272176ef | 1,694 | py | Python | pypressruby/main_widget.py | gfabbris/pypressruby | ce5b89821b0bd829c0aac85f1b364a6f0202716e | [
"BSD-3-Clause"
] | null | null | null | pypressruby/main_widget.py | gfabbris/pypressruby | ce5b89821b0bd829c0aac85f1b364a6f0202716e | [
"BSD-3-Clause"
] | null | null | null | pypressruby/main_widget.py | gfabbris/pypressruby | ce5b89821b0bd829c0aac85f1b364a6f0202716e | [
"BSD-3-Clause"
] | null | null | null | """
Copyright (c) 2018-2021, UChicago Argonne, LLC
See LICENSE file.
"""
from PyQt5.QtWidgets import (
QMainWindow,
QApplication,
QWidget,
QGridLayout,
QMessageBox,
)
from pypressruby.plot_widget import PlotWidget
from pypressruby.options_widget import OptionsWidget
from pypressruby.widgets_logic import LogicWidgets
class MainWindow(QMainWindow):
def __init__(self):
super(MainWindow, self).__init__()
self.spec_fname = ""
self.statusBar().showMessage("Ready")
self.setWindowTitle("Ruby Pressure Calibration")
self.setGeometry(200, 200, 1000, 600)
optwidth = 300
self.plot_widget = PlotWidget()
self.plot_widget.setFixedWidth(1000 - optwidth)
self.options_widget = OptionsWidget()
self.options_widget.setFixedWidth(optwidth)
self._layout = QGridLayout()
self._layout.addWidget(self.options_widget, 0, 0)
self._layout.addWidget(self.plot_widget, 0, 1, 1, 5)
wid = QWidget(self)
self.setCentralWidget(wid)
wid.setLayout(self._layout)
self.connections = LogicWidgets(
self.statusBar(), self.options_widget, self.plot_widget
)
def closeEvent(self, event):
close = QMessageBox()
close.setText("Are you sure?")
close.setStandardButtons(QMessageBox.Yes | QMessageBox.Cancel)
close = close.exec()
if close == QMessageBox.Yes:
self.connections.stop_spectrometer()
event.accept()
else:
event.ignore()
if __name__ == "__main__":
app = QApplication([])
widget = MainWindow()
widget.show()
app.exec_()
| 24.911765 | 70 | 0.646989 |
ceeb2287a1b317cdd310011375e8150d63090d4b | 1,833 | py | Python | Crawlers/news_sites/spiders/neth.py | ashu8912/fact-Bounty | 9ecf86faa1829753c285424d68ac2adfd626ba1f | [
"Apache-2.0"
] | 57 | 2019-01-23T16:19:12.000Z | 2020-07-05T09:19:52.000Z | Crawlers/news_sites/spiders/neth.py | ashu8912/fact-Bounty | 9ecf86faa1829753c285424d68ac2adfd626ba1f | [
"Apache-2.0"
] | 402 | 2019-02-09T10:41:57.000Z | 2020-07-05T06:51:29.000Z | Crawlers/news_sites/spiders/neth.py | ashu8912/fact-Bounty | 9ecf86faa1829753c285424d68ac2adfd626ba1f | [
"Apache-2.0"
] | 173 | 2019-01-24T06:15:04.000Z | 2020-09-08T14:07:38.000Z | # -*- coding: utf-8 -*-
import scrapy
import dateutil.parser as dparser
from news_sites.items import NewsSitesItem
class NethGossipSpider(scrapy.Spider):
name = "nethgossip"
allowed_domains = ["nethgossip.lk"]
start_urls = ["http://nethgossip.lk/category/9"]
def __init__(self, date=None, *args, **kwargs):
super(NethGossipSpider, self).__init__(*args, **kwargs)
if date is not None:
self.dateToMatch = dparser.parse(date).date()
else:
self.dateToMatch = None
def parse(self, response):
for news_url in response.css('.col-sm-9 a::attr("href")').extract():
yield response.follow(news_url, callback=self.parse_article)
# next_page = response.css('.active+ li a::attr("href")').extract_first()
# if next_page is not None:
# yield response.follow(next_page, callback=self.parse)
def parse_article(self, response):
item = NewsSitesItem()
item["author"] = "http://nethgossip.lk"
item["title"] = response.css(".entry-title::text").extract_first()
date = response.css(
".td-post-date .td-module-date::text"
).extract_first()
if date is None:
return
date = date.replace("\r", "")
date = date.replace("\t", "")
date = date.replace("\n", "")
date = dparser.parse(date, fuzzy=True).date()
# don't add news if we are using dateToMatch and date of news
if self.dateToMatch is not None and self.dateToMatch != date:
return
img_link = response.css(".col-sm-3 img::attr(src)").extract()
item["date"] = date.strftime("%d %B, %Y")
item["imageLink"] = img_link
item["source"] = "http://nethnews.lk/"
item["news_url"] = response.url
yield item
| 32.157895 | 81 | 0.596836 |
789190fb6859251156dff8c0e5d500b66f9e7301 | 254 | py | Python | peas/util/make_test_constants.py | 13thProgression/peas-blockchain | 8e058cbfe0c1ab73f7c1ec41bedb39071c63141c | [
"Apache-2.0"
] | 2 | 2021-08-16T17:45:07.000Z | 2021-09-18T19:00:58.000Z | peas/util/make_test_constants.py | 13thProgression/peas-blockchain | 8e058cbfe0c1ab73f7c1ec41bedb39071c63141c | [
"Apache-2.0"
] | 4 | 2021-09-26T15:50:20.000Z | 2021-10-06T06:18:51.000Z | peas/util/make_test_constants.py | 13thProgression/peas-blockchain | 8e058cbfe0c1ab73f7c1ec41bedb39071c63141c | [
"Apache-2.0"
] | 3 | 2021-09-29T19:08:41.000Z | 2022-03-15T08:47:28.000Z | from typing import Dict
from peas.consensus.default_constants import DEFAULT_CONSTANTS, ConsensusConstants
def make_test_constants(test_constants_overrides: Dict) -> ConsensusConstants:
return DEFAULT_CONSTANTS.replace(**test_constants_overrides)
| 31.75 | 82 | 0.854331 |
a7bba3b6da0eab8599d6289b93c39432c901c846 | 152 | py | Python | MAC/gustavoapp/apps.py | LindseyVA/TrabA2-LP-2021 | a24dd0b5349abb77069f7f9e8ab9eb4cc57d136e | [
"MIT"
] | 5 | 2021-12-01T03:23:34.000Z | 2022-03-30T00:33:02.000Z | MAC/gustavoapp/apps.py | LindseyVA/TrabA2-LP-2021 | a24dd0b5349abb77069f7f9e8ab9eb4cc57d136e | [
"MIT"
] | null | null | null | MAC/gustavoapp/apps.py | LindseyVA/TrabA2-LP-2021 | a24dd0b5349abb77069f7f9e8ab9eb4cc57d136e | [
"MIT"
] | null | null | null | from django.apps import AppConfig
class GustavoappConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'gustavoapp'
| 21.714286 | 56 | 0.769737 |
89a450059bf773fc8fe5c06a1ac01cd9d329169d | 4,546 | py | Python | senlin-7.0.0/senlin/objects/receiver.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 45 | 2015-10-18T02:56:50.000Z | 2022-03-01T15:28:02.000Z | senlin-7.0.0/senlin/objects/receiver.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 5 | 2019-08-14T06:46:03.000Z | 2021-12-13T20:01:25.000Z | senlin-7.0.0/senlin/objects/receiver.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 45 | 2015-10-19T02:35:57.000Z | 2021-09-28T09:01:42.000Z | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Receiver object."""
from oslo_utils import uuidutils
from senlin.common import exception
from senlin.common import utils
from senlin.db import api as db_api
from senlin.objects import base
from senlin.objects import fields
@base.SenlinObjectRegistry.register
class Receiver(base.SenlinObject, base.VersionedObjectDictCompat):
"""Senlin receiver object."""
fields = {
'id': fields.UUIDField(),
'name': fields.StringField(),
'type': fields.StringField(),
'cluster_id': fields.StringField(nullable=True),
'actor': fields.JsonField(nullable=True),
'action': fields.StringField(nullable=True),
'params': fields.JsonField(nullable=True),
'channel': fields.JsonField(nullable=True),
'created_at': fields.DateTimeField(nullable=True),
'updated_at': fields.DateTimeField(nullable=True),
'user': fields.StringField(),
'project': fields.StringField(),
'domain': fields.StringField(nullable=True),
}
@classmethod
def create(cls, context, values):
obj = db_api.receiver_create(context, values)
return cls._from_db_object(context, cls(context), obj)
@classmethod
def find(cls, context, identity, **kwargs):
"""Find a receiver with the given identity.
:param context: An instance of the request context.
:param identity: The UUID, name or short-id of a receiver.
:param project_safe: A boolean indicating whether receiver from other
projects other than the requesting one can be
returned.
:return: A DB object of receiver or an exception `ResourceNotFound`
if no matching receiver is found.
"""
if uuidutils.is_uuid_like(identity):
receiver = cls.get(context, identity, **kwargs)
if not receiver:
receiver = cls.get_by_name(context, identity, **kwargs)
else:
receiver = cls.get_by_name(context, identity, **kwargs)
if not receiver:
receiver = cls.get_by_short_id(context, identity, **kwargs)
if not receiver:
raise exception.ResourceNotFound(type='receiver', id=identity)
return receiver
@classmethod
def get(cls, context, receiver_id, **kwargs):
obj = db_api.receiver_get(context, receiver_id, **kwargs)
return cls._from_db_object(context, cls(), obj)
@classmethod
def get_by_name(cls, context, name, **kwargs):
obj = db_api.receiver_get_by_name(context, name, **kwargs)
return cls._from_db_object(context, cls(), obj)
@classmethod
def get_by_short_id(cls, context, short_id, **kwargs):
obj = db_api.receiver_get_by_short_id(context, short_id, **kwargs)
return cls._from_db_object(context, cls(), obj)
@classmethod
def get_all(cls, context, **kwargs):
objs = db_api.receiver_get_all(context, **kwargs)
return [cls._from_db_object(context, cls(), obj) for obj in objs]
@classmethod
def update(cls, context, receiver_id, values):
values = cls._transpose_metadata(values)
obj = db_api.receiver_update(context, receiver_id, values)
return cls._from_db_object(context, cls(), obj)
@classmethod
def delete(cls, context, receiver_id):
db_api.receiver_delete(context, receiver_id)
def to_dict(self):
receiver_dict = {
'id': self.id,
'name': self.name,
'type': self.type,
'user': self.user,
'project': self.project,
'domain': self.domain,
'created_at': utils.isotime(self.created_at),
'updated_at': utils.isotime(self.updated_at),
'cluster_id': self.cluster_id,
'actor': self.actor,
'action': self.action,
'params': self.params,
'channel': self.channel,
}
return receiver_dict
| 37.262295 | 77 | 0.645842 |
7a5c2c560686f568989cc62cb2edcf9856373aaf | 16,525 | py | Python | src/helper.py | jackred/CW2_GERMAN_SIGN | 988a99d6012ae95bec778a91785c76a2ca40ba87 | [
"MIT"
] | null | null | null | src/helper.py | jackred/CW2_GERMAN_SIGN | 988a99d6012ae95bec778a91785c76a2ca40ba87 | [
"MIT"
] | null | null | null | src/helper.py | jackred/CW2_GERMAN_SIGN | 988a99d6012ae95bec778a91785c76a2ca40ba87 | [
"MIT"
] | null | null | null | # -*- Mode: Python; tab-width: 8; indent-tabs-mode: nil; python-indent-offset: 4 -*-
# vim:set et sts=4 ts=4 tw=80:
# This Source Code Form is subject to the terms of the MIT License.
# If a copy of the ML was not distributed with this
# file, You can obtain one at https://opensource.org/licenses/MIT
# author: JackRed <jackred@tuta.io>
import numpy as np
import preprocess
from sklearn.metrics import confusion_matrix, precision_score, \
recall_score, roc_auc_score, f1_score
import matplotlib.pyplot as plt
import pydotplus
from sklearn.metrics import confusion_matrix, precision_score, recall_score, \
roc_auc_score, f1_score, classification_report, confusion_matrix
from sklearn import tree
DELI = ','
FOLDER = '../data/random/'
DATA_FILE = 'r_x_train_gr_smpl'
DATA_FILE_TEST = 'r_x_test_gr_smpl'
LABEL_FILE = 'r_y_train_smpl'
LABEL_FILE_TEST = 'r_y_test_smpl'
IMG_FOLDER = '../data/img/'
SEP = '_'
TEST = 'test' + SEP
LEARN = 'learn' + SEP
EXT = '.csv'
L = 8
####
# WRITE DATA
####
def write_data_to_file(name, data, fmt='%.3f', h='', cmt=''):
# print(name)
# print(data.shape)
np.savetxt(name, data, delimiter=',', fmt=fmt, header=h, comments=cmt)
####
# READ DATA
####
def get_data_from_file(name, deli=DELI, dtype=float, col=None):
if col is None:
return np.loadtxt(name, delimiter=deli, skiprows=1, dtype=dtype)
else:
return np.loadtxt(name, delimiter=deli, skiprows=1, dtype=dtype,
usecols=col)
# give label as numpy array of integer
def get_label(sep='', i='', folder=FOLDER, label_file=LABEL_FILE,
ext=EXT):
folder = folder or FOLDER
label_file = label_file or LABEL_FILE
label = get_data_from_file(folder+label_file+sep+str(i)+ext, dtype=int)
return label
# give data as numpy array of integer
def get_data(folder=FOLDER, data_file=DATA_FILE, deli=DELI, ext=EXT, col=None):
folder = folder or FOLDER
data_file = data_file or DATA_FILE
return get_data_from_file(folder+data_file+ext, deli, col=col)
# give data as numpy of string (one cell = one row)
def get_data_raw(folder=FOLDER, data_file=DATA_FILE, ext=EXT):
folder = folder or FOLDER
data_file = data_file or DATA_FILE
return np.genfromtxt(folder+data_file+ext, dtype='str', skip_header=1)
####
# CREATE IMAGE
####
def create_image(name, d, w, h):
write_data_to_file(name, d, fmt='%d', h='P2\n%d %d 255\n' % (w, h))
# convert an array of integer to a ppm stirng
# ex: [1.0, 2.0, 9.4] => '1\n2\n9\n'
def convert_ppm_raw(row):
return '\n'.join([str(int(round(i))) for i in row])+'\n'
# convert csv data to ppm string
# ex: '1.0,5.0,255.0' => '1\n5\n255'
def from_csv_to_ppm_raw(row):
return row.replace(DELI, '\n').replace('.0', '')+'\n'
def from_row_to_csv(row):
return ', '.join(str(i) for i in row)+'\n'
def from_rows_to_csv(rows):
return [from_row_to_csv(i) for i in rows]
def create_images_from_rows(name, rows):
for i in range(len(rows)):
wh = len(rows[i]) ** (1/2)
create_image(IMG_FOLDER+'%s%i.ppm' % (name, i), rows[i], wh, wh)
####
# UTILITY
####
def print_dry(m, dry):
if not dry:
print(m)
####
# PREPROCESSING
####
def randomize_file(file_value, option, rand=0):
if option.randomize:
file_value = preprocess.randomize(file_value, rand)
return file_value
def split_file(file_value, option):
if option.split is not None:
file_value, file_value_test = preprocess.split_data(file_value,
option.split)
else:
file_value_test = None
return file_value, file_value_test
def pre_processed_file(file_value, option, rand=0):
file_value = randomize_file(file_value, option, rand)
if option.cross_validate is not None:
file_value_test = file_value
else:
file_value, file_value_test = split_file(file_value, option)
return file_value, file_value_test
def pre_processed_data_arg(data, option, rand, dry=True):
data = randomize_file(data, option, rand)
if option.contrast:
data = preprocess.contrast_images(data, option.contrast)
print_dry('data contrasted', dry)
if option.equalize:
data = preprocess.equalize_histograms(data)
print_dry('histogram equalized', dry)
if option.histogram:
data = preprocess.adjust_histograms(data)
print_dry('histogram matched', dry)
if option.size is not None:
data = preprocess.resize_batch(data, option.size)
print_dry('data resized', dry)
if option.pooling is not None:
data = preprocess.pooling_images(data, option.pooling)
print_dry('data resized', dry)
if option.segment is not None:
data = preprocess.segment_images(data, option.segment)
print_dry('data segmented', dry)
# if option.kmeans is not None:
# data = preprocess.old_segment_images(data, option.kmeans)
# print_dry('data kmeans-ed', dry)
if option.filters is not None:
data = preprocess.filter_images(data, option.filters)
print_dry('image_filterized', dry)
if option.binarise:
data = preprocess.binarise_images(data)
print_dry('data binarised', dry)
if option.extract is not None:
data = preprocess.extract_col(data, option.extract)
return data
def pre_processed_label(option, rand, sep='', i='', dry=True):
label = get_label(sep=sep, i=i, folder=option.folder,
label_file=option.label)
print_dry('label loaded', dry)
return pre_processed_file(label, option, rand)
def pre_processed_label_test(option, rand, sep='', i='', dry=True):
label = get_label(sep=sep, i=i, folder=option.folder,
label_file=option.label_test or LABEL_FILE_TEST)
print_dry('label test loaded', dry)
return randomize_file(label, option, rand)
def pre_processed_data(option, rand, dry=True):
data = get_data(folder=option.folder, data_file=option.data)
print_dry('data loaded', dry)
return pre_processed_file(data, option, rand)
def pre_processed_data_test(option, rand, dry=True):
data = get_data(folder=option.folder,
data_file=option.data_test or DATA_FILE_TEST)
print_dry('data test loaded', dry)
return randomize_file(data, option, rand)
def pre_processed_data_all(option, rand, dry=True):
data_train, data_train_test = pre_processed_data(option, rand, dry)
data_train = pre_processed_data_arg(data_train, option, rand, dry)
if option.cross_validate is not None:
return data_train, data_train
if option.test:
data_test = pre_processed_data_test(option, rand, dry)
if data_train_test is not None:
data_test = np.concatenate((data_test, data_train_test))
data_test = pre_processed_data_arg(data_test, option, rand, dry)
else:
if data_train_test is not None:
data_test = pre_processed_data_arg(data_train_test, option, rand,
dry)
else:
data_test = data_train
print(data_train.shape, data_test.shape)
return data_train, data_test
def pre_processed_label_all(option, rand, sep='', i='', dry=True):
label_train, label_train_test = pre_processed_label(option, rand, sep, i,
dry)
if option.cross_validate is not None:
return label_train, label_train
if option.test:
label_test = pre_processed_label_test(option, rand, sep, i, dry)
if label_train_test is not None:
label_test = np.concatenate((label_test, label_train_test))
else:
label_test = label_train \
if label_train_test is None \
else label_train_test
print(label_train.shape, label_test.shape)
return label_train, label_test
####
# MATRIX PRINTING
####
def print_line_matrix(lng):
print('-' * ((L+1) * (lng+2) + 1))
def format_string(a):
return str(a)[:L].center(L)
def format_row(l):
return '|'.join([format_string(i) for i in l])
def print_matrix(m, lb):
print_line_matrix(len(lb))
print('|' + format_string('lb\pr') + '|' + format_row(lb) + '|'
+ format_string('total') + '|')
print_line_matrix(len(lb))
for i in range(len(m)):
print('|' + format_string(lb[i]) + '|' + format_row(m[i]) + '|'
+ format_string(sum(m[i])) + '|')
print_line_matrix(len(lb))
print('|' + format_string('total') + '|'
+ format_row(sum(m)) + '|'
+ format_string(m.sum()) + '|')
print_line_matrix(len(lb))
####
# Measure and Precision
####
def matrix_confusion(label, predicted, lb):
matrix = confusion_matrix(label, predicted)
# print(100 * sum(max(matrix[:,i]) for i in range(len(matrix))) / len(label))
# print(list(max(matrix[:,i]) for i in range(len(matrix))))
print_matrix(matrix, lb)
def compare_class(predicted, label):
unique_p, counts_p = np.unique(predicted, return_counts=True)
found = dict(zip(unique_p, counts_p))
unique_l, counts_l = np.unique(label, return_counts=True)
label_nb = dict(zip(unique_l, counts_l))
print('found: ', found)
print('label: ', label_nb)
matrix_confusion(label, predicted, unique_l)
def compare_class_true_positive(predicted, label, specific=[]):
u_matrix = _get_true_matrix(predicted, label)
precision = precision_score(label, predicted, average=None)
recall = recall_score(label, predicted, average=None)
f1 = f1_score(label, predicted, average=None)
for elem, p, r, f in zip(u_matrix, precision, recall, f1):
if len(specific) == 0 or elem in specific:
print('matrix true', elem)
print_matrix(np.array(u_matrix[elem]), np.array([0, 1]))
print('precision score', p)
print('recall score', r)
print('F measure', f)
print()
def score_to_class(score):
return np.array([np.argmax(i) for i in score])
def print_detail_measure(name, arr, detail=False):
print(name, ':', sum(arr) / len(arr))
if detail:
for i in range(len(arr)):
print('\t', i, ':', arr[i])
def measure(predicted, label, confidence, detail=False):
print_detail_measure('precision score',
precision_score(label, predicted, average=None),
detail)
print_detail_measure('recall score',
recall_score(label, predicted, average=None),
detail)
print_detail_measure('F measure',
f1_score(label, predicted, average=None),
detail)
scores = np.array([])
for elem in confidence:
scores = np.append(scores, np.amax(elem))
true = np.array([], dtype=int)
for exp, got in zip(label, predicted):
val = 0
if exp == got:
val = 1
true = np.append(true, int(val))
print('ROC Area score', roc_auc_score(true, scores))
def precision(predicted, label, detail=False):
if not detail:
return sum(predicted == label) / len(label)
else:
unique = np.unique(label)
res = []
for i in unique:
res.append(np.logical_and(predicted == i, label == i))
return res
def f_measure(predicted, label, detail=False):
score = f1_score(label, predicted, average=None)
if not detail:
return sum(score) / len(np.union1d(predicted, label))
return score
def _get_true_matrix(predicted, label):
unique_p, counts_p = np.unique(predicted, return_counts=True)
unique_l, counts_l = np.unique(label, return_counts=True)
matrix = confusion_matrix(label, predicted, unique_l)
u_matrix = {}
for elem in unique_l:
u_matrix[elem] = [[0, 0], [0, 0]]
for elem in u_matrix:
for i in range(len(unique_l)):
for j in range(len(unique_l)):
if i == j and i == elem:
u_matrix[elem][0][0] = matrix[i][j]
elif i == elem:
u_matrix[elem][0][1] += matrix[i][j]
elif j == elem:
u_matrix[elem][1][0] += matrix[i][j]
else:
u_matrix[elem][1][1] += matrix[i][j]
return u_matrix
def true_and_false_positive(predicted, label, detail=False):
score = recall_score(label, predicted, average=None)
print('score', score)
if not detail:
res = sum(score) / len(np.union1d(predicted, label))
return res, 1 - res
return score, [1 - i for i in score]
def roc_score(predicted, label, confidence):
scores = np.array([])
for elem in confidence:
scores = np.append(scores, np.amax(elem))
true = np.array([], dtype=int)
for exp, got in zip(label, predicted):
val = 0
if exp == got:
val = 1
true = np.append(true, int(val))
return roc_auc_score(true, scores)
def all_measure(predicted, label_test):
accs = precision(predicted, label_test)
f_measures = f_measure(predicted, label_test)
true_positives, false_positives = true_and_false_positive(predicted,
label_test)
return {'accs': accs,
'f_measures': f_measures,
'true_positives': true_positives,
'false_positives': false_positives}
def extract_measure(measures, key):
return [measure[key] for measure in measures]
def extract_measures(measures):
return {key: extract_measure(measures, key) for key in measures[0]}
def print_measures(measures):
for k in measures:
print(k, measures[k])
def mean_measures(measures):
return {k[:-1]: np.mean(measures[k]) for k in measures}
####
# Cross Validation
####
def cross_validate(fn, data, label, k=10, dry=False,
**kwargs):
measures = []
datas = np.array(np.array_split(data, k))
print(len(datas), [i.shape for i in datas])
labels = np.array(np.array_split(label, k))
for i in range(k):
print('fold %d' % i)
data_train = np.concatenate(np.concatenate((datas[:i], datas[i+1:])))
label_train = np.concatenate(np.concatenate((labels[:i],
labels[i+1:])))
data_test = datas[i]
label_test = labels[i]
predicted = fn(data_train, label_train, data_test, **kwargs)
measures.append(all_measure(predicted, label_test))
measures = extract_measures(measures)
print_measures(measures)
return mean_measures(measures)
def run_function(fn, cross, data_train, label_train, data_test, label_test,
**kwargs):
if cross is not None:
print('cross_validating')
return cross_validate(fn, data_train, label_train, cross, **kwargs)
else:
print('training then testing')
predicted = fn(data_train, label_train, data_test, **kwargs)
measures = [all_measure(predicted, label_test)]
measures = extract_measures(measures)
compare_class(predicted, label_test)
return mean_measures(measures)
COLORS = ['b', 'g', 'r', 'c', 'm', 'y', 'k', 'w']
def plot_experiment(title, x_label, to_plot):
i = 0
plt.title(title)
plt.xlabel(x_label)
plt.ylabel('%')
for k in to_plot:
plt.plot(to_plot[k], color=COLORS[i], label=k)
i += 1
plt.legend()
plt.show()
def plot_experiment_server(title, x_label, to_plot):
i = 0
#plt.title(title)
plt.xlabel(x_label)
plt.ylabel('%')
for k in to_plot:
plt.plot(to_plot[k], color=COLORS[i], label=k)
i += 1
plt.legend()
plt.savefig(title + ".png")
def print_result(label, predicted):
print(classification_report(label, predicted))
print(confusion_matrix(label, predicted))
def tree_to_png(model):
className = ["speed limit 60", "speed limit 80", "speed limit 80 lifted", "right of way at crossing",
"right of way in general", "give way", "stop", "no speed limit general", "turn right down",
"turn left down"]
featureName = []
for i in range(0, 2304):
featureName.append(i)
dot_data = tree.export_graphviz(
model,
out_file=None,
feature_names=featureName,
class_names=className
)
graph = pydotplus.graph_from_dot_data(dot_data)
graph.write_png("tree.png")
| 32.149805 | 108 | 0.630802 |
0eb05c8c90a3cbc40502e181c7995281d1308b0e | 2,414 | py | Python | app/models.py | OwinoLucas/blog | 3ef357c6421c2963dba8ba07aa6abee897f81721 | [
"Unlicense"
] | null | null | null | app/models.py | OwinoLucas/blog | 3ef357c6421c2963dba8ba07aa6abee897f81721 | [
"Unlicense"
] | null | null | null | app/models.py | OwinoLucas/blog | 3ef357c6421c2963dba8ba07aa6abee897f81721 | [
"Unlicense"
] | null | null | null | from . import db
from werkzeug.security import generate_password_hash,check_password_hash
from flask_login import UserMixin
from . import login_manager
from datetime import datetime
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
class User(UserMixin,db.Model):
"""
class that contains user objects
"""
__tablename__ = 'users'
id = db.Column(db.Integer,primary_key = True)
username = db.Column(db.String(255))
email = db.Column(db.String(255),unique = True,index = True)
bio = db.Column(db.String(255))
post = db.relationship('Post', backref='user', lazy='dynamic')
comment = db.relationship('Comment', backref='user', lazy='dynamic')
profile_pic_path = db.Column(db.String())
pass_secure = db.Column(db.String(255))
@property
def password(self):
raise AttributeError('You cannot read the password attribute')
@password.setter
def password(self, password):
self.pass_secure = generate_password_hash(password)
def verify_password(self,password):
return check_password_hash(self.pass_secure,password)
def __repr__(self):
return f'User {self.username}'
class Post(db.Model):
"""
class that contains post objects
"""
__tablename__ = 'posts'
id = db.Column(db.Integer, primary_key = True)
title = db.Column(db.String(255))
content = db.Column(db.Text)
date_posted = db.Column(db.DateTime, default = datetime.utcnow)
user_id = db.Column(db.Integer,db.ForeignKey('users.id'))
comment = db.relationship('Comment', backref='post', lazy='dynamic')
def save_post(self):
db.session.add(self)
db.session.commit()
@classmethod
def get_post(cls,id):
posts = Post.query.filter_by(post_id=id).all()
return posts
def __repr__(self):
return f"Post('{self.title}', '{self.date_posted}')"
class Comment(db.Model):
"""
class that contains comment objects
"""
__tablename__ = 'comments'
id = db.Column(db.Integer, primary_key = True)
comment = db.Column(db.Text)
date_posted = db.Column(db.DateTime, default = datetime.utcnow)
user_id = db.Column(db.Integer,db.ForeignKey('users.id'))
post_id = db.Column(db.Integer,db.ForeignKey('posts.id'))
def __repr__(self):
return f"Comment('{self.comment}','{self.date_posted}')"
| 28.069767 | 72 | 0.673571 |
2801058b1511acbf898d6ac5fe585203cdc91b07 | 7,011 | py | Python | onmt/Trainer.py | NaLiuAnna/OpenNMT-py | 881969aa2c662d0e578341d98db72ad276a9ee53 | [
"MIT"
] | null | null | null | onmt/Trainer.py | NaLiuAnna/OpenNMT-py | 881969aa2c662d0e578341d98db72ad276a9ee53 | [
"MIT"
] | null | null | null | onmt/Trainer.py | NaLiuAnna/OpenNMT-py | 881969aa2c662d0e578341d98db72ad276a9ee53 | [
"MIT"
] | null | null | null | from __future__ import division
"""
This is the loadable seq2seq trainer library that is
in charge of training details, loss compute, and statistics.
See train.py for a use case of this library.
Note!!! To make this a general library, we implement *only*
mechanism things here(i.e. what to do), and leave the strategy
things to users(i.e. how to do it). Also see train.py(one of the
users of this library) for the strategy things we do.
"""
import time
import sys
import math
import torch
import torch.nn as nn
import onmt
import onmt.modules
class Statistics(object):
"""
Train/validate loss statistics.
"""
def __init__(self, loss=0, n_words=0, n_correct=0):
self.loss = loss
self.n_words = n_words
self.n_correct = n_correct
self.n_src_words = 0
self.start_time = time.time()
def update(self, stat):
self.loss += stat.loss
self.n_words += stat.n_words
self.n_correct += stat.n_correct
def accuracy(self):
return 100 * (self.n_correct / self.n_words)
def ppl(self):
return math.exp(min(self.loss / self.n_words, 100))
def elapsed_time(self):
return time.time() - self.start_time
def output(self, epoch, batch, n_batches, start):
t = self.elapsed_time()
print(("Epoch %2d, %5d/%5d; acc: %6.2f; ppl: %6.2f; " +
"%3.0f src tok/s; %3.0f tgt tok/s; %6.0f s elapsed") %
(epoch, batch, n_batches,
self.accuracy(),
self.ppl(),
self.n_src_words / (t + 1e-5),
self.n_words / (t + 1e-5),
time.time() - start))
sys.stdout.flush()
def log(self, prefix, experiment, lr):
t = self.elapsed_time()
experiment.add_scalar_value(prefix + "_ppl", self.ppl())
experiment.add_scalar_value(prefix + "_accuracy", self.accuracy())
experiment.add_scalar_value(prefix + "_tgtper", self.n_words / t)
experiment.add_scalar_value(prefix + "_lr", lr)
class Trainer(object):
def __init__(self, model, train_iter, valid_iter,
train_loss, valid_loss, optim,
trunc_size, shard_size):
"""
Args:
model: the seq2seq model.
train_iter: the train data iterator.
valid_iter: the validate data iterator.
train_loss: the train side LossCompute object for computing loss.
valid_loss: the valid side LossCompute object for computing loss.
optim: the optimizer responsible for lr update.
trunc_size: a batch is divided by several truncs of this size.
shard_size: compute loss in shards of this size for efficiency.
"""
# Basic attributes.
self.model = model
self.train_iter = train_iter
self.valid_iter = valid_iter
self.train_loss = train_loss
self.valid_loss = valid_loss
self.optim = optim
self.trunc_size = trunc_size
self.shard_size = shard_size
# Set model in training mode.
self.model.train()
def train(self, epoch, report_func=None):
""" Called for each epoch to train. """
total_stats = Statistics()
report_stats = Statistics()
for i, batch in enumerate(self.train_iter):
target_size = batch.tgt.size(0)
# Truncated BPTT
trunc_size = self.trunc_size if self.trunc_size else target_size
dec_state = None
_, src_lengths = batch.src
src = onmt.IO.make_features(batch, 'src')
tgt_outer = onmt.IO.make_features(batch, 'tgt')
report_stats.n_src_words += src_lengths.sum()
for j in range(0, target_size-1, trunc_size):
# 1. Create truncated target.
tgt = tgt_outer[j: j + trunc_size]
# 2. F-prop all but generator.
self.model.zero_grad()
outputs, attns, dec_state = \
self.model(src, tgt, src_lengths, dec_state)
# 3. Compute loss in shards for memory efficiency.
batch_stats = self.train_loss.sharded_compute_loss(
batch, outputs, attns, j,
trunc_size, self.shard_size)
# 4. Update the parameters and statistics.
self.optim.step()
total_stats.update(batch_stats)
report_stats.update(batch_stats)
# If truncated, don't backprop fully.
if dec_state is not None:
# dec_state.detach()
dec_state.requires_grad = False
if report_func is not None:
report_stats = report_func(
epoch, i, len(self.train_iter),
total_stats.start_time, self.optim.lr, report_stats)
return total_stats
def validate(self):
""" Called for each epoch to validate. """
# Set model in validating mode.
self.model.eval()
stats = Statistics()
for batch in self.valid_iter:
_, src_lengths = batch.src
src = onmt.IO.make_features(batch, 'src')
tgt = onmt.IO.make_features(batch, 'tgt')
# F-prop through the model.
outputs, attns, _ = self.model(src, tgt, src_lengths)
# Compute loss.
batch_stats = self.valid_loss.monolithic_compute_loss(
batch, outputs, attns)
# Update statistics.
stats.update(batch_stats)
# Set model back to training mode.
self.model.train()
return stats
def epoch_step(self, ppl, epoch):
""" Called for each epoch to update learning rate. """
return self.optim.updateLearningRate(ppl, epoch)
def drop_checkpoint(self, opt, epoch, fields, valid_stats):
""" Called conditionally each epoch to save a snapshot. """
real_model = (self.model.module
if isinstance(self.model, nn.DataParallel)
else self.model)
real_generator = (real_model.generator.module
if isinstance(real_model.generator, nn.DataParallel)
else real_model.generator)
model_state_dict = real_model.state_dict()
model_state_dict = {k: v for k, v in model_state_dict.items()
if 'generator' not in k}
generator_state_dict = real_generator.state_dict()
checkpoint = {
'model': model_state_dict,
'generator': generator_state_dict,
'vocab': onmt.IO.save_vocab(fields),
'opt': opt,
'epoch': epoch,
'optim': self.optim
}
torch.save(checkpoint,
'%s_acc_%.2f_ppl_%.2f_e%d.pt'
% (opt.save_model, valid_stats.accuracy(),
valid_stats.ppl(), epoch))
| 35.231156 | 78 | 0.578519 |
7ace633af44d0f44272e4f8361ae44876aef8da5 | 14,411 | py | Python | wf_psf/train_utils.py | tobias-liaudat/wf-psf | 0ff1a12d06c46bd8599061d227785393fb528d76 | [
"MIT"
] | 7 | 2022-03-10T10:49:01.000Z | 2022-03-17T16:06:12.000Z | wf_psf/train_utils.py | tobias-liaudat/wf-psf | 0ff1a12d06c46bd8599061d227785393fb528d76 | [
"MIT"
] | null | null | null | wf_psf/train_utils.py | tobias-liaudat/wf-psf | 0ff1a12d06c46bd8599061d227785393fb528d76 | [
"MIT"
] | null | null | null | import numpy as np
import tensorflow as tf
from wf_psf.tf_psf_field import build_PSF_model
from wf_psf.utils import NoiseEstimator
class L1ParamScheduler(tf.keras.callbacks.Callback):
"""L1 rate scheduler which sets the L1 rate according to schedule.
Arguments:
l1_schedule_rule: a function that takes an epoch index
(integer, indexed from 0) and current l1_rate
as inputs and returns a new l1_rate as output (float).
"""
def __init__(self, l1_schedule_rule):
super(L1ParamScheduler, self).__init__()
self.l1_schedule_rule = l1_schedule_rule
def on_epoch_begin(self, epoch, logs=None):
# Get the current learning rate from model's optimizer.
l1_rate = float(tf.keras.backend.get_value(self.model.l1_rate))
# Call schedule function to get the scheduled learning rate.
scheduled_l1_rate = self.l1_schedule_rule(epoch, l1_rate)
# Set the value back to the optimizer before this epoch starts
self.model.set_l1_rate(scheduled_l1_rate)
# tf.keras.backend.set_value(self.model.optimizer.lr, scheduled_lr)
def l1_schedule_rule(epoch_n, l1_rate):
if epoch_n != 0 and epoch_n % 10 == 0:
scheduled_l1_rate = l1_rate / 2
print("\nEpoch %05d: L1 rate is %0.4e." % (epoch_n, scheduled_l1_rate))
return scheduled_l1_rate
else:
return l1_rate
def general_train_cycle(
tf_semiparam_field,
inputs,
outputs,
val_data,
batch_size,
l_rate_param,
l_rate_non_param,
n_epochs_param,
n_epochs_non_param,
param_optim=None,
non_param_optim=None,
param_loss=None,
non_param_loss=None,
param_metrics=None,
non_param_metrics=None,
param_callback=None,
non_param_callback=None,
general_callback=None,
first_run=False,
cycle_def='complete',
use_sample_weights=False,
verbose=1
):
""" Function to do a BCD iteration on the model.
Define the model optimisation.
For the parametric part we are using:
``l_rate_param = 1e-2``, ``n_epochs_param = 20``.
For the non-parametric part we are using:
``l_rate_non_param = 1.0``, ``n_epochs_non_param = 100``.
Parameters
----------
tf_semiparam_field: tf.keras.Model
The model to be trained.
inputs: Tensor or list of tensors
Inputs used for Model.fit()
outputs: Tensor
Outputs used for Model.fit()
val_data: Tuple
Validation data used for Model.fit().
Tuple of input, output validation data
batch_size: int
Batch size for the training.
l_rate_param: float
Learning rate for the parametric part
l_rate_non_param: float
Learning rate for the non-parametric part
n_epochs_param: int
Number of epochs for the parametric part
n_epochs_non_param: int
Number of epochs for the non-parametric part
param_optim: Tensorflow optimizer
Optimizer for the parametric part.
Optional, default is the Adam optimizer
non_param_optim: Tensorflow optimizer
Optimizer for the non-parametric part.
Optional, default is the Adam optimizer
param_loss: Tensorflow loss
Loss function for the parametric part.
Optional, default is the MeanSquaredError() loss
non_param_loss: Tensorflow loss
Loss function for the non-parametric part.
Optional, default is the MeanSquaredError() loss
param_metrics: Tensorflow metrics
Metrics for the parametric part.
Optional, default is the MeanSquaredError() metric
non_param_metrics: Tensorflow metrics
Metrics for the non-parametric part.
Optional, default is the MeanSquaredError() metric
param_callback: Tensorflow callback
Callback for the parametric part only.
Optional, default is no callback
non_param_callback: Tensorflow callback
Callback for the non-parametric part only.
Optional, default is no callback
general_callback: Tensorflow callback
Callback shared for both the parametric and non-parametric parts.
Optional, default is no callback
first_run: bool
If True, it is the first iteration of the model training.
The Non-parametric part is not considered in the first parametric training.
cycle_def: string
Train cycle definition. It can be: `parametric`, `non-parametric`, `complete`.
Default is `complete`.
use_sample_weights: bool
If True, the sample weights are used for the training.
The sample weights are computed as the inverse noise estimated variance
verbose: int
Verbosity mode used for the training procedure.
If a log of the training is being saved, `verbose=2` is recommended.
Returns
-------
tf_semiparam_field: tf.keras.Model
Trained Tensorflow model.
hist_param: Tensorflow's History object
History of the parametric training.
hist_non_param: Tensorflow's History object
History of the non-parametric training.
"""
# Initialize return variables
hist_param = None
hist_non_param = None
# Parametric train
# Define Loss
if param_loss is None:
loss = tf.keras.losses.MeanSquaredError()
else:
loss = param_loss
# Define optimisers
if param_optim is None:
optimizer = tf.keras.optimizers.Adam(
learning_rate=l_rate_param,
beta_1=0.9,
beta_2=0.999,
epsilon=1e-07,
amsgrad=False,
)
else:
optimizer = param_optim
# Define metrics
if param_metrics is None:
metrics = [tf.keras.metrics.MeanSquaredError()]
else:
metrics = param_metrics
# Define callbacks
if param_callback is None and general_callback is None:
callbacks = None
else:
if general_callback is None:
callbacks = param_callback
elif param_callback is None:
callbacks = general_callback
else:
callbacks = general_callback + param_callback
# Calculate sample weights
if use_sample_weights:
# Generate standard deviation estimator
img_dim = (outputs.shape[1], outputs.shape[2])
win_rad = np.ceil(outputs.shape[1] / 3.33)
std_est = NoiseEstimator(img_dim=img_dim, win_rad=win_rad)
# Estimate noise std_dev
imgs_std = np.array([std_est.estimate_noise(_im) for _im in outputs])
# Calculate weights
variances = imgs_std**2
# Define sample weight strategy
strategy_opt = 1
if strategy_opt == 0:
# Parameters
max_w = 2.
min_w = 0.1
# Epsilon is to avoid outliers
epsilon = np.median(variances) * 0.1
w = 1 / (variances + epsilon)
scaled_w = (w - np.min(w)) / (np.max(w) - np.min(w)) # Transform to [0,1]
scaled_w = scaled_w * (max_w - min_w) + min_w # Transform to [min_w, max_w]
scaled_w = scaled_w + (1 - np.mean(scaled_w)) # Adjust the mean to 1
scaled_w[scaled_w < min_w] = min_w
# Save the weights
sample_weight = scaled_w
elif strategy_opt == 1:
# Use inverse variance for weights
# Then scale the values by the median
sample_weight = 1 / variances
sample_weight /= np.median(sample_weight)
else:
sample_weight = None
# Define the training cycle
if cycle_def == 'parametric' or cycle_def == 'complete' or cycle_def == 'only-parametric':
# If it is the first run
if first_run:
# Set the non-parametric model to zero
# With alpha to zero its already enough
tf_semiparam_field.set_zero_nonparam()
if cycle_def == 'only-parametric':
# Set the non-parametric part to zero
tf_semiparam_field.set_zero_nonparam()
# Set the trainable layer
tf_semiparam_field.set_trainable_layers(param_bool=True, nonparam_bool=False)
# Compile the model for the first optimisation
tf_semiparam_field = build_PSF_model(
tf_semiparam_field,
optimizer=optimizer,
loss=loss,
metrics=metrics,
)
# Train the parametric part
print('Starting parametric update..')
hist_param = tf_semiparam_field.fit(
x=inputs,
y=outputs,
batch_size=batch_size,
epochs=n_epochs_param,
validation_data=val_data,
callbacks=callbacks,
sample_weight=sample_weight,
verbose=verbose
)
## Non parametric train
# Define the training cycle
if cycle_def == 'non-parametric' or cycle_def == 'complete' or cycle_def == 'only-non-parametric':
# If it is the first run
if first_run:
# Set the non-parametric model to non-zero
# With alpha to zero its already enough
tf_semiparam_field.set_nonzero_nonparam()
if cycle_def == 'only-non-parametric':
# Set the parametric layer to zero
coeff_mat = tf_semiparam_field.get_coeff_matrix()
tf_semiparam_field.assign_coeff_matrix(tf.zeros_like(coeff_mat))
# Set the non parametric layer to non trainable
tf_semiparam_field.set_trainable_layers(param_bool=False, nonparam_bool=True)
# Define Loss
if non_param_loss is None:
loss = tf.keras.losses.MeanSquaredError()
else:
loss = non_param_loss
# Define optimiser
if non_param_optim is None:
optimizer = tf.keras.optimizers.Adam(
learning_rate=l_rate_non_param,
beta_1=0.9,
beta_2=0.999,
epsilon=1e-07,
amsgrad=False,
)
else:
optimizer = non_param_optim
# Define metric
if non_param_metrics is None:
metrics = [tf.keras.metrics.MeanSquaredError()]
else:
metrics = non_param_metrics
# Define callbacks
if non_param_callback is None and general_callback is None:
callbacks = None
else:
if general_callback is None:
callbacks = non_param_callback
elif non_param_callback is None:
callbacks = general_callback
else:
callbacks = general_callback + non_param_callback
# Compile the model again for the second optimisation
tf_semiparam_field = build_PSF_model(
tf_semiparam_field,
optimizer=optimizer,
loss=loss,
metrics=metrics,
)
# Train the parametric part
print('Starting non-parametric update..')
hist_non_param = tf_semiparam_field.fit(
x=inputs,
y=outputs,
batch_size=batch_size,
epochs=n_epochs_non_param,
validation_data=val_data,
callbacks=callbacks,
sample_weight=sample_weight,
verbose=verbose
)
return tf_semiparam_field, hist_param, hist_non_param
def param_train_cycle(
tf_semiparam_field,
inputs,
outputs,
val_data,
batch_size,
l_rate,
n_epochs,
param_optim=None,
param_loss=None,
param_metrics=None,
param_callback=None,
general_callback=None,
use_sample_weights=False,
verbose=1
):
""" Training cycle for parametric model.
"""
# Define Loss
if param_loss is None:
loss = tf.keras.losses.MeanSquaredError()
else:
loss = param_loss
# Define optimiser
if param_optim is None:
optimizer = tf.keras.optimizers.Adam(
learning_rate=l_rate, beta_1=0.9, beta_2=0.999, epsilon=1e-07, amsgrad=False
)
else:
optimizer = param_optim
# Define metrics
if param_metrics is None:
metrics = [tf.keras.metrics.MeanSquaredError()]
else:
metrics = param_metrics
# Define callbacks
if param_callback is None and general_callback is None:
callbacks = None
else:
if general_callback is None:
callbacks = param_callback
elif param_callback is None:
callbacks = general_callback
else:
callbacks = general_callback + param_callback
# Calculate sample weights
if use_sample_weights:
# Generate standard deviation estimator
img_dim = (outputs.shape[1], outputs.shape[2])
win_rad = np.ceil(outputs.shape[1] / 3.33)
std_est = NoiseEstimator(img_dim=img_dim, win_rad=win_rad)
# Estimate noise std_dev
imgs_std = np.array([std_est.estimate_noise(_im) for _im in outputs])
# Calculate weights
variances = imgs_std**2
strategy_opt = 1
if strategy_opt == 0:
# Parameters
max_w = 2.
min_w = 0.1
# Epsilon is to avoid outliers
epsilon = np.median(variances) * 0.1
w = 1 / (variances + epsilon)
scaled_w = (w - np.min(w)) / (np.max(w) - np.min(w)) # Transform to [0,1]
scaled_w = scaled_w * (max_w - min_w) + min_w # Transform to [min_w, max_w]
scaled_w = scaled_w + (1 - np.mean(scaled_w)) # Adjust the mean to 1
scaled_w[scaled_w < min_w] = min_w
# Save the weights
sample_weight = scaled_w
elif strategy_opt == 1:
# Use inverse variance for weights
# Then scale the values by the median
sample_weight = 1 / variances
sample_weight /= np.median(sample_weight)
else:
sample_weight = None
# Compile the model for the first optimisation
tf_semiparam_field = build_PSF_model(
tf_semiparam_field, optimizer=optimizer, loss=loss, metrics=metrics
)
# Train the parametric part
print('Starting parametric update..')
hist_param = tf_semiparam_field.fit(
x=inputs,
y=outputs,
batch_size=batch_size,
epochs=n_epochs,
validation_data=val_data,
callbacks=callbacks,
sample_weight=sample_weight,
verbose=verbose
)
return tf_semiparam_field, hist_param
| 33.281755 | 102 | 0.631185 |
e0c09534fcb763316069bcce83c4e94dc2843c8a | 5,211 | py | Python | test/functional/wallet-hd.py | PERSHYANCOIN/PERSHYANCOIN | bbadf90495732ecdbf5ab9a27e84e1dbdaff117d | [
"MIT"
] | 1 | 2018-02-21T07:10:01.000Z | 2018-02-21T07:10:01.000Z | test/functional/wallet-hd.py | pershyancoin/pershyancoin | bbadf90495732ecdbf5ab9a27e84e1dbdaff117d | [
"MIT"
] | 2 | 2018-02-12T22:00:38.000Z | 2018-02-12T22:01:03.000Z | test/functional/wallet-hd.py | PERSHYANCOIN/PERSHYANCOIN | bbadf90495732ecdbf5ab9a27e84e1dbdaff117d | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2016 The Pershyancoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test Hierarchical Deterministic wallet function."""
from test_framework.test_framework import PershyancoinTestFramework
from test_framework.util import (
assert_equal,
connect_nodes_bi,
)
import shutil
import os
class WalletHDTest(PershyancoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
self.extra_args = [[], ['-keypool=0']]
def run_test (self):
tmpdir = self.options.tmpdir
# Make sure can't switch off usehd after wallet creation
self.stop_node(1)
self.assert_start_raises_init_error(1, ['-usehd=0'], 'already existing HD wallet')
self.start_node(1)
connect_nodes_bi(self.nodes, 0, 1)
# Make sure we use hd, keep masterkeyid
masterkeyid = self.nodes[1].getwalletinfo()['hdmasterkeyid']
assert_equal(len(masterkeyid), 40)
# create an internal key
change_addr = self.nodes[1].getrawchangeaddress()
change_addrV= self.nodes[1].validateaddress(change_addr)
assert_equal(change_addrV["hdkeypath"], "m/0'/1'/0'") #first internal child key
# Import a non-HD private key in the HD wallet
non_hd_add = self.nodes[0].getnewaddress()
self.nodes[1].importprivkey(self.nodes[0].dumpprivkey(non_hd_add))
# This should be enough to keep the master key and the non-HD key
self.nodes[1].backupwallet(tmpdir + "/hd.bak")
#self.nodes[1].dumpwallet(tmpdir + "/hd.dump")
# Derive some HD addresses and remember the last
# Also send funds to each add
self.nodes[0].generate(101)
hd_add = None
num_hd_adds = 300
for i in range(num_hd_adds):
hd_add = self.nodes[1].getnewaddress()
hd_info = self.nodes[1].validateaddress(hd_add)
assert_equal(hd_info["hdkeypath"], "m/0'/0'/"+str(i)+"'")
assert_equal(hd_info["hdmasterkeyid"], masterkeyid)
self.nodes[0].sendtoaddress(hd_add, 1)
self.nodes[0].generate(1)
self.nodes[0].sendtoaddress(non_hd_add, 1)
self.nodes[0].generate(1)
# create an internal key (again)
change_addr = self.nodes[1].getrawchangeaddress()
change_addrV= self.nodes[1].validateaddress(change_addr)
assert_equal(change_addrV["hdkeypath"], "m/0'/1'/1'") #second internal child key
self.sync_all()
assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1)
self.log.info("Restore backup ...")
self.stop_node(1)
# we need to delete the complete regtest directory
# otherwise node1 would auto-recover all funds in flag the keypool keys as used
shutil.rmtree(os.path.join(tmpdir, "node1/regtest/blocks"))
shutil.rmtree(os.path.join(tmpdir, "node1/regtest/chainstate"))
shutil.copyfile(os.path.join(tmpdir, "hd.bak"), os.path.join(tmpdir, "node1/regtest/wallets/wallet.dat"))
self.start_node(1)
# Assert that derivation is deterministic
hd_add_2 = None
for _ in range(num_hd_adds):
hd_add_2 = self.nodes[1].getnewaddress()
hd_info_2 = self.nodes[1].validateaddress(hd_add_2)
assert_equal(hd_info_2["hdkeypath"], "m/0'/0'/"+str(_)+"'")
assert_equal(hd_info_2["hdmasterkeyid"], masterkeyid)
assert_equal(hd_add, hd_add_2)
connect_nodes_bi(self.nodes, 0, 1)
self.sync_all()
# Needs rescan
self.stop_node(1)
self.start_node(1, extra_args=self.extra_args[1] + ['-rescan'])
assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1)
# Try a RPC based rescan
self.stop_node(1)
shutil.rmtree(os.path.join(tmpdir, "node1/regtest/blocks"))
shutil.rmtree(os.path.join(tmpdir, "node1/regtest/chainstate"))
shutil.copyfile(os.path.join(tmpdir, "hd.bak"), os.path.join(tmpdir, "node1/regtest/wallet.dat"))
self.start_node(1, extra_args=self.extra_args[1])
connect_nodes_bi(self.nodes, 0, 1)
self.sync_all()
out = self.nodes[1].rescanblockchain(0, 1)
assert_equal(out['start_height'], 0)
assert_equal(out['stop_height'], 1)
out = self.nodes[1].rescanblockchain()
assert_equal(out['start_height'], 0)
assert_equal(out['stop_height'], self.nodes[1].getblockcount())
assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1)
# send a tx and make sure its using the internal chain for the changeoutput
txid = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1)
outs = self.nodes[1].decoderawtransaction(self.nodes[1].gettransaction(txid)['hex'])['vout']
keypath = ""
for out in outs:
if out['value'] != 1:
keypath = self.nodes[1].validateaddress(out['scriptPubKey']['addresses'][0])['hdkeypath']
assert_equal(keypath[0:7], "m/0'/1'")
if __name__ == '__main__':
WalletHDTest().main ()
| 42.365854 | 113 | 0.647285 |
862b2d22c0e49336cae76b485aabce243cbc6922 | 1,773 | py | Python | main.py | Senith-Chandul/Image-UploadBot | 0b3c393335159140c3e09fd23e09ecf7b685f58c | [
"MIT"
] | null | null | null | main.py | Senith-Chandul/Image-UploadBot | 0b3c393335159140c3e09fd23e09ecf7b685f58c | [
"MIT"
] | null | null | null | main.py | Senith-Chandul/Image-UploadBot | 0b3c393335159140c3e09fd23e09ecf7b685f58c | [
"MIT"
] | 1 | 2022-03-10T02:49:33.000Z | 2022-03-10T02:49:33.000Z | import os
import logging
from pyrogram import Client, filters
from pyrogram.types import InlineKeyboardMarkup, InlineKeyboardButton
from creds import Credentials
from telegraph import upload_file
logging.basicConfig(level=logging.WARNING)
tgraph = Client(
"Image upload bot",
bot_token=Credentials.BOT_TOKEN,
api_id=Credentials.API_ID,
api_hash=Credentials.API_HASH
)
@tgraph.on_message(filters.command("start"))
async def start(client, message):
await message.reply_text(
text=f"Hello {message.from_user.mention},\nI'm a telegram to telegra.ph image uploader bot by @SenithChandul",
disable_web_page_preview=True
)
@tgraph.on_message(filters.photo)
async def getimage(client, message):
dwn = await message.reply_text("Downloading to my server...", True)
img_path = await message.download()
await dwn.edit_text("Uploading as telegra.ph link...")
try:
url_path = upload_file(img_path)[0]
except Exception as error:
await dwn.edit_text(f"Oops something went wrong\n{error}")
return
await dwn.edit_text(
text=f"<b>Link :-</b> <code>https://telegra.ph{url_path}</code>",
disable_web_page_preview=True,
reply_markup=InlineKeyboardMarkup(
[
[
InlineKeyboardButton(
text="Open Link", url=f"https://telegra.ph{url_path}"
),
InlineKeyboardButton(
text="Share Link",
url=f"https://telegram.me/share/url?url=https://telegra.ph{url_path}",
)
]
]
)
)
os.remove(img_path)
tgraph.run()
| 30.568966 | 119 | 0.608009 |
fea176da6318c2e15a703fdd54b8c9a7c3d7c4ec | 1,408 | py | Python | Chapter10/regression.py | PacktPublishing/Hands-On-Ensemble-Learning-with-Python | db9b90189dbebbc6ab5ebba0e2e173ba80197c35 | [
"MIT"
] | 31 | 2019-07-21T00:36:52.000Z | 2022-02-25T15:38:21.000Z | Chapter10/regression.py | tokiran/Hands-On-Ensemble-Learning-with-Python | 739ecda33fb75dc1df1366abf4a79c34cc0c2026 | [
"MIT"
] | null | null | null | Chapter10/regression.py | tokiran/Hands-On-Ensemble-Learning-with-Python | 739ecda33fb75dc1df1366abf4a79c34cc0c2026 | [
"MIT"
] | 30 | 2019-07-06T00:22:44.000Z | 2022-02-04T02:44:17.000Z | import numpy as np
import pandas as pd
from simulator import simulate
from sklearn import metrics
from sklearn.linear_model import LinearRegression
from sklearn.model_selection import train_test_split
np.random.seed(123456)
lr = LinearRegression()
data = pd.read_csv('BTC-USD.csv')
data = data.dropna()
data.Date = pd.to_datetime(data.Date)
data.set_index('Date', drop=True, inplace=True)
diffs = (data.Close.diff()/data.Close).values[1:]
diff_len = len(diffs)
def create_x_data(lags=1):
diff_data = np.zeros((diff_len, lags))
for lag in range(1, lags+1):
this_data = diffs[:-lag]
diff_data[lag:, lag-1] = this_data
return diff_data
# REPRODUCIBILITY
x_data = create_x_data(lags=20)*100
y_data = diffs*100
x_data = np.around(x_data, decimals=8)
y_data = np.around(y_data, decimals=8)
# =============================================================================
# WALK FORWARD
# =============================================================================
window = 150
preds = np.zeros(diff_len-window)
for i in range(diff_len-window-1):
x_train = x_data[i:i+window, :]
y_train = y_data[i:i+window]
lr.fit(x_train, y_train)
preds[i] = lr.predict(x_data[i+window+1, :].reshape(1, -1))
print('Percentages MSE: %.2f'%metrics.mean_absolute_error(y_data[window:], preds))
simulate(data, preds)
| 25.6 | 83 | 0.607244 |
3b0f87f97010e7106a74c33e629b28ff077e2df5 | 397 | py | Python | easy/1748-sum-of-unique-elements.py | wanglongjiang/leetcode | c61d2e719e81575cfb5bde9d64e15cee7cf01ef3 | [
"MIT"
] | 2 | 2021-03-14T11:38:26.000Z | 2021-03-14T11:38:30.000Z | easy/1748-sum-of-unique-elements.py | wanglongjiang/leetcode | c61d2e719e81575cfb5bde9d64e15cee7cf01ef3 | [
"MIT"
] | null | null | null | easy/1748-sum-of-unique-elements.py | wanglongjiang/leetcode | c61d2e719e81575cfb5bde9d64e15cee7cf01ef3 | [
"MIT"
] | 1 | 2022-01-17T19:33:23.000Z | 2022-01-17T19:33:23.000Z | '''
唯一元素的和
给你一个整数数组 nums 。数组中唯一元素是那些只出现 恰好一次 的元素。
请你返回 nums 中唯一元素的 和 。
'''
from typing import List
'''
思路:用哈希表存储所有元素
时间复杂度:O(n)
空间复杂度:O(n)
'''
class Solution:
def sumOfUnique(self, nums: List[int]) -> int:
allItem = set()
total = 0
for num in nums:
if num not in allItem:
allItem.add(num)
total += num
return num
| 15.88 | 50 | 0.551637 |
04e2a8450902f008221760cae6d89ab061f68e79 | 591 | py | Python | As_2.py | TR-MEILL/AboutFlaskNote | 6bda8b1fe702441f295179c4fc1e9c276300ed9a | [
"MIT"
] | null | null | null | As_2.py | TR-MEILL/AboutFlaskNote | 6bda8b1fe702441f295179c4fc1e9c276300ed9a | [
"MIT"
] | null | null | null | As_2.py | TR-MEILL/AboutFlaskNote | 6bda8b1fe702441f295179c4fc1e9c276300ed9a | [
"MIT"
] | null | null | null | from flask import Flask
from flask import request
from flask import render_template
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def home():
return render_template('home.htm')
@app.route('/login', methods=['GET'])
def signin_form():
return render_template('login.htm')
@app.route('/login', methods=['POST'])
def signin():
# 需要从request对象读取表单内容:
if request.form['username']=='admin' and request.form['password']=='password':
return render_template('calendar.html')
return render_template('error.htm')
if __name__ == '__main__':
app.run()
| 24.625 | 82 | 0.683587 |
3b11da9f9f6245881abdbf5a0ccd8d4a6d079619 | 534 | py | Python | layers/HorizontalConv2DTranspose.py | eyalbetzalel/PixelCNNPPTF2 | 94db05ae1161ab2975792e50b5e89bcac041bcda | [
"MIT"
] | 8 | 2020-01-09T01:03:31.000Z | 2022-01-09T11:08:57.000Z | layers/HorizontalConv2DTranspose.py | JesseFarebro/PixelCNNPP | 7df9fe22099a28ce2096a2b67d72f8d819581856 | [
"MIT"
] | 2 | 2021-07-12T12:13:42.000Z | 2022-02-09T23:33:46.000Z | layers/HorizontalConv2DTranspose.py | eyalbetzalel/PixelCNNPPTF2 | 94db05ae1161ab2975792e50b5e89bcac041bcda | [
"MIT"
] | 4 | 2020-04-09T19:31:21.000Z | 2020-10-15T11:50:54.000Z | import tensorflow as tf
class HorizontalConv2DTranspose(tf.keras.layers.Conv2DTranspose):
def __init__(self, filters, kernel_size, **kwargs):
super(HorizontalConv2DTranspose, self).__init__(
filters, kernel_size // 2 + 1, output_padding=1, **kwargs
)
self.crop = tf.keras.layers.Cropping2D(
((0, kernel_size // 2), (0, kernel_size // 2))
)
def call(self, inputs):
output = super(HorizontalConv2DTranspose, self).call(inputs)
return self.crop(output)
| 31.411765 | 69 | 0.644195 |
81fba4cb952d1ea3420d0e0e2ce5175bd7d4ff7d | 5,231 | py | Python | kyu_4/most_frequently_used_words/test_top_3_words.py | ikostan/codewars | 69f0ae66e849df102d921cfad3ce2e74bdeda1fc | [
"Unlicense"
] | 1 | 2022-02-12T05:56:04.000Z | 2022-02-12T05:56:04.000Z | kyu_4/most_frequently_used_words/test_top_3_words.py | iKostanOrg/codewars | 69f0ae66e849df102d921cfad3ce2e74bdeda1fc | [
"Unlicense"
] | 182 | 2020-04-30T00:51:36.000Z | 2021-09-07T04:15:05.000Z | kyu_4/most_frequently_used_words/test_top_3_words.py | iKostanOrg/codewars | 69f0ae66e849df102d921cfad3ce2e74bdeda1fc | [
"Unlicense"
] | 4 | 2020-04-29T22:04:20.000Z | 2021-07-13T20:04:14.000Z | # Created by Egor Kostan.
# GitHub: https://github.com/ikostan
# LinkedIn: https://www.linkedin.com/in/egor-kostan/
# ALGORITHMS STRINGS PARSING RANKING FILTERING
import allure
import unittest
from utils.log_func import print_log
from kyu_4.most_frequently_used_words.solution import top_3_words
@allure.epic('4 kyu')
@allure.parent_suite('Competent')
@allure.suite("Algorithms")
@allure.sub_suite("Unit Tests")
@allure.feature("String")
@allure.story('Most frequently used words in a text')
@allure.tag('ALGORITHMS', 'STRINGS', 'PARSING', 'RANKING', 'FILTERING')
@allure.link(
url='https://www.codewars.com/kata/51e056fe544cf36c410000fb/train/python',
name='Source/Kata')
class Top3WordsTestCase(unittest.TestCase):
"""
Testing top_3_words
"""
def test_top_3_words(self):
"""
Test top_3_words function
"""
allure.dynamic.title("Testing top_3_words function")
allure.dynamic.severity(allure.severity_level.NORMAL)
allure.dynamic.description_html(
'<h3>Codewars badge:</h3>'
'<img src="https://www.codewars.com/users/myFirstCode'
'/badges/large">'
'<h3>Test Description:</h3>'
"<p>Given a string of text (possibly with punctuation and line-breaks), "
"the function should return an array of the top-3 most occurring words, "
"in descending order of the number of occurrences.</p>")
test_data = (
("a a a b c c d d d d e e e e e", ["e", "d", "a"]),
("e e e e DDD ddd DdD: ddd ddd aa aA Aa, bb cc cC e e e", ["e", "ddd", "aa"]),
(" //wont won't won't ", ["won't", "wont"]),
(" , e .. ", ["e"]),
(" ... ", []),
(" ' ", []),
(" ''' ", []),
('AqyWZhe /ubx,,/!VCDeGux,uYVx -/-;CUQhpOptV/xoiIHRwI-!-xoiIHRwI!CUQhpOptV /;'
'aqNSrVD--//VCDeGux_!AqyWZhe? uYVx,-/:ubx?aqNSrVD_.PVDdpw,;!AqyWZhe/aqNSrVD_A'
'nZnlq:/ubx_!-wWHf;:,rKlP/VCDeGux; !_:xoiIHRwI;-AqyWZhe !KdAFZ!:;;PVDdpw!_.;x'
'oiIHRwI!KdAFZ/??!QsF_?/?!PVDdpw;.-_-aqNSrVD:?uYVx._AnZnlq-,,wWHf; ;AqyWZhe ? '
'_,uYVx_uYVx?KdAFZ?AqyWZhe:?;/rKlP.aqNSrVD,!-/QsF/??uYVx.PVDdpw . ._xoiIHRwI/?'
'/;!GLU-? aqNSrVD/. ?aqNSrVD.?AqyWZhe !FdalHwCCUr wWHf_:-?rKlP,.?/ PVDdpw?!Fda'
'lHwCCUr.!wWHf :QsF?KdAFZ-::::AnZnlq_QsF/.qyd,/VCDeGux:_;wWHf.!/uYVx!;KdAFZ??_'
':?QsF:,AqyWZhe:::;ubx: ?QsF;;KdAFZ;; _-ubx./_;,rKlP? :?_CUQhpOptV/-KdAFZ?:uYV'
'x??.ecMq\'CMuEx_ecMq\'CMuEx;,-/ qyd., CUQhpOptV:::wWHf-__QsF:/-.VCDeGux?/:-uY'
'Vx-_,-QsF;xoiIHRwI;._AqyWZhe-;-.VCDeGux.aqNSrVD/.AqyWZhe;aqNSrVD:KdAFZ-; .Aqy'
'WZhe!/_;?KdAFZ;;/,KdAFZ-GLU,,.KdAFZ!AqyWZhe//?-rKlP/ ://QsF.!!QsF!;:,ubx:/-?P'
'VDdpw?!AqyWZhe,;!?/xoiIHRwI.. ubx:aqNSrVD/,_:.AnZnlq /_;xoiIHRwI.- .aqNSrVD-'
' , ubx_-?VCDeGux;:rKlP;VCDeGux ?.AnZnlq,PVDdpw-., KdAFZ_QsF/QsF_; uYVx_?;?wW'
'Hf/-,;/GLU_;rKlP -_,wWHf?:QsF!!_rKlP-;aqNSrVD ? KdAFZ?::. AqyWZhe_AnZnlq, ub'
'x,? !GLU_:aqNSrVD.!;AqyWZhe_?,uYVx-CUQhpOptV/..AnZnlq;QsF?rKlP!QsF,_!AnZnlq!-'
'/,uYVx.uYVx ?uYVx-ubx,.,_FdalHwCCUr !/QsF.AqyWZhe./:_KdAFZ ,KdAFZ._ _ ubx?CUQ'
'hpOptV :.KdAFZ;!;:,KdAFZ,_!-?PVDdpw.!CUQhpOptV;.;;KdAFZ,,-VCDeGux_-,ubx ;uYVx'
':;! :VCDeGux/.;_uYVx.!FdalHwCCUr!:,uYVx:!;_uYVx:!CUQhpOptV?!.KdAFZ!-:-uYVx_?w'
'WHf:uYVx.?-.:rKlP?.VCDeGux:?:?uYVx:wWHf?,AnZnlq::FdalHwCCUr.-,_,VCDeGux.;rKlP'
',rKlP. VCDeGux,/_aqNSrVD? AqyWZhe.. /:AqyWZhe_.CUQhpOptV ?;- AqyWZhe! ;/qyd !'
',:AqyWZhe:!?.CUQhpOptV / _uYVx-_:!CUQhpOptV_/AqyWZhe;VCDeGux__?PVDdpw.,_?,VCD'
'eGux wWHf;,_PVDdpw;,-!?AqyWZhe,??AqyWZhe:?/!VCDeGux :;.KdAFZ ?/GLU- AqyWZhe,_'
'?_:FdalHwCCUr.:AqyWZhe. ,,xoiIHRwI;;.;AnZnlq;!;aqNSrVD:.;_/ubx;-,qyd ?uYVx qy'
'd-;wWHf ;! VCDeGux -rKlP:! KdAFZ/??.?rKlP,//:VCDeGux!,ecMq\'CMuEx:VCDeGux.QsF'
'_VCDeGux;?-aqNSrVD,.- uYVx?:KdAFZ;VCDeGux.wWHf_:-/QsF!_.VCDeGux. xoiIHRwI,-An'
'Znlq/aqNSrVD!? -AnZnlq_!qyd _?,.FdalHwCCUr!?!wWHf- ;:rKlP--:AqyWZhe -:/wWHf-K'
'dAFZ_!?;VCDeGux:_?/qyd:uYVx;;FdalHwCCUr .! uYVx?;.,rKlP,AqyWZhe?-!,AnZnlq! ?V'
'CDeGux, ;,aqNSrVD;/::/QsF__.QsF!rKlP?.;AqyWZhe;-?uYVx/_rKlP::ubx!!_PVDdpw._/,'
'AnZnlq!/?. rKlP.;rKlP-,,/:CUQhpOptV; ubx;:-;KdAFZ:AqyWZhe/_GLU!!/PVDdpw,_QsF '
'_ _QsF!;/CUQhpOptV ;-,PVDdpw?aqNSrVD_;?,AqyWZhe;.CUQhpOptV_;!-aqNSrVD!KdAFZ;!'
'KdAFZ/KdAFZ-!_.:aqNSrVD._;VCDeGux_!QsF PVDdpw_,KdAFZ/ ;_/CUQhpOptV;.PVDdpw?/ '
',rKlP:,uYVx? _-QsF-.VCDeGux-;;.wWHf,- QsF_rKlP:?.,/PVDdpw!,VCDeGux-:wWHf __;Q'
'sF,_.QsF:VCDeGux:',
['aqywzhe', 'vcdegux', 'uyvx'])
)
for text, expected in test_data:
actual_result: list = top_3_words(text)
with allure.step("Enter a test string and verify the output"):
print_log(text=text,
expected=expected,
result=actual_result)
self.assertListEqual(expected, actual_result)
| 53.927835 | 92 | 0.575033 |
888cea6597cae2865a8318369db1d468c7ff9469 | 365 | py | Python | noisy_image/gen_noise.py | simonfong6/micro-projects | 5be195ea72ce117df6da041446f11c18e102b5df | [
"MIT"
] | null | null | null | noisy_image/gen_noise.py | simonfong6/micro-projects | 5be195ea72ce117df6da041446f11c18e102b5df | [
"MIT"
] | null | null | null | noisy_image/gen_noise.py | simonfong6/micro-projects | 5be195ea72ce117df6da041446f11c18e102b5df | [
"MIT"
] | null | null | null | import cv2
from random import randint
from time import sleep
from sys import argv
image = cv2.imread('noise.png')
max_y, max_x, channels= image.shape
print(image.shape)
NOISE_POINTS = int(argv[1])
for i in range(NOISE_POINTS):
temp = image[:,-1,:]
image[:,1:,:] = image[:,:-1,:]
image[:,0,:] = temp
cv2.imshow('noisy',image)
cv2.waitKey(1)
| 19.210526 | 35 | 0.652055 |
78c168c0437de7eeaf14eac2f371b89b337cab6c | 1,942 | py | Python | backend/paperchase/helpers/favicon.py | dedalusj/PaperChase | 728cd2f742275b12223d91613275358fb4a92feb | [
"MIT"
] | 3 | 2015-02-13T02:42:39.000Z | 2016-11-22T08:03:45.000Z | backend/paperchase/helpers/favicon.py | dedalusj/PaperChase | 728cd2f742275b12223d91613275358fb4a92feb | [
"MIT"
] | null | null | null | backend/paperchase/helpers/favicon.py | dedalusj/PaperChase | 728cd2f742275b12223d91613275358fb4a92feb | [
"MIT"
] | 1 | 2020-10-10T08:35:16.000Z | 2020-10-10T08:35:16.000Z | import re
import requests
from urlparse import urlparse, urljoin
from bs4 import BeautifulSoup
class FaviconFetcher():
def _htc(self, m):
return chr(int(m.group(1), 16))
def _url_decode(self, url):
rex = re.compile('%([0-9a-hA-H][0-9a-hA-H])', re.M)
return rex.sub(self._htc, url)
def _extract_path(self, url):
return self._url_decode(url.lstrip("/"))
def _extract_domain(self, url):
return "http://" + urlparse(self._extract_path(url))[1]
def icon_at_root(self, domain):
root_icon_path = domain + "/favicon.ico"
r = requests.get(root_icon_path)
if r.status_code == 200:
return root_icon_path
return None
def icon_in_page(self, url):
path = self._extract_path(url)
r = requests.get(path)
if r.status_code == 200:
page_soup = BeautifulSoup(r.content)
page_soup_icon = page_soup.find(
"link", rel=re.compile("^(shortcut|icon|shortcut icon)$",
re.IGNORECASE))
if page_soup_icon:
page_icon_href = page_soup_icon.get("href")
if page_icon_href:
page_icon_path = urljoin(path, page_icon_href)
else:
return None
page_path_favicon_result = requests.get(page_icon_path)
if page_path_favicon_result.status_code == 200:
return page_icon_path
return None
def find_favicon(self, url):
domain = self._extract_domain(url)
candidate_url = self.icon_at_root(domain)
if candidate_url:
return candidate_url
candidate_url = self.icon_in_page(domain)
if candidate_url:
return candidate_url
candidate_url = self.icon_in_page(url)
if candidate_url:
return candidate_url
return None
| 32.366667 | 73 | 0.587024 |
069830b05a702813aca1dcf9dd7e4a53367c93c1 | 4,211 | py | Python | continous_system (copy)/ContSurv/VideoProcess/process.py | sekharkaredla/ASAGS | 142173fc23bd42dcfdf74cf3850db445864f906a | [
"MIT"
] | 10 | 2018-08-02T09:31:19.000Z | 2022-01-27T19:46:30.000Z | continous_system/ContSurv/VideoProcess/process.py | cir7/ASAGS | 142173fc23bd42dcfdf74cf3850db445864f906a | [
"MIT"
] | 3 | 2019-02-08T17:49:54.000Z | 2019-10-31T14:11:58.000Z | continous_system/ContSurv/VideoProcess/process.py | cir7/ASAGS | 142173fc23bd42dcfdf74cf3850db445864f906a | [
"MIT"
] | 12 | 2018-04-08T07:48:59.000Z | 2021-12-10T16:44:22.000Z | import numpy
import cv2
import sys
import time
class PreProcess:
def __init__(self):
#constants----------------
self.FRAME_RATE = 25 #25 frames per second
self.MOVEMENT_INTERVAL = 3 #difference between considered frames
self.N = 4 #number of vertical blocks per frame
self.M = 4 #number of horizontal blocks per frame
self.FRAME_GAP = 2 * self.MOVEMENT_INTERVAL
#-------------------------
self.cap = ''
self.total_frames = 0
self.fps = 0
self.time = 0
#-------------------------
self.dim = 100
#-------------------------
self.frame_number = 0
def read_video(self,video_name):
self.cap = cv2.VideoCapture(video_name)
self.total_frames = int(self.cap.get(cv2.CAP_PROP_FRAME_COUNT))
self.fps = self.cap.get(cv2.CAP_PROP_FPS)
self.time = self.total_frames / self.fps
# self.last_frame = self.read_frame()
def getFrameFromIndex(self,frame_no):
#Number 2 defines flag CV_CAP_PROP_POS_FRAMES which is a 0-based index of the frame to be decoded/captured next.
#The second argument defines the frame number in range 0.0-1.0
self.cap.set(1,frame_no)
ret , img = self.cap.read()
if img is None:
sys.exit('Done')
img = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
return img
def resize_frame(self,frame):
rescale = float(self.dim)/(frame.shape[1])
if rescale<0.8:
dim = (self.dim, int(frame.shape[0] * rescale))
frame = cv2.resize(frame, dim, interpolation = cv2.INTER_AREA)
return frame
def setVideoDimension(self,dim):
self.dim = dim
def useCamera(self):
self.cap = cv2.VideoCapture(0)
self.last_frame = self.read_frame()
def showInputFromCamera(self):
while True:
ret , frame = self.cap.read()
cv2.imshow('camera_frame',frame)
cv2.imshow('resized_camera_frame',self.resize_frame(cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY)))
if cv2.waitKey(1) & 0xFF == ord('q'):
break
def read_frame(self):
ret , frame = self.cap.read()
return frame
def getFPS(self):
return self.cap.get(cv2.CAP_PROP_FPS)
def getFramesFromVideoSource(self):
# frame1 = self.last_frame
#
# for i in range(0,self.MOVEMENT_INTERVAL-1):
# self.read_frame()
#
# frame2 = self.read_frame()
#
# for i in range(0,self.MOVEMENT_INTERVAL-1):
# self.read_frame()
#
# frame3 = self.read_frame()
# self.last_frame = frame3
#
# frame1 = cv2.cvtColor(frame1,cv2.COLOR_BGR2GRAY)
# frame2 = cv2.cvtColor(frame2,cv2.COLOR_BGR2GRAY)
# frame3 = cv2.cvtColor(frame3,cv2.COLOR_BGR2GRAY)
#
# frames = (frame1,frame2,frame3,self.frame_number)
#
# self.frame_number += 6
#
# return frames
PREV_F = self.getFrameFromIndex(self.frame_number)
CURRENT_F = self.getFrameFromIndex(self.frame_number + self.MOVEMENT_INTERVAL)
NEXT_F = self.getFrameFromIndex(self.frame_number + (2 * self.MOVEMENT_INTERVAL))
frames = (PREV_F,CURRENT_F,NEXT_F,self.frame_number)
self.frame_number += 6
return frames
def getFramesFromCameraSource(self):
frame1 = self.last_frame
for i in range(0,self.MOVEMENT_INTERVAL-1):
self.read_frame()
frame2 = self.read_frame()
for i in range(0,self.MOVEMENT_INTERVAL-1):
self.read_frame()
frame3 = self.read_frame()
self.last_frame = frame3
# cv2.imshow('camera_frame',frame2)
frame1 = self.resize_frame(frame1)
frame2 = self.resize_frame(frame2)
frame3 = self.resize_frame(frame3)
frame1 = cv2.cvtColor(frame1,cv2.COLOR_BGR2GRAY)
frame2 = cv2.cvtColor(frame2,cv2.COLOR_BGR2GRAY)
frame3 = cv2.cvtColor(frame3,cv2.COLOR_BGR2GRAY)
# cv2.imshow('resized_camera_frame',frame2)
frames = (frame1,frame2,frame3,time.time())
return frames
| 32.898438 | 120 | 0.597008 |
901143c167e304f6e1ea75ae87668c56545e8be5 | 39,600 | py | Python | kay/lib/babel/dates.py | Letractively/kay-framework | a4cfabe3497e13c3785e5ec381b9cff11a378df3 | [
"Apache-2.0",
"BSD-3-Clause"
] | 1 | 2015-11-05T08:30:09.000Z | 2015-11-05T08:30:09.000Z | kay/lib/babel/dates.py | ianlewis/kay | 1bf28487dc2a273eaa44d442aec8baa6453240b7 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | kay/lib/babel/dates.py | ianlewis/kay | 1bf28487dc2a273eaa44d442aec8baa6453240b7 | [
"Apache-2.0",
"BSD-3-Clause"
] | 1 | 2016-05-23T16:30:15.000Z | 2016-05-23T16:30:15.000Z | # -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://babel.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://babel.edgewall.org/log/.
"""Locale dependent formatting and parsing of dates and times.
The default locale for the functions in this module is determined by the
following environment variables, in that order:
* ``LC_TIME``,
* ``LC_ALL``, and
* ``LANG``
"""
from __future__ import division
from datetime import date, datetime, time, timedelta, tzinfo
import re
from babel.core import default_locale, get_global, Locale
from babel.util import UTC
__all__ = ['format_date', 'format_datetime', 'format_time', 'format_timedelta',
'get_timezone_name', 'parse_date', 'parse_datetime', 'parse_time']
__docformat__ = 'restructuredtext en'
LC_TIME = default_locale('LC_TIME')
# Aliases for use in scopes where the modules are shadowed by local variables
date_ = date
datetime_ = datetime
time_ = time
def get_period_names(locale=LC_TIME):
"""Return the names for day periods (AM/PM) used by the locale.
>>> get_period_names(locale='en_US')['am']
u'AM'
:param locale: the `Locale` object, or a locale string
:return: the dictionary of period names
:rtype: `dict`
"""
return Locale.parse(locale).periods
def get_day_names(width='wide', context='format', locale=LC_TIME):
"""Return the day names used by the locale for the specified format.
>>> get_day_names('wide', locale='en_US')[1]
u'Tuesday'
>>> get_day_names('abbreviated', locale='es')[1]
u'mar'
>>> get_day_names('narrow', context='stand-alone', locale='de_DE')[1]
u'D'
:param width: the width to use, one of "wide", "abbreviated", or "narrow"
:param context: the context, either "format" or "stand-alone"
:param locale: the `Locale` object, or a locale string
:return: the dictionary of day names
:rtype: `dict`
"""
return Locale.parse(locale).days[context][width]
def get_month_names(width='wide', context='format', locale=LC_TIME):
"""Return the month names used by the locale for the specified format.
>>> get_month_names('wide', locale='en_US')[1]
u'January'
>>> get_month_names('abbreviated', locale='es')[1]
u'ene'
>>> get_month_names('narrow', context='stand-alone', locale='de_DE')[1]
u'J'
:param width: the width to use, one of "wide", "abbreviated", or "narrow"
:param context: the context, either "format" or "stand-alone"
:param locale: the `Locale` object, or a locale string
:return: the dictionary of month names
:rtype: `dict`
"""
return Locale.parse(locale).months[context][width]
def get_quarter_names(width='wide', context='format', locale=LC_TIME):
"""Return the quarter names used by the locale for the specified format.
>>> get_quarter_names('wide', locale='en_US')[1]
u'1st quarter'
>>> get_quarter_names('abbreviated', locale='de_DE')[1]
u'Q1'
:param width: the width to use, one of "wide", "abbreviated", or "narrow"
:param context: the context, either "format" or "stand-alone"
:param locale: the `Locale` object, or a locale string
:return: the dictionary of quarter names
:rtype: `dict`
"""
return Locale.parse(locale).quarters[context][width]
def get_era_names(width='wide', locale=LC_TIME):
"""Return the era names used by the locale for the specified format.
>>> get_era_names('wide', locale='en_US')[1]
u'Anno Domini'
>>> get_era_names('abbreviated', locale='de_DE')[1]
u'n. Chr.'
:param width: the width to use, either "wide", "abbreviated", or "narrow"
:param locale: the `Locale` object, or a locale string
:return: the dictionary of era names
:rtype: `dict`
"""
return Locale.parse(locale).eras[width]
def get_date_format(format='medium', locale=LC_TIME):
"""Return the date formatting patterns used by the locale for the specified
format.
>>> get_date_format(locale='en_US')
<DateTimePattern u'MMM d, y'>
>>> get_date_format('full', locale='de_DE')
<DateTimePattern u'EEEE, d. MMMM y'>
:param format: the format to use, one of "full", "long", "medium", or
"short"
:param locale: the `Locale` object, or a locale string
:return: the date format pattern
:rtype: `DateTimePattern`
"""
return Locale.parse(locale).date_formats[format]
def get_datetime_format(format='medium', locale=LC_TIME):
"""Return the datetime formatting patterns used by the locale for the
specified format.
>>> get_datetime_format(locale='en_US')
u'{1} {0}'
:param format: the format to use, one of "full", "long", "medium", or
"short"
:param locale: the `Locale` object, or a locale string
:return: the datetime format pattern
:rtype: `unicode`
"""
patterns = Locale.parse(locale).datetime_formats
if format not in patterns:
format = None
return patterns[format]
def get_time_format(format='medium', locale=LC_TIME):
"""Return the time formatting patterns used by the locale for the specified
format.
>>> get_time_format(locale='en_US')
<DateTimePattern u'h:mm:ss a'>
>>> get_time_format('full', locale='de_DE')
<DateTimePattern u'HH:mm:ss zzzz'>
:param format: the format to use, one of "full", "long", "medium", or
"short"
:param locale: the `Locale` object, or a locale string
:return: the time format pattern
:rtype: `DateTimePattern`
"""
return Locale.parse(locale).time_formats[format]
def get_timezone_gmt(datetime=None, width='long', locale=LC_TIME):
"""Return the timezone associated with the given `datetime` object formatted
as string indicating the offset from GMT.
>>> dt = datetime(2007, 4, 1, 15, 30)
>>> get_timezone_gmt(dt, locale='en')
u'GMT+00:00'
>>> from pytz import timezone
>>> tz = timezone('America/Los_Angeles')
>>> dt = datetime(2007, 4, 1, 15, 30, tzinfo=tz)
>>> get_timezone_gmt(dt, locale='en')
u'GMT-08:00'
>>> get_timezone_gmt(dt, 'short', locale='en')
u'-0800'
The long format depends on the locale, for example in France the acronym
UTC string is used instead of GMT:
>>> get_timezone_gmt(dt, 'long', locale='fr_FR')
u'UTC-08:00'
:param datetime: the ``datetime`` object; if `None`, the current date and
time in UTC is used
:param width: either "long" or "short"
:param locale: the `Locale` object, or a locale string
:return: the GMT offset representation of the timezone
:rtype: `unicode`
:since: version 0.9
"""
if datetime is None:
datetime = datetime_.utcnow()
elif isinstance(datetime, (int, long)):
datetime = datetime_.utcfromtimestamp(datetime).time()
if datetime.tzinfo is None:
datetime = datetime.replace(tzinfo=UTC)
locale = Locale.parse(locale)
offset = datetime.utcoffset()
seconds = offset.days * 24 * 60 * 60 + offset.seconds
hours, seconds = divmod(seconds, 3600)
if width == 'short':
pattern = u'%+03d%02d'
else:
pattern = locale.zone_formats['gmt'] % '%+03d:%02d'
return pattern % (hours, seconds // 60)
def get_timezone_location(dt_or_tzinfo=None, locale=LC_TIME):
"""Return a representation of the given timezone using "location format".
The result depends on both the local display name of the country and the
city assocaited with the time zone:
>>> from pytz import timezone
>>> tz = timezone('America/St_Johns')
>>> get_timezone_location(tz, locale='de_DE')
u"Kanada (St. John's)"
>>> tz = timezone('America/Mexico_City')
>>> get_timezone_location(tz, locale='de_DE')
u'Mexiko (Mexiko-Stadt)'
If the timezone is associated with a country that uses only a single
timezone, just the localized country name is returned:
>>> tz = timezone('Europe/Berlin')
>>> get_timezone_name(tz, locale='de_DE')
u'Deutschland'
:param dt_or_tzinfo: the ``datetime`` or ``tzinfo`` object that determines
the timezone; if `None`, the current date and time in
UTC is assumed
:param locale: the `Locale` object, or a locale string
:return: the localized timezone name using location format
:rtype: `unicode`
:since: version 0.9
"""
if dt_or_tzinfo is None or isinstance(dt_or_tzinfo, (int, long)):
dt = None
tzinfo = UTC
elif isinstance(dt_or_tzinfo, (datetime, time)):
dt = dt_or_tzinfo
if dt.tzinfo is not None:
tzinfo = dt.tzinfo
else:
tzinfo = UTC
else:
dt = None
tzinfo = dt_or_tzinfo
locale = Locale.parse(locale)
if hasattr(tzinfo, 'zone'):
zone = tzinfo.zone
else:
zone = tzinfo.tzname(dt or datetime.utcnow())
# Get the canonical time-zone code
zone = get_global('zone_aliases').get(zone, zone)
info = locale.time_zones.get(zone, {})
# Otherwise, if there is only one timezone for the country, return the
# localized country name
region_format = locale.zone_formats['region']
territory = get_global('zone_territories').get(zone)
if territory not in locale.territories:
territory = 'ZZ' # invalid/unknown
territory_name = locale.territories[territory]
if territory and len(get_global('territory_zones').get(territory, [])) == 1:
return region_format % (territory_name)
# Otherwise, include the city in the output
fallback_format = locale.zone_formats['fallback']
if 'city' in info:
city_name = info['city']
else:
metazone = get_global('meta_zones').get(zone)
metazone_info = locale.meta_zones.get(metazone, {})
if 'city' in metazone_info:
city_name = metainfo['city']
elif '/' in zone:
city_name = zone.split('/', 1)[1].replace('_', ' ')
else:
city_name = zone.replace('_', ' ')
return region_format % (fallback_format % {
'0': city_name,
'1': territory_name
})
def get_timezone_name(dt_or_tzinfo=None, width='long', uncommon=False,
locale=LC_TIME):
r"""Return the localized display name for the given timezone. The timezone
may be specified using a ``datetime`` or `tzinfo` object.
>>> from pytz import timezone
>>> dt = time(15, 30, tzinfo=timezone('America/Los_Angeles'))
>>> get_timezone_name(dt, locale='en_US')
u'Pacific Standard Time'
>>> get_timezone_name(dt, width='short', locale='en_US')
u'PST'
If this function gets passed only a `tzinfo` object and no concrete
`datetime`, the returned display name is indenpendent of daylight savings
time. This can be used for example for selecting timezones, or to set the
time of events that recur across DST changes:
>>> tz = timezone('America/Los_Angeles')
>>> get_timezone_name(tz, locale='en_US')
u'Pacific Time'
>>> get_timezone_name(tz, 'short', locale='en_US')
u'PT'
If no localized display name for the timezone is available, and the timezone
is associated with a country that uses only a single timezone, the name of
that country is returned, formatted according to the locale:
>>> tz = timezone('Europe/Berlin')
>>> get_timezone_name(tz, locale='de_DE')
u'Deutschland'
>>> get_timezone_name(tz, locale='pt_BR')
u'Hor\xe1rio Alemanha'
On the other hand, if the country uses multiple timezones, the city is also
included in the representation:
>>> tz = timezone('America/St_Johns')
>>> get_timezone_name(tz, locale='de_DE')
u"Kanada (St. John's)"
The `uncommon` parameter can be set to `True` to enable the use of timezone
representations that are not commonly used by the requested locale. For
example, while in frensh the central europian timezone is usually
abbreviated as "HEC", in Canadian French, this abbreviation is not in
common use, so a generic name would be chosen by default:
>>> tz = timezone('Europe/Paris')
>>> get_timezone_name(tz, 'short', locale='fr_CA')
u'France'
>>> get_timezone_name(tz, 'short', uncommon=True, locale='fr_CA')
u'HEC'
:param dt_or_tzinfo: the ``datetime`` or ``tzinfo`` object that determines
the timezone; if a ``tzinfo`` object is used, the
resulting display name will be generic, i.e.
independent of daylight savings time; if `None`, the
current date in UTC is assumed
:param width: either "long" or "short"
:param uncommon: whether even uncommon timezone abbreviations should be used
:param locale: the `Locale` object, or a locale string
:return: the timezone display name
:rtype: `unicode`
:since: version 0.9
:see: `LDML Appendix J: Time Zone Display Names
<http://www.unicode.org/reports/tr35/#Time_Zone_Fallback>`_
"""
if dt_or_tzinfo is None or isinstance(dt_or_tzinfo, (int, long)):
dt = None
tzinfo = UTC
elif isinstance(dt_or_tzinfo, (datetime, time)):
dt = dt_or_tzinfo
if dt.tzinfo is not None:
tzinfo = dt.tzinfo
else:
tzinfo = UTC
else:
dt = None
tzinfo = dt_or_tzinfo
locale = Locale.parse(locale)
if hasattr(tzinfo, 'zone'):
zone = tzinfo.zone
else:
zone = tzinfo.tzname(dt)
# Get the canonical time-zone code
zone = get_global('zone_aliases').get(zone, zone)
info = locale.time_zones.get(zone, {})
# Try explicitly translated zone names first
if width in info:
if dt is None:
field = 'generic'
else:
dst = tzinfo.dst(dt)
if dst is None:
field = 'generic'
elif dst == 0:
field = 'standard'
else:
field = 'daylight'
if field in info[width]:
return info[width][field]
metazone = get_global('meta_zones').get(zone)
if metazone:
metazone_info = locale.meta_zones.get(metazone, {})
if width in metazone_info and (uncommon or metazone_info.get('common')):
if dt is None:
field = 'generic'
else:
field = tzinfo.dst(dt) and 'daylight' or 'standard'
if field in metazone_info[width]:
return metazone_info[width][field]
# If we have a concrete datetime, we assume that the result can't be
# independent of daylight savings time, so we return the GMT offset
if dt is not None:
return get_timezone_gmt(dt, width=width, locale=locale)
return get_timezone_location(dt_or_tzinfo, locale=locale)
def format_date(date=None, format='medium', locale=LC_TIME):
"""Return a date formatted according to the given pattern.
>>> d = date(2007, 04, 01)
>>> format_date(d, locale='en_US')
u'Apr 1, 2007'
>>> format_date(d, format='full', locale='de_DE')
u'Sonntag, 1. April 2007'
If you don't want to use the locale default formats, you can specify a
custom date pattern:
>>> format_date(d, "EEE, MMM d, ''yy", locale='en')
u"Sun, Apr 1, '07"
:param date: the ``date`` or ``datetime`` object; if `None`, the current
date is used
:param format: one of "full", "long", "medium", or "short", or a custom
date/time pattern
:param locale: a `Locale` object or a locale identifier
:rtype: `unicode`
:note: If the pattern contains time fields, an `AttributeError` will be
raised when trying to apply the formatting. This is also true if
the value of ``date`` parameter is actually a ``datetime`` object,
as this function automatically converts that to a ``date``.
"""
if date is None:
date = date_.today()
elif isinstance(date, datetime):
date = date.date()
locale = Locale.parse(locale)
if format in ('full', 'long', 'medium', 'short'):
format = get_date_format(format, locale=locale)
pattern = parse_pattern(format)
return parse_pattern(format).apply(date, locale)
def format_datetime(datetime=None, format='medium', tzinfo=None,
locale=LC_TIME):
r"""Return a date formatted according to the given pattern.
>>> dt = datetime(2007, 04, 01, 15, 30)
>>> format_datetime(dt, locale='en_US')
u'Apr 1, 2007 3:30:00 PM'
For any pattern requiring the display of the time-zone, the third-party
``pytz`` package is needed to explicitly specify the time-zone:
>>> from pytz import timezone
>>> format_datetime(dt, 'full', tzinfo=timezone('Europe/Paris'),
... locale='fr_FR')
u'dimanche 1 avril 2007 17:30:00 Heure avanc\xe9e de l\u2019Europe centrale'
>>> format_datetime(dt, "yyyy.MM.dd G 'at' HH:mm:ss zzz",
... tzinfo=timezone('US/Eastern'), locale='en')
u'2007.04.01 AD at 11:30:00 EDT'
:param datetime: the `datetime` object; if `None`, the current date and
time is used
:param format: one of "full", "long", "medium", or "short", or a custom
date/time pattern
:param tzinfo: the timezone to apply to the time for display
:param locale: a `Locale` object or a locale identifier
:rtype: `unicode`
"""
if datetime is None:
datetime = datetime_.utcnow()
elif isinstance(datetime, (int, long)):
datetime = datetime_.utcfromtimestamp(datetime)
elif isinstance(datetime, time):
datetime = datetime_.combine(date.today(), datetime)
if datetime.tzinfo is None:
datetime = datetime.replace(tzinfo=UTC)
if tzinfo is not None:
datetime = datetime.astimezone(tzinfo)
if hasattr(tzinfo, 'normalize'): # pytz
datetime = tzinfo.normalize(datetime)
locale = Locale.parse(locale)
if format in ('full', 'long', 'medium', 'short'):
return get_datetime_format(format, locale=locale) \
.replace('{0}', format_time(datetime, format, tzinfo=None,
locale=locale)) \
.replace('{1}', format_date(datetime, format, locale=locale))
else:
return parse_pattern(format).apply(datetime, locale)
def format_time(time=None, format='medium', tzinfo=None, locale=LC_TIME):
r"""Return a time formatted according to the given pattern.
>>> t = time(15, 30)
>>> format_time(t, locale='en_US')
u'3:30:00 PM'
>>> format_time(t, format='short', locale='de_DE')
u'15:30'
If you don't want to use the locale default formats, you can specify a
custom time pattern:
>>> format_time(t, "hh 'o''clock' a", locale='en')
u"03 o'clock PM"
For any pattern requiring the display of the time-zone, the third-party
``pytz`` package is needed to explicitly specify the time-zone:
>>> from pytz import timezone
>>> t = datetime(2007, 4, 1, 15, 30)
>>> tzinfo = timezone('Europe/Paris')
>>> t = tzinfo.localize(t)
>>> format_time(t, format='full', tzinfo=tzinfo, locale='fr_FR')
u'15:30:00 Heure avanc\xe9e de l\u2019Europe centrale'
>>> format_time(t, "hh 'o''clock' a, zzzz", tzinfo=timezone('US/Eastern'),
... locale='en')
u"09 o'clock AM, Eastern Daylight Time"
As that example shows, when this function gets passed a
``datetime.datetime`` value, the actual time in the formatted string is
adjusted to the timezone specified by the `tzinfo` parameter. If the
``datetime`` is "naive" (i.e. it has no associated timezone information),
it is assumed to be in UTC.
These timezone calculations are **not** performed if the value is of type
``datetime.time``, as without date information there's no way to determine
what a given time would translate to in a different timezone without
information about whether daylight savings time is in effect or not. This
means that time values are left as-is, and the value of the `tzinfo`
parameter is only used to display the timezone name if needed:
>>> t = time(15, 30)
>>> format_time(t, format='full', tzinfo=timezone('Europe/Paris'),
... locale='fr_FR')
u'15:30:00 Heure normale de l\u2019Europe centrale'
>>> format_time(t, format='full', tzinfo=timezone('US/Eastern'),
... locale='en_US')
u'3:30:00 PM Eastern Standard Time'
:param time: the ``time`` or ``datetime`` object; if `None`, the current
time in UTC is used
:param format: one of "full", "long", "medium", or "short", or a custom
date/time pattern
:param tzinfo: the time-zone to apply to the time for display
:param locale: a `Locale` object or a locale identifier
:rtype: `unicode`
:note: If the pattern contains date fields, an `AttributeError` will be
raised when trying to apply the formatting. This is also true if
the value of ``time`` parameter is actually a ``datetime`` object,
as this function automatically converts that to a ``time``.
"""
if time is None:
time = datetime.utcnow()
elif isinstance(time, (int, long)):
time = datetime.utcfromtimestamp(time)
if time.tzinfo is None:
time = time.replace(tzinfo=UTC)
if isinstance(time, datetime):
if tzinfo is not None:
time = time.astimezone(tzinfo)
if hasattr(tzinfo, 'normalize'): # pytz
time = tzinfo.normalize(time)
time = time.timetz()
elif tzinfo is not None:
time = time.replace(tzinfo=tzinfo)
locale = Locale.parse(locale)
if format in ('full', 'long', 'medium', 'short'):
format = get_time_format(format, locale=locale)
return parse_pattern(format).apply(time, locale)
TIMEDELTA_UNITS = (
('year', 3600 * 24 * 365),
('month', 3600 * 24 * 30),
('week', 3600 * 24 * 7),
('day', 3600 * 24),
('hour', 3600),
('minute', 60),
('second', 1)
)
def format_timedelta(delta, granularity='second', threshold=.85, locale=LC_TIME):
"""Return a time delta according to the rules of the given locale.
>>> format_timedelta(timedelta(weeks=12), locale='en_US')
u'3 mths'
>>> format_timedelta(timedelta(seconds=1), locale='es')
u'1 s'
The granularity parameter can be provided to alter the lowest unit
presented, which defaults to a second.
>>> format_timedelta(timedelta(hours=3), granularity='day',
... locale='en_US')
u'1 day'
The threshold parameter can be used to determine at which value the
presentation switches to the next higher unit. A higher threshold factor
means the presentation will switch later. For example:
>>> format_timedelta(timedelta(hours=23), threshold=0.9, locale='en_US')
u'1 day'
>>> format_timedelta(timedelta(hours=23), threshold=1.1, locale='en_US')
u'23 hrs'
:param delta: a ``timedelta`` object representing the time difference to
format, or the delta in seconds as an `int` value
:param granularity: determines the smallest unit that should be displayed,
the value can be one of "year", "month", "week", "day",
"hour", "minute" or "second"
:param threshold: factor that determines at which point the presentation
switches to the next higher unit
:param locale: a `Locale` object or a locale identifier
:rtype: `unicode`
"""
if isinstance(delta, timedelta):
seconds = int((delta.days * 86400) + delta.seconds)
else:
seconds = delta
locale = Locale.parse(locale)
for unit, secs_per_unit in TIMEDELTA_UNITS:
value = abs(seconds) / secs_per_unit
if value >= threshold or unit == granularity:
if unit == granularity and value > 0:
value = max(1, value)
value = int(round(value))
plural_form = locale.plural_form(value)
pattern = locale._data['unit_patterns'][unit][plural_form]
return pattern.replace('{0}', str(value))
return u''
def parse_date(string, locale=LC_TIME):
"""Parse a date from a string.
This function uses the date format for the locale as a hint to determine
the order in which the date fields appear in the string.
>>> parse_date('4/1/04', locale='en_US')
datetime.date(2004, 4, 1)
>>> parse_date('01.04.2004', locale='de_DE')
datetime.date(2004, 4, 1)
:param string: the string containing the date
:param locale: a `Locale` object or a locale identifier
:return: the parsed date
:rtype: `date`
"""
# TODO: try ISO format first?
format = get_date_format(locale=locale).pattern.lower()
year_idx = format.index('y')
month_idx = format.index('m')
if month_idx < 0:
month_idx = format.index('l')
day_idx = format.index('d')
indexes = [(year_idx, 'Y'), (month_idx, 'M'), (day_idx, 'D')]
indexes.sort()
indexes = dict([(item[1], idx) for idx, item in enumerate(indexes)])
# FIXME: this currently only supports numbers, but should also support month
# names, both in the requested locale, and english
numbers = re.findall('(\d+)', string)
year = numbers[indexes['Y']]
if len(year) == 2:
year = 2000 + int(year)
else:
year = int(year)
month = int(numbers[indexes['M']])
day = int(numbers[indexes['D']])
if month > 12:
month, day = day, month
return date(year, month, day)
def parse_datetime(string, locale=LC_TIME):
"""Parse a date and time from a string.
This function uses the date and time formats for the locale as a hint to
determine the order in which the time fields appear in the string.
:param string: the string containing the date and time
:param locale: a `Locale` object or a locale identifier
:return: the parsed date/time
:rtype: `datetime`
"""
raise NotImplementedError
def parse_time(string, locale=LC_TIME):
"""Parse a time from a string.
This function uses the time format for the locale as a hint to determine
the order in which the time fields appear in the string.
>>> parse_time('15:30:00', locale='en_US')
datetime.time(15, 30)
:param string: the string containing the time
:param locale: a `Locale` object or a locale identifier
:return: the parsed time
:rtype: `time`
"""
# TODO: try ISO format first?
format = get_time_format(locale=locale).pattern.lower()
hour_idx = format.index('h')
if hour_idx < 0:
hour_idx = format.index('k')
min_idx = format.index('m')
sec_idx = format.index('s')
indexes = [(hour_idx, 'H'), (min_idx, 'M'), (sec_idx, 'S')]
indexes.sort()
indexes = dict([(item[1], idx) for idx, item in enumerate(indexes)])
# FIXME: support 12 hour clock, and 0-based hour specification
# and seconds should be optional, maybe minutes too
# oh, and time-zones, of course
numbers = re.findall('(\d+)', string)
hour = int(numbers[indexes['H']])
minute = int(numbers[indexes['M']])
second = int(numbers[indexes['S']])
return time(hour, minute, second)
class DateTimePattern(object):
def __init__(self, pattern, format):
self.pattern = pattern
self.format = format
def __repr__(self):
return '<%s %r>' % (type(self).__name__, self.pattern)
def __unicode__(self):
return self.pattern
def __mod__(self, other):
if type(other) is not DateTimeFormat:
return NotImplemented
return self.format % other
def apply(self, datetime, locale):
return self % DateTimeFormat(datetime, locale)
class DateTimeFormat(object):
def __init__(self, value, locale):
assert isinstance(value, (date, datetime, time))
if isinstance(value, (datetime, time)) and value.tzinfo is None:
value = value.replace(tzinfo=UTC)
self.value = value
self.locale = Locale.parse(locale)
def __getitem__(self, name):
char = name[0]
num = len(name)
if char == 'G':
return self.format_era(char, num)
elif char in ('y', 'Y', 'u'):
return self.format_year(char, num)
elif char in ('Q', 'q'):
return self.format_quarter(char, num)
elif char in ('M', 'L'):
return self.format_month(char, num)
elif char in ('w', 'W'):
return self.format_week(char, num)
elif char == 'd':
return self.format(self.value.day, num)
elif char == 'D':
return self.format_day_of_year(num)
elif char == 'F':
return self.format_day_of_week_in_month()
elif char in ('E', 'e', 'c'):
return self.format_weekday(char, num)
elif char == 'a':
return self.format_period(char)
elif char == 'h':
if self.value.hour % 12 == 0:
return self.format(12, num)
else:
return self.format(self.value.hour % 12, num)
elif char == 'H':
return self.format(self.value.hour, num)
elif char == 'K':
return self.format(self.value.hour % 12, num)
elif char == 'k':
if self.value.hour == 0:
return self.format(24, num)
else:
return self.format(self.value.hour, num)
elif char == 'm':
return self.format(self.value.minute, num)
elif char == 's':
return self.format(self.value.second, num)
elif char == 'S':
return self.format_frac_seconds(num)
elif char == 'A':
return self.format_milliseconds_in_day(num)
elif char in ('z', 'Z', 'v', 'V'):
return self.format_timezone(char, num)
else:
raise KeyError('Unsupported date/time field %r' % char)
def format_era(self, char, num):
width = {3: 'abbreviated', 4: 'wide', 5: 'narrow'}[max(3, num)]
era = int(self.value.year >= 0)
return get_era_names(width, self.locale)[era]
def format_year(self, char, num):
value = self.value.year
if char.isupper():
week = self.get_week_number(self.get_day_of_year())
if week == 0:
value -= 1
year = self.format(value, num)
if num == 2:
year = year[-2:]
return year
def format_quarter(self, char, num):
quarter = (self.value.month - 1) // 3 + 1
if num <= 2:
return ('%%0%dd' % num) % quarter
width = {3: 'abbreviated', 4: 'wide', 5: 'narrow'}[num]
context = {'Q': 'format', 'q': 'stand-alone'}[char]
return get_quarter_names(width, context, self.locale)[quarter]
def format_month(self, char, num):
if num <= 2:
return ('%%0%dd' % num) % self.value.month
width = {3: 'abbreviated', 4: 'wide', 5: 'narrow'}[num]
context = {'M': 'format', 'L': 'stand-alone'}[char]
return get_month_names(width, context, self.locale)[self.value.month]
def format_week(self, char, num):
if char.islower(): # week of year
day_of_year = self.get_day_of_year()
week = self.get_week_number(day_of_year)
if week == 0:
date = self.value - timedelta(days=day_of_year)
week = self.get_week_number(self.get_day_of_year(date),
date.weekday())
return self.format(week, num)
else: # week of month
week = self.get_week_number(self.value.day)
if week == 0:
date = self.value - timedelta(days=self.value.day)
week = self.get_week_number(date.day, date.weekday())
pass
return '%d' % week
def format_weekday(self, char, num):
if num < 3:
if char.islower():
value = 7 - self.locale.first_week_day + self.value.weekday()
return self.format(value % 7 + 1, num)
num = 3
weekday = self.value.weekday()
width = {3: 'abbreviated', 4: 'wide', 5: 'narrow'}[num]
context = {3: 'format', 4: 'format', 5: 'stand-alone'}[num]
return get_day_names(width, context, self.locale)[weekday]
def format_day_of_year(self, num):
return self.format(self.get_day_of_year(), num)
def format_day_of_week_in_month(self):
return '%d' % ((self.value.day - 1) // 7 + 1)
def format_period(self, char):
period = {0: 'am', 1: 'pm'}[int(self.value.hour >= 12)]
return get_period_names(locale=self.locale)[period]
def format_frac_seconds(self, num):
value = str(self.value.microsecond)
return self.format(round(float('.%s' % value), num) * 10**num, num)
def format_milliseconds_in_day(self, num):
msecs = self.value.microsecond // 1000 + self.value.second * 1000 + \
self.value.minute * 60000 + self.value.hour * 3600000
return self.format(msecs, num)
def format_timezone(self, char, num):
width = {3: 'short', 4: 'long'}[max(3, num)]
if char == 'z':
return get_timezone_name(self.value, width, locale=self.locale)
elif char == 'Z':
return get_timezone_gmt(self.value, width, locale=self.locale)
elif char == 'v':
return get_timezone_name(self.value.tzinfo, width,
locale=self.locale)
elif char == 'V':
if num == 1:
return get_timezone_name(self.value.tzinfo, width,
uncommon=True, locale=self.locale)
return get_timezone_location(self.value.tzinfo, locale=self.locale)
def format(self, value, length):
return ('%%0%dd' % length) % value
def get_day_of_year(self, date=None):
if date is None:
date = self.value
return (date - date_(date.year, 1, 1)).days + 1
def get_week_number(self, day_of_period, day_of_week=None):
"""Return the number of the week of a day within a period. This may be
the week number in a year or the week number in a month.
Usually this will return a value equal to or greater than 1, but if the
first week of the period is so short that it actually counts as the last
week of the previous period, this function will return 0.
>>> format = DateTimeFormat(date(2006, 1, 8), Locale.parse('de_DE'))
>>> format.get_week_number(6)
1
>>> format = DateTimeFormat(date(2006, 1, 8), Locale.parse('en_US'))
>>> format.get_week_number(6)
2
:param day_of_period: the number of the day in the period (usually
either the day of month or the day of year)
:param day_of_week: the week day; if ommitted, the week day of the
current date is assumed
"""
if day_of_week is None:
day_of_week = self.value.weekday()
first_day = (day_of_week - self.locale.first_week_day -
day_of_period + 1) % 7
if first_day < 0:
first_day += 7
week_number = (day_of_period + first_day - 1) // 7
if 7 - first_day >= self.locale.min_week_days:
week_number += 1
return week_number
PATTERN_CHARS = {
'G': [1, 2, 3, 4, 5], # era
'y': None, 'Y': None, 'u': None, # year
'Q': [1, 2, 3, 4], 'q': [1, 2, 3, 4], # quarter
'M': [1, 2, 3, 4, 5], 'L': [1, 2, 3, 4, 5], # month
'w': [1, 2], 'W': [1], # week
'd': [1, 2], 'D': [1, 2, 3], 'F': [1], 'g': None, # day
'E': [1, 2, 3, 4, 5], 'e': [1, 2, 3, 4, 5], 'c': [1, 3, 4, 5], # week day
'a': [1], # period
'h': [1, 2], 'H': [1, 2], 'K': [1, 2], 'k': [1, 2], # hour
'm': [1, 2], # minute
's': [1, 2], 'S': None, 'A': None, # second
'z': [1, 2, 3, 4], 'Z': [1, 2, 3, 4], 'v': [1, 4], 'V': [1, 4] # zone
}
def parse_pattern(pattern):
"""Parse date, time, and datetime format patterns.
>>> parse_pattern("MMMMd").format
u'%(MMMM)s%(d)s'
>>> parse_pattern("MMM d, yyyy").format
u'%(MMM)s %(d)s, %(yyyy)s'
Pattern can contain literal strings in single quotes:
>>> parse_pattern("H:mm' Uhr 'z").format
u'%(H)s:%(mm)s Uhr %(z)s'
An actual single quote can be used by using two adjacent single quote
characters:
>>> parse_pattern("hh' o''clock'").format
u"%(hh)s o'clock"
:param pattern: the formatting pattern to parse
"""
if type(pattern) is DateTimePattern:
return pattern
result = []
quotebuf = None
charbuf = []
fieldchar = ['']
fieldnum = [0]
def append_chars():
result.append(''.join(charbuf).replace('%', '%%'))
del charbuf[:]
def append_field():
limit = PATTERN_CHARS[fieldchar[0]]
if limit and fieldnum[0] not in limit:
raise ValueError('Invalid length for field: %r'
% (fieldchar[0] * fieldnum[0]))
result.append('%%(%s)s' % (fieldchar[0] * fieldnum[0]))
fieldchar[0] = ''
fieldnum[0] = 0
for idx, char in enumerate(pattern.replace("''", '\0')):
if quotebuf is None:
if char == "'": # quote started
if fieldchar[0]:
append_field()
elif charbuf:
append_chars()
quotebuf = []
elif char in PATTERN_CHARS:
if charbuf:
append_chars()
if char == fieldchar[0]:
fieldnum[0] += 1
else:
if fieldchar[0]:
append_field()
fieldchar[0] = char
fieldnum[0] = 1
else:
if fieldchar[0]:
append_field()
charbuf.append(char)
elif quotebuf is not None:
if char == "'": # end of quote
charbuf.extend(quotebuf)
quotebuf = None
else: # inside quote
quotebuf.append(char)
if fieldchar[0]:
append_field()
elif charbuf:
append_chars()
return DateTimePattern(pattern, u''.join(result).replace('\0', "'"))
| 37.5 | 81 | 0.599545 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.