blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 4
721
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
57
| license_type
stringclasses 2
values | repo_name
stringlengths 5
91
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 321
values | visit_date
timestamp[ns]date 2016-08-12 09:31:09
2023-09-06 10:45:07
| revision_date
timestamp[ns]date 2010-09-28 14:01:40
2023-09-06 06:22:19
| committer_date
timestamp[ns]date 2010-09-28 14:01:40
2023-09-06 06:22:19
| github_id
int64 426
681M
| star_events_count
int64 101
243k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[ns]date 2012-06-28 18:51:49
2023-09-14 21:59:16
⌀ | gha_created_at
timestamp[ns]date 2008-02-11 22:55:26
2023-08-10 11:14:58
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 26
values | language
stringclasses 2
values | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 6
10.2M
| extension
stringclasses 115
values | filename
stringlengths 3
113
| content
stringlengths 6
10.2M
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
abeff4c1fe56632bd1a1386e703f8df49c2f8a09
|
96dcea595e7c16cec07b3f649afd65f3660a0bad
|
/tests/components/rflink/test_utils.py
|
9a9caebab174a6b4204b536f8491d50b4e0e3f30
|
[
"Apache-2.0"
] |
permissive
|
home-assistant/core
|
3455eac2e9d925c92d30178643b1aaccf3a6484f
|
80caeafcb5b6e2f9da192d0ea6dd1a5b8244b743
|
refs/heads/dev
| 2023-08-31T15:41:06.299469
| 2023-08-31T14:50:53
| 2023-08-31T14:50:53
| 12,888,993
| 35,501
| 20,617
|
Apache-2.0
| 2023-09-14T21:50:15
| 2013-09-17T07:29:48
|
Python
|
UTF-8
|
Python
| false
| false
| 1,182
|
py
|
test_utils.py
|
"""Test for RFLink utils methods."""
from homeassistant.components.rflink.utils import (
brightness_to_rflink,
rflink_to_brightness,
)
from homeassistant.core import HomeAssistant
async def test_utils(hass: HomeAssistant, monkeypatch) -> None:
"""Test all utils methods."""
# test brightness_to_rflink
assert brightness_to_rflink(0) == 0
assert brightness_to_rflink(17) == 1
assert brightness_to_rflink(34) == 2
assert brightness_to_rflink(85) == 5
assert brightness_to_rflink(170) == 10
assert brightness_to_rflink(255) == 15
assert brightness_to_rflink(10) == 0
assert brightness_to_rflink(20) == 1
assert brightness_to_rflink(30) == 1
assert brightness_to_rflink(40) == 2
assert brightness_to_rflink(50) == 2
assert brightness_to_rflink(60) == 3
assert brightness_to_rflink(70) == 4
assert brightness_to_rflink(80) == 4
# test rflink_to_brightness
assert rflink_to_brightness(0) == 0
assert rflink_to_brightness(1) == 17
assert rflink_to_brightness(5) == 85
assert rflink_to_brightness(10) == 170
assert rflink_to_brightness(12) == 204
assert rflink_to_brightness(15) == 255
|
b09bdd2d30733bffcee6651b732186ad286c9640
|
f450e9146c7bf938ee3ec306655df64da3cef686
|
/clickhouse_sqlalchemy/sql/ddl.py
|
d651646198d822fa287511e9d0b0e89ee7cb9acf
|
[
"MIT"
] |
permissive
|
xzkostyan/clickhouse-sqlalchemy
|
7bd3a7f5b1df8225943a156146eed2e6080bff42
|
060c60131a7a830cb691c23bc9c9931f5e3e19cc
|
refs/heads/master
| 2023-09-01T14:09:43.331638
| 2023-07-27T07:33:27
| 2023-07-27T07:33:27
| 86,639,356
| 355
| 133
|
NOASSERTION
| 2023-07-27T07:33:28
| 2017-03-29T23:45:58
|
Python
|
UTF-8
|
Python
| false
| false
| 2,650
|
py
|
ddl.py
|
from sqlalchemy.sql.ddl import (
SchemaDropper as SchemaDropperBase, DropTable as DropTableBase,
SchemaGenerator as SchemaGeneratorBase, _CreateDropBase
)
from sqlalchemy.sql.expression import UnaryExpression
from sqlalchemy.sql.operators import custom_op
class DropTable(DropTableBase):
def __init__(self, element, bind=None, if_exists=False):
self.on_cluster = element.dialect_options['clickhouse']['cluster']
super(DropTable, self).__init__(element, bind=bind,
if_exists=if_exists)
class DropView(DropTableBase):
def __init__(self, element, bind=None, if_exists=False):
self.on_cluster = element.cluster
super(DropView, self).__init__(element, bind=bind, if_exists=if_exists)
class SchemaDropper(SchemaDropperBase):
def __init__(self, dialect, connection, if_exists=False, **kwargs):
self.if_exists = if_exists
super(SchemaDropper, self).__init__(dialect, connection, **kwargs)
def visit_table(self, table, **kwargs):
table.dispatch.before_drop(
table,
self.connection,
checkfirst=self.checkfirst,
_ddl_runner=self,
)
self.connection.execute(DropTable(table, if_exists=self.if_exists))
table.dispatch.after_drop(
table,
self.connection,
checkfirst=self.checkfirst,
_ddl_runner=self,
)
def visit_materialized_view(self, table, **kwargs):
self.connection.execute(DropView(table, if_exists=self.if_exists))
class CreateMaterializedView(_CreateDropBase):
"""Represent a CREATE MATERIALIZED VIEW statement."""
__visit_name__ = "create_materialized_view"
def __init__(self, element, if_not_exists=False):
self.if_not_exists = if_not_exists
super(CreateMaterializedView, self).__init__(element)
class SchemaGenerator(SchemaGeneratorBase):
def __init__(self, dialect, connection, if_not_exists=False, **kwargs):
self.if_not_exists = if_not_exists
super(SchemaGenerator, self).__init__(dialect, connection, **kwargs)
def visit_materialized_view(self, table, **kwargs):
self.connection.execute(
CreateMaterializedView(table, if_not_exists=self.if_not_exists)
)
def ttl_delete(expr):
return UnaryExpression(expr, modifier=custom_op('DELETE'))
def ttl_to_disk(expr, disk):
assert isinstance(disk, str), 'Disk must be str'
return expr.op('TO DISK')(disk)
def ttl_to_volume(expr, volume):
assert isinstance(volume, str), 'Volume must be str'
return expr.op('TO VOLUME')(volume)
|
3aa0600acc794d387ee9185b5141b10661eae2a7
|
bb33e6be8316f35decbb2b81badf2b6dcf7df515
|
/source/res/battle_royale/scripts/client/battle_royale/gui/Scaleform/daapi/view/lobby/hangar/carousel/handlers.py
|
5912ab6856a13c15b819646a6644d1112808344b
|
[] |
no_license
|
StranikS-Scan/WorldOfTanks-Decompiled
|
999c9567de38c32c760ab72c21c00ea7bc20990c
|
d2fe9c195825ececc728e87a02983908b7ea9199
|
refs/heads/1.18
| 2023-08-25T17:39:27.718097
| 2022-09-22T06:49:44
| 2022-09-22T06:49:44
| 148,696,315
| 103
| 39
| null | 2022-09-14T17:50:03
| 2018-09-13T20:49:11
|
Python
|
UTF-8
|
Python
| false
| false
| 3,853
|
py
|
handlers.py
|
# Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: battle_royale/scripts/client/battle_royale/gui/Scaleform/daapi/view/lobby/hangar/carousel/handlers.py
from logging import getLogger
from stats_params import BATTLE_ROYALE_STATS_ENABLED
from gui.shared import event_dispatcher as shared_events
from gui.impl import backport
from gui.impl.gen import R
from helpers import dependency
from gui.Scaleform.locale.MENU import MENU
from gui.prb_control import prbDispatcherProperty
from gui.Scaleform.daapi.view.lobby.hangar.hangar_cm_handlers import SimpleVehicleCMHandler
from gui.impl.gen.view_models.views.battle_royale.equipment_panel_cmp_rent_states import EquipmentPanelCmpRentStates
from skeletons.gui.game_control import IBattleRoyaleRentVehiclesController
_logger = getLogger(__name__)
class VEHICLE(object):
STATS = 'showVehicleStatistics'
TAKE_TEST_DRIVE = 'takeToRent'
TAKE_RENT = 'takeToRent'
class BRVehicleContextMenuHandler(SimpleVehicleCMHandler):
__rentVehiclesController = dependency.descriptor(IBattleRoyaleRentVehiclesController)
def __init__(self, cmProxy, ctx=None):
handlers = {VEHICLE.STATS: 'showVehicleStats',
VEHICLE.TAKE_TEST_DRIVE: 'takeToTestDrive',
VEHICLE.TAKE_RENT: 'takeToRent'}
super(BRVehicleContextMenuHandler, self).__init__(cmProxy, ctx, handlers)
@prbDispatcherProperty
def prbDispatcher(self):
return None
def getVehCD(self):
return self.vehCD
def getVehInvID(self):
return self.vehInvID
def _initFlashValues(self, ctx):
self.vehInvID = int(ctx.inventoryId)
vehicle = self.itemsCache.items.getVehicle(self.vehInvID)
self.vehCD = vehicle.intCD if vehicle is not None else None
return
def _clearFlashValues(self):
self.vehInvID = None
self.vehCD = None
return
def _generateOptions(self, ctx=None):
options = []
vehicle = self.itemsCache.items.getVehicle(self.getVehInvID())
if vehicle is None:
return options
else:
if BATTLE_ROYALE_STATS_ENABLED:
options.extend([self._makeItem(VEHICLE.STATS, MENU.contextmenu(VEHICLE.STATS), {'enabled': True})])
rentState = self.__rentVehiclesController.getRentState(self.vehCD)
if rentState != EquipmentPanelCmpRentStates.STATE_NORMAL:
testDriveStates = (EquipmentPanelCmpRentStates.STATE_TEST_DRIVE_AVAILABLE, EquipmentPanelCmpRentStates.STATE_TEST_DRIVE_ACTIVE)
rentStates = (EquipmentPanelCmpRentStates.STATE_RENT_AVAILABLE, EquipmentPanelCmpRentStates.STATE_RENT_ACTIVE)
if rentState in testDriveStates:
days = self.__rentVehiclesController.getNextTestDriveDaysTotal(self.vehCD)
text = backport.text(R.strings.menu.battleRoyale.contextMenu.takeTestDrive(), days=days)
options.extend([self._makeItem(VEHICLE.TAKE_TEST_DRIVE, text, {'enabled': rentState == EquipmentPanelCmpRentStates.STATE_TEST_DRIVE_AVAILABLE})])
elif rentState in rentStates:
isEnough = self.__rentVehiclesController.isEnoughMoneyToPurchase(self.vehCD)
isEnabled = rentState == EquipmentPanelCmpRentStates.STATE_RENT_AVAILABLE and isEnough
days = self.__rentVehiclesController.getNextRentDaysTotal(self.vehCD)
text = backport.text(R.strings.menu.battleRoyale.contextMenu.takeRent(), days=days)
options.extend([self._makeItem(VEHICLE.TAKE_RENT, text, {'enabled': isEnabled})])
return options
def takeToRent(self):
self.__rentVehiclesController.purchaseRent(self.vehCD)
def showVehicleStats(self):
shared_events.showVehicleStats(self.getVehCD(), 'battleRoyale')
|
125cbb8c5431cf90595322036bc270df38d03944
|
3ca67d69abd4e74b7145b340cdda65532f90053b
|
/BOJ/1213.팰린드롬 만들기/6047198844.py
|
a01ddd5495b1fc5c8ead71611d03d3ae45c60bc1
|
[] |
no_license
|
DKU-STUDY/Algorithm
|
19549516984b52a1c5cd73e1ed1e58f774d6d30e
|
6f78efdbefd8eedab24e43d74c7dae7f95c2893b
|
refs/heads/master
| 2023-02-18T06:48:39.309641
| 2023-02-09T07:16:14
| 2023-02-09T07:16:14
| 258,455,710
| 175
| 49
| null | 2023-02-09T07:16:16
| 2020-04-24T08:42:27
|
Python
|
UTF-8
|
Python
| false
| false
| 338
|
py
|
6047198844.py
|
from collections import Counter
s = input()
res = ''
even_alpha = ''
for alpha, cnt in sorted(Counter(s).items()):
res += alpha * (cnt // 2)
if cnt % 2 != 0:
if even_alpha == '':
even_alpha = alpha
else:
print("I'm Sorry Hansoo")
break
else:
print(res+even_alpha+res[::-1])
|
dd03f7f9446eb400d88dab5d84d19bfa3bce2aaa
|
d7d63d3a36f147b9f03032f5890546248599ebce
|
/util/mongodb_binaries/errors.py
|
9e3fbe2ca302ccc79c12352a922e0dd9f4a86555
|
[
"LicenseRef-scancode-warranty-disclaimer"
] |
no_license
|
mongodb/mongo-perf
|
fb1f5bce96b8387b3c539225b92435d9068644e4
|
d4562b5d2d941613af2424192557b10b9cc24934
|
refs/heads/master
| 2023-08-31T09:02:22.271105
| 2023-06-27T20:46:25
| 2023-06-27T20:46:25
| 404,397
| 259
| 123
| null | 2023-09-14T20:54:36
| 2009-12-07T20:29:03
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 265
|
py
|
errors.py
|
class BinariesNotAvailableError(Exception):
"""Exception to be thrown when no Binary package is found"""
class DownloadDirectoryExistsError(Exception):
""" Raised when a download directory exists but doest not appear to be a
binaries directory
"""
|
cf2ae015a17c43a9df2111edb11f347040c6f1b5
|
5e9576c368e98927e2965bd2fb23bd35d9993d69
|
/featuretools/primitives/standard/aggregation/count_below_mean.py
|
c64a5d1f325c582237ab0292a45595d9e0c5fa1a
|
[
"BSD-3-Clause"
] |
permissive
|
alteryx/featuretools
|
c6e319e063e8e84e7684bf232376f95dc5272160
|
c284c2d27a95b81e0bae913ac90df2b02c8f3b37
|
refs/heads/main
| 2023-08-25T12:21:33.945418
| 2023-08-23T16:30:25
| 2023-08-23T16:30:25
| 102,908,804
| 1,783
| 201
|
BSD-3-Clause
| 2023-09-07T18:53:19
| 2017-09-08T22:15:17
|
Python
|
UTF-8
|
Python
| false
| false
| 1,265
|
py
|
count_below_mean.py
|
import numpy as np
from woodwork.column_schema import ColumnSchema
from woodwork.logical_types import IntegerNullable
from featuretools.primitives.base.aggregation_primitive_base import AggregationPrimitive
class CountBelowMean(AggregationPrimitive):
"""Determines the number of values that are below the mean.
Args:
skipna (bool): Determines if to use NA/null values. Defaults to
True to skip NA/null.
Examples:
>>> count_below_mean = CountBelowMean()
>>> count_below_mean([1, 2, 3, 4, 10])
3
The way NaNs are treated can be controlled.
>>> count_below_mean_skipna = CountBelowMean(skipna=False)
>>> count_below_mean_skipna([1, 2, 3, 4, 5, None])
nan
"""
name = "count_below_mean"
input_types = [ColumnSchema(semantic_tags={"numeric"})]
return_type = ColumnSchema(logical_type=IntegerNullable, semantic_tags={"numeric"})
stack_on_self = False
def __init__(self, skipna=True):
self.skipna = skipna
def get_function(self):
def count_below_mean(x):
mean = x.mean(skipna=self.skipna)
if np.isnan(mean):
return np.nan
return len(x[x < mean])
return count_below_mean
|
b37b771abc1b8d7e7e4f65d1eb9eb8cb5cbbb0a0
|
e910cca862905577212a514727ca8cbfa9213839
|
/policy_sentry/querying/arns.py
|
43078c17b24fb101595de52443dd3ceccb4295b3
|
[
"MIT"
] |
permissive
|
salesforce/policy_sentry
|
1b0da6af01dd5507087ae7b266389036855ddb94
|
a06d95c4c20722e4a1c51da9943c52bac2154b41
|
refs/heads/master
| 2023-08-31T15:24:58.480240
| 2023-08-28T20:47:16
| 2023-08-29T20:52:56
| 209,652,627
| 1,864
| 151
|
MIT
| 2023-09-09T19:55:39
| 2019-09-19T21:35:53
|
Python
|
UTF-8
|
Python
| false
| false
| 6,020
|
py
|
arns.py
|
"""
Methods that execute specific queries against the SQLite database for the ARN table.
This supports the policy_sentry query functionality
"""
from __future__ import annotations
import logging
import functools
import warnings
from typing import Any
from policy_sentry.querying.arns_v1 import get_arn_type_details_v1
from policy_sentry.shared.constants import POLICY_SENTRY_SCHEMA_VERSION_V2
from policy_sentry.shared.iam_data import (
get_service_prefix_data,
get_iam_definition_schema_version,
)
from policy_sentry.util.arns import does_arn_match, get_service_from_arn
logger = logging.getLogger(__name__)
def get_arn_data(service_prefix: str, resource_type_name: str) -> list[dict[str, Any]]:
"""
DEPRECATED: Please use get_arn_type_details() instead!
Get details about ARNs in JSON format.
Arguments:
service_prefix: An AWS service prefix, like `s3` or `kms`
resource_type_name: The name of a resource type, like `bucket` or `object`. To get details on ALL arns in a service, specify "*" here.
Returns:
Dictionary: Metadata about an ARN type
"""
warnings.warn("Please use get_arn_type_details() instead", DeprecationWarning)
results = []
service_prefix_data = get_service_prefix_data(service_prefix)
for resource_name, resource_data in service_prefix_data["resources"].items():
if resource_data["resource"].lower() == resource_type_name.lower():
output = {
"resource_type_name": resource_data["resource"],
"raw_arn": resource_data["arn"],
"condition_keys": resource_data["condition_keys"],
}
results.append(output)
return results
@functools.lru_cache(maxsize=1024)
def get_raw_arns_for_service(service_prefix: str) -> list[str]:
"""
Get a list of available raw ARNs per AWS service
Arguments:
service_prefix: An AWS service prefix, like `s3` or `kms`
Returns:
List: A list of raw ARNs
"""
service_prefix_data = get_service_prefix_data(service_prefix)
return [
resource_data["arn"]
for resource_data in service_prefix_data["resources"].values()
]
@functools.lru_cache(maxsize=1024)
def get_arn_types_for_service(service_prefix: str) -> dict[str, str]:
"""
Get a list of available ARN short names per AWS service.
Arguments:
service_prefix: An AWS service prefix, like `s3` or `kms`
Returns:
List: A list of ARN types, like `bucket` or `object`
"""
service_prefix_data = get_service_prefix_data(service_prefix)
return {
resource_name: resource_data["arn"]
for resource_name, resource_data in service_prefix_data["resources"].items()
}
def get_arn_type_details(
service_prefix: str, resource_type_name: str
) -> dict[str, Any]:
"""
Get details about ARNs in JSON format.
Arguments:
service_prefix: An AWS service prefix, like `s3` or `kms`
resource_type_name: The name of a resource type, like `bucket` or `object`. To get details on ALL arns in a service, specify "*" here.
Returns:
Dictionary: Metadata about an ARN type
"""
schema_version = get_iam_definition_schema_version()
if schema_version == POLICY_SENTRY_SCHEMA_VERSION_V2:
return get_arn_type_details_v2(
service_prefix=service_prefix, resource_type_name=resource_type_name
)
return get_arn_type_details_v1(
service_prefix=service_prefix, resource_type_name=resource_type_name
)
def get_arn_type_details_v2(
service_prefix: str, resource_type_name: str
) -> dict[str, Any]:
"""
Get details about ARNs in JSON format (v2).
Arguments:
service_prefix: An AWS service prefix, like `s3` or `kms`
resource_type_name: The name of a resource type, like `bucket` or `object`. To get details on ALL arns in a service, specify "*" here.
Returns:
Dictionary: Metadata about an ARN type
"""
output = {}
service_prefix_data = get_service_prefix_data(service_prefix)
this_resource_type_name = service_prefix_data["resources_lower_name"].get(resource_type_name.lower())
if this_resource_type_name:
resource_data = service_prefix_data["resources"][this_resource_type_name]
output = {
"resource_type_name": this_resource_type_name,
"raw_arn": resource_data["arn"],
"condition_keys": resource_data["condition_keys"],
}
return output
# pylint: disable=inconsistent-return-statements
def get_resource_type_name_with_raw_arn(raw_arn: str) -> str | None:
"""
Given a raw ARN, return the resource type name as shown in the database.
Arguments:
raw_arn: The raw ARN stored in the database, like 'arn:${Partition}:s3:::${BucketName}'
Returns:
String: The resource type name, like bucket
"""
elements = raw_arn.split(":", 5)
service_prefix = elements[2]
service_data = get_service_prefix_data(service_prefix)
for resource_name, resource_data in service_data["resources"].items():
if resource_data["arn"].lower() == raw_arn.lower():
return resource_name
return None
def get_matching_raw_arns(arn: str) -> list[str]:
"""
Given a user-supplied ARN, return the list of raw_arns since that is used as a unique identifier throughout this library
Arguments:
arn: The user-supplied arn, like arn:aws:s3:::mybucket
Returns:
list(str): The list of raw ARNs stored in the database, like 'arn:${Partition}:s3:::${BucketName}'
"""
result = []
service_in_scope = get_service_from_arn(arn)
# Determine which resource it applies to
all_raw_arns_for_service = get_raw_arns_for_service(service_in_scope)
# Get the raw ARN specific to the provided one
for raw_arn in all_raw_arns_for_service:
if does_arn_match(arn, raw_arn) and raw_arn not in result:
result.append(raw_arn)
return result
|
5bb3815ae55bb1e05fa04d9c6b8dfa47e9775558
|
a1d6c7e0489938bb197f0571ff19f43a9757b069
|
/src/sphinx/conf.py
|
70dcef5e880412742f1e8c73112ba36585159d43
|
[
"BSD-3-Clause",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
harrah/xsbt
|
79b122ce88d937c2b54a3e7e2058fa90001de198
|
92a38c604ee8d449135684b0d5887b6769bffc32
|
refs/heads/0.13
| 2023-05-27T12:45:42.000007
| 2013-01-07T14:58:58
| 2013-01-07T14:58:58
| 279,553
| 829
| 123
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,133
|
py
|
conf.py
|
# -*- coding: utf-8 -*-
import sys, os
sys.path.append(os.path.abspath('_sphinx/exts'))
extensions = ['sphinxcontrib.issuetracker', 'sphinx.ext.extlinks', 'howto']
# Project variables
project = 'sbt'
version = '0.13'
release = '0.13.0-SNAPSHOT'
scalaVersion = "2.10"
scalaRelease = "2.10.0"
# General settings
needs_sphinx = '1.1'
nitpicky = True
default_role = 'literal'
master_doc = 'home'
highlight_language = 'scala'
add_function_parentheses = False
# HTML
html_theme = 'sbt'
html_theme_path = ['_sphinx/themes']
html_title = 'sbt Documentation'
html_domain_indices = False
html_use_index = False
html_show_sphinx = False
htmlhelp_basename = 'sbtdoc'
html_use_smartypants = False
# if true:
# the Home link is to scala-sbt.org
# if false:
# the Home link is to home.html for the current documentation version
# TODO: pass this as an argument to sphinx
home_site = True
# Passed to Google as site:<site_search_base>
# If empty, no search box is included
# TODO: pass this as an argument to sphinx, use actual version instead of release
site_search_base = 'http://www.scala-sbt.org/release/docs'
# passes variables to the template
html_context = {'home_site': home_site, 'site_search_base': site_search_base}
# Latex (PDF)
latex_documents = [
('pdf_index', 'sbt.tex', html_title, '', 'manual', True),
('Getting-Started/index', 'sbt-Getting-Started.tex', html_title, '', 'manual', True),
]
# Issues role
issuetracker = 'github'
issuetracker_project = 'sbt/sbt'
issuetracker_plaintext_issues = True
issuetracker_issue_pattern = r'\bgh-(\d+)\b'
issuetracker_title_template = '#{issue.id}'
# links, substitutions
typesafe_base = 'http://repo.typesafe.com/typesafe/'
typesafe_ivy_snapshots = typesafe_base + 'ivy-snapshots/'
typesafe_ivy_releases = typesafe_base + 'ivy-releases/'
launcher_release_base = typesafe_ivy_releases + 'org.scala-sbt/sbt-launch/'
launcher_snapshots_base = typesafe_ivy_snapshots + 'org.scala-sbt/sbt-launch/'
sbt_native_package_base = 'http://scalasbt.artifactoryonline.com/scalasbt/sbt-native-packages/org/scala-sbt/sbt/'
rst_epilog = """
.. |scalaVersion| replace:: %(scalaVersion)s
.. |scalaRelease| replace:: %(scalaRelease)s
.. _typesafe-snapshots: %(typesafe_ivy_snapshots)s
.. |typesafe-snapshots| replace:: Typesafe Snapshots
.. _sbt-launch.jar: %(launcher_release_base)s/%(version)s/sbt-launch.jar
.. _MSI: %(sbt_native_package_base)s/%(version)s/sbt.msi
.. _TGZ: %(sbt_native_package_base)s/%(version)s/sbt.tgz
.. _ZIP: %(sbt_native_package_base)s/%(version)s/sbt.zip
.. _DEB: %(sbt_native_package_base)s/%(version)s/sbt.deb
.. _RPM: %(sbt_native_package_base)s/%(version)s/sbt.rpm
.. |nightly-launcher| replace:: <%(launcher_snapshots_base)s
.. _mailing list: http://groups.google.com/group/simple-build-tool/topics
.. _source code: http://github.com/sbt/sbt
""" % {
'launcher_release_base': launcher_release_base,
'launcher_snapshots_base': launcher_snapshots_base,
'version': release,
'typesafe_ivy_snapshots': typesafe_ivy_snapshots,
'sbt_native_package_base': sbt_native_package_base,
'scalaRelease': scalaRelease,
'scalaVersion': scalaVersion
}
|
862a4caa6c22e37c38bff1714d59a9c8fde50aab
|
4a9b074e9cafebcffa4c8212bb966658c476a33b
|
/neo/Core/UInt256.py
|
1dc611e242604bb5d2440232cc9755ab3829900b
|
[
"LicenseRef-scancode-free-unknown",
"MIT"
] |
permissive
|
CityOfZion/neo-python
|
66a46c2509044d9b52ccce4643b4df74a1f14da2
|
99783bc8310982a5380081ec41a6ee07ba843f3f
|
refs/heads/master
| 2023-07-19T00:04:36.415986
| 2021-11-15T12:15:55
| 2021-11-15T12:15:55
| 97,416,422
| 396
| 288
|
MIT
| 2021-06-01T21:42:39
| 2017-07-16T23:05:32
|
Python
|
UTF-8
|
Python
| false
| false
| 688
|
py
|
UInt256.py
|
from neo.Core.UIntBase import UIntBase
class UInt256(UIntBase):
def __init__(self, data=None):
super(UInt256, self).__init__(num_bytes=32, data=data)
@staticmethod
def ParseString(value):
"""
Parse the input str `value` into UInt256
Raises:
ValueError: if the input `value` length (after '0x' if present) != 64
"""
if value[0:2] == '0x':
value = value[2:]
if not len(value) == 64:
raise ValueError(f"Invalid UInt256 input: {len(value)} chars != 64 chars")
reversed_data = bytearray.fromhex(value)
reversed_data.reverse()
return UInt256(data=reversed_data)
|
22275fe59726a110709ac65b6c484096ed293d55
|
771fa3a01480527df835c593bf99c5b8ae702b6a
|
/tests/pylt/tests/t-mod-auth.py
|
bdad4152745ab4669d06aa347ed045deb55cf1a6
|
[
"CC0-1.0",
"Apache-2.0",
"MIT"
] |
permissive
|
lighttpd/lighttpd2
|
cee558174b0a07174a4488591c16882845d43240
|
23164d557f10d4ccd948b34c5b1323e22ce5088f
|
refs/heads/master
| 2023-09-05T03:39:50.805344
| 2023-07-23T00:03:44
| 2023-07-23T00:03:44
| 1,767,422
| 459
| 135
|
NOASSERTION
| 2023-01-06T20:16:37
| 2011-05-18T18:13:11
|
C
|
UTF-8
|
Python
| false
| false
| 3,111
|
py
|
t-mod-auth.py
|
# -*- coding: utf-8 -*-
from pylt.base import ModuleTest
from pylt.requests import CurlRequest
# userI:passI for I in [1..4] with [apr-md5, crypt, plain and apr-sha]
PASSWORDS = """user1:$apr1$mhpONdUp$xSRcAbK2F6hLFUzW59tzW/
# user2:JTMoqfZHCS0aI
user3:pass3
user4:{SHA}LbTBgR9CRYKpD41+53mVzwGNlEM=
"""
# user5:pass5 in realm 'Realm1'
DIGESTPASSWORDS = """user5:Realm1:b0590e8c95605dd708226b552fc86a22
"""
class TestAprMd5Fail(CurlRequest):
URL = "/test.txt"
EXPECT_RESPONSE_CODE = 401
AUTH = "user1:test1"
class TestAprMd5Success(CurlRequest):
URL = "/test.txt"
EXPECT_RESPONSE_CODE = 200
AUTH = "user1:pass1"
class TestCryptFail(CurlRequest):
URL = "/test.txt"
EXPECT_RESPONSE_CODE = 401
AUTH = "user2:test2"
# no-prefix crypt deprecated
# class TestCryptSuccess(CurlRequest):
# URL = "/test.txt"
# EXPECT_RESPONSE_CODE = 200
# AUTH = "user2:pass2"
class TestPlainFail(CurlRequest):
URL = "/test.txt?plain"
EXPECT_RESPONSE_CODE = 401
AUTH = "user3:test3"
class TestPlainSuccess(CurlRequest):
URL = "/test.txt?plain"
EXPECT_RESPONSE_CODE = 200
AUTH = "user3:pass3"
class TestAprSha1Fail(CurlRequest):
URL = "/test.txt"
EXPECT_RESPONSE_CODE = 401
AUTH = "user4:test4"
class TestAprSha1Success(CurlRequest):
URL = "/test.txt"
EXPECT_RESPONSE_CODE = 200
AUTH = "user4:pass4"
class TestDigestFail(CurlRequest):
URL = "/test.txt?digest"
EXPECT_RESPONSE_CODE = 401
AUTH = "user5:test5"
class TestDigestSuccess(CurlRequest):
URL = "/test.txt?digest"
EXPECT_RESPONSE_CODE = 200
AUTH = "user5:pass5"
class TestRequireUserDeny(CurlRequest):
URL = "/test.txt?require1"
EXPECT_RESPONSE_CODE = 403
AUTH = "user4:pass4"
class TestRequireUserSuccess(CurlRequest):
URL = "/test.txt?require1"
EXPECT_RESPONSE_CODE = 200
AUTH = "user1:pass1"
class TestDeny(CurlRequest):
URL = "/test.txt?deny"
EXPECT_RESPONSE_CODE = 403
class Test(ModuleTest):
def prepare_test(self) -> None:
passwdfile = self.prepare_file("conf/mod-auth.htpasswd", PASSWORDS)
digestfile = self.prepare_file("conf/mod-auth.htdigest", DIGESTPASSWORDS)
self.config = f"""
setup {{ module_load ( "mod_auth" ); }}
auth.debug true;
if req.query == "plain" {{
auth.plain ["method" => "basic", "realm" => "Basic Auth Realm", "file" => "{passwdfile}", "ttl" => 10];
}} else if req.query == "digest" {{
auth.htdigest ["method" => "basic", "realm" => "Realm1", "file" => "{digestfile}", "ttl" => 10];
}} else if req.query == "deny" {{
auth.deny;
}} else {{
auth.htpasswd [
"method" => "basic",
"realm" => "Basic Auth Realm",
"file" => "{passwdfile}",
"ttl" => 10,
];
}}
if req.query == "require1" {{
auth.require_user ("user1");
}}
defaultaction;
"""
|
5a80efdd3830c6e2fa2d5f930dcf09f89395559e
|
a1c7055f3e66fb802ae4c3ecdb952ff45579914f
|
/cmd2/rl_utils.py
|
52feacb9b26c02dcb24936595953e643ab333f7f
|
[
"MIT"
] |
permissive
|
python-cmd2/cmd2
|
3e4ef2a1804554e8acd50898636a5685eab292c1
|
9886b82c71face043e1fac871a6cdbebbf0e864c
|
refs/heads/master
| 2023-09-05T14:55:50.702311
| 2023-09-04T16:44:01
| 2023-09-04T16:44:01
| 51,442,999
| 571
| 154
|
MIT
| 2023-09-04T16:44:03
| 2016-02-10T13:34:49
|
Python
|
UTF-8
|
Python
| false
| false
| 10,377
|
py
|
rl_utils.py
|
# coding=utf-8
"""
Imports the proper Readline for the platform and provides utility functions for it
"""
import sys
from enum import (
Enum,
)
from typing import (
Union,
)
#########################################################################################################################
# NOTE ON LIBEDIT:
#
# On Linux/Mac, the underlying readline API may be implemented by libedit instead of GNU readline.
# We don't support libedit because it doesn't implement all the readline features cmd2 needs.
#
# For example:
# cmd2 sets a custom display function using Python's readline.set_completion_display_matches_hook() to
# support many of its advanced tab completion features (e.g. tab completion tables, displaying path basenames,
# colored results, etc.). This function "sets or clears the rl_completion_display_matches_hook callback in the
# underlying library". libedit has never implemented rl_completion_display_matches_hook. It merely sets it to NULL
# and never references it.
#
# The workaround for Python environments using libedit is to install the gnureadline Python library.
#########################################################################################################################
# Prefer statically linked gnureadline if installed due to compatibility issues with libedit
try:
# noinspection PyPackageRequirements
import gnureadline as readline # type: ignore[import]
except ImportError:
# Note: If this actually fails, you should install gnureadline on Linux/Mac or pyreadline3 on Windows.
try:
# noinspection PyUnresolvedReferences
import readline # type: ignore[no-redef]
except ImportError: # pragma: no cover
pass
class RlType(Enum):
"""Readline library types we recognize"""
GNU = 1
PYREADLINE = 2
NONE = 3
# Check what implementation of Readline we are using
rl_type = RlType.NONE
# Tells if the terminal we are running in supports vt100 control characters
vt100_support = False
# Explanation for why Readline wasn't loaded
_rl_warn_reason = ''
# The order of this check matters since importing pyreadline3 will also show readline in the modules list
if 'pyreadline3' in sys.modules:
rl_type = RlType.PYREADLINE
import atexit
from ctypes import (
byref,
)
from ctypes.wintypes import (
DWORD,
HANDLE,
)
# Check if we are running in a terminal
if sys.stdout is not None and sys.stdout.isatty(): # pragma: no cover
# noinspection PyPep8Naming,PyUnresolvedReferences
def enable_win_vt100(handle: HANDLE) -> bool:
"""
Enables VT100 character sequences in a Windows console
This only works on Windows 10 and up
:param handle: the handle on which to enable vt100
:return: True if vt100 characters are enabled for the handle
"""
ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x0004
# Get the current mode for this handle in the console
cur_mode = DWORD(0)
readline.rl.console.GetConsoleMode(handle, byref(cur_mode))
retVal = False
# Check if ENABLE_VIRTUAL_TERMINAL_PROCESSING is already enabled
if (cur_mode.value & ENABLE_VIRTUAL_TERMINAL_PROCESSING) != 0:
retVal = True
elif readline.rl.console.SetConsoleMode(handle, cur_mode.value | ENABLE_VIRTUAL_TERMINAL_PROCESSING):
# Restore the original mode when we exit
atexit.register(readline.rl.console.SetConsoleMode, handle, cur_mode)
retVal = True
return retVal
# Enable VT100 sequences for stdout and stderr
STD_OUT_HANDLE = -11
STD_ERROR_HANDLE = -12
# noinspection PyUnresolvedReferences
vt100_stdout_support = enable_win_vt100(readline.rl.console.GetStdHandle(STD_OUT_HANDLE))
# noinspection PyUnresolvedReferences
vt100_stderr_support = enable_win_vt100(readline.rl.console.GetStdHandle(STD_ERROR_HANDLE))
vt100_support = vt100_stdout_support and vt100_stderr_support
############################################################################################################
# pyreadline3 is incomplete in terms of the Python readline API. Add the missing functions we need.
############################################################################################################
# readline.redisplay()
try:
getattr(readline, 'redisplay')
except AttributeError:
# noinspection PyProtectedMember,PyUnresolvedReferences
readline.redisplay = readline.rl.mode._update_line
# readline.remove_history_item()
try:
getattr(readline, 'remove_history_item')
except AttributeError:
# noinspection PyProtectedMember,PyUnresolvedReferences
def pyreadline_remove_history_item(pos: int) -> None:
"""
An implementation of remove_history_item() for pyreadline3
:param pos: The 0-based position in history to remove
"""
# Save of the current location of the history cursor
saved_cursor = readline.rl.mode._history.history_cursor
# Delete the history item
del readline.rl.mode._history.history[pos]
# Update the cursor if needed
if saved_cursor > pos:
readline.rl.mode._history.history_cursor -= 1
readline.remove_history_item = pyreadline_remove_history_item
elif 'gnureadline' in sys.modules or 'readline' in sys.modules:
# We don't support libedit. See top of this file for why.
if readline.__doc__ is not None and 'libedit' not in readline.__doc__:
try:
# Load the readline lib so we can access members of it
import ctypes
readline_lib = ctypes.CDLL(readline.__file__)
except (AttributeError, OSError): # pragma: no cover
_rl_warn_reason = (
"this application is running in a non-standard Python environment in\n"
"which GNU readline is not loaded dynamically from a shared library file."
)
else:
rl_type = RlType.GNU
vt100_support = sys.stdout.isatty()
# Check if readline was loaded
if rl_type == RlType.NONE: # pragma: no cover
if not _rl_warn_reason:
_rl_warn_reason = (
"no supported version of readline was found. To resolve this, install\n"
"pyreadline3 on Windows or gnureadline on Linux/Mac."
)
rl_warning = "Readline features including tab completion have been disabled because\n" + _rl_warn_reason + '\n\n'
else:
rl_warning = ''
# noinspection PyProtectedMember,PyUnresolvedReferences
def rl_force_redisplay() -> None: # pragma: no cover
"""
Causes readline to display the prompt and input text wherever the cursor is and start
reading input from this location. This is the proper way to restore the input line after
printing to the screen
"""
if not sys.stdout.isatty():
return
if rl_type == RlType.GNU:
readline_lib.rl_forced_update_display()
# After manually updating the display, readline asks that rl_display_fixed be set to 1 for efficiency
display_fixed = ctypes.c_int.in_dll(readline_lib, "rl_display_fixed")
display_fixed.value = 1
elif rl_type == RlType.PYREADLINE:
# Call _print_prompt() first to set the new location of the prompt
readline.rl.mode._print_prompt()
readline.rl.mode._update_line()
# noinspection PyProtectedMember, PyUnresolvedReferences
def rl_get_point() -> int: # pragma: no cover
"""
Returns the offset of the current cursor position in rl_line_buffer
"""
if rl_type == RlType.GNU:
return ctypes.c_int.in_dll(readline_lib, "rl_point").value
elif rl_type == RlType.PYREADLINE:
return int(readline.rl.mode.l_buffer.point)
else:
return 0
# noinspection PyUnresolvedReferences
def rl_get_prompt() -> str: # pragma: no cover
"""Gets Readline's current prompt"""
if rl_type == RlType.GNU:
encoded_prompt = ctypes.c_char_p.in_dll(readline_lib, "rl_prompt").value
if encoded_prompt is None:
prompt = ''
else:
prompt = encoded_prompt.decode(encoding='utf-8')
elif rl_type == RlType.PYREADLINE:
prompt_data: Union[str, bytes] = readline.rl.prompt
if isinstance(prompt_data, bytes):
prompt = prompt_data.decode(encoding='utf-8')
else:
prompt = prompt_data
else:
prompt = ''
return rl_unescape_prompt(prompt)
# noinspection PyUnresolvedReferences
def rl_set_prompt(prompt: str) -> None: # pragma: no cover
"""
Sets Readline's prompt
:param prompt: the new prompt value
"""
escaped_prompt = rl_escape_prompt(prompt)
if rl_type == RlType.GNU:
encoded_prompt = bytes(escaped_prompt, encoding='utf-8')
readline_lib.rl_set_prompt(encoded_prompt)
elif rl_type == RlType.PYREADLINE:
readline.rl.prompt = escaped_prompt
def rl_escape_prompt(prompt: str) -> str:
"""Overcome bug in GNU Readline in relation to calculation of prompt length in presence of ANSI escape codes
:param prompt: original prompt
:return: prompt safe to pass to GNU Readline
"""
if rl_type == RlType.GNU:
# start code to tell GNU Readline about beginning of invisible characters
escape_start = "\x01"
# end code to tell GNU Readline about end of invisible characters
escape_end = "\x02"
escaped = False
result = ""
for c in prompt:
if c == "\x1b" and not escaped:
result += escape_start + c
escaped = True
elif c.isalpha() and escaped:
result += c + escape_end
escaped = False
else:
result += c
return result
else:
return prompt
def rl_unescape_prompt(prompt: str) -> str:
"""Remove escape characters from a Readline prompt"""
if rl_type == RlType.GNU:
escape_start = "\x01"
escape_end = "\x02"
prompt = prompt.replace(escape_start, "").replace(escape_end, "")
return prompt
|
46400685c5b4a730fba3e232a476157aba2104ac
|
61a148d684047323f866017c6c95e0dc78682c43
|
/core/amber/src/main/python/core/architecture/handlers/update_input_linking_handler.py
|
95b9985dc146bd6ec68abf6f108458d29e207d7c
|
[
"LicenseRef-scancode-free-unknown",
"Apache-2.0"
] |
permissive
|
Texera/texera
|
9dd92dd0999fd78ff37cb6241f3395d475549e27
|
ca554ecad8e161b489aa17bdb17c9249ef888b6d
|
refs/heads/master
| 2023-09-03T21:46:42.147647
| 2023-08-31T21:42:30
| 2023-08-31T21:42:30
| 53,976,910
| 129
| 61
|
Apache-2.0
| 2023-09-14T15:53:52
| 2016-03-15T20:38:46
|
Scala
|
UTF-8
|
Python
| false
| false
| 517
|
py
|
update_input_linking_handler.py
|
from core.architecture.handlers.handler_base import Handler
from core.architecture.managers.context import Context
from proto.edu.uci.ics.amber.engine.architecture.worker import UpdateInputLinkingV2
class UpdateInputLinkingHandler(Handler):
cmd = UpdateInputLinkingV2
def __call__(
self, context: Context, command: UpdateInputLinkingV2, *args, **kwargs
):
context.batch_to_tuple_converter.register_input(
command.identifier, command.input_link
)
return None
|
6ffbed51119bb8b6b58c1505c00bd20a0ed7e167
|
fa1ad2e2ac7e376fc7cb3b3a6e1bb88eed3e80be
|
/olap/ByConity/tests/testflows/rbac/tests/views/view.py
|
3fd12164ad6d51fe9085390c14146e15629dd7dc
|
[
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] |
permissive
|
alldatacenter/alldata
|
7bc7713c9f1d56ad6b8e59ea03206d1073b7e047
|
8d5f9a2d49ab8f9e85ccf058cb02c2fda287afc6
|
refs/heads/master
| 2023-08-05T07:32:25.442740
| 2023-08-03T13:17:24
| 2023-08-03T13:17:24
| 213,321,771
| 774
| 250
|
Apache-2.0
| 2023-09-06T17:35:32
| 2019-10-07T07:36:18
| null |
UTF-8
|
Python
| false
| false
| 61,176
|
py
|
view.py
|
from testflows.core import *
from testflows.asserts import error
from rbac.requirements import *
from rbac.helper.common import *
import rbac.helper.errors as errors
@TestSuite
@Requirements(
RQ_SRS_006_RBAC_View_Create("1.0"),
)
def create(self, node=None):
"""Test the RBAC functionality of the `CREATE VIEW` command.
"""
Scenario(run=create_without_create_view_privilege, setup=instrument_clickhouse_server_log)
Scenario(run=create_with_create_view_privilege_granted_directly_or_via_role, setup=instrument_clickhouse_server_log)
Scenario(run=create_with_revoked_create_view_privilege_revoked_directly_or_from_role, setup=instrument_clickhouse_server_log)
Scenario(run=create_without_source_table_privilege, setup=instrument_clickhouse_server_log)
Scenario(run=create_with_source_table_privilege_granted_directly_or_via_role, setup=instrument_clickhouse_server_log)
Scenario(run=create_with_subquery_privilege_granted_directly_or_via_role, setup=instrument_clickhouse_server_log)
Scenario(run=create_with_join_query_privilege_granted_directly_or_via_role, setup=instrument_clickhouse_server_log)
Scenario(run=create_with_union_query_privilege_granted_directly_or_via_role, setup=instrument_clickhouse_server_log)
Scenario(run=create_with_join_union_subquery_privilege_granted_directly_or_via_role, setup=instrument_clickhouse_server_log)
Scenario(run=create_with_nested_views_privilege_granted_directly_or_via_role, setup=instrument_clickhouse_server_log)
@TestScenario
def create_without_create_view_privilege(self, node=None):
"""Check that user is unable to create a view without CREATE VIEW privilege.
"""
user_name = f"user_{getuid()}"
view_name = f"view_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
with When("I try to create a view without CREATE VIEW privilege as the user"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT 1", settings = [("user", f"{user_name}")],
exitcode=exitcode, message=message)
@TestScenario
def create_with_create_view_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to create a view with CREATE VIEW privilege, either granted directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=create_with_create_view_privilege,
name="create with create view privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=create_with_create_view_privilege,
name="create with create view privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_create_view_privilege(self, grant_target_name, user_name, node=None):
"""Check that user is able to create a view with the granted privileges.
"""
view_name = f"view_{getuid()}"
if node is None:
node = self.context.node
try:
with When("I grant the CREATE VIEW privilege"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"GRANT CREATE VIEW ON {view_name} TO {grant_target_name}")
with Then("I try to create a view without privilege as the user"):
node.query(f"CREATE VIEW {view_name} AS SELECT 1", settings = [("user", f"{user_name}")])
finally:
with Then("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def create_with_revoked_create_view_privilege_revoked_directly_or_from_role(self, node=None):
"""Check that user is unable to create view after the CREATE VIEW privilege is revoked, either directly or from a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=create_with_revoked_create_view_privilege,
name="create with create view privilege revoked directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=create_with_revoked_create_view_privilege,
name="create with create view privilege revoked from a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_revoked_create_view_privilege(self, grant_target_name, user_name, node=None):
"""Revoke CREATE VIEW privilege and check the user is unable to create a view.
"""
view_name = f"view_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
with When("I grant CREATE VIEW privilege"):
node.query(f"GRANT CREATE VIEW ON {view_name} TO {grant_target_name}")
with And("I revoke CREATE VIEW privilege"):
node.query(f"REVOKE CREATE VIEW ON {view_name} FROM {grant_target_name}")
with Then("I try to create a view on the table as the user"):
node.query(f"CREATE VIEW {view_name} AS SELECT 1", settings = [("user", f"{user_name}")],
exitcode=exitcode, message=message)
@TestScenario
def create_without_source_table_privilege(self, node=None):
"""Check that user is unable to create a view without select
privilege on the source table.
"""
user_name = f"user_{getuid()}"
view_name = f"view_{getuid()}"
table_name = f"table_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
with table(node, f"{table_name}"):
with user(node, f"{user_name}"):
with When("I grant CREATE VIEW privilege to a user"):
node.query(f"GRANT CREATE VIEW ON {view_name} TO {user_name}")
with Then("I try to create a view without select privilege on the table"):
node.query(f"CREATE VIEW {view_name} AS SELECT * FROM {table_name}", settings = [("user", f"{user_name}")],
exitcode=exitcode, message=message)
@TestScenario
def create_with_source_table_privilege_granted_directly_or_via_role(self, node=None):
"""Check that a user is able to create a view if and only if the user has create view privilege and
select privilege on the source table, either granted directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=create_with_source_table_privilege,
name="create with create view and select privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=create_with_source_table_privilege,
name="create with create view and select privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_source_table_privilege(self, user_name, grant_target_name, node=None):
"""Check that user is unable to create a view without SELECT privilege on the source table.
"""
view_name = f"view_{getuid()}"
table_name = f"table_{getuid()}"
if node is None:
node = self.context.node
with table(node, f"{table_name}"):
try:
with When("I grant CREATE VIEW privilege"):
node.query(f"GRANT CREATE VIEW ON {view_name} TO {grant_target_name}")
with And("I grant SELECT privilege"):
node.query(f"GRANT SELECT ON {table_name} TO {grant_target_name}")
with And("I try to create a view on the table as the user"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT * FROM {table_name}", settings = [("user", f"{user_name}")])
with Then("I check the view"):
output = node.query(f"SELECT count(*) FROM {view_name}").output
assert output == '0', error()
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def create_with_subquery_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to create a view where the stored query has two subqueries
if and only if the user has SELECT privilege on all of the tables,
either granted directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=create_with_subquery,
name="create with subquery, privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=create_with_subquery,
name="create with subquery, privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_subquery(self, user_name, grant_target_name, node=None):
"""Grant select and create view privileges and check that user is able to create a view
if and only if they have all necessary privileges.
"""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
table2_name = f"table2_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
create_view_query = "CREATE VIEW {view_name} AS SELECT * FROM {table0_name} WHERE y IN (SELECT y FROM {table1_name} WHERE y IN (SELECT y FROM {table2_name} WHERE y<2))"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name},{table2_name}"):
try:
with When("I grant CREATE VIEW privilege"):
node.query(f"GRANT CREATE VIEW ON {view_name} TO {grant_target_name}")
with Then("I attempt to CREATE VIEW as the user with create privilege"):
node.query(create_view_query.format(view_name=view_name, table0_name=table0_name, table1_name=table1_name, table2_name=table2_name), settings = [("user", f"{user_name}")],
exitcode=exitcode, message=message)
for permutation in permutations(table_count=3):
with grant_select_on_table(node, permutation, grant_target_name, table0_name, table1_name, table2_name) as tables_granted:
with When(f"permutation={permutation}, tables granted = {tables_granted}"):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
with Then("I attempt to create a view as the user"):
node.query(create_view_query.format(view_name=view_name, table0_name=table0_name, table1_name=table1_name, table2_name=table2_name), settings = [("user", f"{user_name}")],
exitcode=exitcode, message=message)
with When("I grant select on all tables"):
with grant_select_on_table(node, max(permutations(table_count=3))+1, grant_target_name, table0_name, table1_name, table2_name):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
with Then("I attempt to create a view as the user"):
node.query(create_view_query.format(view_name=view_name, table0_name=table0_name, table1_name=table1_name, table2_name=table2_name), settings = [("user", f"{user_name}")])
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def create_with_join_query_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to create a view where the stored query includes a `JOIN` statement
if and only if the user has SELECT privilege on all of the tables,
either granted directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=create_with_join_query,
name="create with join query, privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=create_with_join_query,
name="create with join query, privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_join_query(self, grant_target_name, user_name, node=None):
"""Grant select and create view privileges and check that user is able to create a view
if and only if they have all necessary privileges.
"""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
create_view_query = "CREATE VIEW {view_name} AS SELECT * FROM {table0_name} JOIN {table1_name} USING d"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name}"):
try:
with When("I grant CREATE VIEW privilege"):
node.query(f"GRANT CREATE VIEW ON {view_name} TO {grant_target_name}")
with Then("I attempt to create view as the user"):
node.query(create_view_query.format(view_name=view_name, table0_name=table0_name, table1_name=table1_name), settings = [("user", f"{user_name}")],
exitcode=exitcode, message=message)
for permutation in permutations(table_count=2):
with grant_select_on_table(node, permutation, grant_target_name, table0_name, table1_name) as tables_granted:
with When(f"permutation={permutation}, tables granted = {tables_granted}"):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
with Then("I attempt to create a view as the user"):
node.query(create_view_query.format(view_name=view_name, table0_name=table0_name, table1_name=table1_name), settings = [("user", f"{user_name}")],
exitcode=exitcode, message=message)
with When("I grant select on all tables"):
with grant_select_on_table(node, max(permutations(table_count=2))+1, grant_target_name, table0_name, table1_name):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
with Then("I attempt to create a view as the user"):
node.query(create_view_query.format(view_name=view_name, table0_name=table0_name, table1_name=table1_name), settings = [("user", f"{user_name}")])
finally:
with Then("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def create_with_union_query_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to create a view where the stored query includes a `UNION ALL` statement
if and only if the user has SELECT privilege on all of the tables,
either granted directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=create_with_union_query,
name="create with union query, privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=create_with_union_query,
name="create with union query, privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_union_query(self, grant_target_name, user_name, node=None):
"""Grant select and create view privileges and check that user is able to create a view
if and only if they have all necessary privileges.
"""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
create_view_query = "CREATE VIEW {view_name} AS SELECT * FROM {table0_name} UNION ALL SELECT * FROM {table1_name}"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name}"):
try:
with When("I grant CREATE VIEW privilege"):
node.query(f"GRANT CREATE VIEW ON {view_name} TO {grant_target_name}")
with Then("I attempt to create view as the user"):
node.query(create_view_query.format(view_name=view_name, table0_name=table0_name, table1_name=table1_name), settings = [("user", f"{user_name}")],
exitcode=exitcode, message=message)
for permutation in permutations(table_count=2):
with grant_select_on_table(node, permutation, grant_target_name, table0_name, table1_name) as tables_granted:
with When(f"permutation={permutation}, tables granted = {tables_granted}"):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
with Then("I attempt to create a view as the user"):
node.query(create_view_query.format(view_name=view_name, table0_name=table0_name, table1_name=table1_name), settings = [("user", f"{user_name}")],
exitcode=exitcode, message=message)
with When("I grant select on all tables"):
with grant_select_on_table(node, max(permutations(table_count=2))+1, grant_target_name, table0_name, table1_name):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
with Then("I attempt to create a view as the user"):
node.query(create_view_query.format(view_name=view_name, table0_name=table0_name, table1_name=table1_name), settings = [("user", f"{user_name}")])
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def create_with_join_union_subquery_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to create a view with a stored query that includes `UNION ALL`, `JOIN` and two subqueries
if and only if the user has SELECT privilege on all of the tables, either granted directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=create_with_join_union_subquery,
name="create with join union subquery, privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=create_with_join_union_subquery,
name="create with join union subquery, privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_join_union_subquery(self, grant_target_name, user_name, node=None):
"""Grant select and create view privileges and check that user is able to create a view
if and only if they have all necessary privileges.
"""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
table2_name = f"table2_{getuid()}"
table3_name = f"table3_{getuid()}"
table4_name = f"table4_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
create_view_query = "CREATE VIEW {view_name} AS SELECT y FROM {table0_name} JOIN {table1_name} USING y UNION ALL SELECT y FROM {table1_name} WHERE y IN (SELECT y FROM {table3_name} WHERE y IN (SELECT y FROM {table4_name} WHERE y<2))"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name},{table2_name},{table3_name},{table4_name}"):
with user(node, f"{user_name}"):
try:
with When("I grant CREATE VIEW privilege"):
node.query(f"GRANT CREATE VIEW ON {view_name} TO {grant_target_name}")
with Then("I attempt to create view as the user with CREATE VIEW privilege"):
node.query(create_view_query.format(view_name=view_name, table0_name=table0_name, table1_name=table1_name, table2_name=table2_name, table3_name=table3_name, table4_name=table4_name),
settings = [("user", f"{user_name}")], exitcode=exitcode, message=message)
for permutation in permutations(table_count=5):
with grant_select_on_table(node, permutation, grant_target_name, table0_name, table1_name, table3_name, table4_name) as tables_granted:
with When(f"permutation={permutation}, tables granted = {tables_granted}"):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
with Then("I attempt to create a view as the user"):
node.query(create_view_query.format(view_name=view_name, table0_name=table0_name, table1_name=table1_name, table2_name=table2_name, table3_name=table3_name, table4_name=table4_name),
settings = [("user", f"{user_name}")], exitcode=exitcode, message=message)
with When("I grant select on all tables"):
with grant_select_on_table(node, max(permutations(table_count=5))+1, grant_target_name, table0_name, table1_name, table2_name, table3_name, table4_name):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
with Then("I attempt to create a view as the user"):
node.query(create_view_query.format(view_name=view_name, table0_name=table0_name, table1_name=table1_name, table2_name=table2_name, table3_name=table3_name, table4_name=table4_name),
settings = [("user", f"{user_name}")])
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def create_with_nested_views_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to create a view with a stored query that includes other views if and only if
they have SELECT privilege on all the views and the source tables for those views.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=create_with_nested_views,
name="create with nested views, privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=create_with_nested_views,
name="create with nested views, privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_nested_views(self, grant_target_name, user_name, node=None):
"""Grant select and create view privileges and check that user is able to create a view
if and only if they have all necessary privileges.
"""
view0_name = f"view0_{getuid()}"
view1_name = f"view1_{getuid()}"
view2_name = f"view2_{getuid()}"
view3_name = f"view3_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
table2_name = f"table2_{getuid()}"
table3_name = f"table3_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
create_view_query = "CREATE VIEW {view3_name} AS SELECT y FROM {table3_name} UNION ALL SELECT y FROM {view2_name}"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name},{table2_name},{table3_name}"):
try:
with Given("I have some views"):
node.query(f"CREATE VIEW {view0_name} AS SELECT y FROM {table0_name}")
node.query(f"CREATE VIEW {view1_name} AS SELECT y FROM {table1_name} WHERE y IN (SELECT y FROM {view0_name} WHERE y<2)")
node.query(f"CREATE VIEW {view2_name} AS SELECT y FROM {table2_name} JOIN {view1_name} USING y")
with When("I grant CREATE VIEW privilege"):
node.query(f"GRANT CREATE VIEW ON {view3_name} TO {grant_target_name}")
with Then("I attempt to create view as the user with CREATE VIEW privilege"):
node.query(create_view_query.format(view3_name=view3_name, view2_name=view2_name, table3_name=table3_name),
settings = [("user",f"{user_name}")], exitcode=exitcode, message=message)
for permutation in ([0,1,2,3,7,11,15,31,39,79,95],permutations(table_count=7))[self.context.stress]:
with grant_select_on_table(node, permutation, grant_target_name, view2_name, table3_name, view1_name, table2_name, view0_name, table1_name, table0_name) as tables_granted:
with When(f"permutation={permutation}, tables granted = {tables_granted}"):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view3_name}")
with Then("I attempt to create a view as the user"):
node.query(create_view_query.format(view3_name=view3_name, view2_name=view2_name, table3_name=table3_name),
settings = [("user", f"{user_name}")], exitcode=exitcode, message=message)
with When("I grant select on all views"):
with grant_select_on_table(node, max(permutations(table_count=7))+1, grant_target_name, view0_name, view1_name, view2_name, table0_name, table1_name, table2_name, table3_name):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view3_name}")
with Then("I attempt to create a view as the user"):
node.query(create_view_query.format(view3_name=view3_name, view2_name=view2_name, table3_name=table3_name),
settings = [("user", f"{user_name}")])
finally:
with Finally("I drop the views"):
with When("I drop view0", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view3_name}")
with And("I drop view1", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view2_name}")
with And("I drop view2", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view1_name}")
with And("I drop view3", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view0_name}")
@TestSuite
@Requirements(
RQ_SRS_006_RBAC_View_Select("1.0"),
)
def select(self, node=None):
"""Test the RBAC functionality of the `SELECT FROM view` command.
"""
Scenario(run=select_without_select_privilege, setup=instrument_clickhouse_server_log)
Scenario(run=select_with_select_privilege_granted_directly_or_via_role, setup=instrument_clickhouse_server_log)
Scenario(run=select_with_select_privilege_revoked_directly_or_from_role, setup=instrument_clickhouse_server_log)
Scenario(run=select_without_source_table_privilege, setup=instrument_clickhouse_server_log)
Scenario(run=select_with_source_table_privilege_granted_directly_or_via_role, setup=instrument_clickhouse_server_log)
Scenario(run=select_with_subquery_privilege_granted_directly_or_via_role, setup=instrument_clickhouse_server_log)
Scenario(run=select_with_join_query_privilege_granted_directly_or_via_role, setup=instrument_clickhouse_server_log)
Scenario(run=select_with_union_query_privilege_granted_directly_or_via_role, setup=instrument_clickhouse_server_log)
Scenario(run=select_with_join_union_subquery_privilege_granted_directly_or_via_role, setup=instrument_clickhouse_server_log)
Scenario(run=select_with_nested_views_privilege_granted_directly_or_via_role, setup=instrument_clickhouse_server_log)
@TestScenario
def select_without_select_privilege(self, node=None):
"""Check that user is unable to select on a view without view SELECT privilege.
"""
user_name = f"user_{getuid()}"
view_name = f"view_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
try:
with When("I have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT 1")
with Then("I try to select from view without privilege as the user"):
node.query(f"SELECT * FROM {view_name}", settings = [("user",f"{user_name}")],
exitcode=exitcode, message=message)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_with_select_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to select from a view if and only if they have select privilege on that view, either directly or from a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=select_with_select_privilege,
name="select with select privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=select_with_select_privilege,
name="select with select privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_with_select_privilege(self, user_name, grant_target_name, node=None):
"""Grant SELECT privilege on a view and check the user is able to SELECT from it.
"""
view_name = f"view_{getuid()}"
if node is None:
node = self.context.node
try:
with When("I have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT 1")
with And("I grant SELECT privilege for the view"):
node.query(f"GRANT SELECT ON {view_name} TO {grant_target_name}")
with Then("I attempt to select from view with privilege as the user"):
output = node.query(f"SELECT count(*) FROM {view_name}", settings = [("user",f"{user_name}")]).output
assert output == '1', error()
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_with_select_privilege_revoked_directly_or_from_role(self, node=None):
"""Check that user is unable to select from a view if their SELECT privilege is revoked, either directly or from a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=select_with_select_privilege,
name="select with select privilege revoked directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=select_with_select_privilege,
name="select with select privilege revoked from a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_with_revoked_select_privilege(self, user_name, grant_target_name, node=None):
"""Grant and revoke SELECT privilege on a view and check the user is unable to SELECT from it.
"""
view_name = f"view_{getuid()}"
if node is None:
node = self.context.node
try:
with When("I have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT 1")
with And("I grant SELECT privilege for the view"):
node.query(f"GRANT SELECT ON {view_name} TO {grant_target_name}")
with And("I revoke SELECT privilege for the view"):
node.query(f"REVOKE SELECT ON {view_name} FROM {grant_target_name}")
with Then("I attempt to select from view with privilege as the user"):
node.query(f"SELECT count(*) FROM {view_name}", settings = [("user",f"{user_name}")],
exitcode=exitcode, message=message)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_without_source_table_privilege(self, node=None):
"""Check that user is unable to select from a view without SELECT privilege for the source table.
"""
user_name = f"user_{getuid()}"
view_name = f"view_{getuid()}"
table_name = f"table_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
with table(node, f"{table_name}"):
with user(node, f"{user_name}"):
try:
with When("I create a view from the source table"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT * FROM {table_name}")
with And("I grant view select privilege to the user"):
node.query(f"GRANT SELECT ON {view_name} TO {user_name}")
with Then("I attempt to select from view without privilege on the source table"):
node.query(f"SELECT count(*) FROM {view_name}", settings = [("user",f"{user_name}")],
exitcode=exitcode, message=message)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_with_source_table_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to select from a view, with source table in the stored query, if and only if
the user has SELECT privilege for the view and the source table, either directly or from a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=select_with_source_table_privilege,
name="select with source table, privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=select_with_source_table_privilege,
name="select with source table, privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_with_source_table_privilege(self, user_name, grant_target_name, node=None):
"""Grant SELECT privilege on view and the source table for that view and check the user is able to SELECT from the view.
"""
view_name = f"view_{getuid()}"
table_name = f"table_{getuid()}"
if node is None:
node = self.context.node
with table(node, f"{table_name}"):
try:
with Given("I have a view with a source table"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT * FROM {table_name}")
with And("I grant select privileges"):
node.query(f"GRANT SELECT ON {view_name} TO {grant_target_name}")
node.query(f"GRANT SELECT ON {table_name} TO {grant_target_name}")
with Then("I check the user is able to select from the view"):
output = node.query(f"SELECT count(*) FROM {view_name}", settings = [("user", f"{user_name}")]).output
assert output == '0', error()
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_with_subquery_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to select from a view where the stored query has two subqueries if and only if
the user has SELECT privilege for that view and all tables, either directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=select_with_subquery,
name="select with subquery, privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=select_with_subquery,
name="select with subquery, privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_with_subquery(self, user_name, grant_target_name, node=None):
"""Grant SELECT on the view and tables in the stored query and check the user is able to SELECT if and only if they have SELECT privilege on all of them.
"""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
table2_name = f"table2_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
select_view_query = "SELECT count(*) FROM {view_name}"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name},{table2_name}"):
try:
with Given("I have a view with a subquery"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT * FROM {table0_name} WHERE y IN (SELECT y FROM {table1_name} WHERE y IN (SELECT y FROM {table2_name} WHERE y<2))")
with When("I grant SELECT privilege on view"):
node.query(f"GRANT SELECT ON {view_name} TO {grant_target_name}")
with Then("I attempt to select from the view as the user"):
node.query(select_view_query.format(view_name=view_name), settings = [("user",f"{user_name}")], exitcode=exitcode, message=message)
for permutation in permutations(table_count=3):
with grant_select_on_table(node, permutation, grant_target_name, table0_name, table1_name, table2_name) as tables_granted:
with When(f"permutation={permutation}, tables granted = {tables_granted}"):
with Then("I attempt to select from a view as the user"):
node.query(select_view_query.format(view_name=view_name), settings = [("user", f"{user_name}")], exitcode=exitcode, message=message)
with When("I grant select on all tables"):
with grant_select_on_table(node, max(permutations(table_count=3))+1, grant_target_name, table0_name, table1_name, table2_name):
with Then("I attempt to select from a view as the user"):
output = node.query(select_view_query.format(view_name=view_name), settings = [("user", f"{user_name}")]).output
assert output == '0', error()
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_with_join_query_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to select from a view where the stored query includes a `JOIN` statement if and only if
the user has SELECT privilege on all the tables and the view, either directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=select_with_join_query,
name="select with join, privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=select_with_join_query,
name="select with join, privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_with_join_query(self, user_name, grant_target_name, node=None):
"""Grant SELECT on the view and tables in the stored query and check the user is able to SELECT if and only if they have SELECT privilege on all of them.
"""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
select_view_query = "SELECT count(*) FROM {view_name}"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name}"):
try:
with Given("I have a view with a JOIN statement"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT * FROM {table0_name} JOIN {table1_name} USING d")
with When("I grant SELECT privilege on view"):
node.query(f"GRANT SELECT ON {view_name} TO {grant_target_name}")
with Then("I attempt to select from the view as the user"):
node.query(select_view_query.format(view_name=view_name), settings = [("user",f"{user_name}")], exitcode=exitcode, message=message)
for permutation in permutations(table_count=2):
with grant_select_on_table(node, permutation, grant_target_name, table0_name, table1_name) as tables_granted:
with When(f"permutation={permutation}, tables granted = {tables_granted}"):
with Then("I attempt to select from a view as the user"):
node.query(select_view_query.format(view_name=view_name), settings = [("user", f"{user_name}")], exitcode=exitcode, message=message)
with When("I grant select on all tables"):
with grant_select_on_table(node, max(permutations(table_count=2))+1, grant_target_name, table0_name, table1_name):
with Then("I attempt to select from a view as the user"):
node.query(select_view_query.format(view_name=view_name), settings = [("user", f"{user_name}")])
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_with_union_query_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to select from a view where the stored query includes a `UNION ALL` statement if and only if
the user has SELECT privilege on all the tables and the view, either directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=select_with_union_query,
name="select with union, privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=select_with_union_query,
name="select with union, privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_with_union_query(self, user_name, grant_target_name, node=None):
"""Grant SELECT on the view and tables in the stored query and check the user is able to SELECT if and only if they have SELECT privilege on all of them.
"""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
select_view_query = "SELECT count(*) FROM {view_name}"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name}"):
try:
with Given("I have a view with a UNION statement"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT * FROM {table0_name} UNION ALL SELECT * FROM {table1_name}")
with When("I grant SELECT privilege on view"):
node.query(f"GRANT SELECT ON {view_name} TO {grant_target_name}")
with Then("I attempt to select from the view as the user"):
node.query(select_view_query.format(view_name=view_name), settings = [("user",f"{user_name}")], exitcode=exitcode, message=message)
for permutation in permutations(table_count=2):
with grant_select_on_table(node, permutation, grant_target_name, table0_name, table1_name) as tables_granted:
with When(f"permutation={permutation}, tables granted = {tables_granted}"):
with Then("I attempt to select from a view as the user"):
node.query(select_view_query.format(view_name=view_name), settings = [("user", f"{user_name}")], exitcode=exitcode, message=message)
with When("I grant select on all tables"):
with grant_select_on_table(node, max(permutations(table_count=2))+1, grant_target_name, table0_name, table1_name):
with Then("I attempt to select from a view as the user"):
node.query(select_view_query.format(view_name=view_name), settings = [("user", f"{user_name}")])
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_with_join_union_subquery_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to select from a view with a stored query that includes `UNION ALL`, `JOIN` and two subqueries
if and only if the user has SELECT privilege on all the tables and the view, either directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=select_with_join_union_subquery,
name="select with join union subquery, privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=select_with_join_union_subquery,
name="select with join union subquery, privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_with_join_union_subquery(self, grant_target_name, user_name, node=None):
"""Grant SELECT on the view and tables in the stored query and check the user is able to SELECT if and only if they have SELECT privilege on all of them.
"""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
table2_name = f"table2_{getuid()}"
table3_name = f"table3_{getuid()}"
table4_name = f"table4_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
select_view_query = "SELECT count(*) FROM {view_name}"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name},{table2_name},{table3_name},{table4_name}"):
try:
with Given("I have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT y FROM {table0_name} JOIN {table1_name} USING y UNION ALL SELECT y FROM {table1_name} WHERE y IN (SELECT y FROM {table3_name} WHERE y IN (SELECT y FROM {table4_name} WHERE y<2))")
with When("I grant SELECT privilege on view"):
node.query(f"GRANT SELECT ON {view_name} TO {grant_target_name}")
with Then("I attempt to select from the view as the user"):
node.query(select_view_query.format(view_name=view_name), settings = [("user", f"{user_name}")], exitcode=exitcode, message=message)
for permutation in permutations(table_count=5):
with grant_select_on_table(node, permutation, grant_target_name, table0_name, table1_name, table2_name, table3_name, table4_name) as tables_granted:
with When(f"permutation={permutation}, tables granted = {tables_granted}"):
with Then("I attempt to select from a view as the user"):
node.query(select_view_query.format(view_name=view_name), settings = [("user", f"{user_name}")], exitcode=exitcode, message=message)
with When("I grant select on all tables"):
with grant_select_on_table(node, max(permutations(table_count=5))+1, grant_target_name, table0_name, table1_name, table2_name, table3_name, table4_name):
with Then("I attempt to select from a view as the user"):
node.query(select_view_query.format(view_name=view_name), settings = [("user", f"{user_name}")])
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_with_nested_views_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to select from a view with a stored query that includes other views if and only if
the user has select privilege on all of the views and the source tables for those views, either directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=select_with_nested_views,
name="select with nested views, privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=select_with_nested_views,
name="select with nested views, privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_with_nested_views(self, grant_target_name, user_name, node=None):
"""Grant SELECT on views and tables in the stored query and check the user is able to SELECT if and only if they have SELECT privilege on all of them.
"""
view0_name = f"view0_{getuid()}"
view1_name = f"view1_{getuid()}"
view2_name = f"view2_{getuid()}"
view3_name = f"view3_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
table2_name = f"table2_{getuid()}"
table3_name = f"table3_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
select_view_query = "SELECT count(*) FROM {view3_name}"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name},{table2_name},{table3_name}"):
try:
with Given("I have some views"):
node.query(f"CREATE VIEW {view0_name} AS SELECT y FROM {table0_name}")
node.query(f"CREATE VIEW {view1_name} AS SELECT y FROM {view0_name} WHERE y IN (SELECT y FROM {table1_name} WHERE y<2)")
node.query(f"CREATE VIEW {view2_name} AS SELECT y FROM {view1_name} JOIN {table2_name} USING y")
node.query(f"CREATE VIEW {view3_name} AS SELECT y FROM {view2_name} UNION ALL SELECT y FROM {table3_name}")
with Then("I attempt to select from a view as the user"):
node.query(select_view_query.format(view3_name=view3_name),
settings = [("user",f"{user_name}")], exitcode=exitcode, message=message)
for permutation in ([0,1,3,5,7,13,15,23,31,45,63,95,127,173,237,247,253],permutations(table_count=8))[self.context.stress]:
with grant_select_on_table(node, permutation, grant_target_name, view3_name, table3_name, view2_name, view1_name, table2_name, view0_name, table1_name, table0_name) as tables_granted:
with When(f"permutation={permutation}, tables granted = {tables_granted}"):
with Then("I attempt to select from a view as the user"):
node.query(select_view_query.format(view3_name=view3_name),
settings = [("user", f"{user_name}")], exitcode=exitcode, message=message)
with When("I grant select on all views"):
with grant_select_on_table(node, max(permutations(table_count=8))+1, grant_target_name, view0_name, view1_name, view2_name, view3_name, table0_name, table1_name, table2_name, table3_name):
with Then("I attempt to select from a view as the user"):
node.query(select_view_query.format(view3_name=view3_name), settings = [("user", f"{user_name}")])
finally:
with Finally("I drop the views"):
with When("I drop view0", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view3_name}")
with And("I drop view1", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view2_name}")
with And("I drop view2", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view1_name}")
with And("I drop view3", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view0_name}")
@TestSuite
@Requirements(
RQ_SRS_006_RBAC_View_Drop("1.0"),
)
def drop(self, node=None):
"""Test the RBAC functionality of the `DROP VIEW` command.
"""
Scenario(run=drop_with_privilege_granted_directly_or_via_role, setup=instrument_clickhouse_server_log)
Scenario(run=drop_with_revoked_privilege_revoked_directly_or_from_role, setup=instrument_clickhouse_server_log)
@TestScenario
def drop_with_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to drop view with DROP VIEW privilege if the user has privilege directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=drop_with_privilege,
name="drop privilege granted directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=drop_with_privilege,
name="drop privilege granted through a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def drop_with_privilege(self, grant_target_name, user_name, node=None):
"""Grant DROP VIEW privilege and check the user is able to successfully drop a view.
"""
view_name = f"view_{getuid()}"
exitcode, message = errors.table_does_not_exist(name=f"default.{view_name}")
if node is None:
node = self.context.node
try:
with Given("I have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT 1")
with When("I grant DROP VIEW privilege"):
node.query(f"GRANT DROP VIEW ON {view_name} TO {grant_target_name}")
with And("I drop the view as the user"):
node.query(f"DROP VIEW {view_name}", settings = [("user",f"{user_name}")])
with Then("I check the table does not exist"):
node.query(f"SELECT * FROM {view_name}", exitcode=exitcode, message=message)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def drop_with_revoked_privilege_revoked_directly_or_from_role(self, node=None):
"""Check that user is unable to drop view with DROP VIEW privilege revoked directly or from a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=drop_with_revoked_privilege,
name="drop privilege revoked directly")(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(test=drop_with_revoked_privilege,
name="drop privilege revoked from a role")(grant_target_name=role_name, user_name=user_name)
@TestOutline
def drop_with_revoked_privilege(self, grant_target_name, user_name, node=None):
"""Revoke DROP VIEW privilege and check the user is unable to DROP a view.
"""
view_name = f"view_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
try:
with Given("I have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT 1")
with When("I grant DROP VIEW privilege"):
node.query(f"GRANT DROP VIEW ON {view_name} TO {grant_target_name}")
with And("I revoke DROP VIEW privilege"):
node.query(f"REVOKE DROP VIEW ON {view_name} FROM {grant_target_name}")
with Then("I drop the view as the user"):
node.query(f"DROP VIEW {view_name}", settings = [("user",f"{user_name}")],
exitcode=exitcode, message=message)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestFeature
@Requirements(
RQ_SRS_006_RBAC_View("1.0"),
)
@Name("view")
def feature(self, stress=None, parallel=None, node="clickhouse1"):
self.context.node = self.context.cluster.node(node)
if stress is not None:
self.context.stress = stress
if parallel is not None:
self.context.stress = parallel
tasks = []
with Pool(3) as pool:
try:
for suite in loads(current_module(), Suite):
run_scenario(pool, tasks, suite)
finally:
join(tasks)
|
73dc13544e3bb49938642a19abdf129c08a57dbe
|
850fb312d6cfa25546369b4950c47b04231dce8e
|
/src/gt4sd/training_pipelines/torchdrug/__init__.py
|
64a9cc4fbda26e900ae552c3d81971ce862646d8
|
[
"MIT"
] |
permissive
|
GT4SD/gt4sd-core
|
825418303547c36cf64575ac4f8711877fd7e16b
|
0b69b7d5b261f2f9af3984793c1295b9b80cd01a
|
refs/heads/main
| 2023-09-02T21:23:46.156469
| 2023-08-30T08:28:40
| 2023-08-30T08:28:40
| 458,309,249
| 239
| 50
|
MIT
| 2023-08-25T06:14:52
| 2022-02-11T19:06:58
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 2,578
|
py
|
__init__.py
|
#
# MIT License
#
# Copyright (c) 2022 GT4SD team
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
from .unpatch import ( # isort:skip
fix_datasets,
sane_datasets,
fix_schedulers,
sane_schedulers,
TORCH_HAS_OPENMP,
)
import torch
from torchdrug.datasets import (
BACE,
BBBP,
CEP,
HIV,
MOSES,
MUV,
OPV,
PCQM4M,
QM8,
QM9,
SIDER,
ChEMBLFiltered,
ClinTox,
Delaney,
FreeSolv,
Lipophilicity,
Malaria,
PubChem110m,
Tox21,
ToxCast,
ZINC2m,
ZINC250k,
)
from .dataset import TorchDrugDataset
# isort: off
from torch import nn
"""
Necessary because torchdrug silently overwrites the default nn.Module. This is quite
invasive and causes significant side-effects in the rest of the code.
See: https://github.com/DeepGraphLearning/torchdrug/issues/77
"""
nn.Module = nn._Module # type: ignore
fix_datasets(sane_datasets)
fix_schedulers(sane_schedulers)
DATASET_FACTORY = {
"bace": BACE,
"bbbp": BBBP,
"custom": TorchDrugDataset,
"cep": CEP,
"chembl": ChEMBLFiltered,
"clintox": ClinTox,
"delaney": Delaney,
"freesolv": FreeSolv,
"hiv": HIV,
"lipophilicity": Lipophilicity,
"malaria": Malaria,
"moses": MOSES,
"muv": MUV,
"opv": OPV,
"pcqm4m": PCQM4M,
"pubchem": PubChem110m,
"qm8": QM8,
"qm9": QM9,
"sider": SIDER,
"tox21": Tox21,
"toxcast": ToxCast,
"zinc250k": ZINC250k,
"zinc2m": ZINC2m,
}
# NOTE: restore original OpenMP settings
torch._C.has_openmp = TORCH_HAS_OPENMP
|
bd599eee828d00db3dfa05a3b88093de38530f97
|
db43055180800e54526affb7941fc77e0226c4b3
|
/tests/test_chi_state_university.py
|
6dcc667b8898857a02f50f79f40047b47d29e021
|
[
"MIT"
] |
permissive
|
City-Bureau/city-scrapers
|
53724d4e973d71ec331cd9749e350c460aa6fb9d
|
611fce6a2705446e25a2fc33e32090a571eb35d1
|
refs/heads/main
| 2023-08-15T01:06:49.336847
| 2023-07-01T20:39:28
| 2023-07-01T20:39:28
| 97,890,025
| 308
| 366
|
MIT
| 2023-09-07T07:30:12
| 2017-07-21T00:45:13
|
Python
|
UTF-8
|
Python
| false
| false
| 2,560
|
py
|
test_chi_state_university.py
|
from datetime import date, datetime
from os.path import dirname, join
import pytest
from city_scrapers_core.constants import BOARD
from city_scrapers_core.utils import file_response
from freezegun import freeze_time
from city_scrapers.spiders.chi_state_university import ChiStateUniversitySpider
test_response = file_response(
join(dirname(__file__), "files", "chi_state_university.html"),
url="https://www.csu.edu/boardoftrustees/dates.htm",
)
test_minutes_response = file_response(
join(dirname(__file__), "files", "chi_state_university.html"),
url=f"https://www.csu.edu/boardoftrustees/\
meetingagendas/year{date.today().year}.htm",
)
spider = ChiStateUniversitySpider()
freezer = freeze_time("2020-09-15")
freezer.start()
spider.minutes_map = spider._parse_minutes(test_minutes_response)
parsed_items = [item for item in spider._parse_meetings(test_response)]
freezer.stop()
expected = {
"title": "Special Board Meeting",
"classification": BOARD,
"start": datetime(2020, 7, 27, 15, 0),
"end": None,
"all_day": False,
"time_notes": "",
"location": {
"address": "9501 S. King Drive Chicago, IL 60628",
"name": "Room 15, 4th Floor, Gwendolyn Brooks Library Auditorium",
},
"links": [
{
"href": "https://attendee.gotowebinar.com/register/1203282843839078926",
"title": "Virtual meeting link",
}
],
"source": "https://www.csu.edu/boardoftrustees/dates.htm",
"status": "passed",
"id": "chi_state_university/202007271500/x/special_board_meeting",
}
def test_items():
assert len(parsed_items) == 24
def test_title():
assert parsed_items[0]["title"] == expected["title"]
def test_start():
assert parsed_items[0]["start"] == expected["start"]
def test_end():
assert parsed_items[0]["end"] == expected["end"]
def test_time_notes():
assert parsed_items[0]["time_notes"] == expected["time_notes"]
def test_id():
assert parsed_items[0]["id"] == expected["id"]
def test_status():
assert parsed_items[0]["status"] == expected["status"]
def test_location():
assert parsed_items[0]["location"] == expected["location"]
def test_source():
assert parsed_items[0]["source"] == expected["source"]
def test_links():
assert parsed_items[0]["links"] == expected["links"]
def test_classification():
assert parsed_items[0]["classification"] == expected["classification"]
@pytest.mark.parametrize("item", parsed_items)
def test_all_day(item):
assert item["all_day"] is False
|
35211fde81e8c6f19e88f0f72411cb558442bfbe
|
35b6013c1943f37d1428afd2663c8aba0a02628d
|
/appengine/standard_python3/bigquery/main.py
|
d1d41b5a6e2ce5440c3f7f2178435d029ef72d2d
|
[
"Apache-2.0"
] |
permissive
|
GoogleCloudPlatform/python-docs-samples
|
d2a251805fbeab15d76ed995cf200727f63f887d
|
44e819e713c3885e38c99c16dc73b7d7478acfe8
|
refs/heads/main
| 2023-08-28T12:52:01.712293
| 2023-08-28T11:18:28
| 2023-08-28T11:18:28
| 35,065,876
| 7,035
| 7,593
|
Apache-2.0
| 2023-09-14T20:20:56
| 2015-05-04T23:26:13
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 2,375
|
py
|
main.py
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START gae_python38_bigquery]
# [START gae_python3_bigquery]
import concurrent.futures
import flask
from google.cloud import bigquery
app = flask.Flask(__name__)
bigquery_client = bigquery.Client()
@app.route("/")
def main():
query_job = bigquery_client.query(
"""
SELECT
CONCAT(
'https://stackoverflow.com/questions/',
CAST(id as STRING)) as url,
view_count
FROM `bigquery-public-data.stackoverflow.posts_questions`
WHERE tags like '%google-bigquery%'
ORDER BY view_count DESC
LIMIT 10
"""
)
return flask.redirect(
flask.url_for(
"results",
project_id=query_job.project,
job_id=query_job.job_id,
location=query_job.location,
)
)
@app.route("/results")
def results():
project_id = flask.request.args.get("project_id")
job_id = flask.request.args.get("job_id")
location = flask.request.args.get("location")
query_job = bigquery_client.get_job(
job_id,
project=project_id,
location=location,
)
try:
# Set a timeout because queries could take longer than one minute.
results = query_job.result(timeout=30)
except concurrent.futures.TimeoutError:
return flask.render_template("timeout.html", job_id=query_job.job_id)
return flask.render_template("query_result.html", results=results)
if __name__ == "__main__":
# This is used when running locally only. When deploying to Google App
# Engine, a webserver process such as Gunicorn will serve the app. This
# can be configured by adding an `entrypoint` to app.yaml.
app.run(host="127.0.0.1", port=8080, debug=True)
# [END gae_python3_bigquery]
# [END gae_python38_bigquery]
|
cb90a7855b2fb76d25d0bfe43b6899fe03681224
|
5b8fdeb482307b05b43c51784b41b2fa92112d0a
|
/Code of Winning Methods/A1/3. code/1. preprocessing/1. preprocessing.py
|
b8105e49ddbbbc2ee2814c5d1444987c0ab1f43c
|
[] |
no_license
|
Mcompetitions/M5-methods
|
77546b51161ea26e6aea6b96af9ed90e7792149b
|
18e4e776cfa1ded3b387fabe008c6c2c44922007
|
refs/heads/master
| 2023-04-28T04:30:05.475564
| 2023-04-19T05:55:10
| 2023-04-19T05:55:10
| 235,292,116
| 525
| 225
| null | 2022-11-22T06:21:28
| 2020-01-21T08:37:29
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 16,116
|
py
|
1. preprocessing.py
|
#!/usr/bin/env python
# coding: utf-8
# ## Please input your directory for the top level folder
# folder name : SUBMISSION MODEL
# In[ ]:
dir_ = '/home/artemis/M5/A1-Yeon/' # input only here
# In[ ]:
raw_data_dir = dir_+'2. data/'
processed_data_dir = dir_+'2. data/processed/'
# # 1. Main setup
# In[ ]:
# General imports
import numpy as np
import pandas as pd
import os, sys, gc, time, warnings, pickle, psutil, random
from math import ceil
from sklearn.preprocessing import LabelEncoder
from tqdm import tqdm
warnings.filterwarnings('ignore')
# In[ ]:
## Simple "Memory profilers" to see memory usage
def get_memory_usage():
return np.round(psutil.Process(os.getpid()).memory_info()[0]/2.**30, 2)
def sizeof_fmt(num, suffix='B'):
for unit in ['','Ki','Mi','Gi','Ti','Pi','Ei','Zi']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
# In[ ]:
## Memory Reducer
# :df pandas dataframe to reduce size # type: pd.DataFrame()
# :verbose # type: bool
def reduce_mem_usage(df, verbose=True):
numerics = ['int16', 'int32', 'int64', 'float16', 'float32', 'float64']
start_mem = df.memory_usage().sum() / 1024**2
for col in df.columns:
col_type = df[col].dtypes
if col_type in numerics:
c_min = df[col].min()
c_max = df[col].max()
if str(col_type)[:3] == 'int':
if c_min > np.iinfo(np.int8).min and c_max < np.iinfo(np.int8).max:
df[col] = df[col].astype(np.int8)
elif c_min > np.iinfo(np.int16).min and c_max < np.iinfo(np.int16).max:
df[col] = df[col].astype(np.int16)
elif c_min > np.iinfo(np.int32).min and c_max < np.iinfo(np.int32).max:
df[col] = df[col].astype(np.int32)
elif c_min > np.iinfo(np.int64).min and c_max < np.iinfo(np.int64).max:
df[col] = df[col].astype(np.int64)
else:
if c_min > np.finfo(np.float16).min and c_max < np.finfo(np.float16).max:
df[col] = df[col].astype(np.float16)
elif c_min > np.finfo(np.float32).min and c_max < np.finfo(np.float32).max:
df[col] = df[col].astype(np.float32)
else:
df[col] = df[col].astype(np.float64)
end_mem = df.memory_usage().sum() / 1024**2
if verbose: print('Mem. usage decreased to {:5.2f} Mb ({:.1f}% reduction)'.format(end_mem, 100 * (start_mem - end_mem) / start_mem))
return df
# In[ ]:
## Merging by concat to not lose dtypes
def merge_by_concat(df1, df2, merge_on):
merged_gf = df1[merge_on]
merged_gf = merged_gf.merge(df2, on=merge_on, how='left')
new_columns = [col for col in list(merged_gf) if col not in merge_on]
df1 = pd.concat([df1, merged_gf[new_columns]], axis=1)
return df1
# In[ ]:
########################### Vars
#################################################################################
TARGET = 'sales' # Our main target
END_TRAIN = 1941 # Last day in train set
MAIN_INDEX = ['id','d'] # We can identify item by these columns
# # 2. Part 1
# - Melting train data => grid_part_1
# - creating price features => grid_part_2
# - creating calendar features => grid_part_3
# In[ ]:
########################### Load Data
#################################################################################
print('Load Main Data')
# Here are reafing all our data
# without any limitations and dtype modification
train_df = pd.read_csv(raw_data_dir+'sales_train_evaluation.csv')
prices_df = pd.read_csv(raw_data_dir+'sell_prices.csv')
calendar_df = pd.read_csv(raw_data_dir+'calendar.csv')
# In[ ]:
########################### Make Grid
#################################################################################
print('Create Grid')
# We can tranform horizontal representation representation 바꾸기
# to vertical "view"
# Our "index" will be 'id','item_id','dept_id','cat_id','store_id','state_id'
# and labels are 'd_' coulmns
index_columns = ['id','item_id','dept_id','cat_id','store_id','state_id']
grid_df = pd.melt(train_df,
id_vars = index_columns,
var_name = 'd',
value_name = TARGET)
# If we look on train_df we se that
# we don't have a lot of traning rows
# but each day can provide more train data
print('Train rows:', len(train_df), len(grid_df))
# To be able to make predictions
# we need to add "test set" to our grid
add_grid = pd.DataFrame()
for i in range(1,29):
temp_df = train_df[index_columns]
temp_df = temp_df.drop_duplicates()
temp_df['d'] = 'd_'+ str(END_TRAIN+i)
temp_df[TARGET] = np.nan
add_grid = pd.concat([add_grid,temp_df])
grid_df = pd.concat([grid_df,add_grid])
grid_df = grid_df.reset_index(drop=True)
# Remove some temoprary DFs
del temp_df, add_grid
# We will not need original train_df
# anymore and can remove it
del train_df
# You don't have to use df = df construction
# you can use inplace=True instead.
# like this
# grid_df.reset_index(drop=True, inplace=True)
# Let's check our memory usage
print("{:>20}: {:>8}".format('Original grid_df',sizeof_fmt(grid_df.memory_usage(index=True).sum())))
# We can free some memory
# by converting "strings" to categorical
# it will not affect merging and
# we will not lose any valuable data
for col in index_columns:
grid_df[col] = grid_df[col].astype('category')
# Let's check again memory usage
print("{:>20}: {:>8}".format('Reduced grid_df',sizeof_fmt(grid_df.memory_usage(index=True).sum())))
# In[ ]:
########################### Product Release date
#################################################################################
print('Release week')
# It seems that leadings zero values
# in each train_df item row
# are not real 0 sales but mean
# absence for the item in the store
# we can safe some memory by removing
# such zeros
# Prices are set by week
# so it we will have not very accurate release week
release_df = prices_df.groupby(['store_id','item_id'])['wm_yr_wk'].agg(['min']).reset_index()
release_df.columns = ['store_id','item_id','release']
# Now we can merge release_df
grid_df = merge_by_concat(grid_df, release_df, ['store_id','item_id'])
del release_df
# We want to remove some "zeros" rows
# from grid_df
# to do it we need wm_yr_wk column
# let's merge partly calendar_df to have it
grid_df = merge_by_concat(grid_df, calendar_df[['wm_yr_wk','d']], ['d'])
# Now we can cutoff some rows
# and safe memory
grid_df = grid_df[grid_df['wm_yr_wk']>=grid_df['release']]
grid_df = grid_df.reset_index(drop=True)
# Let's check our memory usage
print("{:>20}: {:>8}".format('Original grid_df',sizeof_fmt(grid_df.memory_usage(index=True).sum())))
# Should we keep release week
# as one of the features?
# Only good CV can give the answer.
# Let's minify the release values.
# Min transformation will not help here
# as int16 -> Integer (-32768 to 32767)
# and our grid_df['release'].max() serves for int16
# but we have have an idea how to transform
# other columns in case we will need it
grid_df['release'] = grid_df['release'] - grid_df['release'].min()
grid_df['release'] = grid_df['release'].astype(np.int16)
# Let's check again memory usage
print("{:>20}: {:>8}".format('Reduced grid_df',sizeof_fmt(grid_df.memory_usage(index=True).sum())))
# In[ ]:
########################### Save part 1
#################################################################################
print('Save Part 1')
# We have our BASE grid ready
# and can save it as pickle file
# for future use (model training)
grid_df.to_pickle(processed_data_dir+'grid_part_1.pkl')
print('Size:', grid_df.shape)
# In[ ]:
########################### Prices
#################################################################################
print('Prices')
# We can do some basic aggregations
prices_df['price_max'] = prices_df.groupby(['store_id','item_id'])['sell_price'].transform('max')
prices_df['price_min'] = prices_df.groupby(['store_id','item_id'])['sell_price'].transform('min')
prices_df['price_std'] = prices_df.groupby(['store_id','item_id'])['sell_price'].transform('std')
prices_df['price_mean'] = prices_df.groupby(['store_id','item_id'])['sell_price'].transform('mean')
# and do price normalization (min/max scaling)
prices_df['price_norm'] = prices_df['sell_price']/prices_df['price_max']
# Some items are can be inflation dependent
# and some items are very "stable"
prices_df['price_nunique'] = prices_df.groupby(['store_id','item_id'])['sell_price'].transform('nunique')
prices_df['item_nunique'] = prices_df.groupby(['store_id','sell_price'])['item_id'].transform('nunique')
# I would like some "rolling" aggregations
# but would like months and years as "window"
calendar_prices = calendar_df[['wm_yr_wk','month','year']]
calendar_prices = calendar_prices.drop_duplicates(subset=['wm_yr_wk']) # distinct(.keep_all = True)
prices_df = prices_df.merge(calendar_prices[['wm_yr_wk','month','year']], on=['wm_yr_wk'], how='left')
del calendar_prices
# Now we can add price "momentum" (some sort of)
# Shifted by week
# by month mean
# by year mean
prices_df['price_momentum'] = prices_df['sell_price']/prices_df.groupby(['store_id','item_id'])['sell_price'].transform(lambda x: x.shift(1))
prices_df['price_momentum_m'] = prices_df['sell_price']/prices_df.groupby(['store_id','item_id','month'])['sell_price'].transform('mean')
prices_df['price_momentum_y'] = prices_df['sell_price']/prices_df.groupby(['store_id','item_id','year'])['sell_price'].transform('mean')
del prices_df['month'], prices_df['year']
# In[ ]:
grid_df = reduce_mem_usage(grid_df)
prices_df = reduce_mem_usage(prices_df)
# In[ ]:
########################### Merge prices and save part 2
#################################################################################
print('Merge prices and save part 2')
# Merge Prices
original_columns = list(grid_df)
grid_df = grid_df.merge(prices_df, on=['store_id','item_id','wm_yr_wk'], how='left')
keep_columns = [col for col in list(grid_df) if col not in original_columns]
grid_df = grid_df[MAIN_INDEX+keep_columns]
grid_df = reduce_mem_usage(grid_df)
# Safe part 2
grid_df.to_pickle(processed_data_dir+'grid_part_2.pkl')
print('Size:', grid_df.shape)
# In[ ]:
# We don't need prices_df anymore
del prices_df, grid_df
# We can remove new columns
# or just load part_1
grid_df = pd.read_pickle(processed_data_dir+'grid_part_1.pkl')
# In[ ]:
########################### Merge calendar
#################################################################################
grid_df = grid_df[MAIN_INDEX]
# Merge calendar partly
icols = ['date',
'd',
'event_name_1',
'event_type_1',
'event_name_2',
'event_type_2',
'snap_CA',
'snap_TX',
'snap_WI']
grid_df = grid_df.merge(calendar_df[icols], on=['d'], how='left')
# Minify data
# 'snap_' columns we can convert to bool or int8
icols = ['event_name_1',
'event_type_1',
'event_name_2',
'event_type_2',
'snap_CA',
'snap_TX',
'snap_WI']
for col in icols:
grid_df[col] = grid_df[col].astype('category')
# Convert to DateTime
grid_df['date'] = pd.to_datetime(grid_df['date'])
# Make some features from date
grid_df['tm_d'] = grid_df['date'].dt.day.astype(np.int8)
grid_df['tm_w'] = grid_df['date'].dt.week.astype(np.int8)
grid_df['tm_m'] = grid_df['date'].dt.month.astype(np.int8)
grid_df['tm_y'] = grid_df['date'].dt.year
grid_df['tm_y'] = (grid_df['tm_y'] - grid_df['tm_y'].min()).astype(np.int8)
grid_df['tm_wm'] = grid_df['tm_d'].apply(lambda x: ceil(x/7)).astype(np.int8) # 오늘 몇째주?
grid_df['tm_dw'] = grid_df['date'].dt.dayofweek.astype(np.int8)
grid_df['tm_w_end'] = (grid_df['tm_dw']>=5).astype(np.int8)
# Remove date
del grid_df['date']
# In[ ]:
########################### Save part 3 (Dates)
#################################################################################
print('Save part 3')
# Safe part 3
grid_df.to_pickle(processed_data_dir+'grid_part_3.pkl')
print('Size:', grid_df.shape)
# We don't need calendar_df anymore
del calendar_df
del grid_df
# In[ ]:
########################### Some additional cleaning
#################################################################################
## Part 1
# Convert 'd' to int
grid_df = pd.read_pickle(processed_data_dir+'grid_part_1.pkl')
grid_df['d'] = grid_df['d'].apply(lambda x: x[2:]).astype(np.int16)
# Remove 'wm_yr_wk'
# as test values are not in train set
del grid_df['wm_yr_wk']
grid_df.to_pickle(processed_data_dir+'grid_part_1.pkl')
del grid_df
# # 3. Part2
# - Lag featrue
# - Lag rolling feature
# In[ ]:
grid_df = pd.read_pickle(processed_data_dir+'grid_part_1.pkl')
# We need only 'id','d','sales'
# to make lags and rollings
grid_df = grid_df[['id','d','sales']]
SHIFT_DAY = 28
# Lags
# with 28 day shift
start_time = time.time()
print('Create lags')
LAG_DAYS = [col for col in range(SHIFT_DAY,SHIFT_DAY+15)]
grid_df = grid_df.assign(**{
'{}_lag_{}'.format(col, l): grid_df.groupby(['id'])[col].transform(lambda x: x.shift(l))
for l in LAG_DAYS
for col in [TARGET]
})
# Minify lag columns
for col in list(grid_df):
if 'lag' in col:
grid_df[col] = grid_df[col].astype(np.float16)
print('%0.2f min: Lags' % ((time.time() - start_time) / 60))
# Rollings
# with 28 day shift
start_time = time.time()
print('Create rolling aggs')
for i in [7,14,30,60,180]:
print('Rolling period:', i)
grid_df['rolling_mean_'+str(i)] = grid_df.groupby(['id'])[TARGET].transform(lambda x: x.shift(SHIFT_DAY).rolling(i).mean()).astype(np.float16)
grid_df['rolling_std_'+str(i)] = grid_df.groupby(['id'])[TARGET].transform(lambda x: x.shift(SHIFT_DAY).rolling(i).std()).astype(np.float16)
# Rollings
# with sliding shift
for d_shift in [1,7,14]:
print('Shifting period:', d_shift)
for d_window in [7,14,30,60]:
col_name = 'rolling_mean_tmp_'+str(d_shift)+'_'+str(d_window)
grid_df[col_name] = grid_df.groupby(['id'])[TARGET].transform(lambda x: x.shift(d_shift).rolling(d_window).mean()).astype(np.float16)
print('%0.2f min: Lags' % ((time.time() - start_time) / 60))
# In[ ]:
########################### Export
#################################################################################
print('Save lags and rollings')
grid_df.to_pickle(processed_data_dir+'lags_df_'+str(SHIFT_DAY)+'.pkl')
# # 4. Part3
# - Mean encoding feature
# In[ ]:
########################### Apply on grid_df
#################################################################################
# lets read grid from
# https://www.kaggle.com/kyakovlev/m5-simple-fe
# to be sure that our grids are aligned by index
grid_df = pd.read_pickle(processed_data_dir+'grid_part_1.pkl')
grid_df['sales'][grid_df['d']>(1941-28)] = np.nan
base_cols = list(grid_df)
icols = [
['state_id'],
['store_id'],
['cat_id'],
['dept_id'],
['state_id', 'cat_id'],
['state_id', 'dept_id'],
['store_id', 'cat_id'],
['store_id', 'dept_id'],
['item_id'],
['item_id', 'state_id'],
['item_id', 'store_id']
]
for col in icols:
print('Encoding', col)
col_name = '_'+'_'.join(col)+'_'
grid_df['enc'+col_name+'mean'] = grid_df.groupby(col)['sales'].transform('mean').astype(np.float16)
grid_df['enc'+col_name+'std'] = grid_df.groupby(col)['sales'].transform('std').astype(np.float16)
keep_cols = [col for col in list(grid_df) if col not in base_cols]
grid_df = grid_df[['id','d']+keep_cols]
# In[ ]:
#################################################################################
print('Save Mean/Std encoding')
grid_df.to_pickle(processed_data_dir+'mean_encoding_df.pkl')
|
ed163c499ca8456f99f4397aeaf27634d99cb575
|
993252f0bab4d37b1ea0f0b9a95dbb96a200808f
|
/examples/sparse_eigen.py
|
ee331e49284f46a6184a50d84c859c9e8d4a758b
|
[] |
no_license
|
cvxgrp/dccp
|
4dba3e8e5c517a7c314a20d6144e2268c4cd61a1
|
c94aba9881e9f39119c1a693a5a752f23cf0d403
|
refs/heads/master
| 2023-06-09T08:50:12.835704
| 2022-07-09T21:22:12
| 2022-07-09T21:22:12
| 55,815,696
| 115
| 29
| null | 2020-12-02T04:47:42
| 2016-04-08T23:28:30
|
Python
|
UTF-8
|
Python
| false
| false
| 2,105
|
py
|
sparse_eigen.py
|
__author__ = "Xinyue"
from cvxpy import *
import numpy as np
import matplotlib.pyplot as plt
import dccp
np.random.seed(2)
m = 10
n = 10
A0 = np.random.randn(m, n) / 3
A0[:, -1] = A0[:, -2] + A0[:, -3] # spark(A) = 3
for j in range(6, 8):
A0[:, j] = A0[:, j - 1] + A0[:, j - 3]
U, Sigma, V = np.linalg.svd(A0, 0)
Sigma += 1
A = np.dot(U, np.dot(np.diag(Sigma), V))
######################smallest singular value
mu_min = Parameter(nonneg=True)
x_min = Variable(n)
# cost_min = norm(A*x_min)+lambd_min*norm(x_min,1)
cost_min = norm(A @ x_min)
constr_min = [norm(x_min, 2) == 1, norm(x_min, 1) <= mu_min]
obj_min = Minimize(cost_min)
prob_min = Problem(obj_min, constr_min)
singular_min_value = []
card_min = []
x_min_result = []
# lambd_min_vals = gamma_vals = np.logspace(-2,2,40)
mu_vals = np.linspace(1, np.sqrt(n), 50)
for val in mu_vals:
mu_min.value = val
prob_min.solve(method="dccp")
singular_min_value.append(norm(A @ x_min).value)
card_min.append(np.sum(np.abs(x_min.value) >= 1e-2))
x_min_result.append(x_min.value)
plt.figure(figsize=(5, 5))
# plt.subplot(121)
# for i in range(n):
# plt.plot(mu_vals, [np.abs(xi[i,0]) for xi in x_min_result])
##for ind in range(len(card)-1):
## plt.axvspan(lambd_vals[ind], lambd_vals[ind+1], facecolor=str(card[ind]/float(10)), edgecolor = 'none', alpha=0.3)
# plt.xlabel(r'$\mu$', fontsize=16)
# plt.ylabel(r'$\|x_{i}\|$', fontsize=16)
# plt.ylim([0,1])
plt.subplot(111)
card_plot = []
s_value_plot = []
count = []
for ind in range(len(card_min)):
if card_min[ind] not in card_plot:
card_plot.append(card_min[ind])
s_value_plot.append(singular_min_value[ind])
count.append(1)
else:
temp_ind = card_plot.index(card_min[ind])
s_value_plot[temp_ind] = np.min(
[s_value_plot[temp_ind], singular_min_value[ind]]
)
count[temp_ind] += 1
plt.plot(s_value_plot, card_plot, "r o")
plt.ylim([0, 6])
plt.grid()
plt.xlabel(r"$\|\|Ax\|\|_2/\sigma_{\mathrm{min}}$", fontsize=16)
plt.ylabel("card($x$)", fontsize=16)
print("singular values = ", Sigma)
plt.show()
|
e1a214080ca1b1b78443771a50bd96b6a293d5d4
|
93713f46f16f1e29b725f263da164fed24ebf8a8
|
/Library/lib/python3.7/site-packages/sympy/integrals/rubi/rules/special_functions.py
|
02013023b396332adffd9b72a575834b4efd4dd5
|
[
"BSD-3-Clause"
] |
permissive
|
holzschu/Carnets
|
b83d15136d25db640cea023abb5c280b26a9620e
|
1ad7ec05fb1e3676ac879585296c513c3ee50ef9
|
refs/heads/master
| 2023-02-20T12:05:14.980685
| 2023-02-13T15:59:23
| 2023-02-13T15:59:23
| 167,671,526
| 541
| 36
|
BSD-3-Clause
| 2022-11-29T03:08:22
| 2019-01-26T09:26:46
|
Python
|
UTF-8
|
Python
| false
| false
| 89,418
|
py
|
special_functions.py
|
"""
This code is automatically generated. Never edit it manually.
For details of generating the code see `rubi_parsing_guide.md` in `parsetools`.
"""
from sympy.external import import_module
matchpy = import_module("matchpy")
if matchpy:
from matchpy import Pattern, ReplacementRule, CustomConstraint, is_match
from sympy.integrals.rubi.utility_function import (
Int, Sum, Set, With, Module, Scan, MapAnd, FalseQ,
ZeroQ, NegativeQ, NonzeroQ, FreeQ, NFreeQ, List, Log, PositiveQ,
PositiveIntegerQ, NegativeIntegerQ, IntegerQ, IntegersQ,
ComplexNumberQ, PureComplexNumberQ, RealNumericQ, PositiveOrZeroQ,
NegativeOrZeroQ, FractionOrNegativeQ, NegQ, Equal, Unequal, IntPart,
FracPart, RationalQ, ProductQ, SumQ, NonsumQ, Subst, First, Rest,
SqrtNumberQ, SqrtNumberSumQ, LinearQ, Sqrt, ArcCosh, Coefficient,
Denominator, Hypergeometric2F1, Not, Simplify, FractionalPart,
IntegerPart, AppellF1, EllipticPi, EllipticE, EllipticF, ArcTan,
ArcCot, ArcCoth, ArcTanh, ArcSin, ArcSinh, ArcCos, ArcCsc, ArcSec,
ArcCsch, ArcSech, Sinh, Tanh, Cosh, Sech, Csch, Coth, LessEqual, Less,
Greater, GreaterEqual, FractionQ, IntLinearcQ, Expand, IndependentQ,
PowerQ, IntegerPowerQ, PositiveIntegerPowerQ, FractionalPowerQ, AtomQ,
ExpQ, LogQ, Head, MemberQ, TrigQ, SinQ, CosQ, TanQ, CotQ, SecQ, CscQ,
Sin, Cos, Tan, Cot, Sec, Csc, HyperbolicQ, SinhQ, CoshQ, TanhQ, CothQ,
SechQ, CschQ, InverseTrigQ, SinCosQ, SinhCoshQ, LeafCount, Numerator,
NumberQ, NumericQ, Length, ListQ, Im, Re, InverseHyperbolicQ,
InverseFunctionQ, TrigHyperbolicFreeQ, InverseFunctionFreeQ, RealQ,
EqQ, FractionalPowerFreeQ, ComplexFreeQ, PolynomialQ, FactorSquareFree,
PowerOfLinearQ, Exponent, QuadraticQ, LinearPairQ, BinomialParts,
TrinomialParts, PolyQ, EvenQ, OddQ, PerfectSquareQ, NiceSqrtAuxQ,
NiceSqrtQ, Together, PosAux, PosQ, CoefficientList, ReplaceAll,
ExpandLinearProduct, GCD, ContentFactor, NumericFactor,
NonnumericFactors, MakeAssocList, GensymSubst, KernelSubst,
ExpandExpression, Apart, SmartApart, MatchQ,
PolynomialQuotientRemainder, FreeFactors, NonfreeFactors,
RemoveContentAux, RemoveContent, FreeTerms, NonfreeTerms,
ExpandAlgebraicFunction, CollectReciprocals, ExpandCleanup,
AlgebraicFunctionQ, Coeff, LeadTerm, RemainingTerms, LeadFactor,
RemainingFactors, LeadBase, LeadDegree, Numer, Denom, hypergeom, Expon,
MergeMonomials, PolynomialDivide, BinomialQ, TrinomialQ,
GeneralizedBinomialQ, GeneralizedTrinomialQ, FactorSquareFreeList,
PerfectPowerTest, SquareFreeFactorTest, RationalFunctionQ,
RationalFunctionFactors, NonrationalFunctionFactors, Reverse,
RationalFunctionExponents, RationalFunctionExpand, ExpandIntegrand,
SimplerQ, SimplerSqrtQ, SumSimplerQ, BinomialDegree, TrinomialDegree,
CancelCommonFactors, SimplerIntegrandQ, GeneralizedBinomialDegree,
GeneralizedBinomialParts, GeneralizedTrinomialDegree,
GeneralizedTrinomialParts, MonomialQ, MonomialSumQ,
MinimumMonomialExponent, MonomialExponent, LinearMatchQ,
PowerOfLinearMatchQ, QuadraticMatchQ, CubicMatchQ, BinomialMatchQ,
TrinomialMatchQ, GeneralizedBinomialMatchQ, GeneralizedTrinomialMatchQ,
QuotientOfLinearsMatchQ, PolynomialTermQ, PolynomialTerms,
NonpolynomialTerms, PseudoBinomialParts, NormalizePseudoBinomial,
PseudoBinomialPairQ, PseudoBinomialQ, PolynomialGCD, PolyGCD,
AlgebraicFunctionFactors, NonalgebraicFunctionFactors,
QuotientOfLinearsP, QuotientOfLinearsParts, QuotientOfLinearsQ,
Flatten, Sort, AbsurdNumberQ, AbsurdNumberFactors,
NonabsurdNumberFactors, SumSimplerAuxQ, Prepend, Drop,
CombineExponents, FactorInteger, FactorAbsurdNumber,
SubstForInverseFunction, SubstForFractionalPower,
SubstForFractionalPowerOfQuotientOfLinears,
FractionalPowerOfQuotientOfLinears, SubstForFractionalPowerQ,
SubstForFractionalPowerAuxQ, FractionalPowerOfSquareQ,
FractionalPowerSubexpressionQ, Apply, FactorNumericGcd,
MergeableFactorQ, MergeFactor, MergeFactors, TrigSimplifyQ,
TrigSimplify, TrigSimplifyRecur, Order, FactorOrder, Smallest,
OrderedQ, MinimumDegree, PositiveFactors, Sign, NonpositiveFactors,
PolynomialInAuxQ, PolynomialInQ, ExponentInAux, ExponentIn,
PolynomialInSubstAux, PolynomialInSubst, Distrib, DistributeDegree,
FunctionOfPower, DivideDegreesOfFactors, MonomialFactor, FullSimplify,
FunctionOfLinearSubst, FunctionOfLinear, NormalizeIntegrand,
NormalizeIntegrandAux, NormalizeIntegrandFactor,
NormalizeIntegrandFactorBase, NormalizeTogether,
NormalizeLeadTermSigns, AbsorbMinusSign, NormalizeSumFactors,
SignOfFactor, NormalizePowerOfLinear, SimplifyIntegrand, SimplifyTerm,
TogetherSimplify, SmartSimplify, SubstForExpn, ExpandToSum, UnifySum,
UnifyTerms, UnifyTerm, CalculusQ, FunctionOfInverseLinear,
PureFunctionOfSinhQ, PureFunctionOfTanhQ, PureFunctionOfCoshQ,
IntegerQuotientQ, OddQuotientQ, EvenQuotientQ, FindTrigFactor,
FunctionOfSinhQ, FunctionOfCoshQ, OddHyperbolicPowerQ, FunctionOfTanhQ,
FunctionOfTanhWeight, FunctionOfHyperbolicQ, SmartNumerator,
SmartDenominator, SubstForAux, ActivateTrig, ExpandTrig, TrigExpand,
SubstForTrig, SubstForHyperbolic, InertTrigFreeQ, LCM,
SubstForFractionalPowerOfLinear, FractionalPowerOfLinear,
InverseFunctionOfLinear, InertTrigQ, InertReciprocalQ, DeactivateTrig,
FixInertTrigFunction, DeactivateTrigAux, PowerOfInertTrigSumQ,
PiecewiseLinearQ, KnownTrigIntegrandQ, KnownSineIntegrandQ,
KnownTangentIntegrandQ, KnownCotangentIntegrandQ,
KnownSecantIntegrandQ, TryPureTanSubst, TryTanhSubst, TryPureTanhSubst,
AbsurdNumberGCD, AbsurdNumberGCDList, ExpandTrigExpand,
ExpandTrigReduce, ExpandTrigReduceAux, NormalizeTrig, TrigToExp,
ExpandTrigToExp, TrigReduce, FunctionOfTrig, AlgebraicTrigFunctionQ,
FunctionOfHyperbolic, FunctionOfQ, FunctionOfExpnQ, PureFunctionOfSinQ,
PureFunctionOfCosQ, PureFunctionOfTanQ, PureFunctionOfCotQ,
FunctionOfCosQ, FunctionOfSinQ, OddTrigPowerQ, FunctionOfTanQ,
FunctionOfTanWeight, FunctionOfTrigQ, FunctionOfDensePolynomialsQ,
FunctionOfLog, PowerVariableExpn, PowerVariableDegree,
PowerVariableSubst, EulerIntegrandQ, FunctionOfSquareRootOfQuadratic,
SquareRootOfQuadraticSubst, Divides, EasyDQ, ProductOfLinearPowersQ,
Rt, NthRoot, AtomBaseQ, SumBaseQ, NegSumBaseQ, AllNegTermQ,
SomeNegTermQ, TrigSquareQ, RtAux, TrigSquare, IntSum, IntTerm, Map2,
ConstantFactor, SameQ, ReplacePart, CommonFactors,
MostMainFactorPosition, FunctionOfExponentialQ, FunctionOfExponential,
FunctionOfExponentialFunction, FunctionOfExponentialFunctionAux,
FunctionOfExponentialTest, FunctionOfExponentialTestAux, stdev,
rubi_test, If, IntQuadraticQ, IntBinomialQ, RectifyTangent,
RectifyCotangent, Inequality, Condition, Simp, SimpHelp, SplitProduct,
SplitSum, SubstFor, SubstForAux, FresnelS, FresnelC, Erfc, Erfi, Gamma,
FunctionOfTrigOfLinearQ, ElementaryFunctionQ, Complex, UnsameQ,
_SimpFixFactor, SimpFixFactor, _FixSimplify, FixSimplify,
_SimplifyAntiderivativeSum, SimplifyAntiderivativeSum,
_SimplifyAntiderivative, SimplifyAntiderivative, _TrigSimplifyAux,
TrigSimplifyAux, Cancel, Part, PolyLog, D, Dist, Sum_doit, PolynomialQuotient, Floor,
PolynomialRemainder, Factor, PolyLog, CosIntegral, SinIntegral, LogIntegral, SinhIntegral,
CoshIntegral, Rule, Erf, PolyGamma, ExpIntegralEi, ExpIntegralE, LogGamma , UtilityOperator, Factorial,
Zeta, ProductLog, DerivativeDivides, HypergeometricPFQ, IntHide, OneQ, Null, rubi_exp as exp, rubi_log as log, Discriminant,
Negative, Quotient
)
from sympy import (Integral, S, sqrt, And, Or, Integer, Float, Mod, I, Abs, simplify, Mul,
Add, Pow, sign, EulerGamma)
from sympy.integrals.rubi.symbol import WC
from sympy.core.symbol import symbols, Symbol
from sympy.functions import (sin, cos, tan, cot, csc, sec, sqrt, erf)
from sympy.functions.elementary.hyperbolic import (acosh, asinh, atanh, acoth, acsch, asech, cosh, sinh, tanh, coth, sech, csch)
from sympy.functions.elementary.trigonometric import (atan, acsc, asin, acot, acos, asec, atan2)
from sympy import pi as Pi
A_, B_, C_, F_, G_, H_, a_, b_, c_, d_, e_, f_, g_, h_, i_, j_, k_, l_, m_, n_, p_, q_, r_, t_, u_, v_, s_, w_, x_, y_, z_ = [WC(i) for i in 'ABCFGHabcdefghijklmnpqrtuvswxyz']
a1_, a2_, b1_, b2_, c1_, c2_, d1_, d2_, n1_, n2_, e1_, e2_, f1_, f2_, g1_, g2_, n1_, n2_, n3_, Pq_, Pm_, Px_, Qm_, Qr_, Qx_, jn_, mn_, non2_, RFx_, RGx_ = [WC(i) for i in ['a1', 'a2', 'b1', 'b2', 'c1', 'c2', 'd1', 'd2', 'n1', 'n2', 'e1', 'e2', 'f1', 'f2', 'g1', 'g2', 'n1', 'n2', 'n3', 'Pq', 'Pm', 'Px', 'Qm', 'Qr', 'Qx', 'jn', 'mn', 'non2', 'RFx', 'RGx']]
i, ii, Pqq, Q, R, r, C, k, u = symbols('i ii Pqq Q R r C k u')
_UseGamma = False
ShowSteps = False
StepCounter = None
def special_functions():
from sympy.integrals.rubi.constraints import cons69, cons2, cons3, cons68, cons19, cons1266, cons8, cons29, cons20, cons168, cons1959, cons1960, cons96, cons263, cons1961, cons1834, cons64, cons1962, cons1963, cons1964, cons249, cons1965, cons1966, cons1967, cons1833, cons4, cons1257, cons21, cons1361, cons1968, cons1969, cons170, cons1970, cons1971, cons33, cons1972, cons1973, cons1974, cons802, cons89, cons90, cons5, cons52, cons91, cons385, cons50, cons1975, cons1976, cons1977, cons54, cons1978, cons1101, cons127, cons1245, cons13, cons139, cons1381, cons1979, cons1980, cons198, cons1981, cons1982, cons1983, cons152, cons465, cons1767, cons165, cons950, cons951, cons1984, cons1985, cons805, cons1986, cons1987, cons1988, cons1989, cons340, cons1990, cons1991, cons1992, cons1993, cons1994, cons1995, cons40, cons1996, cons349, cons1997, cons1998, cons1999, cons2000, cons2001, cons2002, cons2003
pattern6742 = Pattern(Integral(Erf(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons69)
rule6742 = ReplacementRule(pattern6742, replacement6742)
pattern6743 = Pattern(Integral(Erfc(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons69)
rule6743 = ReplacementRule(pattern6743, replacement6743)
pattern6744 = Pattern(Integral(Erfi(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons69)
rule6744 = ReplacementRule(pattern6744, replacement6744)
pattern6745 = Pattern(Integral(Erf(x_*WC('b', S(1)))/x_, x_), cons3, cons3)
rule6745 = ReplacementRule(pattern6745, replacement6745)
pattern6746 = Pattern(Integral(Erfc(x_*WC('b', S(1)))/x_, x_), cons3, cons3)
rule6746 = ReplacementRule(pattern6746, replacement6746)
pattern6747 = Pattern(Integral(Erfi(x_*WC('b', S(1)))/x_, x_), cons3, cons3)
rule6747 = ReplacementRule(pattern6747, replacement6747)
pattern6748 = Pattern(Integral(x_**WC('m', S(1))*Erf(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons19, cons68)
rule6748 = ReplacementRule(pattern6748, replacement6748)
pattern6749 = Pattern(Integral(x_**WC('m', S(1))*Erfc(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons19, cons68)
rule6749 = ReplacementRule(pattern6749, replacement6749)
pattern6750 = Pattern(Integral(x_**WC('m', S(1))*Erfi(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons19, cons68)
rule6750 = ReplacementRule(pattern6750, replacement6750)
pattern6751 = Pattern(Integral(x_*Erf(x_*WC('b', S(1)) + WC('a', S(0)))*exp(x_**S(2)*WC('d', S(1)) + WC('c', S(0))), x_), cons2, cons3, cons8, cons29, cons1266)
rule6751 = ReplacementRule(pattern6751, replacement6751)
pattern6752 = Pattern(Integral(x_*Erfc(x_*WC('b', S(1)) + WC('a', S(0)))*exp(x_**S(2)*WC('d', S(1)) + WC('c', S(0))), x_), cons2, cons3, cons8, cons29, cons1266)
rule6752 = ReplacementRule(pattern6752, replacement6752)
pattern6753 = Pattern(Integral(x_*Erfi(x_*WC('b', S(1)) + WC('a', S(0)))*exp(x_**S(2)*WC('d', S(1)) + WC('c', S(0))), x_), cons2, cons3, cons8, cons29, cons1266)
rule6753 = ReplacementRule(pattern6753, replacement6753)
pattern6754 = Pattern(Integral(x_**m_*Erf(x_*WC('b', S(1)) + WC('a', S(0)))*exp(x_**S(2)*WC('d', S(1)) + WC('c', S(0))), x_), cons2, cons3, cons8, cons29, cons20, cons168)
rule6754 = ReplacementRule(pattern6754, replacement6754)
pattern6755 = Pattern(Integral(x_**m_*Erfc(x_*WC('b', S(1)) + WC('a', S(0)))*exp(x_**S(2)*WC('d', S(1)) + WC('c', S(0))), x_), cons2, cons3, cons8, cons29, cons20, cons168)
rule6755 = ReplacementRule(pattern6755, replacement6755)
pattern6756 = Pattern(Integral(x_**m_*Erfi(x_*WC('b', S(1)) + WC('a', S(0)))*exp(x_**S(2)*WC('d', S(1)) + WC('c', S(0))), x_), cons2, cons3, cons8, cons29, cons20, cons168)
rule6756 = ReplacementRule(pattern6756, replacement6756)
pattern6757 = Pattern(Integral(Erf(x_*WC('b', S(1)))*exp(x_**S(2)*WC('d', S(1)) + WC('c', S(0)))/x_, x_), cons3, cons1959)
rule6757 = ReplacementRule(pattern6757, replacement6757)
pattern6758 = Pattern(Integral(Erfc(x_*WC('b', S(1)))*exp(x_**S(2)*WC('d', S(1)) + WC('c', S(0)))/x_, x_), cons3, cons1959)
rule6758 = ReplacementRule(pattern6758, replacement6758)
pattern6759 = Pattern(Integral(Erfi(x_*WC('b', S(1)))*exp(x_**S(2)*WC('d', S(1)) + WC('c', S(0)))/x_, x_), cons3, cons1960)
rule6759 = ReplacementRule(pattern6759, replacement6759)
pattern6760 = Pattern(Integral(x_**m_*Erf(x_*WC('b', S(1)) + WC('a', S(0)))*exp(x_**S(2)*WC('d', S(1)) + WC('c', S(0))), x_), cons2, cons3, cons8, cons29, cons20, cons96)
rule6760 = ReplacementRule(pattern6760, replacement6760)
pattern6761 = Pattern(Integral(x_**m_*Erfc(x_*WC('b', S(1)) + WC('a', S(0)))*exp(x_**S(2)*WC('d', S(1)) + WC('c', S(0))), x_), cons2, cons3, cons8, cons29, cons20, cons96)
rule6761 = ReplacementRule(pattern6761, replacement6761)
pattern6762 = Pattern(Integral(x_**m_*Erfi(x_*WC('b', S(1)) + WC('a', S(0)))*exp(x_**S(2)*WC('d', S(1)) + WC('c', S(0))), x_), cons2, cons3, cons8, cons29, cons20, cons96)
rule6762 = ReplacementRule(pattern6762, replacement6762)
pattern6763 = Pattern(Integral(Erf(x_*WC('b', S(1)) + WC('a', S(0)))**S(2), x_), cons2, cons3, cons69)
rule6763 = ReplacementRule(pattern6763, replacement6763)
pattern6764 = Pattern(Integral(Erfc(x_*WC('b', S(1)) + WC('a', S(0)))**S(2), x_), cons2, cons3, cons69)
rule6764 = ReplacementRule(pattern6764, replacement6764)
pattern6765 = Pattern(Integral(Erfi(x_*WC('b', S(1)) + WC('a', S(0)))**S(2), x_), cons2, cons3, cons69)
rule6765 = ReplacementRule(pattern6765, replacement6765)
pattern6766 = Pattern(Integral(x_**WC('m', S(1))*Erf(x_*WC('b', S(1)))**S(2), x_), cons3, cons20, cons263, cons1961)
rule6766 = ReplacementRule(pattern6766, replacement6766)
pattern6767 = Pattern(Integral(x_**WC('m', S(1))*Erfc(x_*WC('b', S(1)))**S(2), x_), cons3, cons20, cons1834, cons1961)
rule6767 = ReplacementRule(pattern6767, replacement6767)
pattern6768 = Pattern(Integral(x_**WC('m', S(1))*Erfi(x_*WC('b', S(1)))**S(2), x_), cons3, cons20, cons1834, cons1961)
rule6768 = ReplacementRule(pattern6768, replacement6768)
pattern6769 = Pattern(Integral(x_**WC('m', S(1))*Erf(a_ + x_*WC('b', S(1)))**S(2), x_), cons2, cons3, cons64)
rule6769 = ReplacementRule(pattern6769, replacement6769)
pattern6770 = Pattern(Integral(x_**WC('m', S(1))*Erfc(a_ + x_*WC('b', S(1)))**S(2), x_), cons2, cons3, cons64)
rule6770 = ReplacementRule(pattern6770, replacement6770)
pattern6771 = Pattern(Integral(x_**WC('m', S(1))*Erfi(a_ + x_*WC('b', S(1)))**S(2), x_), cons2, cons3, cons64)
rule6771 = ReplacementRule(pattern6771, replacement6771)
pattern6772 = Pattern(Integral(FresnelS(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons69)
rule6772 = ReplacementRule(pattern6772, replacement6772)
pattern6773 = Pattern(Integral(FresnelC(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons69)
rule6773 = ReplacementRule(pattern6773, replacement6773)
pattern6774 = Pattern(Integral(FresnelS(x_*WC('b', S(1)))/x_, x_), cons3, cons3)
rule6774 = ReplacementRule(pattern6774, replacement6774)
pattern6775 = Pattern(Integral(FresnelC(x_*WC('b', S(1)))/x_, x_), cons3, cons3)
rule6775 = ReplacementRule(pattern6775, replacement6775)
pattern6776 = Pattern(Integral(x_**WC('m', S(1))*FresnelS(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons19, cons68)
rule6776 = ReplacementRule(pattern6776, replacement6776)
pattern6777 = Pattern(Integral(x_**WC('m', S(1))*FresnelC(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons19, cons68)
rule6777 = ReplacementRule(pattern6777, replacement6777)
pattern6778 = Pattern(Integral(FresnelS(x_*WC('b', S(1)) + WC('a', S(0)))**S(2), x_), cons2, cons3, cons69)
rule6778 = ReplacementRule(pattern6778, replacement6778)
pattern6779 = Pattern(Integral(FresnelC(x_*WC('b', S(1)) + WC('a', S(0)))**S(2), x_), cons2, cons3, cons69)
rule6779 = ReplacementRule(pattern6779, replacement6779)
pattern6780 = Pattern(Integral(x_**m_*FresnelS(x_*WC('b', S(1)))**S(2), x_), cons3, cons20, cons1834, cons1962)
rule6780 = ReplacementRule(pattern6780, replacement6780)
pattern6781 = Pattern(Integral(x_**m_*FresnelC(x_*WC('b', S(1)))**S(2), x_), cons3, cons20, cons1834, cons1962)
rule6781 = ReplacementRule(pattern6781, replacement6781)
pattern6782 = Pattern(Integral(x_*FresnelS(x_*WC('b', S(1)))*sin(x_**S(2)*WC('c', S(1))), x_), cons3, cons8, cons1963)
rule6782 = ReplacementRule(pattern6782, replacement6782)
pattern6783 = Pattern(Integral(x_*FresnelC(x_*WC('b', S(1)))*cos(x_**S(2)*WC('c', S(1))), x_), cons3, cons8, cons1963)
rule6783 = ReplacementRule(pattern6783, replacement6783)
pattern6784 = Pattern(Integral(x_**m_*FresnelS(x_*WC('b', S(1)))*sin(x_**S(2)*WC('c', S(1))), x_), cons3, cons8, cons1963, cons20, cons168, cons1964)
rule6784 = ReplacementRule(pattern6784, replacement6784)
pattern6785 = Pattern(Integral(x_**m_*FresnelC(x_*WC('b', S(1)))*cos(x_**S(2)*WC('c', S(1))), x_), cons3, cons8, cons1963, cons20, cons168, cons1964)
rule6785 = ReplacementRule(pattern6785, replacement6785)
pattern6786 = Pattern(Integral(x_**m_*FresnelS(x_*WC('b', S(1)))*sin(x_**S(2)*WC('c', S(1))), x_), cons3, cons8, cons1963, cons20, cons249, cons1965)
rule6786 = ReplacementRule(pattern6786, replacement6786)
pattern6787 = Pattern(Integral(x_**m_*FresnelC(x_*WC('b', S(1)))*cos(x_**S(2)*WC('c', S(1))), x_), cons3, cons8, cons1963, cons20, cons249, cons1965)
rule6787 = ReplacementRule(pattern6787, replacement6787)
pattern6788 = Pattern(Integral(x_*FresnelS(x_*WC('b', S(1)))*cos(x_**S(2)*WC('c', S(1))), x_), cons3, cons8, cons1963)
rule6788 = ReplacementRule(pattern6788, replacement6788)
pattern6789 = Pattern(Integral(x_*FresnelC(x_*WC('b', S(1)))*sin(x_**S(2)*WC('c', S(1))), x_), cons3, cons8, cons1963)
rule6789 = ReplacementRule(pattern6789, replacement6789)
pattern6790 = Pattern(Integral(x_**m_*FresnelS(x_*WC('b', S(1)))*cos(x_**S(2)*WC('c', S(1))), x_), cons3, cons8, cons1963, cons20, cons168, cons1966)
rule6790 = ReplacementRule(pattern6790, replacement6790)
pattern6791 = Pattern(Integral(x_**m_*FresnelC(x_*WC('b', S(1)))*sin(x_**S(2)*WC('c', S(1))), x_), cons3, cons8, cons1963, cons20, cons168, cons1966)
rule6791 = ReplacementRule(pattern6791, replacement6791)
pattern6792 = Pattern(Integral(x_**m_*FresnelS(x_*WC('b', S(1)))*cos(x_**S(2)*WC('c', S(1))), x_), cons3, cons8, cons1963, cons20, cons96, cons1967)
rule6792 = ReplacementRule(pattern6792, replacement6792)
pattern6793 = Pattern(Integral(x_**m_*FresnelC(x_*WC('b', S(1)))*sin(x_**S(2)*WC('c', S(1))), x_), cons3, cons8, cons1963, cons20, cons96, cons1967)
rule6793 = ReplacementRule(pattern6793, replacement6793)
pattern6794 = Pattern(Integral(ExpIntegralE(n_, x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons4, cons1833)
rule6794 = ReplacementRule(pattern6794, replacement6794)
pattern6795 = Pattern(Integral(x_**WC('m', S(1))*ExpIntegralE(n_, x_*WC('b', S(1))), x_), cons3, cons1257, cons64)
rule6795 = ReplacementRule(pattern6795, replacement6795)
pattern6796 = Pattern(Integral(ExpIntegralE(S(1), x_*WC('b', S(1)))/x_, x_), cons3, cons3)
rule6796 = ReplacementRule(pattern6796, replacement6796)
pattern6797 = Pattern(Integral(x_**m_*ExpIntegralE(n_, x_*WC('b', S(1))), x_), cons3, cons1257, cons20, cons96)
rule6797 = ReplacementRule(pattern6797, replacement6797)
pattern6798 = Pattern(Integral(x_**m_*ExpIntegralE(n_, x_*WC('b', S(1))), x_), cons3, cons19, cons4, cons1257, cons21)
rule6798 = ReplacementRule(pattern6798, replacement6798)
pattern6799 = Pattern(Integral(x_**WC('m', S(1))*ExpIntegralE(n_, x_*WC('b', S(1))), x_), cons3, cons19, cons4, cons1361)
rule6799 = ReplacementRule(pattern6799, replacement6799)
pattern6800 = Pattern(Integral(x_**WC('m', S(1))*ExpIntegralE(n_, a_ + x_*WC('b', S(1))), x_), cons2, cons3, cons19, cons4, cons1968)
rule6800 = ReplacementRule(pattern6800, replacement6800)
pattern6801 = Pattern(Integral(x_**WC('m', S(1))*ExpIntegralE(n_, a_ + x_*WC('b', S(1))), x_), cons2, cons3, cons19, cons1969, cons68)
rule6801 = ReplacementRule(pattern6801, replacement6801)
pattern6802 = Pattern(Integral(ExpIntegralEi(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons69)
rule6802 = ReplacementRule(pattern6802, replacement6802)
pattern6803 = Pattern(Integral(x_**WC('m', S(1))*ExpIntegralEi(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons19, cons68)
rule6803 = ReplacementRule(pattern6803, replacement6803)
pattern6804 = Pattern(Integral(ExpIntegralEi(x_*WC('b', S(1)) + WC('a', S(0)))**S(2), x_), cons2, cons3, cons69)
rule6804 = ReplacementRule(pattern6804, replacement6804)
pattern6805 = Pattern(Integral(x_**WC('m', S(1))*ExpIntegralEi(x_*WC('b', S(1)))**S(2), x_), cons3, cons64)
rule6805 = ReplacementRule(pattern6805, replacement6805)
pattern6806 = Pattern(Integral(x_**WC('m', S(1))*ExpIntegralEi(a_ + x_*WC('b', S(1)))**S(2), x_), cons2, cons3, cons64)
rule6806 = ReplacementRule(pattern6806, replacement6806)
pattern6807 = Pattern(Integral(ExpIntegralEi(x_*WC('d', S(1)) + WC('c', S(0)))*exp(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons1266)
rule6807 = ReplacementRule(pattern6807, replacement6807)
pattern6808 = Pattern(Integral(x_**WC('m', S(1))*ExpIntegralEi(x_*WC('d', S(1)) + WC('c', S(0)))*exp(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons64)
rule6808 = ReplacementRule(pattern6808, replacement6808)
pattern6809 = Pattern(Integral(x_**m_*ExpIntegralEi(x_*WC('d', S(1)) + WC('c', S(0)))*exp(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons20, cons96)
rule6809 = ReplacementRule(pattern6809, replacement6809)
pattern6810 = Pattern(Integral(LogIntegral(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons69)
rule6810 = ReplacementRule(pattern6810, replacement6810)
pattern6811 = Pattern(Integral(LogIntegral(x_*WC('b', S(1)))/x_, x_), cons3, cons3)
rule6811 = ReplacementRule(pattern6811, replacement6811)
pattern6812 = Pattern(Integral(x_**WC('m', S(1))*LogIntegral(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons19, cons68)
rule6812 = ReplacementRule(pattern6812, replacement6812)
pattern6813 = Pattern(Integral(SinIntegral(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons69)
rule6813 = ReplacementRule(pattern6813, replacement6813)
pattern6814 = Pattern(Integral(CosIntegral(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons69)
rule6814 = ReplacementRule(pattern6814, replacement6814)
pattern6815 = Pattern(Integral(SinIntegral(x_*WC('b', S(1)))/x_, x_), cons3, cons3)
rule6815 = ReplacementRule(pattern6815, replacement6815)
pattern6816 = Pattern(Integral(CosIntegral(x_*WC('b', S(1)))/x_, x_), cons3, cons3)
rule6816 = ReplacementRule(pattern6816, replacement6816)
pattern6817 = Pattern(Integral(x_**WC('m', S(1))*SinIntegral(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons19, cons68)
rule6817 = ReplacementRule(pattern6817, replacement6817)
pattern6818 = Pattern(Integral(x_**WC('m', S(1))*CosIntegral(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons19, cons68)
rule6818 = ReplacementRule(pattern6818, replacement6818)
pattern6819 = Pattern(Integral(SinIntegral(x_*WC('b', S(1)) + WC('a', S(0)))**S(2), x_), cons2, cons3, cons69)
rule6819 = ReplacementRule(pattern6819, replacement6819)
pattern6820 = Pattern(Integral(CosIntegral(x_*WC('b', S(1)) + WC('a', S(0)))**S(2), x_), cons2, cons3, cons69)
rule6820 = ReplacementRule(pattern6820, replacement6820)
pattern6821 = Pattern(Integral(x_**WC('m', S(1))*SinIntegral(x_*WC('b', S(1)))**S(2), x_), cons3, cons64)
rule6821 = ReplacementRule(pattern6821, replacement6821)
pattern6822 = Pattern(Integral(x_**WC('m', S(1))*CosIntegral(x_*WC('b', S(1)))**S(2), x_), cons3, cons64)
rule6822 = ReplacementRule(pattern6822, replacement6822)
pattern6823 = Pattern(Integral(x_**WC('m', S(1))*SinIntegral(a_ + x_*WC('b', S(1)))**S(2), x_), cons2, cons3, cons64)
rule6823 = ReplacementRule(pattern6823, replacement6823)
pattern6824 = Pattern(Integral(x_**WC('m', S(1))*CosIntegral(a_ + x_*WC('b', S(1)))**S(2), x_), cons2, cons3, cons64)
rule6824 = ReplacementRule(pattern6824, replacement6824)
pattern6825 = Pattern(Integral(SinIntegral(x_*WC('d', S(1)) + WC('c', S(0)))*sin(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons1266)
rule6825 = ReplacementRule(pattern6825, replacement6825)
pattern6826 = Pattern(Integral(CosIntegral(x_*WC('d', S(1)) + WC('c', S(0)))*cos(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons1266)
rule6826 = ReplacementRule(pattern6826, replacement6826)
pattern6827 = Pattern(Integral(x_**WC('m', S(1))*SinIntegral(x_*WC('d', S(1)) + WC('c', S(0)))*sin(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons64)
rule6827 = ReplacementRule(pattern6827, replacement6827)
pattern6828 = Pattern(Integral(x_**WC('m', S(1))*CosIntegral(x_*WC('d', S(1)) + WC('c', S(0)))*cos(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons64)
rule6828 = ReplacementRule(pattern6828, replacement6828)
pattern6829 = Pattern(Integral(x_**m_*SinIntegral(x_*WC('d', S(1)) + WC('c', S(0)))*sin(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons20, cons96)
rule6829 = ReplacementRule(pattern6829, replacement6829)
pattern6830 = Pattern(Integral(x_**WC('m', S(1))*CosIntegral(x_*WC('d', S(1)) + WC('c', S(0)))*cos(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons20, cons96)
rule6830 = ReplacementRule(pattern6830, replacement6830)
pattern6831 = Pattern(Integral(SinIntegral(x_*WC('d', S(1)) + WC('c', S(0)))*cos(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons1266)
rule6831 = ReplacementRule(pattern6831, replacement6831)
pattern6832 = Pattern(Integral(CosIntegral(x_*WC('d', S(1)) + WC('c', S(0)))*sin(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons1266)
rule6832 = ReplacementRule(pattern6832, replacement6832)
pattern6833 = Pattern(Integral(x_**WC('m', S(1))*SinIntegral(x_*WC('d', S(1)) + WC('c', S(0)))*cos(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons64)
rule6833 = ReplacementRule(pattern6833, replacement6833)
pattern6834 = Pattern(Integral(x_**WC('m', S(1))*CosIntegral(x_*WC('d', S(1)) + WC('c', S(0)))*sin(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons64)
rule6834 = ReplacementRule(pattern6834, replacement6834)
pattern6835 = Pattern(Integral(x_**WC('m', S(1))*SinIntegral(x_*WC('d', S(1)) + WC('c', S(0)))*cos(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons20, cons96)
rule6835 = ReplacementRule(pattern6835, replacement6835)
pattern6836 = Pattern(Integral(x_**m_*CosIntegral(x_*WC('d', S(1)) + WC('c', S(0)))*sin(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons20, cons96)
rule6836 = ReplacementRule(pattern6836, replacement6836)
pattern6837 = Pattern(Integral(SinhIntegral(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons69)
rule6837 = ReplacementRule(pattern6837, replacement6837)
pattern6838 = Pattern(Integral(CoshIntegral(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons69)
rule6838 = ReplacementRule(pattern6838, replacement6838)
pattern6839 = Pattern(Integral(SinhIntegral(x_*WC('b', S(1)))/x_, x_), cons3, cons3)
rule6839 = ReplacementRule(pattern6839, replacement6839)
pattern6840 = Pattern(Integral(CoshIntegral(x_*WC('b', S(1)))/x_, x_), cons3, cons3)
rule6840 = ReplacementRule(pattern6840, replacement6840)
pattern6841 = Pattern(Integral(x_**WC('m', S(1))*SinhIntegral(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons19, cons68)
rule6841 = ReplacementRule(pattern6841, replacement6841)
pattern6842 = Pattern(Integral(x_**WC('m', S(1))*CoshIntegral(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons19, cons68)
rule6842 = ReplacementRule(pattern6842, replacement6842)
pattern6843 = Pattern(Integral(SinhIntegral(x_*WC('b', S(1)) + WC('a', S(0)))**S(2), x_), cons2, cons3, cons69)
rule6843 = ReplacementRule(pattern6843, replacement6843)
pattern6844 = Pattern(Integral(CoshIntegral(x_*WC('b', S(1)) + WC('a', S(0)))**S(2), x_), cons2, cons3, cons69)
rule6844 = ReplacementRule(pattern6844, replacement6844)
pattern6845 = Pattern(Integral(x_**WC('m', S(1))*SinhIntegral(x_*WC('b', S(1)))**S(2), x_), cons3, cons64)
rule6845 = ReplacementRule(pattern6845, replacement6845)
pattern6846 = Pattern(Integral(x_**WC('m', S(1))*CoshIntegral(x_*WC('b', S(1)))**S(2), x_), cons3, cons64)
rule6846 = ReplacementRule(pattern6846, replacement6846)
pattern6847 = Pattern(Integral(x_**WC('m', S(1))*SinhIntegral(a_ + x_*WC('b', S(1)))**S(2), x_), cons2, cons3, cons64)
rule6847 = ReplacementRule(pattern6847, replacement6847)
pattern6848 = Pattern(Integral(x_**WC('m', S(1))*CoshIntegral(a_ + x_*WC('b', S(1)))**S(2), x_), cons2, cons3, cons64)
rule6848 = ReplacementRule(pattern6848, replacement6848)
pattern6849 = Pattern(Integral(SinhIntegral(x_*WC('d', S(1)) + WC('c', S(0)))*sinh(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons1266)
rule6849 = ReplacementRule(pattern6849, replacement6849)
pattern6850 = Pattern(Integral(CoshIntegral(x_*WC('d', S(1)) + WC('c', S(0)))*cosh(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons1266)
rule6850 = ReplacementRule(pattern6850, replacement6850)
pattern6851 = Pattern(Integral(x_**WC('m', S(1))*SinhIntegral(x_*WC('d', S(1)) + WC('c', S(0)))*sinh(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons20, cons170)
rule6851 = ReplacementRule(pattern6851, replacement6851)
pattern6852 = Pattern(Integral(x_**WC('m', S(1))*CoshIntegral(x_*WC('d', S(1)) + WC('c', S(0)))*cosh(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons20, cons170)
rule6852 = ReplacementRule(pattern6852, replacement6852)
pattern6853 = Pattern(Integral(x_**m_*SinhIntegral(x_*WC('d', S(1)) + WC('c', S(0)))*sinh(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons20, cons96)
rule6853 = ReplacementRule(pattern6853, replacement6853)
pattern6854 = Pattern(Integral(x_**WC('m', S(1))*CoshIntegral(x_*WC('d', S(1)) + WC('c', S(0)))*cosh(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons20, cons96)
rule6854 = ReplacementRule(pattern6854, replacement6854)
pattern6855 = Pattern(Integral(SinhIntegral(x_*WC('d', S(1)) + WC('c', S(0)))*cosh(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons1266)
rule6855 = ReplacementRule(pattern6855, replacement6855)
pattern6856 = Pattern(Integral(CoshIntegral(x_*WC('d', S(1)) + WC('c', S(0)))*sinh(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons1266)
rule6856 = ReplacementRule(pattern6856, replacement6856)
pattern6857 = Pattern(Integral(x_**WC('m', S(1))*SinhIntegral(x_*WC('d', S(1)) + WC('c', S(0)))*cosh(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons64)
rule6857 = ReplacementRule(pattern6857, replacement6857)
pattern6858 = Pattern(Integral(x_**WC('m', S(1))*CoshIntegral(x_*WC('d', S(1)) + WC('c', S(0)))*sinh(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons64)
rule6858 = ReplacementRule(pattern6858, replacement6858)
pattern6859 = Pattern(Integral(x_**WC('m', S(1))*SinhIntegral(x_*WC('d', S(1)) + WC('c', S(0)))*cosh(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons20, cons96)
rule6859 = ReplacementRule(pattern6859, replacement6859)
pattern6860 = Pattern(Integral(x_**m_*CoshIntegral(x_*WC('d', S(1)) + WC('c', S(0)))*sinh(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons8, cons29, cons20, cons96)
rule6860 = ReplacementRule(pattern6860, replacement6860)
pattern6861 = Pattern(Integral(Gamma(n_, x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons69)
rule6861 = ReplacementRule(pattern6861, replacement6861)
pattern6862 = Pattern(Integral(Gamma(n_, b_*x_)/x_, x_), cons3, cons4, cons1970)
rule6862 = ReplacementRule(pattern6862, replacement6862)
pattern6863 = Pattern(Integral(x_**WC('m', S(1))*Gamma(n_, b_*x_), x_), cons3, cons19, cons4, cons68)
rule6863 = ReplacementRule(pattern6863, replacement6863)
pattern6864 = Pattern(Integral(x_**WC('m', S(1))*Gamma(n_, a_ + x_*WC('b', S(1))), x_), cons2, cons3, cons19, cons4, cons1971, cons68)
rule6864 = ReplacementRule(pattern6864, With6864)
pattern6865 = Pattern(Integral(LogGamma(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons69)
rule6865 = ReplacementRule(pattern6865, replacement6865)
pattern6866 = Pattern(Integral(x_**WC('m', S(1))*LogGamma(x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons33, cons170)
rule6866 = ReplacementRule(pattern6866, replacement6866)
pattern6867 = Pattern(Integral(PolyGamma(n_, x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons4, cons1833)
rule6867 = ReplacementRule(pattern6867, replacement6867)
pattern6868 = Pattern(Integral(x_**WC('m', S(1))*PolyGamma(n_, x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons4, cons33, cons170)
rule6868 = ReplacementRule(pattern6868, replacement6868)
pattern6869 = Pattern(Integral(x_**WC('m', S(1))*PolyGamma(n_, x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons4, cons33, cons96)
rule6869 = ReplacementRule(pattern6869, replacement6869)
pattern6870 = Pattern(Integral(Gamma(x_*WC('b', S(1)) + WC('a', S(0)))**WC('n', S(1))*PolyGamma(S(0), x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons4, cons1833)
rule6870 = ReplacementRule(pattern6870, replacement6870)
pattern6871 = Pattern(Integral(Factorial(x_*WC('b', S(1)) + WC('a', S(0)))**WC('n', S(1))*PolyGamma(S(0), x_*WC('b', S(1)) + WC('c', S(0))), x_), cons2, cons3, cons8, cons4, cons1972)
rule6871 = ReplacementRule(pattern6871, replacement6871)
pattern6872 = Pattern(Integral(Zeta(S(2), x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons69)
rule6872 = ReplacementRule(pattern6872, replacement6872)
pattern6873 = Pattern(Integral(Zeta(s_, x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons802, cons1973, cons1974)
rule6873 = ReplacementRule(pattern6873, replacement6873)
pattern6874 = Pattern(Integral(x_**WC('m', S(1))*Zeta(S(2), x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons33)
rule6874 = ReplacementRule(pattern6874, replacement6874)
pattern6875 = Pattern(Integral(x_**WC('m', S(1))*Zeta(s_, x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons802, cons1973, cons1974, cons33, cons170)
rule6875 = ReplacementRule(pattern6875, replacement6875)
pattern6876 = Pattern(Integral(x_**WC('m', S(1))*Zeta(s_, x_*WC('b', S(1)) + WC('a', S(0))), x_), cons2, cons3, cons802, cons1973, cons1974, cons33, cons96)
rule6876 = ReplacementRule(pattern6876, replacement6876)
pattern6877 = Pattern(Integral(PolyLog(n_, (x_**WC('p', S(1))*WC('b', S(1)))**WC('q', S(1))*WC('a', S(1))), x_), cons2, cons3, cons5, cons52, cons89, cons90)
rule6877 = ReplacementRule(pattern6877, replacement6877)
pattern6878 = Pattern(Integral(PolyLog(n_, (x_**WC('p', S(1))*WC('b', S(1)))**WC('q', S(1))*WC('a', S(1))), x_), cons2, cons3, cons5, cons52, cons89, cons91)
rule6878 = ReplacementRule(pattern6878, replacement6878)
pattern6879 = Pattern(Integral(PolyLog(n_, (x_*WC('b', S(1)) + WC('a', S(0)))**WC('p', S(1))*WC('c', S(1)))/(x_*WC('e', S(1)) + WC('d', S(0))), x_), cons2, cons3, cons8, cons29, cons50, cons4, cons5, cons385)
rule6879 = ReplacementRule(pattern6879, replacement6879)
pattern6880 = Pattern(Integral(PolyLog(n_, (x_**WC('p', S(1))*WC('b', S(1)))**WC('q', S(1))*WC('a', S(1)))/x_, x_), cons2, cons3, cons4, cons5, cons52, cons1975)
rule6880 = ReplacementRule(pattern6880, replacement6880)
pattern6881 = Pattern(Integral(x_**WC('m', S(1))*PolyLog(n_, (x_**WC('p', S(1))*WC('b', S(1)))**WC('q', S(1))*WC('a', S(1))), x_), cons2, cons3, cons19, cons5, cons52, cons68, cons89, cons90)
rule6881 = ReplacementRule(pattern6881, replacement6881)
pattern6882 = Pattern(Integral(x_**WC('m', S(1))*PolyLog(n_, (x_**WC('p', S(1))*WC('b', S(1)))**WC('q', S(1))*WC('a', S(1))), x_), cons2, cons3, cons19, cons5, cons52, cons68, cons89, cons91)
rule6882 = ReplacementRule(pattern6882, replacement6882)
pattern6883 = Pattern(Integral(PolyLog(n_, (x_**WC('p', S(1))*WC('b', S(1)))**WC('q', S(1))*WC('a', S(1)))*log(x_**WC('m', S(1))*WC('c', S(1)))**WC('r', S(1))/x_, x_), cons2, cons3, cons8, cons19, cons4, cons52, cons54, cons1976, cons1977)
rule6883 = ReplacementRule(pattern6883, replacement6883)
pattern6884 = Pattern(Integral(PolyLog(n_, (x_*WC('b', S(1)) + WC('a', S(0)))**WC('p', S(1))*WC('c', S(1))), x_), cons2, cons3, cons8, cons5, cons89, cons90)
rule6884 = ReplacementRule(pattern6884, replacement6884)
pattern6885 = Pattern(Integral(x_**WC('m', S(1))*PolyLog(n_, (x_*WC('b', S(1)) + WC('a', S(0)))**WC('p', S(1))*WC('c', S(1))), x_), cons2, cons3, cons8, cons19, cons5, cons89, cons90, cons64)
rule6885 = ReplacementRule(pattern6885, replacement6885)
pattern6886 = Pattern(Integral(PolyLog(n_, (F_**((x_*WC('b', S(1)) + WC('a', S(0)))*WC('c', S(1))))**WC('p', S(1))*WC('d', S(1))), x_), cons1101, cons2, cons3, cons8, cons29, cons4, cons5, cons1978)
rule6886 = ReplacementRule(pattern6886, replacement6886)
pattern6887 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1))*PolyLog(n_, (F_**((x_*WC('b', S(1)) + WC('a', S(0)))*WC('c', S(1))))**WC('p', S(1))*WC('d', S(1))), x_), cons1101, cons2, cons3, cons8, cons29, cons50, cons127, cons4, cons5, cons33, cons170)
rule6887 = ReplacementRule(pattern6887, replacement6887)
pattern6888 = Pattern(Integral(u_*PolyLog(n_, v_), x_), cons4, cons4, CustomConstraint(With6888))
rule6888 = ReplacementRule(pattern6888, replacement6888)
pattern6889 = Pattern(Integral(u_*PolyLog(n_, v_)*log(w_), x_), cons4, cons1245, CustomConstraint(With6889))
rule6889 = ReplacementRule(pattern6889, replacement6889)
pattern6890 = Pattern(Integral((ProductLog(x_*WC('b', S(1)) + WC('a', S(0)))*WC('c', S(1)))**p_, x_), cons2, cons3, cons8, cons13, cons139)
rule6890 = ReplacementRule(pattern6890, replacement6890)
pattern6891 = Pattern(Integral((ProductLog(x_*WC('b', S(1)) + WC('a', S(0)))*WC('c', S(1)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons1381)
rule6891 = ReplacementRule(pattern6891, replacement6891)
pattern6892 = Pattern(Integral(x_**WC('m', S(1))*(ProductLog(a_ + x_*WC('b', S(1)))*WC('c', S(1)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons5, cons64)
rule6892 = ReplacementRule(pattern6892, replacement6892)
pattern6893 = Pattern(Integral((ProductLog(x_**n_*WC('a', S(1)))*WC('c', S(1)))**WC('p', S(1)), x_), cons2, cons8, cons4, cons5, cons1979)
rule6893 = ReplacementRule(pattern6893, replacement6893)
pattern6894 = Pattern(Integral((ProductLog(x_**n_*WC('a', S(1)))*WC('c', S(1)))**WC('p', S(1)), x_), cons2, cons8, cons4, cons1980)
rule6894 = ReplacementRule(pattern6894, replacement6894)
pattern6895 = Pattern(Integral((ProductLog(x_**n_*WC('a', S(1)))*WC('c', S(1)))**WC('p', S(1)), x_), cons2, cons8, cons5, cons198)
rule6895 = ReplacementRule(pattern6895, replacement6895)
pattern6896 = Pattern(Integral(x_**WC('m', S(1))*(ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('c', S(1)))**WC('p', S(1)), x_), cons2, cons8, cons19, cons4, cons5, cons68, cons1981)
rule6896 = ReplacementRule(pattern6896, replacement6896)
pattern6897 = Pattern(Integral(x_**WC('m', S(1))*(ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('c', S(1)))**WC('p', S(1)), x_), cons2, cons8, cons19, cons4, cons5, cons1982)
rule6897 = ReplacementRule(pattern6897, replacement6897)
pattern6898 = Pattern(Integral(x_**WC('m', S(1))*(ProductLog(x_*WC('a', S(1)))*WC('c', S(1)))**WC('p', S(1)), x_), cons2, cons8, cons19, cons1983)
rule6898 = ReplacementRule(pattern6898, replacement6898)
pattern6899 = Pattern(Integral(x_**WC('m', S(1))*(ProductLog(x_**n_*WC('a', S(1)))*WC('c', S(1)))**WC('p', S(1)), x_), cons2, cons8, cons5, cons152, cons465, cons68)
rule6899 = ReplacementRule(pattern6899, replacement6899)
pattern6900 = Pattern(Integral(S(1)/(d_ + ProductLog(x_*WC('b', S(1)) + WC('a', S(0)))*WC('d', S(1))), x_), cons2, cons3, cons29, cons1767)
rule6900 = ReplacementRule(pattern6900, replacement6900)
pattern6901 = Pattern(Integral(ProductLog(x_*WC('b', S(1)) + WC('a', S(0)))/(d_ + ProductLog(x_*WC('b', S(1)) + WC('a', S(0)))*WC('d', S(1))), x_), cons2, cons3, cons29, cons1767)
rule6901 = ReplacementRule(pattern6901, replacement6901)
pattern6902 = Pattern(Integral((ProductLog(x_*WC('b', S(1)) + WC('a', S(0)))*WC('c', S(1)))**p_/(d_ + ProductLog(x_*WC('b', S(1)) + WC('a', S(0)))*WC('d', S(1))), x_), cons2, cons3, cons8, cons29, cons13, cons165)
rule6902 = ReplacementRule(pattern6902, replacement6902)
pattern6903 = Pattern(Integral(S(1)/((d_ + ProductLog(x_*WC('b', S(1)) + WC('a', S(0)))*WC('d', S(1)))*ProductLog(x_*WC('b', S(1)) + WC('a', S(0)))), x_), cons2, cons3, cons29, cons1767)
rule6903 = ReplacementRule(pattern6903, replacement6903)
pattern6904 = Pattern(Integral(S(1)/(sqrt(ProductLog(x_*WC('b', S(1)) + WC('a', S(0)))*WC('c', S(1)))*(d_ + ProductLog(x_*WC('b', S(1)) + WC('a', S(0)))*WC('d', S(1)))), x_), cons2, cons3, cons8, cons29, cons950)
rule6904 = ReplacementRule(pattern6904, replacement6904)
pattern6905 = Pattern(Integral(S(1)/(sqrt(ProductLog(x_*WC('b', S(1)) + WC('a', S(0)))*WC('c', S(1)))*(d_ + ProductLog(x_*WC('b', S(1)) + WC('a', S(0)))*WC('d', S(1)))), x_), cons2, cons3, cons8, cons29, cons951)
rule6905 = ReplacementRule(pattern6905, replacement6905)
pattern6906 = Pattern(Integral((ProductLog(x_*WC('b', S(1)) + WC('a', S(0)))*WC('c', S(1)))**p_/(d_ + ProductLog(x_*WC('b', S(1)) + WC('a', S(0)))*WC('d', S(1))), x_), cons2, cons3, cons8, cons29, cons13, cons139)
rule6906 = ReplacementRule(pattern6906, replacement6906)
pattern6907 = Pattern(Integral((ProductLog(x_*WC('b', S(1)) + WC('a', S(0)))*WC('c', S(1)))**WC('p', S(1))/(d_ + ProductLog(x_*WC('b', S(1)) + WC('a', S(0)))*WC('d', S(1))), x_), cons2, cons3, cons8, cons29, cons5, cons1984)
rule6907 = ReplacementRule(pattern6907, replacement6907)
pattern6908 = Pattern(Integral(x_**WC('m', S(1))/(d_ + ProductLog(a_ + x_*WC('b', S(1)))*WC('d', S(1))), x_), cons2, cons3, cons29, cons64)
rule6908 = ReplacementRule(pattern6908, replacement6908)
pattern6909 = Pattern(Integral(x_**WC('m', S(1))*(ProductLog(a_ + x_*WC('b', S(1)))*WC('c', S(1)))**WC('p', S(1))/(d_ + ProductLog(a_ + x_*WC('b', S(1)))*WC('d', S(1))), x_), cons2, cons3, cons8, cons29, cons5, cons64)
rule6909 = ReplacementRule(pattern6909, replacement6909)
pattern6910 = Pattern(Integral(S(1)/(d_ + ProductLog(x_**n_*WC('a', S(1)))*WC('d', S(1))), x_), cons2, cons29, cons198)
rule6910 = ReplacementRule(pattern6910, replacement6910)
pattern6911 = Pattern(Integral((ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('c', S(1)))**WC('p', S(1))/(d_ + ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('d', S(1))), x_), cons2, cons8, cons29, cons4, cons5, cons1985)
rule6911 = ReplacementRule(pattern6911, replacement6911)
pattern6912 = Pattern(Integral(ProductLog(x_**WC('n', S(1))*WC('a', S(1)))**WC('p', S(1))/(d_ + ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('d', S(1))), x_), cons2, cons29, cons805, cons1986)
rule6912 = ReplacementRule(pattern6912, replacement6912)
pattern6913 = Pattern(Integral((ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('c', S(1)))**p_/(d_ + ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('d', S(1))), x_), cons2, cons8, cons29, cons805, cons1987, cons1988)
rule6913 = ReplacementRule(pattern6913, replacement6913)
pattern6914 = Pattern(Integral((ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('c', S(1)))**p_/(d_ + ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('d', S(1))), x_), cons2, cons8, cons29, cons805, cons1987, cons1989)
rule6914 = ReplacementRule(pattern6914, replacement6914)
pattern6915 = Pattern(Integral((ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('c', S(1)))**WC('p', S(1))/(d_ + ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('d', S(1))), x_), cons2, cons8, cons29, cons340, cons90, cons1990)
rule6915 = ReplacementRule(pattern6915, replacement6915)
pattern6916 = Pattern(Integral((ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('c', S(1)))**WC('p', S(1))/(d_ + ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('d', S(1))), x_), cons2, cons8, cons29, cons340, cons90, cons1991)
rule6916 = ReplacementRule(pattern6916, replacement6916)
pattern6917 = Pattern(Integral((ProductLog(x_**n_*WC('a', S(1)))*WC('c', S(1)))**WC('p', S(1))/(d_ + ProductLog(x_**n_*WC('a', S(1)))*WC('d', S(1))), x_), cons2, cons8, cons29, cons5, cons198)
rule6917 = ReplacementRule(pattern6917, replacement6917)
pattern6918 = Pattern(Integral(x_**WC('m', S(1))/(d_ + ProductLog(x_*WC('a', S(1)))*WC('d', S(1))), x_), cons2, cons29, cons33, cons170)
rule6918 = ReplacementRule(pattern6918, replacement6918)
pattern6919 = Pattern(Integral(S(1)/(x_*(d_ + ProductLog(x_*WC('a', S(1)))*WC('d', S(1)))), x_), cons2, cons29, cons1992)
rule6919 = ReplacementRule(pattern6919, replacement6919)
pattern6920 = Pattern(Integral(x_**WC('m', S(1))/(d_ + ProductLog(x_*WC('a', S(1)))*WC('d', S(1))), x_), cons2, cons29, cons33, cons96)
rule6920 = ReplacementRule(pattern6920, replacement6920)
pattern6921 = Pattern(Integral(x_**WC('m', S(1))/(d_ + ProductLog(x_*WC('a', S(1)))*WC('d', S(1))), x_), cons2, cons29, cons19, cons21)
rule6921 = ReplacementRule(pattern6921, replacement6921)
pattern6922 = Pattern(Integral(S(1)/(x_*(d_ + ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('d', S(1)))), x_), cons2, cons29, cons4, cons1993)
rule6922 = ReplacementRule(pattern6922, replacement6922)
pattern6923 = Pattern(Integral(x_**WC('m', S(1))/(d_ + ProductLog(x_**n_*WC('a', S(1)))*WC('d', S(1))), x_), cons2, cons29, cons152, cons465, cons68)
rule6923 = ReplacementRule(pattern6923, replacement6923)
pattern6924 = Pattern(Integral((ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('c', S(1)))**WC('p', S(1))/(x_*(d_ + ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('d', S(1)))), x_), cons2, cons8, cons29, cons4, cons5, cons1994)
rule6924 = ReplacementRule(pattern6924, replacement6924)
pattern6925 = Pattern(Integral(x_**WC('m', S(1))*(ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('c', S(1)))**WC('p', S(1))/(d_ + ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('d', S(1))), x_), cons2, cons8, cons29, cons19, cons4, cons5, cons68, cons1995)
rule6925 = ReplacementRule(pattern6925, replacement6925)
pattern6926 = Pattern(Integral(x_**WC('m', S(1))*ProductLog(x_**WC('n', S(1))*WC('a', S(1)))**WC('p', S(1))/(d_ + ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('d', S(1))), x_), cons2, cons29, cons19, cons4, cons40, cons1996)
rule6926 = ReplacementRule(pattern6926, replacement6926)
pattern6927 = Pattern(Integral(x_**WC('m', S(1))*(ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('c', S(1)))**p_/(d_ + ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('d', S(1))), x_), cons2, cons8, cons29, cons19, cons4, cons68, cons349, cons1997, cons1998)
rule6927 = ReplacementRule(pattern6927, replacement6927)
pattern6928 = Pattern(Integral(x_**WC('m', S(1))*(ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('c', S(1)))**p_/(d_ + ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('d', S(1))), x_), cons2, cons8, cons29, cons19, cons4, cons68, cons349, cons1997, cons1999)
rule6928 = ReplacementRule(pattern6928, replacement6928)
pattern6929 = Pattern(Integral(x_**WC('m', S(1))*(ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('c', S(1)))**WC('p', S(1))/(d_ + ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('d', S(1))), x_), cons2, cons8, cons29, cons19, cons4, cons5, cons68, cons2000, cons2001)
rule6929 = ReplacementRule(pattern6929, replacement6929)
pattern6930 = Pattern(Integral(x_**WC('m', S(1))*(ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('c', S(1)))**WC('p', S(1))/(d_ + ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('d', S(1))), x_), cons2, cons8, cons29, cons19, cons4, cons5, cons68, cons2000, cons2002)
rule6930 = ReplacementRule(pattern6930, replacement6930)
pattern6931 = Pattern(Integral(x_**WC('m', S(1))*(ProductLog(x_*WC('a', S(1)))*WC('c', S(1)))**WC('p', S(1))/(d_ + ProductLog(x_*WC('a', S(1)))*WC('d', S(1))), x_), cons2, cons8, cons29, cons19, cons5, cons68)
rule6931 = ReplacementRule(pattern6931, replacement6931)
pattern6932 = Pattern(Integral(x_**WC('m', S(1))*(ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('c', S(1)))**WC('p', S(1))/(d_ + ProductLog(x_**WC('n', S(1))*WC('a', S(1)))*WC('d', S(1))), x_), cons2, cons8, cons29, cons5, cons68, cons152, cons465)
rule6932 = ReplacementRule(pattern6932, replacement6932)
pattern6933 = Pattern(Integral(u_, x_), cons2003)
rule6933 = ReplacementRule(pattern6933, replacement6933)
return [rule6742, rule6743, rule6744, rule6745, rule6746, rule6747, rule6748, rule6749, rule6750, rule6751, rule6752, rule6753, rule6754, rule6755, rule6756, rule6757, rule6758, rule6759, rule6760, rule6761, rule6762, rule6763, rule6764, rule6765, rule6766, rule6767, rule6768, rule6769, rule6770, rule6771, rule6772, rule6773, rule6774, rule6775, rule6776, rule6777, rule6778, rule6779, rule6780, rule6781, rule6782, rule6783, rule6784, rule6785, rule6786, rule6787, rule6788, rule6789, rule6790, rule6791, rule6792, rule6793, rule6794, rule6795, rule6796, rule6797, rule6798, rule6799, rule6800, rule6801, rule6802, rule6803, rule6804, rule6805, rule6806, rule6807, rule6808, rule6809, rule6810, rule6811, rule6812, rule6813, rule6814, rule6815, rule6816, rule6817, rule6818, rule6819, rule6820, rule6821, rule6822, rule6823, rule6824, rule6825, rule6826, rule6827, rule6828, rule6829, rule6830, rule6831, rule6832, rule6833, rule6834, rule6835, rule6836, rule6837, rule6838, rule6839, rule6840, rule6841, rule6842, rule6843, rule6844, rule6845, rule6846, rule6847, rule6848, rule6849, rule6850, rule6851, rule6852, rule6853, rule6854, rule6855, rule6856, rule6857, rule6858, rule6859, rule6860, rule6861, rule6862, rule6863, rule6864, rule6865, rule6866, rule6867, rule6868, rule6869, rule6870, rule6871, rule6872, rule6873, rule6874, rule6875, rule6876, rule6877, rule6878, rule6879, rule6880, rule6881, rule6882, rule6883, rule6884, rule6885, rule6886, rule6887, rule6888, rule6889, rule6890, rule6891, rule6892, rule6893, rule6894, rule6895, rule6896, rule6897, rule6898, rule6899, rule6900, rule6901, rule6902, rule6903, rule6904, rule6905, rule6906, rule6907, rule6908, rule6909, rule6910, rule6911, rule6912, rule6913, rule6914, rule6915, rule6916, rule6917, rule6918, rule6919, rule6920, rule6921, rule6922, rule6923, rule6924, rule6925, rule6926, rule6927, rule6928, rule6929, rule6930, rule6931, rule6932, rule6933, ]
def replacement6742(a, b, x):
return Simp(exp(-(a + b*x)**S(2))/(sqrt(Pi)*b), x) + Simp((a + b*x)*Erf(a + b*x)/b, x)
def replacement6743(a, b, x):
return -Simp(exp(-(a + b*x)**S(2))/(sqrt(Pi)*b), x) + Simp((a + b*x)*Erfc(a + b*x)/b, x)
def replacement6744(a, b, x):
return -Simp(exp((a + b*x)**S(2))/(sqrt(Pi)*b), x) + Simp((a + b*x)*Erfi(a + b*x)/b, x)
def replacement6745(b, x):
return Simp(S(2)*b*x*HypergeometricPFQ(List(S(1)/2, S(1)/2), List(S(3)/2, S(3)/2), -b**S(2)*x**S(2))/sqrt(Pi), x)
def replacement6746(b, x):
return -Int(Erf(b*x)/x, x) + Simp(log(x), x)
def replacement6747(b, x):
return Simp(S(2)*b*x*HypergeometricPFQ(List(S(1)/2, S(1)/2), List(S(3)/2, S(3)/2), b**S(2)*x**S(2))/sqrt(Pi), x)
def replacement6748(a, b, m, x):
return -Dist(S(2)*b/(sqrt(Pi)*(m + S(1))), Int(x**(m + S(1))*exp(-(a + b*x)**S(2)), x), x) + Simp(x**(m + S(1))*Erf(a + b*x)/(m + S(1)), x)
def replacement6749(a, b, m, x):
return Dist(S(2)*b/(sqrt(Pi)*(m + S(1))), Int(x**(m + S(1))*exp(-(a + b*x)**S(2)), x), x) + Simp(x**(m + S(1))*Erfc(a + b*x)/(m + S(1)), x)
def replacement6750(a, b, m, x):
return -Dist(S(2)*b/(sqrt(Pi)*(m + S(1))), Int(x**(m + S(1))*exp((a + b*x)**S(2)), x), x) + Simp(x**(m + S(1))*Erfi(a + b*x)/(m + S(1)), x)
def replacement6751(a, b, c, d, x):
return -Dist(b/(sqrt(Pi)*d), Int(exp(-a**S(2) - S(2)*a*b*x + c - x**S(2)*(b**S(2) - d)), x), x) + Simp(Erf(a + b*x)*exp(c + d*x**S(2))/(S(2)*d), x)
def replacement6752(a, b, c, d, x):
return Dist(b/(sqrt(Pi)*d), Int(exp(-a**S(2) - S(2)*a*b*x + c - x**S(2)*(b**S(2) - d)), x), x) + Simp(Erfc(a + b*x)*exp(c + d*x**S(2))/(S(2)*d), x)
def replacement6753(a, b, c, d, x):
return -Dist(b/(sqrt(Pi)*d), Int(exp(a**S(2) + S(2)*a*b*x + c + x**S(2)*(b**S(2) + d)), x), x) + Simp(Erfi(a + b*x)*exp(c + d*x**S(2))/(S(2)*d), x)
def replacement6754(a, b, c, d, m, x):
return -Dist((m + S(-1))/(S(2)*d), Int(x**(m + S(-2))*Erf(a + b*x)*exp(c + d*x**S(2)), x), x) - Dist(b/(sqrt(Pi)*d), Int(x**(m + S(-1))*exp(-a**S(2) - S(2)*a*b*x + c - x**S(2)*(b**S(2) - d)), x), x) + Simp(x**(m + S(-1))*Erf(a + b*x)*exp(c + d*x**S(2))/(S(2)*d), x)
def replacement6755(a, b, c, d, m, x):
return -Dist((m + S(-1))/(S(2)*d), Int(x**(m + S(-2))*Erfc(a + b*x)*exp(c + d*x**S(2)), x), x) + Dist(b/(sqrt(Pi)*d), Int(x**(m + S(-1))*exp(-a**S(2) - S(2)*a*b*x + c - x**S(2)*(b**S(2) - d)), x), x) + Simp(x**(m + S(-1))*Erfc(a + b*x)*exp(c + d*x**S(2))/(S(2)*d), x)
def replacement6756(a, b, c, d, m, x):
return -Dist((m + S(-1))/(S(2)*d), Int(x**(m + S(-2))*Erfi(a + b*x)*exp(c + d*x**S(2)), x), x) - Dist(b/(sqrt(Pi)*d), Int(x**(m + S(-1))*exp(a**S(2) + S(2)*a*b*x + c + x**S(2)*(b**S(2) + d)), x), x) + Simp(x**(m + S(-1))*Erfi(a + b*x)*exp(c + d*x**S(2))/(S(2)*d), x)
def replacement6757(b, c, d, x):
return Simp(S(2)*b*x*HypergeometricPFQ(List(S(1)/2, S(1)), List(S(3)/2, S(3)/2), d*x**S(2))*exp(c)/sqrt(Pi), x)
def replacement6758(b, c, d, x):
return Int(exp(c + d*x**S(2))/x, x) - Int(Erf(b*x)*exp(c + d*x**S(2))/x, x)
def replacement6759(b, c, d, x):
return Simp(S(2)*b*x*HypergeometricPFQ(List(S(1)/2, S(1)), List(S(3)/2, S(3)/2), d*x**S(2))*exp(c)/sqrt(Pi), x)
def replacement6760(a, b, c, d, m, x):
return -Dist(S(2)*d/(m + S(1)), Int(x**(m + S(2))*Erf(a + b*x)*exp(c + d*x**S(2)), x), x) - Dist(S(2)*b/(sqrt(Pi)*(m + S(1))), Int(x**(m + S(1))*exp(-a**S(2) - S(2)*a*b*x + c - x**S(2)*(b**S(2) - d)), x), x) + Simp(x**(m + S(1))*Erf(a + b*x)*exp(c + d*x**S(2))/(m + S(1)), x)
def replacement6761(a, b, c, d, m, x):
return -Dist(S(2)*d/(m + S(1)), Int(x**(m + S(2))*Erfc(a + b*x)*exp(c + d*x**S(2)), x), x) + Dist(S(2)*b/(sqrt(Pi)*(m + S(1))), Int(x**(m + S(1))*exp(-a**S(2) - S(2)*a*b*x + c - x**S(2)*(b**S(2) - d)), x), x) + Simp(x**(m + S(1))*Erfc(a + b*x)*exp(c + d*x**S(2))/(m + S(1)), x)
def replacement6762(a, b, c, d, m, x):
return -Dist(S(2)*d/(m + S(1)), Int(x**(m + S(2))*Erfi(a + b*x)*exp(c + d*x**S(2)), x), x) - Dist(S(2)*b/(sqrt(Pi)*(m + S(1))), Int(x**(m + S(1))*exp(a**S(2) + S(2)*a*b*x + c + x**S(2)*(b**S(2) + d)), x), x) + Simp(x**(m + S(1))*Erfi(a + b*x)*exp(c + d*x**S(2))/(m + S(1)), x)
def replacement6763(a, b, x):
return -Dist(S(4)/sqrt(Pi), Int((a + b*x)*Erf(a + b*x)*exp(-(a + b*x)**S(2)), x), x) + Simp((a + b*x)*Erf(a + b*x)**S(2)/b, x)
def replacement6764(a, b, x):
return Dist(S(4)/sqrt(Pi), Int((a + b*x)*Erfc(a + b*x)*exp(-(a + b*x)**S(2)), x), x) + Simp((a + b*x)*Erfc(a + b*x)**S(2)/b, x)
def replacement6765(a, b, x):
return -Dist(S(4)/sqrt(Pi), Int((a + b*x)*Erfi(a + b*x)*exp((a + b*x)**S(2)), x), x) + Simp((a + b*x)*Erfi(a + b*x)**S(2)/b, x)
def replacement6766(b, m, x):
return -Dist(S(4)*b/(sqrt(Pi)*(m + S(1))), Int(x**(m + S(1))*Erf(b*x)*exp(-b**S(2)*x**S(2)), x), x) + Simp(x**(m + S(1))*Erf(b*x)**S(2)/(m + S(1)), x)
def replacement6767(b, m, x):
return Dist(S(4)*b/(sqrt(Pi)*(m + S(1))), Int(x**(m + S(1))*Erfc(b*x)*exp(-b**S(2)*x**S(2)), x), x) + Simp(x**(m + S(1))*Erfc(b*x)**S(2)/(m + S(1)), x)
def replacement6768(b, m, x):
return -Dist(S(4)*b/(sqrt(Pi)*(m + S(1))), Int(x**(m + S(1))*Erfi(b*x)*exp(b**S(2)*x**S(2)), x), x) + Simp(x**(m + S(1))*Erfi(b*x)**S(2)/(m + S(1)), x)
def replacement6769(a, b, m, x):
return Dist(S(1)/b, Subst(Int((-a/b + x/b)**m*Erf(x)**S(2), x), x, a + b*x), x)
def replacement6770(a, b, m, x):
return Dist(S(1)/b, Subst(Int((-a/b + x/b)**m*Erfc(x)**S(2), x), x, a + b*x), x)
def replacement6771(a, b, m, x):
return Dist(S(1)/b, Subst(Int((-a/b + x/b)**m*Erfi(x)**S(2), x), x, a + b*x), x)
def replacement6772(a, b, x):
return Simp(cos(Pi*(a + b*x)**S(2)/S(2))/(Pi*b), x) + Simp((a + b*x)*FresnelS(a + b*x)/b, x)
def replacement6773(a, b, x):
return -Simp(sin(Pi*(a + b*x)**S(2)/S(2))/(Pi*b), x) + Simp((a + b*x)*FresnelC(a + b*x)/b, x)
def replacement6774(b, x):
return Simp(I*b*x*HypergeometricPFQ(List(S(1)/2, S(1)/2), List(S(3)/2, S(3)/2), -I*Pi*b**S(2)*x**S(2)/S(2))/S(2), x) - Simp(I*b*x*HypergeometricPFQ(List(S(1)/2, S(1)/2), List(S(3)/2, S(3)/2), I*Pi*b**S(2)*x**S(2)/S(2))/S(2), x)
def replacement6775(b, x):
return Simp(b*x*HypergeometricPFQ(List(S(1)/2, S(1)/2), List(S(3)/2, S(3)/2), -I*Pi*b**S(2)*x**S(2)/S(2))/S(2), x) + Simp(b*x*HypergeometricPFQ(List(S(1)/2, S(1)/2), List(S(3)/2, S(3)/2), I*Pi*b**S(2)*x**S(2)/S(2))/S(2), x)
def replacement6776(a, b, m, x):
return -Dist(b/(m + S(1)), Int(x**(m + S(1))*sin(Pi*(a + b*x)**S(2)/S(2)), x), x) + Simp(x**(m + S(1))*FresnelS(a + b*x)/(m + S(1)), x)
def replacement6777(a, b, m, x):
return -Dist(b/(m + S(1)), Int(x**(m + S(1))*cos(Pi*(a + b*x)**S(2)/S(2)), x), x) + Simp(x**(m + S(1))*FresnelC(a + b*x)/(m + S(1)), x)
def replacement6778(a, b, x):
return -Dist(S(2), Int((a + b*x)*FresnelS(a + b*x)*sin(Pi*(a + b*x)**S(2)/S(2)), x), x) + Simp((a + b*x)*FresnelS(a + b*x)**S(2)/b, x)
def replacement6779(a, b, x):
return -Dist(S(2), Int((a + b*x)*FresnelC(a + b*x)*cos(Pi*(a + b*x)**S(2)/S(2)), x), x) + Simp((a + b*x)*FresnelC(a + b*x)**S(2)/b, x)
def replacement6780(b, m, x):
return -Dist(S(2)*b/(m + S(1)), Int(x**(m + S(1))*FresnelS(b*x)*sin(Pi*b**S(2)*x**S(2)/S(2)), x), x) + Simp(x**(m + S(1))*FresnelS(b*x)**S(2)/(m + S(1)), x)
def replacement6781(b, m, x):
return -Dist(S(2)*b/(m + S(1)), Int(x**(m + S(1))*FresnelC(b*x)*cos(Pi*b**S(2)*x**S(2)/S(2)), x), x) + Simp(x**(m + S(1))*FresnelC(b*x)**S(2)/(m + S(1)), x)
def replacement6782(b, c, x):
return Dist(S(1)/(S(2)*Pi*b), Int(sin(Pi*b**S(2)*x**S(2)), x), x) - Simp(FresnelS(b*x)*cos(Pi*b**S(2)*x**S(2)/S(2))/(Pi*b**S(2)), x)
def replacement6783(b, c, x):
return -Dist(S(1)/(S(2)*Pi*b), Int(sin(Pi*b**S(2)*x**S(2)), x), x) + Simp(FresnelC(b*x)*sin(Pi*b**S(2)*x**S(2)/S(2))/(Pi*b**S(2)), x)
def replacement6784(b, c, m, x):
return Dist(S(1)/(S(2)*Pi*b), Int(x**(m + S(-1))*sin(Pi*b**S(2)*x**S(2)), x), x) + Dist((m + S(-1))/(Pi*b**S(2)), Int(x**(m + S(-2))*FresnelS(b*x)*cos(Pi*b**S(2)*x**S(2)/S(2)), x), x) - Simp(x**(m + S(-1))*FresnelS(b*x)*cos(Pi*b**S(2)*x**S(2)/S(2))/(Pi*b**S(2)), x)
def replacement6785(b, c, m, x):
return -Dist(S(1)/(S(2)*Pi*b), Int(x**(m + S(-1))*sin(Pi*b**S(2)*x**S(2)), x), x) - Dist((m + S(-1))/(Pi*b**S(2)), Int(x**(m + S(-2))*FresnelC(b*x)*sin(Pi*b**S(2)*x**S(2)/S(2)), x), x) + Simp(x**(m + S(-1))*FresnelC(b*x)*sin(Pi*b**S(2)*x**S(2)/S(2))/(Pi*b**S(2)), x)
def replacement6786(b, c, m, x):
return Dist(b/(S(2)*m + S(2)), Int(x**(m + S(1))*cos(Pi*b**S(2)*x**S(2)), x), x) - Dist(Pi*b**S(2)/(m + S(1)), Int(x**(m + S(2))*FresnelS(b*x)*cos(Pi*b**S(2)*x**S(2)/S(2)), x), x) - Simp(b*x**(m + S(2))/(S(2)*(m + S(1))*(m + S(2))), x) + Simp(x**(m + S(1))*FresnelS(b*x)*sin(Pi*b**S(2)*x**S(2)/S(2))/(m + S(1)), x)
def replacement6787(b, c, m, x):
return -Dist(b/(S(2)*m + S(2)), Int(x**(m + S(1))*cos(Pi*b**S(2)*x**S(2)), x), x) + Dist(Pi*b**S(2)/(m + S(1)), Int(x**(m + S(2))*FresnelC(b*x)*sin(Pi*b**S(2)*x**S(2)/S(2)), x), x) - Simp(b*x**(m + S(2))/(S(2)*(m + S(1))*(m + S(2))), x) + Simp(x**(m + S(1))*FresnelC(b*x)*cos(Pi*b**S(2)*x**S(2)/S(2))/(m + S(1)), x)
def replacement6788(b, c, x):
return Dist(S(1)/(S(2)*Pi*b), Int(cos(Pi*b**S(2)*x**S(2)), x), x) - Simp(x/(S(2)*Pi*b), x) + Simp(FresnelS(b*x)*sin(Pi*b**S(2)*x**S(2)/S(2))/(Pi*b**S(2)), x)
def replacement6789(b, c, x):
return Dist(S(1)/(S(2)*Pi*b), Int(cos(Pi*b**S(2)*x**S(2)), x), x) + Simp(x/(S(2)*Pi*b), x) - Simp(FresnelC(b*x)*cos(Pi*b**S(2)*x**S(2)/S(2))/(Pi*b**S(2)), x)
def replacement6790(b, c, m, x):
return Dist(S(1)/(S(2)*Pi*b), Int(x**(m + S(-1))*cos(Pi*b**S(2)*x**S(2)), x), x) - Dist((m + S(-1))/(Pi*b**S(2)), Int(x**(m + S(-2))*FresnelS(b*x)*sin(Pi*b**S(2)*x**S(2)/S(2)), x), x) - Simp(x**m/(S(2)*Pi*b*m), x) + Simp(x**(m + S(-1))*FresnelS(b*x)*sin(Pi*b**S(2)*x**S(2)/S(2))/(Pi*b**S(2)), x)
def replacement6791(b, c, m, x):
return Dist(S(1)/(S(2)*Pi*b), Int(x**(m + S(-1))*cos(Pi*b**S(2)*x**S(2)), x), x) + Dist((m + S(-1))/(Pi*b**S(2)), Int(x**(m + S(-2))*FresnelC(b*x)*cos(Pi*b**S(2)*x**S(2)/S(2)), x), x) + Simp(x**m/(S(2)*Pi*b*m), x) - Simp(x**(m + S(-1))*FresnelC(b*x)*cos(Pi*b**S(2)*x**S(2)/S(2))/(Pi*b**S(2)), x)
def replacement6792(b, c, m, x):
return -Dist(b/(S(2)*m + S(2)), Int(x**(m + S(1))*sin(Pi*b**S(2)*x**S(2)), x), x) + Dist(Pi*b**S(2)/(m + S(1)), Int(x**(m + S(2))*FresnelS(b*x)*sin(Pi*b**S(2)*x**S(2)/S(2)), x), x) + Simp(x**(m + S(1))*FresnelS(b*x)*cos(Pi*b**S(2)*x**S(2)/S(2))/(m + S(1)), x)
def replacement6793(b, c, m, x):
return -Dist(b/(S(2)*m + S(2)), Int(x**(m + S(1))*sin(Pi*b**S(2)*x**S(2)), x), x) - Dist(Pi*b**S(2)/(m + S(1)), Int(x**(m + S(2))*FresnelC(b*x)*cos(Pi*b**S(2)*x**S(2)/S(2)), x), x) + Simp(x**(m + S(1))*FresnelC(b*x)*sin(Pi*b**S(2)*x**S(2)/S(2))/(m + S(1)), x)
def replacement6794(a, b, n, x):
return -Simp(ExpIntegralE(n + S(1), a + b*x)/b, x)
def replacement6795(b, m, n, x):
return Dist(m/b, Int(x**(m + S(-1))*ExpIntegralE(n + S(1), b*x), x), x) - Simp(x**m*ExpIntegralE(n + S(1), b*x)/b, x)
def replacement6796(b, x):
return -Simp(EulerGamma*log(x), x) + Simp(b*x*HypergeometricPFQ(List(S(1), S(1), S(1)), List(S(2), S(2), S(2)), -b*x), x) - Simp(log(b*x)**S(2)/S(2), x)
def replacement6797(b, m, n, x):
return Dist(b/(m + S(1)), Int(x**(m + S(1))*ExpIntegralE(n + S(-1), b*x), x), x) + Simp(x**(m + S(1))*ExpIntegralE(n, b*x)/(m + S(1)), x)
def replacement6798(b, m, n, x):
return -Simp(x**(m + S(1))*HypergeometricPFQ(List(m + S(1), m + S(1)), List(m + S(2), m + S(2)), -b*x)/(m + S(1))**S(2), x) + Simp(x**m*(b*x)**(-m)*Gamma(m + S(1))*log(x)/b, x)
def replacement6799(b, m, n, x):
return -Simp(x**(m + S(1))*ExpIntegralE(-m, b*x)/(m + n), x) + Simp(x**(m + S(1))*ExpIntegralE(n, b*x)/(m + n), x)
def replacement6800(a, b, m, n, x):
return Dist(m/b, Int(x**(m + S(-1))*ExpIntegralE(n + S(1), a + b*x), x), x) - Simp(x**m*ExpIntegralE(n + S(1), a + b*x)/b, x)
def replacement6801(a, b, m, n, x):
return Dist(b/(m + S(1)), Int(x**(m + S(1))*ExpIntegralE(n + S(-1), a + b*x), x), x) + Simp(x**(m + S(1))*ExpIntegralE(n, a + b*x)/(m + S(1)), x)
def replacement6802(a, b, x):
return -Simp(exp(a + b*x)/b, x) + Simp((a + b*x)*ExpIntegralEi(a + b*x)/b, x)
def replacement6803(a, b, m, x):
return -Dist(b/(m + S(1)), Int(x**(m + S(1))*exp(a + b*x)/(a + b*x), x), x) + Simp(x**(m + S(1))*ExpIntegralEi(a + b*x)/(m + S(1)), x)
def replacement6804(a, b, x):
return -Dist(S(2), Int(ExpIntegralEi(a + b*x)*exp(a + b*x), x), x) + Simp((a + b*x)*ExpIntegralEi(a + b*x)**S(2)/b, x)
def replacement6805(b, m, x):
return -Dist(S(2)/(m + S(1)), Int(x**m*ExpIntegralEi(b*x)*exp(b*x), x), x) + Simp(x**(m + S(1))*ExpIntegralEi(b*x)**S(2)/(m + S(1)), x)
def replacement6806(a, b, m, x):
return -Dist(a*m/(b*(m + S(1))), Int(x**(m + S(-1))*ExpIntegralEi(a + b*x)**S(2), x), x) - Dist(S(2)/(m + S(1)), Int(x**m*ExpIntegralEi(a + b*x)*exp(a + b*x), x), x) + Simp(x**(m + S(1))*ExpIntegralEi(a + b*x)**S(2)/(m + S(1)), x) + Simp(a*x**m*ExpIntegralEi(a + b*x)**S(2)/(b*(m + S(1))), x)
def replacement6807(a, b, c, d, x):
return -Dist(d/b, Int(exp(a + c + x*(b + d))/(c + d*x), x), x) + Simp(ExpIntegralEi(c + d*x)*exp(a + b*x)/b, x)
def replacement6808(a, b, c, d, m, x):
return -Dist(d/b, Int(x**m*exp(a + c + x*(b + d))/(c + d*x), x), x) - Dist(m/b, Int(x**(m + S(-1))*ExpIntegralEi(c + d*x)*exp(a + b*x), x), x) + Simp(x**m*ExpIntegralEi(c + d*x)*exp(a + b*x)/b, x)
def replacement6809(a, b, c, d, m, x):
return -Dist(b/(m + S(1)), Int(x**(m + S(1))*ExpIntegralEi(c + d*x)*exp(a + b*x), x), x) - Dist(d/(m + S(1)), Int(x**(m + S(1))*exp(a + c + x*(b + d))/(c + d*x), x), x) + Simp(x**(m + S(1))*ExpIntegralEi(c + d*x)*exp(a + b*x)/(m + S(1)), x)
def replacement6810(a, b, x):
return -Simp(ExpIntegralEi(S(2)*log(a + b*x))/b, x) + Simp((a + b*x)*LogIntegral(a + b*x)/b, x)
def replacement6811(b, x):
return -Simp(b*x, x) + Simp(LogIntegral(b*x)*log(b*x), x)
def replacement6812(a, b, m, x):
return -Dist(b/(m + S(1)), Int(x**(m + S(1))/log(a + b*x), x), x) + Simp(x**(m + S(1))*LogIntegral(a + b*x)/(m + S(1)), x)
def replacement6813(a, b, x):
return Simp(cos(a + b*x)/b, x) + Simp((a + b*x)*SinIntegral(a + b*x)/b, x)
def replacement6814(a, b, x):
return -Simp(sin(a + b*x)/b, x) + Simp((a + b*x)*CosIntegral(a + b*x)/b, x)
def replacement6815(b, x):
return Simp(b*x*HypergeometricPFQ(List(S(1), S(1), S(1)), List(S(2), S(2), S(2)), -I*b*x)/S(2), x) + Simp(b*x*HypergeometricPFQ(List(S(1), S(1), S(1)), List(S(2), S(2), S(2)), I*b*x)/S(2), x)
def replacement6816(b, x):
return Simp(EulerGamma*log(x), x) - Simp(I*b*x*HypergeometricPFQ(List(S(1), S(1), S(1)), List(S(2), S(2), S(2)), -I*b*x)/S(2), x) + Simp(I*b*x*HypergeometricPFQ(List(S(1), S(1), S(1)), List(S(2), S(2), S(2)), I*b*x)/S(2), x) + Simp(log(b*x)**S(2)/S(2), x)
def replacement6817(a, b, m, x):
return -Dist(b/(m + S(1)), Int(x**(m + S(1))*sin(a + b*x)/(a + b*x), x), x) + Simp(x**(m + S(1))*SinIntegral(a + b*x)/(m + S(1)), x)
def replacement6818(a, b, m, x):
return -Dist(b/(m + S(1)), Int(x**(m + S(1))*cos(a + b*x)/(a + b*x), x), x) + Simp(x**(m + S(1))*CosIntegral(a + b*x)/(m + S(1)), x)
def replacement6819(a, b, x):
return -Dist(S(2), Int(SinIntegral(a + b*x)*sin(a + b*x), x), x) + Simp((a + b*x)*SinIntegral(a + b*x)**S(2)/b, x)
def replacement6820(a, b, x):
return -Dist(S(2), Int(CosIntegral(a + b*x)*cos(a + b*x), x), x) + Simp((a + b*x)*CosIntegral(a + b*x)**S(2)/b, x)
def replacement6821(b, m, x):
return -Dist(S(2)/(m + S(1)), Int(x**m*SinIntegral(b*x)*sin(b*x), x), x) + Simp(x**(m + S(1))*SinIntegral(b*x)**S(2)/(m + S(1)), x)
def replacement6822(b, m, x):
return -Dist(S(2)/(m + S(1)), Int(x**m*CosIntegral(b*x)*cos(b*x), x), x) + Simp(x**(m + S(1))*CosIntegral(b*x)**S(2)/(m + S(1)), x)
def replacement6823(a, b, m, x):
return -Dist(a*m/(b*(m + S(1))), Int(x**(m + S(-1))*SinIntegral(a + b*x)**S(2), x), x) - Dist(S(2)/(m + S(1)), Int(x**m*SinIntegral(a + b*x)*sin(a + b*x), x), x) + Simp(x**(m + S(1))*SinIntegral(a + b*x)**S(2)/(m + S(1)), x) + Simp(a*x**m*SinIntegral(a + b*x)**S(2)/(b*(m + S(1))), x)
def replacement6824(a, b, m, x):
return -Dist(a*m/(b*(m + S(1))), Int(x**(m + S(-1))*CosIntegral(a + b*x)**S(2), x), x) - Dist(S(2)/(m + S(1)), Int(x**m*CosIntegral(a + b*x)*cos(a + b*x), x), x) + Simp(x**(m + S(1))*CosIntegral(a + b*x)**S(2)/(m + S(1)), x) + Simp(a*x**m*CosIntegral(a + b*x)**S(2)/(b*(m + S(1))), x)
def replacement6825(a, b, c, d, x):
return Dist(d/b, Int(sin(c + d*x)*cos(a + b*x)/(c + d*x), x), x) - Simp(SinIntegral(c + d*x)*cos(a + b*x)/b, x)
def replacement6826(a, b, c, d, x):
return -Dist(d/b, Int(sin(a + b*x)*cos(c + d*x)/(c + d*x), x), x) + Simp(CosIntegral(c + d*x)*sin(a + b*x)/b, x)
def replacement6827(a, b, c, d, m, x):
return Dist(d/b, Int(x**m*sin(c + d*x)*cos(a + b*x)/(c + d*x), x), x) + Dist(m/b, Int(x**(m + S(-1))*SinIntegral(c + d*x)*cos(a + b*x), x), x) - Simp(x**m*SinIntegral(c + d*x)*cos(a + b*x)/b, x)
def replacement6828(a, b, c, d, m, x):
return -Dist(d/b, Int(x**m*sin(a + b*x)*cos(c + d*x)/(c + d*x), x), x) - Dist(m/b, Int(x**(m + S(-1))*CosIntegral(c + d*x)*sin(a + b*x), x), x) + Simp(x**m*CosIntegral(c + d*x)*sin(a + b*x)/b, x)
def replacement6829(a, b, c, d, m, x):
return -Dist(b/(m + S(1)), Int(x**(m + S(1))*SinIntegral(c + d*x)*cos(a + b*x), x), x) - Dist(d/(m + S(1)), Int(x**(m + S(1))*sin(a + b*x)*sin(c + d*x)/(c + d*x), x), x) + Simp(x**(m + S(1))*SinIntegral(c + d*x)*sin(a + b*x)/(m + S(1)), x)
def replacement6830(a, b, c, d, m, x):
return Dist(b/(m + S(1)), Int(x**(m + S(1))*CosIntegral(c + d*x)*sin(a + b*x), x), x) - Dist(d/(m + S(1)), Int(x**(m + S(1))*cos(a + b*x)*cos(c + d*x)/(c + d*x), x), x) + Simp(x**(m + S(1))*CosIntegral(c + d*x)*cos(a + b*x)/(m + S(1)), x)
def replacement6831(a, b, c, d, x):
return -Dist(d/b, Int(sin(a + b*x)*sin(c + d*x)/(c + d*x), x), x) + Simp(SinIntegral(c + d*x)*sin(a + b*x)/b, x)
def replacement6832(a, b, c, d, x):
return Dist(d/b, Int(cos(a + b*x)*cos(c + d*x)/(c + d*x), x), x) - Simp(CosIntegral(c + d*x)*cos(a + b*x)/b, x)
def replacement6833(a, b, c, d, m, x):
return -Dist(d/b, Int(x**m*sin(a + b*x)*sin(c + d*x)/(c + d*x), x), x) - Dist(m/b, Int(x**(m + S(-1))*SinIntegral(c + d*x)*sin(a + b*x), x), x) + Simp(x**m*SinIntegral(c + d*x)*sin(a + b*x)/b, x)
def replacement6834(a, b, c, d, m, x):
return Dist(d/b, Int(x**m*cos(a + b*x)*cos(c + d*x)/(c + d*x), x), x) + Dist(m/b, Int(x**(m + S(-1))*CosIntegral(c + d*x)*cos(a + b*x), x), x) - Simp(x**m*CosIntegral(c + d*x)*cos(a + b*x)/b, x)
def replacement6835(a, b, c, d, m, x):
return Dist(b/(m + S(1)), Int(x**(m + S(1))*SinIntegral(c + d*x)*sin(a + b*x), x), x) - Dist(d/(m + S(1)), Int(x**(m + S(1))*sin(c + d*x)*cos(a + b*x)/(c + d*x), x), x) + Simp(x**(m + S(1))*SinIntegral(c + d*x)*cos(a + b*x)/(m + S(1)), x)
def replacement6836(a, b, c, d, m, x):
return -Dist(b/(m + S(1)), Int(x**(m + S(1))*CosIntegral(c + d*x)*cos(a + b*x), x), x) - Dist(d/(m + S(1)), Int(x**(m + S(1))*sin(a + b*x)*cos(c + d*x)/(c + d*x), x), x) + Simp(x**(m + S(1))*CosIntegral(c + d*x)*sin(a + b*x)/(m + S(1)), x)
def replacement6837(a, b, x):
return -Simp(cosh(a + b*x)/b, x) + Simp((a + b*x)*SinhIntegral(a + b*x)/b, x)
def replacement6838(a, b, x):
return -Simp(sinh(a + b*x)/b, x) + Simp((a + b*x)*CoshIntegral(a + b*x)/b, x)
def replacement6839(b, x):
return Simp(b*x*HypergeometricPFQ(List(S(1), S(1), S(1)), List(S(2), S(2), S(2)), -b*x)/S(2), x) + Simp(b*x*HypergeometricPFQ(List(S(1), S(1), S(1)), List(S(2), S(2), S(2)), b*x)/S(2), x)
def replacement6840(b, x):
return Simp(EulerGamma*log(x), x) - Simp(b*x*HypergeometricPFQ(List(S(1), S(1), S(1)), List(S(2), S(2), S(2)), -b*x)/S(2), x) + Simp(b*x*HypergeometricPFQ(List(S(1), S(1), S(1)), List(S(2), S(2), S(2)), b*x)/S(2), x) + Simp(log(b*x)**S(2)/S(2), x)
def replacement6841(a, b, m, x):
return -Dist(b/(m + S(1)), Int(x**(m + S(1))*sinh(a + b*x)/(a + b*x), x), x) + Simp(x**(m + S(1))*SinhIntegral(a + b*x)/(m + S(1)), x)
def replacement6842(a, b, m, x):
return -Dist(b/(m + S(1)), Int(x**(m + S(1))*cosh(a + b*x)/(a + b*x), x), x) + Simp(x**(m + S(1))*CoshIntegral(a + b*x)/(m + S(1)), x)
def replacement6843(a, b, x):
return -Dist(S(2), Int(SinhIntegral(a + b*x)*sinh(a + b*x), x), x) + Simp((a + b*x)*SinhIntegral(a + b*x)**S(2)/b, x)
def replacement6844(a, b, x):
return -Dist(S(2), Int(CoshIntegral(a + b*x)*cosh(a + b*x), x), x) + Simp((a + b*x)*CoshIntegral(a + b*x)**S(2)/b, x)
def replacement6845(b, m, x):
return -Dist(S(2)/(m + S(1)), Int(x**m*SinhIntegral(b*x)*sinh(b*x), x), x) + Simp(x**(m + S(1))*SinhIntegral(b*x)**S(2)/(m + S(1)), x)
def replacement6846(b, m, x):
return -Dist(S(2)/(m + S(1)), Int(x**m*CoshIntegral(b*x)*cosh(b*x), x), x) + Simp(x**(m + S(1))*CoshIntegral(b*x)**S(2)/(m + S(1)), x)
def replacement6847(a, b, m, x):
return -Dist(a*m/(b*(m + S(1))), Int(x**(m + S(-1))*SinhIntegral(a + b*x)**S(2), x), x) - Dist(S(2)/(m + S(1)), Int(x**m*SinhIntegral(a + b*x)*sinh(a + b*x), x), x) + Simp(x**(m + S(1))*SinhIntegral(a + b*x)**S(2)/(m + S(1)), x) + Simp(a*x**m*SinhIntegral(a + b*x)**S(2)/(b*(m + S(1))), x)
def replacement6848(a, b, m, x):
return -Dist(a*m/(b*(m + S(1))), Int(x**(m + S(-1))*CoshIntegral(a + b*x)**S(2), x), x) - Dist(S(2)/(m + S(1)), Int(x**m*CoshIntegral(a + b*x)*cosh(a + b*x), x), x) + Simp(x**(m + S(1))*CoshIntegral(a + b*x)**S(2)/(m + S(1)), x) + Simp(a*x**m*CoshIntegral(a + b*x)**S(2)/(b*(m + S(1))), x)
def replacement6849(a, b, c, d, x):
return -Dist(d/b, Int(sinh(c + d*x)*cosh(a + b*x)/(c + d*x), x), x) + Simp(SinhIntegral(c + d*x)*cosh(a + b*x)/b, x)
def replacement6850(a, b, c, d, x):
return -Dist(d/b, Int(sinh(a + b*x)*cosh(c + d*x)/(c + d*x), x), x) + Simp(CoshIntegral(c + d*x)*sinh(a + b*x)/b, x)
def replacement6851(a, b, c, d, m, x):
return -Dist(d/b, Int(x**m*sinh(c + d*x)*cosh(a + b*x)/(c + d*x), x), x) - Dist(m/b, Int(x**(m + S(-1))*SinhIntegral(c + d*x)*cosh(a + b*x), x), x) + Simp(x**m*SinhIntegral(c + d*x)*cosh(a + b*x)/b, x)
def replacement6852(a, b, c, d, m, x):
return -Dist(d/b, Int(x**m*sinh(a + b*x)*cosh(c + d*x)/(c + d*x), x), x) - Dist(m/b, Int(x**(m + S(-1))*CoshIntegral(c + d*x)*sinh(a + b*x), x), x) + Simp(x**m*CoshIntegral(c + d*x)*sinh(a + b*x)/b, x)
def replacement6853(a, b, c, d, m, x):
return -Dist(b/(m + S(1)), Int(x**(m + S(1))*SinhIntegral(c + d*x)*cosh(a + b*x), x), x) - Dist(d/(m + S(1)), Int(x**(m + S(1))*sinh(a + b*x)*sinh(c + d*x)/(c + d*x), x), x) + Simp(x**(m + S(1))*SinhIntegral(c + d*x)*sinh(a + b*x)/(m + S(1)), x)
def replacement6854(a, b, c, d, m, x):
return -Dist(b/(m + S(1)), Int(x**(m + S(1))*CoshIntegral(c + d*x)*sinh(a + b*x), x), x) - Dist(d/(m + S(1)), Int(x**(m + S(1))*cosh(a + b*x)*cosh(c + d*x)/(c + d*x), x), x) + Simp(x**(m + S(1))*CoshIntegral(c + d*x)*cosh(a + b*x)/(m + S(1)), x)
def replacement6855(a, b, c, d, x):
return -Dist(d/b, Int(sinh(a + b*x)*sinh(c + d*x)/(c + d*x), x), x) + Simp(SinhIntegral(c + d*x)*sinh(a + b*x)/b, x)
def replacement6856(a, b, c, d, x):
return -Dist(d/b, Int(cosh(a + b*x)*cosh(c + d*x)/(c + d*x), x), x) + Simp(CoshIntegral(c + d*x)*cosh(a + b*x)/b, x)
def replacement6857(a, b, c, d, m, x):
return -Dist(d/b, Int(x**m*sinh(a + b*x)*sinh(c + d*x)/(c + d*x), x), x) - Dist(m/b, Int(x**(m + S(-1))*SinhIntegral(c + d*x)*sinh(a + b*x), x), x) + Simp(x**m*SinhIntegral(c + d*x)*sinh(a + b*x)/b, x)
def replacement6858(a, b, c, d, m, x):
return -Dist(d/b, Int(x**m*cosh(a + b*x)*cosh(c + d*x)/(c + d*x), x), x) - Dist(m/b, Int(x**(m + S(-1))*CoshIntegral(c + d*x)*cosh(a + b*x), x), x) + Simp(x**m*CoshIntegral(c + d*x)*cosh(a + b*x)/b, x)
def replacement6859(a, b, c, d, m, x):
return -Dist(b/(m + S(1)), Int(x**(m + S(1))*SinhIntegral(c + d*x)*sinh(a + b*x), x), x) - Dist(d/(m + S(1)), Int(x**(m + S(1))*sinh(c + d*x)*cosh(a + b*x)/(c + d*x), x), x) + Simp(x**(m + S(1))*SinhIntegral(c + d*x)*cosh(a + b*x)/(m + S(1)), x)
def replacement6860(a, b, c, d, m, x):
return -Dist(b/(m + S(1)), Int(x**(m + S(1))*CoshIntegral(c + d*x)*cosh(a + b*x), x), x) - Dist(d/(m + S(1)), Int(x**(m + S(1))*sinh(a + b*x)*cosh(c + d*x)/(c + d*x), x), x) + Simp(x**(m + S(1))*CoshIntegral(c + d*x)*sinh(a + b*x)/(m + S(1)), x)
def replacement6861(a, b, n, x):
return -Simp(Gamma(n + S(1), a + b*x)/b, x) + Simp((a + b*x)*Gamma(n, a + b*x)/b, x)
def replacement6862(b, n, x):
return Simp(Gamma(n)*log(x), x) - Simp((b*x)**n*HypergeometricPFQ(List(n, n), List(n + S(1), n + S(1)), -b*x)/n**S(2), x)
def replacement6863(b, m, n, x):
return Simp(x**(m + S(1))*Gamma(n, b*x)/(m + S(1)), x) - Simp(x**m*(b*x)**(-m)*Gamma(m + n + S(1), b*x)/(b*(m + S(1))), x)
def With6864(a, b, m, n, x):
_UseGamma = True
return Dist(b/(m + S(1)), Int(x**(m + S(1))*(a + b*x)**(n + S(-1))*exp(-a - b*x), x), x) + Simp(x**(m + S(1))*Gamma(n, a + b*x)/(m + S(1)), x)
def replacement6865(a, b, x):
return Simp(PolyGamma(S(-2), a + b*x)/b, x)
def replacement6866(a, b, m, x):
return -Dist(m/b, Int(x**(m + S(-1))*PolyGamma(S(-2), a + b*x), x), x) + Simp(x**m*PolyGamma(S(-2), a + b*x)/b, x)
def replacement6867(a, b, n, x):
return Simp(PolyGamma(n + S(-1), a + b*x)/b, x)
def replacement6868(a, b, m, n, x):
return -Dist(m/b, Int(x**(m + S(-1))*PolyGamma(n + S(-1), a + b*x), x), x) + Simp(x**m*PolyGamma(n + S(-1), a + b*x)/b, x)
def replacement6869(a, b, m, n, x):
return -Dist(b/(m + S(1)), Int(x**(m + S(1))*PolyGamma(n + S(1), a + b*x), x), x) + Simp(x**(m + S(1))*PolyGamma(n, a + b*x)/(m + S(1)), x)
def replacement6870(a, b, n, x):
return Simp(Gamma(a + b*x)**n/(b*n), x)
def replacement6871(a, b, c, n, x):
return Simp(Factorial(a + b*x)**n/(b*n), x)
def replacement6872(a, b, x):
return Int(PolyGamma(S(1), a + b*x), x)
def replacement6873(a, b, s, x):
return -Simp(Zeta(s + S(-1), a + b*x)/(b*(s + S(-1))), x)
def replacement6874(a, b, m, x):
return Int(x**m*PolyGamma(S(1), a + b*x), x)
def replacement6875(a, b, m, s, x):
return Dist(m/(b*(s + S(-1))), Int(x**(m + S(-1))*Zeta(s + S(-1), a + b*x), x), x) - Simp(x**m*Zeta(s + S(-1), a + b*x)/(b*(s + S(-1))), x)
def replacement6876(a, b, m, s, x):
return Dist(b*s/(m + S(1)), Int(x**(m + S(1))*Zeta(s + S(1), a + b*x), x), x) + Simp(x**(m + S(1))*Zeta(s, a + b*x)/(m + S(1)), x)
def replacement6877(a, b, n, p, q, x):
return -Dist(p*q, Int(PolyLog(n + S(-1), a*(b*x**p)**q), x), x) + Simp(x*PolyLog(n, a*(b*x**p)**q), x)
def replacement6878(a, b, n, p, q, x):
return -Dist(S(1)/(p*q), Int(PolyLog(n + S(1), a*(b*x**p)**q), x), x) + Simp(x*PolyLog(n + S(1), a*(b*x**p)**q)/(p*q), x)
def replacement6879(a, b, c, d, e, n, p, x):
return Simp(PolyLog(n + S(1), c*(a + b*x)**p)/(e*p), x)
def replacement6880(a, b, n, p, q, x):
return Simp(PolyLog(n + S(1), a*(b*x**p)**q)/(p*q), x)
def replacement6881(a, b, m, n, p, q, x):
return -Dist(p*q/(m + S(1)), Int(x**m*PolyLog(n + S(-1), a*(b*x**p)**q), x), x) + Simp(x**(m + S(1))*PolyLog(n, a*(b*x**p)**q)/(m + S(1)), x)
def replacement6882(a, b, m, n, p, q, x):
return -Dist((m + S(1))/(p*q), Int(x**m*PolyLog(n + S(1), a*(b*x**p)**q), x), x) + Simp(x**(m + S(1))*PolyLog(n + S(1), a*(b*x**p)**q)/(p*q), x)
def replacement6883(a, b, c, m, n, p, q, r, x):
return -Dist(m*r/(p*q), Int(PolyLog(n + S(1), a*(b*x**p)**q)*log(c*x**m)**(r + S(-1))/x, x), x) + Simp(PolyLog(n + S(1), a*(b*x**p)**q)*log(c*x**m)**r/(p*q), x)
def replacement6884(a, b, c, n, p, x):
return -Dist(p, Int(PolyLog(n + S(-1), c*(a + b*x)**p), x), x) + Dist(a*p, Int(PolyLog(n + S(-1), c*(a + b*x)**p)/(a + b*x), x), x) + Simp(x*PolyLog(n, c*(a + b*x)**p), x)
def replacement6885(a, b, c, m, n, p, x):
return -Dist(b*p/(m + S(1)), Int(x**(m + S(1))*PolyLog(n + S(-1), c*(a + b*x)**p)/(a + b*x), x), x) + Simp(x**(m + S(1))*PolyLog(n, c*(a + b*x)**p)/(m + S(1)), x)
def replacement6886(F, a, b, c, d, n, p, x):
return Simp(PolyLog(n + S(1), d*(F**(c*(a + b*x)))**p)/(b*c*p*log(F)), x)
def replacement6887(F, a, b, c, d, e, f, m, n, p, x):
return -Dist(f*m/(b*c*p*log(F)), Int((e + f*x)**(m + S(-1))*PolyLog(n + S(1), d*(F**(c*(a + b*x)))**p), x), x) + Simp((e + f*x)**m*PolyLog(n + S(1), d*(F**(c*(a + b*x)))**p)/(b*c*p*log(F)), x)
def With6888(n, u, v, x):
if isinstance(x, (int, Integer, float, Float)):
return False
try:
w = DerivativeDivides(v, u*v, x)
res = Not(FalseQ(w))
except (TypeError, AttributeError):
return False
if res:
return True
return False
def replacement6888(n, u, v, x):
w = DerivativeDivides(v, u*v, x)
return Simp(w*PolyLog(n + S(1), v), x)
def With6889(n, u, v, w, x):
if isinstance(x, (int, Integer, float, Float)):
return False
try:
z = DerivativeDivides(v, u*v, x)
res = Not(FalseQ(z))
except (TypeError, AttributeError):
return False
if res:
return True
return False
def replacement6889(n, u, v, w, x):
z = DerivativeDivides(v, u*v, x)
return -Int(SimplifyIntegrand(z*D(w, x)*PolyLog(n + S(1), v)/w, x), x) + Simp(z*PolyLog(n + S(1), v)*log(w), x)
def replacement6890(a, b, c, p, x):
return Dist(p/(c*(p + S(1))), Int((c*ProductLog(a + b*x))**(p + S(1))/(ProductLog(a + b*x) + S(1)), x), x) + Simp((c*ProductLog(a + b*x))**p*(a + b*x)/(b*(p + S(1))), x)
def replacement6891(a, b, c, p, x):
return -Dist(p, Int((c*ProductLog(a + b*x))**p/(ProductLog(a + b*x) + S(1)), x), x) + Simp((c*ProductLog(a + b*x))**p*(a + b*x)/b, x)
def replacement6892(a, b, c, m, p, x):
return Dist(S(1)/b, Subst(Int(ExpandIntegrand((c*ProductLog(x))**p, (-a/b + x/b)**m, x), x), x, a + b*x), x)
def replacement6893(a, c, n, p, x):
return -Dist(n*p, Int((c*ProductLog(a*x**n))**p/(ProductLog(a*x**n) + S(1)), x), x) + Simp(x*(c*ProductLog(a*x**n))**p, x)
def replacement6894(a, c, n, p, x):
return Dist(n*p/(c*(n*p + S(1))), Int((c*ProductLog(a*x**n))**(p + S(1))/(ProductLog(a*x**n) + S(1)), x), x) + Simp(x*(c*ProductLog(a*x**n))**p/(n*p + S(1)), x)
def replacement6895(a, c, n, p, x):
return -Subst(Int((c*ProductLog(a*x**(-n)))**p/x**S(2), x), x, S(1)/x)
def replacement6896(a, c, m, n, p, x):
return -Dist(n*p/(m + S(1)), Int(x**m*(c*ProductLog(a*x**n))**p/(ProductLog(a*x**n) + S(1)), x), x) + Simp(x**(m + S(1))*(c*ProductLog(a*x**n))**p/(m + S(1)), x)
def replacement6897(a, c, m, n, p, x):
return Dist(n*p/(c*(m + n*p + S(1))), Int(x**m*(c*ProductLog(a*x**n))**(p + S(1))/(ProductLog(a*x**n) + S(1)), x), x) + Simp(x**(m + S(1))*(c*ProductLog(a*x**n))**p/(m + n*p + S(1)), x)
def replacement6898(a, c, m, p, x):
return Dist(S(1)/c, Int(x**m*(c*ProductLog(a*x))**(p + S(1))/(ProductLog(a*x) + S(1)), x), x) + Int(x**m*(c*ProductLog(a*x))**p/(ProductLog(a*x) + S(1)), x)
def replacement6899(a, c, m, n, p, x):
return -Subst(Int(x**(-m + S(-2))*(c*ProductLog(a*x**(-n)))**p, x), x, S(1)/x)
def replacement6900(a, b, d, x):
return Simp((a + b*x)/(b*d*ProductLog(a + b*x)), x)
def replacement6901(a, b, d, x):
return -Int(S(1)/(d*ProductLog(a + b*x) + d), x) + Simp(d*x, x)
def replacement6902(a, b, c, d, p, x):
return -Dist(c*p, Int((c*ProductLog(a + b*x))**(p + S(-1))/(d*ProductLog(a + b*x) + d), x), x) + Simp(c*(c*ProductLog(a + b*x))**(p + S(-1))*(a + b*x)/(b*d), x)
def replacement6903(a, b, d, x):
return Simp(ExpIntegralEi(ProductLog(a + b*x))/(b*d), x)
def replacement6904(a, b, c, d, x):
return Simp(Erfi(sqrt(c*ProductLog(a + b*x))/Rt(c, S(2)))*Rt(Pi*c, S(2))/(b*c*d), x)
def replacement6905(a, b, c, d, x):
return Simp(Erf(sqrt(c*ProductLog(a + b*x))/Rt(-c, S(2)))*Rt(-Pi*c, S(2))/(b*c*d), x)
def replacement6906(a, b, c, d, p, x):
return -Dist(S(1)/(c*(p + S(1))), Int((c*ProductLog(a + b*x))**(p + S(1))/(d*ProductLog(a + b*x) + d), x), x) + Simp((c*ProductLog(a + b*x))**p*(a + b*x)/(b*d*(p + S(1))), x)
def replacement6907(a, b, c, d, p, x):
return Simp((-ProductLog(a + b*x))**(-p)*(c*ProductLog(a + b*x))**p*Gamma(p + S(1), -ProductLog(a + b*x))/(b*d), x)
def replacement6908(a, b, d, m, x):
return Dist(S(1)/b, Subst(Int(ExpandIntegrand(S(1)/(d*ProductLog(x) + d), (-a/b + x/b)**m, x), x), x, a + b*x), x)
def replacement6909(a, b, c, d, m, p, x):
return Dist(S(1)/b, Subst(Int(ExpandIntegrand((c*ProductLog(x))**p/(d*ProductLog(x) + d), (-a/b + x/b)**m, x), x), x, a + b*x), x)
def replacement6910(a, d, n, x):
return -Subst(Int(S(1)/(x**S(2)*(d*ProductLog(a*x**(-n)) + d)), x), x, S(1)/x)
def replacement6911(a, c, d, n, p, x):
return Simp(c*x*(c*ProductLog(a*x**n))**(p + S(-1))/d, x)
def replacement6912(a, d, n, p, x):
return Simp(a**p*ExpIntegralEi(-p*ProductLog(a*x**n))/(d*n), x)
def replacement6913(a, c, d, n, p, x):
return Simp(a**(-S(1)/n)*c**(-S(1)/n)*Erfi(sqrt(c*ProductLog(a*x**n))/Rt(c*n, S(2)))*Rt(Pi*c*n, S(2))/(d*n), x)
def replacement6914(a, c, d, n, p, x):
return Simp(a**(-S(1)/n)*c**(-S(1)/n)*Erf(sqrt(c*ProductLog(a*x**n))/Rt(-c*n, S(2)))*Rt(-Pi*c*n, S(2))/(d*n), x)
def replacement6915(a, c, d, n, p, x):
return -Dist(c*(n*(p + S(-1)) + S(1)), Int((c*ProductLog(a*x**n))**(p + S(-1))/(d*ProductLog(a*x**n) + d), x), x) + Simp(c*x*(c*ProductLog(a*x**n))**(p + S(-1))/d, x)
def replacement6916(a, c, d, n, p, x):
return -Dist(S(1)/(c*(n*p + S(1))), Int((c*ProductLog(a*x**n))**(p + S(1))/(d*ProductLog(a*x**n) + d), x), x) + Simp(x*(c*ProductLog(a*x**n))**p/(d*(n*p + S(1))), x)
def replacement6917(a, c, d, n, p, x):
return -Subst(Int((c*ProductLog(a*x**(-n)))**p/(x**S(2)*(d*ProductLog(a*x**(-n)) + d)), x), x, S(1)/x)
def replacement6918(a, d, m, x):
return -Dist(m/(m + S(1)), Int(x**m/((d*ProductLog(a*x) + d)*ProductLog(a*x)), x), x) + Simp(x**(m + S(1))/(d*(m + S(1))*ProductLog(a*x)), x)
def replacement6919(a, d, x):
return Simp(log(ProductLog(a*x))/d, x)
def replacement6920(a, d, m, x):
return -Int(x**m*ProductLog(a*x)/(d*ProductLog(a*x) + d), x) + Simp(x**(m + S(1))/(d*(m + S(1))), x)
def replacement6921(a, d, m, x):
return Simp(x**m*(-(m + S(1))*ProductLog(a*x))**(-m)*Gamma(m + S(1), -(m + S(1))*ProductLog(a*x))*exp(-m*ProductLog(a*x))/(a*d*(m + S(1))), x)
def replacement6922(a, d, n, x):
return Simp(log(ProductLog(a*x**n))/(d*n), x)
def replacement6923(a, d, m, n, x):
return -Subst(Int(x**(-m + S(-2))/(d*ProductLog(a*x**(-n)) + d), x), x, S(1)/x)
def replacement6924(a, c, d, n, p, x):
return Simp((c*ProductLog(a*x**n))**p/(d*n*p), x)
def replacement6925(a, c, d, m, n, p, x):
return Simp(c*x**(m + S(1))*(c*ProductLog(a*x**n))**(p + S(-1))/(d*(m + S(1))), x)
def replacement6926(a, d, m, n, p, x):
return Simp(a**p*ExpIntegralEi(-p*ProductLog(a*x**n))/(d*n), x)
def replacement6927(a, c, d, m, n, p, x):
return Simp(a**(p + S(-1)/2)*c**(p + S(-1)/2)*Erf(sqrt(c*ProductLog(a*x**n))/Rt(c/(p + S(-1)/2), S(2)))*Rt(Pi*c/(p + S(-1)/2), S(2))/(d*n), x)
def replacement6928(a, c, d, m, n, p, x):
return Simp(a**(p + S(-1)/2)*c**(p + S(-1)/2)*Erfi(sqrt(c*ProductLog(a*x**n))/Rt(-c/(p + S(-1)/2), S(2)))*Rt(-Pi*c/(p + S(-1)/2), S(2))/(d*n), x)
def replacement6929(a, c, d, m, n, p, x):
return -Dist(c*(m + n*(p + S(-1)) + S(1))/(m + S(1)), Int(x**m*(c*ProductLog(a*x**n))**(p + S(-1))/(d*ProductLog(a*x**n) + d), x), x) + Simp(c*x**(m + S(1))*(c*ProductLog(a*x**n))**(p + S(-1))/(d*(m + S(1))), x)
def replacement6930(a, c, d, m, n, p, x):
return -Dist((m + S(1))/(c*(m + n*p + S(1))), Int(x**m*(c*ProductLog(a*x**n))**(p + S(1))/(d*ProductLog(a*x**n) + d), x), x) + Simp(x**(m + S(1))*(c*ProductLog(a*x**n))**p/(d*(m + n*p + S(1))), x)
def replacement6931(a, c, d, m, p, x):
return Simp(x**m*(c*ProductLog(a*x))**p*(-(m + S(1))*ProductLog(a*x))**(-m - p)*Gamma(m + p + S(1), -(m + S(1))*ProductLog(a*x))*exp(-m*ProductLog(a*x))/(a*d*(m + S(1))), x)
def replacement6932(a, c, d, m, n, p, x):
return -Subst(Int(x**(-m + S(-2))*(c*ProductLog(a*x**(-n)))**p/(d*ProductLog(a*x**(-n)) + d), x), x, S(1)/x)
def replacement6933(u, x):
return Subst(Int(SimplifyIntegrand((x + S(1))*SubstFor(ProductLog(x), u, x)*exp(x), x), x), x, ProductLog(x))
|
60c418a3ea5af9e28fa4ba5a085980e81856d838
|
a3d6556180e74af7b555f8d47d3fea55b94bcbda
|
/components/optimization_guide/core/DEPS
|
416a75578789f033666300c982acecd958d6c235
|
[
"BSD-3-Clause"
] |
permissive
|
chromium/chromium
|
aaa9eda10115b50b0616d2f1aed5ef35d1d779d6
|
a401d6cf4f7bf0e2d2e964c512ebb923c3d8832c
|
refs/heads/main
| 2023-08-24T00:35:12.585945
| 2023-08-23T22:01:11
| 2023-08-23T22:01:11
| 120,360,765
| 17,408
| 7,102
|
BSD-3-Clause
| 2023-09-10T23:44:27
| 2018-02-05T20:55:32
| null |
UTF-8
|
Python
| false
| false
| 468
|
DEPS
|
include_rules = [
"+components/crx_file",
"+components/download/public/background_service",
"+components/services/unzip",
"+components/ukm/test_ukm_recorder.h",
"+crypto",
"+mojo/public/cpp",
"+services/metrics/public/cpp",
"+third_party/mediapipe",
"+third_party/tensorflow_models/src",
"+third_party/zlib/google",
"+ui/base/l10n",
]
specific_include_rules = {
".*_unittest\.cc": [
"+third_party/zlib/google/zip.h",
"+ui/base",
]
}
|
|
48adce20c42147d758ea78c657f0357b7996933d
|
3c65819102cf0bed7c044096b2ffac09ef48b81d
|
/tu_07_basic_shading.py
|
67d83810a6ac7c947752c313e93e8898ff2d17b5
|
[] |
no_license
|
jcteng/python-opengl-tutorial
|
abbb42d8adb463d08b4e2dcf39fc84c8a382f914
|
d2045f7e6349628d4290c383607b21e530cb1f52
|
refs/heads/master
| 2022-11-24T01:15:35.796283
| 2021-09-14T04:17:57
| 2021-09-14T04:17:57
| 140,151,029
| 136
| 40
| null | 2022-11-22T08:48:48
| 2018-07-08T08:37:39
|
Python
|
UTF-8
|
Python
| false
| false
| 4,929
|
py
|
tu_07_basic_shading.py
|
# import os,sys
# sys.path.append(os.path.abspath(os.path.dirname(__file__)))
from OpenGL.GL import * # pylint: disable=W0614
from utils.meshViewer import MeshViewWindow, meshWithRender
from utils.shaderLoader import Shader
import glm
class basicShading(meshWithRender):
def __init__(self,meshName,textureName,location=[0.0,0.0,0.0]):
self.meshName = meshName
self.textureName = textureName
self.location = location
def loadShader(self):
self.shader = Shader()
self.shader.initShaderFromGLSL(
["glsl/tu07/StandardShading.vertexshader.glsl"], ["glsl/tu07/StandardShading.fragmentshader.glsl"])
self.MVP_ID = glGetUniformLocation(self.shader.program, "MVP")
self.ModelMatrix_ID = glGetUniformLocation(self.shader.program, "M")
self.ViewMatrix_ID = glGetUniformLocation(self.shader.program, "V")
self.Texture_ID = glGetUniformLocation(self.shader.program, "myTextureSampler")
self.Light_ID = glGetUniformLocation(self.shader.program, "LightPosition_worldspace")
self.OFFSET_ID = glGetUniformLocation(self.shader.program, "LOCATION_OFFSET")
def loadObject(self):
from utils.objLoader import objLoader
model = objLoader(self.meshName).to_single_index_style()
self.model = model
self.vertexbuffer = glGenBuffers(1)
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER,self.vertexbuffer)
glBufferData(GL_ELEMENT_ARRAY_BUFFER,len(model.vertexs)*4,(GLfloat * len(model.vertexs))(*model.vertexs),GL_STATIC_DRAW)
# used normal
self.normalbuffer = glGenBuffers(1)
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER,self.normalbuffer)
glBufferData(GL_ELEMENT_ARRAY_BUFFER,len(model.normals)*4,(GLfloat * len(model.normals))(*model.normals),GL_STATIC_DRAW)
self.indicesbufferSize = len(model.indices)
self.indicesbuffer = glGenBuffers(1)
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER,self.indicesbuffer)
glBufferData(GL_ELEMENT_ARRAY_BUFFER,len(model.indices)*2,(GLushort * len(model.indices))(*model.indices),GL_STATIC_DRAW)
def loadTexture(self):
from utils.textureLoader import textureLoader
texture = textureLoader(self.textureName)
model = self.model
if(texture.inversedVCoords):
for index in range(0,len(model.texcoords)):
if(index % 2):
model.texcoords[index] = 1.0 - model.texcoords[index]
self.texturebuffer = texture.textureGLID
self.uvbuffer = glGenBuffers(1)
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER,self.uvbuffer)
glBufferData(GL_ELEMENT_ARRAY_BUFFER,len(model.texcoords)*4,(GLfloat * len(model.texcoords))(*model.texcoords),GL_STATIC_DRAW)
def rendering(self, MVP,View,Projection):
self.shader.begin()
glUniformMatrix4fv(self.MVP_ID,1,GL_FALSE, glm.value_ptr(MVP))
glUniformMatrix4fv(self.ModelMatrix_ID,1,GL_FALSE,glm.value_ptr(glm.mat4(1.0)))
glUniformMatrix4fv(self.ViewMatrix_ID,1,GL_FALSE,glm.value_ptr(View))
glUniform3f(self.OFFSET_ID,self.location[0],self.location[1],self.location[2])
lightPos = glm.vec3(0.0,0.0,4.0)
glUniform3f(self.Light_ID, lightPos.x, lightPos.y, lightPos.z)
glActiveTexture(GL_TEXTURE0)
glBindTexture(GL_TEXTURE_2D, self.texturebuffer)
glUniform1i(self.Texture_ID, 0) #// Set "myTextureSampler" sampler to use Texture Unit 0
glEnableVertexAttribArray(0)
glBindBuffer(GL_ARRAY_BUFFER, self.vertexbuffer)
glVertexAttribPointer(0,3,GL_FLOAT,GL_FALSE,0,None)
glEnableVertexAttribArray(1)
glBindBuffer(GL_ARRAY_BUFFER, self.uvbuffer)
glVertexAttribPointer(1,2,GL_FLOAT,GL_FALSE,0,None)
glEnableVertexAttribArray(2)
glBindBuffer(GL_ARRAY_BUFFER, self.normalbuffer)
glVertexAttribPointer(2,3,GL_FLOAT,GL_FALSE,0,None)
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, self.indicesbuffer)
glDrawElements(
GL_TRIANGLES, # mode
self.indicesbufferSize, #// count
GL_UNSIGNED_SHORT, # // type
None #// element array buffer offset
)
glDisableVertexAttribArray(0)
glDisableVertexAttribArray(1)
glDisableVertexAttribArray(1)
from tu_06_multobjs import meshFromObj
if __name__ == "__main__":
win = MeshViewWindow().init_default()
win.add_mesh(meshFromObj(meshName="resources/tu04/suzanne.obj",textureName="resources/tu04/uvmap.dds",location=[0.0,3.0,0.0]))
win.add_mesh(basicShading(meshName="resources/tu04/suzanne.obj",textureName="resources/tu04/uvmap.dds"))
win.run()
|
8d6120b0505dfbbbdcf75f1694dee2768ec85737
|
6f2fef1b207299681f8d67d3831c400bb91de04b
|
/data_collection/gazette/spiders/sc_santa_rosa_de_lima.py
|
725191dd0a78ee667b35765593798fd86f50fbfe
|
[
"MIT"
] |
permissive
|
okfn-brasil/querido-diario
|
76177747aa5ad47e99514f38402e6bc747b9a715
|
548a9b1b2718dc78ba8ccb06b36cf337543ad71d
|
refs/heads/main
| 2023-08-22T04:26:30.798196
| 2023-08-18T14:12:37
| 2023-08-18T14:12:37
| 127,598,755
| 402
| 233
|
MIT
| 2023-09-14T18:56:02
| 2018-04-01T05:01:21
|
Python
|
UTF-8
|
Python
| false
| false
| 212
|
py
|
sc_santa_rosa_de_lima.py
|
from gazette.spiders.base.fecam import FecamGazetteSpider
class ScSantaRosaDeLimaSpider(FecamGazetteSpider):
name = "sc_santa_rosa_de_lima"
FECAM_QUERY = "cod_entidade:233"
TERRITORY_ID = "4215604"
|
5c9308ce94a4561840fcff5ec2fb5f4a955a362c
|
e64f4410da97e445a61e1a78baf7fdbaa8296547
|
/flask_rest_jsonapi/decorators.py
|
adab2256880a642d2f3f07c206e76b22425b35b9
|
[
"MIT"
] |
permissive
|
miLibris/flask-rest-jsonapi
|
b8eb3148be026d58e3720107d0b7222f452b761a
|
a4ff3f4d5be78071f015efe003e976d31d4eba10
|
refs/heads/master
| 2023-08-17T08:21:07.601145
| 2020-10-08T07:09:20
| 2020-10-08T07:09:20
| 71,563,227
| 693
| 147
|
MIT
| 2023-06-28T11:42:03
| 2016-10-21T12:29:30
|
Python
|
UTF-8
|
Python
| false
| false
| 4,629
|
py
|
decorators.py
|
# -*- coding: utf-8 -*-
"""Decorators to check headers and method requirements for each Api calls"""
import json
from functools import wraps
from flask import request, make_response, jsonify, current_app
from flask_rest_jsonapi.errors import jsonapi_errors
from flask_rest_jsonapi.exceptions import JsonApiException
from flask_rest_jsonapi.utils import JSONEncoder
def check_headers(func):
"""Check headers according to jsonapi reference
:param callable func: the function to decorate
:return callable: the wrapped function
"""
@wraps(func)
def wrapper(*args, **kwargs):
if request.method in ('POST', 'PATCH'):
if 'Content-Type' not in request.headers or\
'application/vnd.api+json' not in request.headers['Content-Type'] or\
request.headers['Content-Type'] != 'application/vnd.api+json':
error = json.dumps(jsonapi_errors([{'source': '',
'detail': "Content-Type header must be application/vnd.api+json",
'title': 'Invalid request header',
'status': '415'}]), cls=JSONEncoder)
return make_response(error, 415, {'Content-Type': 'application/vnd.api+json'})
if 'Accept' in request.headers:
flag = False
for accept in request.headers['Accept'].split(','):
if accept.strip() == 'application/vnd.api+json':
flag = False
break
if 'application/vnd.api+json' in accept and accept.strip() != 'application/vnd.api+json':
flag = True
if flag is True:
error = json.dumps(jsonapi_errors([{'source': '',
'detail': ('Accept header must be application/vnd.api+json without'
'media type parameters'),
'title': 'Invalid request header',
'status': '406'}]), cls=JSONEncoder)
return make_response(error, 406, {'Content-Type': 'application/vnd.api+json'})
return func(*args, **kwargs)
return wrapper
def check_method_requirements(func):
"""Check methods requirements
:param callable func: the function to decorate
:return callable: the wrapped function
"""
@wraps(func)
def wrapper(*args, **kwargs):
error_message = "You must provide {error_field} in {cls} to get access to the default {method} method"
error_data = {'cls': args[0].__class__.__name__,
'method': request.method.lower()}
if request.method != 'DELETE':
if not hasattr(args[0], 'schema'):
error_data.update({'error_field': 'a schema class'})
raise Exception(error_message.format(**error_data))
return func(*args, **kwargs)
return wrapper
def jsonapi_exception_formatter(func):
@wraps(func)
def wrapper(*args, **kwargs):
headers = {'Content-Type': 'application/vnd.api+json'}
try:
return func(*args, **kwargs)
except JsonApiException as e:
return make_response(jsonify(jsonapi_errors([e.to_dict()])),
e.status,
headers)
except Exception as e:
if current_app.config['DEBUG'] is True or current_app.config.get('PROPAGATE_EXCEPTIONS') is True:
raise
if 'sentry' in current_app.extensions:
current_app.extensions['sentry'].captureException()
exc = JsonApiException(getattr(e,
'detail',
current_app.config.get('GLOBAL_ERROR_MESSAGE') or str(e)),
source=getattr(e, 'source', ''),
title=getattr(e, 'title', None),
status=getattr(e, 'status', None),
code=getattr(e, 'code', None),
id_=getattr(e, 'id', None),
links=getattr(e, 'links', None),
meta=getattr(e, 'meta', None))
return make_response(jsonify(jsonapi_errors([exc.to_dict()])),
exc.status,
headers)
return wrapper
|
f599a4680de6a369e2fd21a02984e7ee5daf0593
|
263170e7dca79883314273bb35aef1449e018361
|
/tests/testcase/execution/test_executiontrace.py
|
85d434527ee9e7f26422b4029962baac6d194708
|
[
"CC-BY-4.0",
"LGPL-2.1-or-later",
"MIT"
] |
permissive
|
se2p/pynguin
|
029cfd9c43c08a2f687a816749828054e409646e
|
cc083252c7054824bfaf200533a8b7ad45f7c4fb
|
refs/heads/main
| 2023-08-23T16:58:04.568755
| 2023-08-18T13:11:44
| 2023-08-18T13:11:44
| 282,944,472
| 1,223
| 65
|
MIT
| 2023-08-18T13:12:29
| 2020-07-27T15:50:19
|
Python
|
UTF-8
|
Python
| false
| false
| 2,598
|
py
|
test_executiontrace.py
|
# This file is part of Pynguin.
#
# SPDX-FileCopyrightText: 2019–2023 Pynguin Contributors
#
# SPDX-License-Identifier: MIT
#
from unittest.mock import MagicMock
from pynguin.slicer.executedinstruction import ExecutedInstruction
from pynguin.testcase.execution import ExecutedAssertion
from pynguin.testcase.execution import ExecutionTrace
def test_merge():
trace0 = ExecutionTrace()
trace1 = ExecutionTrace()
trace0.merge(trace1)
assert trace0 == ExecutionTrace()
def test_merge_full():
instr0 = ExecutedInstruction("foo", 0, 1, 2, 3, 4, 5)
stmt0 = MagicMock()
assert0 = ExecutedAssertion(0, 1, 2, stmt0)
trace0 = ExecutionTrace()
trace0.executed_code_objects.add(0)
trace0.executed_code_objects.add(1)
trace0.executed_predicates[0] = 9
trace0.executed_predicates[1] = 7
trace0.true_distances[0] = 6
trace0.true_distances[1] = 3
trace0.false_distances[0] = 0
trace0.false_distances[1] = 1
trace0.covered_line_ids = {0}
trace0.executed_instructions = [instr0]
trace0.executed_assertions = [assert0]
instr1 = ExecutedInstruction("bar", 1, 2, 3, 4, 5, 6)
stmt1 = MagicMock()
assert1 = ExecutedAssertion(1, 2, 3, stmt1)
trace1 = ExecutionTrace()
trace1.executed_code_objects.add(1)
trace1.executed_code_objects.add(2)
trace1.executed_predicates[1] = 5
trace1.executed_predicates[2] = 8
trace1.true_distances[1] = 19
trace1.true_distances[2] = 3
trace1.false_distances[1] = 234
trace1.false_distances[2] = 0
trace1.covered_line_ids = {1}
trace1.executed_instructions = [instr0, instr1]
trace1.executed_assertions = [assert1]
# Shifted by one
assert2 = ExecutedAssertion(1, 2, 4, stmt1)
result = ExecutionTrace()
result.executed_code_objects.add(0)
result.executed_code_objects.add(1)
result.executed_code_objects.add(2)
result.executed_predicates[0] = 9
result.executed_predicates[1] = 12
result.executed_predicates[2] = 8
result.true_distances[0] = 6
result.true_distances[1] = 3
result.true_distances[2] = 3
result.false_distances[0] = 0
result.false_distances[1] = 1
result.false_distances[2] = 0
result.covered_line_ids = {0, 1}
# instr0 is prepended
result.executed_instructions = [instr0, instr0, instr1]
result.executed_assertions = [assert0, assert2]
trace0.merge(trace1)
assert trace0 == result
def test_merge_min():
dict0 = {0: 0.5, 1: 0.2}
dict1 = {0: 0.3, 1: 0.6}
ExecutionTrace._merge_min(dict0, dict1)
assert dict0 == {0: 0.3, 1: 0.2}
|
e01f804b95834b37a03f03b0ee6e04a3a7ea05d5
|
12091b1c0723759464f949b0a47b305c76549278
|
/tests/test_geneticmap.py
|
382df9112a56d97ff9dc35e56a7d9c8c610343fb
|
[
"MIT"
] |
permissive
|
whatshap/whatshap
|
6311e13d36210f395206683bb00b2054ef639653
|
15c9ff8c4f5b04b86195396dbc6620c874b5ceb8
|
refs/heads/main
| 2023-09-04T07:58:09.567203
| 2023-08-31T08:45:45
| 2023-08-31T08:45:45
| 276,673,862
| 254
| 27
|
MIT
| 2023-09-10T06:47:19
| 2020-07-02T14:53:00
|
Python
|
UTF-8
|
Python
| false
| false
| 1,082
|
py
|
test_geneticmap.py
|
import pytest
from whatshap.pedigree import GeneticMapRecombinationCostComputer, ParseError
def test_read_genetic_map(tmp_path):
path = tmp_path / "genetic.map"
path.write_text("ignored header\n" "568527 0 0\n" "723891 2.9813105581 0.417644215424158\n")
_ = GeneticMapRecombinationCostComputer(str(path))
def test_read_wrong_number_of_fields(tmp_path):
path = tmp_path / "genetic.map"
path.write_text(
"ignored header\n" "55550 0 0\n" "568322 0 0 17\n" "723891 2.9813105581 0.417644215424158\n"
)
with pytest.raises(ParseError):
_ = GeneticMapRecombinationCostComputer(str(path))
def test_invalid_int(tmp_path):
path = tmp_path / "genetic.map"
path.write_text("ignored header\n" "55550 0 0\n" "abc 0 0\n")
with pytest.raises(ParseError):
_ = GeneticMapRecombinationCostComputer(str(path))
def test_invalid_float(tmp_path):
path = tmp_path / "genetic.map"
path.write_text("ignored header\n" "55550 0 abc\n")
with pytest.raises(ParseError):
_ = GeneticMapRecombinationCostComputer(str(path))
|
e8ba9352258b3311aa2925010a950345baaaa768
|
e95d1d4690d8d0bbcfa38f1e2e569497028a946b
|
/src/benchmarks/gc/src/mypy_stubs/flask_cors.pyi
|
ff2220cd8f49231f0654a698cc947f06e182a8de
|
[
"MIT"
] |
permissive
|
dotnet/performance
|
003b5f3a407099ea2dbda5d325aa83901eb5e8a2
|
24bfc1a4bb8726bd8531036923e462117d5e6f87
|
refs/heads/main
| 2023-09-04T11:51:34.513827
| 2023-09-04T09:17:54
| 2023-09-04T09:17:54
| 124,948,838
| 672
| 262
|
MIT
| 2023-09-14T18:25:17
| 2018-03-12T20:41:10
|
F#
|
UTF-8
|
Python
| false
| false
| 59
|
pyi
|
flask_cors.pyi
|
from flask import Flask
def CORS(app: Flask) -> None: ...
|
fec17a098c79dd7f5cf9267767519159c961a721
|
3dc647cd07a7361ed401e40d2b7cce8c826c8f6c
|
/Lib/test/test_importlib/test_path.py
|
6fc41f301d1cab196a26bacb7b8bd1a1535a62c3
|
[
"CC-BY-4.0",
"MIT",
"Python-2.0"
] |
permissive
|
RustPython/RustPython
|
5ddce4a9848b9de8c041ffd2634f83c0105d3f39
|
b864e5da1f18897fc884180b7093df5aa170024f
|
refs/heads/main
| 2023-09-04T12:38:29.458699
| 2023-09-03T12:33:42
| 2023-09-03T12:33:42
| 135,201,145
| 15,815
| 1,302
|
MIT
| 2023-09-14T08:11:45
| 2018-05-28T19:27:01
|
Rust
|
UTF-8
|
Python
| false
| false
| 2,086
|
py
|
test_path.py
|
import io
import unittest
from importlib import resources
from . import data01
from .resources import util
class CommonTests(util.CommonTests, unittest.TestCase):
def execute(self, package, path):
with resources.as_file(resources.files(package).joinpath(path)):
pass
class PathTests:
def test_reading(self):
# Path should be readable.
# Test also implicitly verifies the returned object is a pathlib.Path
# instance.
target = resources.files(self.data) / 'utf-8.file'
with resources.as_file(target) as path:
self.assertTrue(path.name.endswith("utf-8.file"), repr(path))
# pathlib.Path.read_text() was introduced in Python 3.5.
with path.open('r', encoding='utf-8') as file:
text = file.read()
self.assertEqual('Hello, UTF-8 world!\n', text)
class PathDiskTests(PathTests, unittest.TestCase):
data = data01
def test_natural_path(self):
# Guarantee the internal implementation detail that
# file-system-backed resources do not get the tempdir
# treatment.
target = resources.files(self.data) / 'utf-8.file'
with resources.as_file(target) as path:
assert 'data' in str(path)
class PathMemoryTests(PathTests, unittest.TestCase):
def setUp(self):
file = io.BytesIO(b'Hello, UTF-8 world!\n')
self.addCleanup(file.close)
self.data = util.create_package(
file=file, path=FileNotFoundError("package exists only in memory")
)
self.data.__spec__.origin = None
self.data.__spec__.has_location = False
class PathZipTests(PathTests, util.ZipSetup, unittest.TestCase):
def test_remove_in_context_manager(self):
# It is not an error if the file that was temporarily stashed on the
# file system is removed inside the `with` stanza.
target = resources.files(self.data) / 'utf-8.file'
with resources.as_file(target) as path:
path.unlink()
if __name__ == '__main__':
unittest.main()
|
d81153b21cff696f1e1052a35386c8b6a9f4ae81
|
8d1c7fba7cd15f8a1e33fd27d11eefd1c67d579f
|
/third_party/py/dataclasses/dataclasses/__init__.py
|
15d9756f5d4af619e926cec221599e799aa38dad
|
[
"Python-2.0",
"Apache-2.0"
] |
permissive
|
bazelbuild/bazel
|
5896162455f032efc899b8de60aa39b9d2cad4a6
|
171aae3f9c57b41089e25ec61fc84c35baa3079d
|
refs/heads/master
| 2023-08-22T22:52:48.714735
| 2023-08-22T18:01:53
| 2023-08-22T18:01:53
| 20,773,773
| 20,294
| 4,383
|
Apache-2.0
| 2023-09-14T18:38:44
| 2014-06-12T16:00:38
|
Java
|
UTF-8
|
Python
| false
| false
| 40,539
|
py
|
__init__.py
|
import sys
import copy
import types
import inspect
__all__ = ['dataclass',
'field',
'Field',
'FrozenInstanceError',
'InitVar',
'MISSING',
# Helper functions.
'fields',
'asdict',
'astuple',
'make_dataclass',
'replace',
'is_dataclass',
]
# Conditions for adding methods. The boxes indicate what action the
# dataclass decorator takes. For all of these tables, when I talk
# about init=, repr=, eq=, order=, unsafe_hash=, or frozen=, I'm
# referring to the arguments to the @dataclass decorator. When
# checking if a dunder method already exists, I mean check for an
# entry in the class's __dict__. I never check to see if an
# attribute is defined in a base class.
# Key:
# +=========+=========================================+
# + Value | Meaning |
# +=========+=========================================+
# | <blank> | No action: no method is added. |
# +---------+-----------------------------------------+
# | add | Generated method is added. |
# +---------+-----------------------------------------+
# | raise | TypeError is raised. |
# +---------+-----------------------------------------+
# | None | Attribute is set to None. |
# +=========+=========================================+
# __init__
#
# +--- init= parameter
# |
# v | | |
# | no | yes | <--- class has __init__ in __dict__?
# +=======+=======+=======+
# | False | | |
# +-------+-------+-------+
# | True | add | | <- the default
# +=======+=======+=======+
# __repr__
#
# +--- repr= parameter
# |
# v | | |
# | no | yes | <--- class has __repr__ in __dict__?
# +=======+=======+=======+
# | False | | |
# +-------+-------+-------+
# | True | add | | <- the default
# +=======+=======+=======+
# __setattr__
# __delattr__
#
# +--- frozen= parameter
# |
# v | | |
# | no | yes | <--- class has __setattr__ or __delattr__ in __dict__?
# +=======+=======+=======+
# | False | | | <- the default
# +-------+-------+-------+
# | True | add | raise |
# +=======+=======+=======+
# Raise because not adding these methods would break the "frozen-ness"
# of the class.
# __eq__
#
# +--- eq= parameter
# |
# v | | |
# | no | yes | <--- class has __eq__ in __dict__?
# +=======+=======+=======+
# | False | | |
# +-------+-------+-------+
# | True | add | | <- the default
# +=======+=======+=======+
# __lt__
# __le__
# __gt__
# __ge__
#
# +--- order= parameter
# |
# v | | |
# | no | yes | <--- class has any comparison method in __dict__?
# +=======+=======+=======+
# | False | | | <- the default
# +-------+-------+-------+
# | True | add | raise |
# +=======+=======+=======+
# Raise because to allow this case would interfere with using
# functools.total_ordering.
# __hash__
# +------------------- unsafe_hash= parameter
# | +----------- eq= parameter
# | | +--- frozen= parameter
# | | |
# v v v | | |
# | no | yes | <--- class has explicitly defined __hash__
# +=======+=======+=======+========+========+
# | False | False | False | | | No __eq__, use the base class __hash__
# +-------+-------+-------+--------+--------+
# | False | False | True | | | No __eq__, use the base class __hash__
# +-------+-------+-------+--------+--------+
# | False | True | False | None | | <-- the default, not hashable
# +-------+-------+-------+--------+--------+
# | False | True | True | add | | Frozen, so hashable, allows override
# +-------+-------+-------+--------+--------+
# | True | False | False | add | raise | Has no __eq__, but hashable
# +-------+-------+-------+--------+--------+
# | True | False | True | add | raise | Has no __eq__, but hashable
# +-------+-------+-------+--------+--------+
# | True | True | False | add | raise | Not frozen, but hashable
# +-------+-------+-------+--------+--------+
# | True | True | True | add | raise | Frozen, so hashable
# +=======+=======+=======+========+========+
# For boxes that are blank, __hash__ is untouched and therefore
# inherited from the base class. If the base is object, then
# id-based hashing is used.
# Note that a class may already have __hash__=None if it specified an
# __eq__ method in the class body (not one that was created by
# @dataclass).
# See _hash_action (below) for a coded version of this table.
# Raised when an attempt is made to modify a frozen class.
class FrozenInstanceError(AttributeError): pass
# A sentinel object for default values to signal that a
# default factory will be used.
# This is given a nice repr() which will appear in the function
# signature of dataclasses' constructors.
class _HAS_DEFAULT_FACTORY_CLASS:
def __repr__(self):
return '<factory>'
_HAS_DEFAULT_FACTORY = _HAS_DEFAULT_FACTORY_CLASS()
# A sentinel object to detect if a parameter is supplied or not. Use
# a class to give it a better repr.
class _MISSING_TYPE:
pass
MISSING = _MISSING_TYPE()
# Since most per-field metadata will be unused, create an empty
# read-only proxy that can be shared among all fields.
_EMPTY_METADATA = types.MappingProxyType({})
# Markers for the various kinds of fields and pseudo-fields.
_FIELD = object() # An actual field.
_FIELD_CLASSVAR = object() # Not a field, but a ClassVar.
_FIELD_INITVAR = object() # Not a field, but an InitVar.
# The name of an attribute on the class where we store the Field
# objects. Also used to check if a class is a Data Class.
_FIELDS = '__dataclass_fields__'
# The name of an attribute on the class that stores the parameters to
# @dataclass.
_PARAMS = '__dataclass_params__'
# The name of the function, that if it exists, is called at the end of
# __init__.
_POST_INIT_NAME = '__post_init__'
class _InitVarMeta(type):
def __getitem__(self, params):
return self
class InitVar(metaclass=_InitVarMeta):
pass
# Instances of Field are only ever created from within this module,
# and only from the field() function, although Field instances are
# exposed externally as (conceptually) read-only objects.
# name and type are filled in after the fact, not in __init__. They're
# not known at the time this class is instantiated, but it's
# convenient if they're available later.
# When cls._FIELDS is filled in with a list of Field objects, the name
# and type fields will have been populated.
class Field:
__slots__ = ('name',
'type',
'default',
'default_factory',
'repr',
'hash',
'init',
'compare',
'metadata',
'_field_type', # Private: not to be used by user code.
)
def __init__(self, default, default_factory, init, repr, hash, compare,
metadata):
self.name = None
self.type = None
self.default = default
self.default_factory = default_factory
self.init = init
self.repr = repr
self.hash = hash
self.compare = compare
self.metadata = (_EMPTY_METADATA
if metadata is None or len(metadata) == 0 else
types.MappingProxyType(metadata))
self._field_type = None
def __repr__(self):
return ('Field('
f'name={self.name!r},'
f'type={self.type},'
f'default={self.default},'
f'default_factory={self.default_factory},'
f'init={self.init},'
f'repr={self.repr},'
f'hash={self.hash},'
f'compare={self.compare},'
f'metadata={self.metadata}'
')')
# This is used to support the PEP 487 __set_name__ protocol in the
# case where we're using a field that contains a descriptor as a
# defaul value. For details on __set_name__, see
# https://www.python.org/dev/peps/pep-0487/#implementation-details.
# Note that in _process_class, this Field object is overwritten with
# the default value, so the end result is a descriptor that had
# __set_name__ called on it at the right time.
def __set_name__(self, owner, name):
func = getattr(type(self.default), '__set_name__', None)
if func:
# There is a __set_name__ method on the descriptor,
# call it.
func(self.default, owner, name)
class _DataclassParams:
__slots__ = ('init',
'repr',
'eq',
'order',
'unsafe_hash',
'frozen',
)
def __init__(self, init, repr, eq, order, unsafe_hash, frozen):
self.init = init
self.repr = repr
self.eq = eq
self.order = order
self.unsafe_hash = unsafe_hash
self.frozen = frozen
def __repr__(self):
return ('_DataclassParams('
f'init={self.init},'
f'repr={self.repr},'
f'eq={self.eq},'
f'order={self.order},'
f'unsafe_hash={self.unsafe_hash},'
f'frozen={self.frozen}'
')')
# This function is used instead of exposing Field creation directly,
# so that a type checker can be told (via overloads) that this is a
# function whose type depends on its parameters.
def field(*, default=MISSING, default_factory=MISSING, init=True, repr=True,
hash=None, compare=True, metadata=None):
"""Return an object to identify dataclass fields.
default is the default value of the field. default_factory is a
0-argument function called to initialize a field's value. If init
is True, the field will be a parameter to the class's __init__()
function. If repr is True, the field will be included in the
object's repr(). If hash is True, the field will be included in
the object's hash(). If compare is True, the field will be used in
comparison functions. metadata, if specified, must be a mapping
which is stored but not otherwise examined by dataclass.
It is an error to specify both default and default_factory.
"""
if default is not MISSING and default_factory is not MISSING:
raise ValueError('cannot specify both default and default_factory')
return Field(default, default_factory, init, repr, hash, compare,
metadata)
def _tuple_str(obj_name, fields):
# Return a string representing each field of obj_name as a tuple
# member. So, if fields is ['x', 'y'] and obj_name is "self",
# return "(self.x,self.y)".
# Special case for the 0-tuple.
if not fields:
return '()'
# Note the trailing comma, needed if this turns out to be a 1-tuple.
return f'({",".join([f"{obj_name}.{f.name}" for f in fields])},)'
def _create_fn(name, args, body, *, globals=None, locals=None,
return_type=MISSING):
# Note that we mutate locals when exec() is called. Caller beware!
# The only callers are internal to this module, so no worries
# about external callers.
if locals is None:
locals = {}
return_annotation = ''
if return_type is not MISSING:
locals['_return_type'] = return_type
return_annotation = '->_return_type'
args = ','.join(args)
body = '\n'.join(f' {b}' for b in body)
# Compute the text of the entire function.
txt = f'def {name}({args}){return_annotation}:\n{body}'
exec(txt, globals, locals)
return locals[name]
def _field_assign(frozen, name, value, self_name):
# If we're a frozen class, then assign to our fields in __init__
# via object.__setattr__. Otherwise, just use a simple
# assignment.
# self_name is what "self" is called in this function: don't
# hard-code "self", since that might be a field name.
if frozen:
return f'object.__setattr__({self_name},{name!r},{value})'
return f'{self_name}.{name}={value}'
def _field_init(f, frozen, globals, self_name):
# Return the text of the line in the body of __init__ that will
# initialize this field.
default_name = f'_dflt_{f.name}'
if f.default_factory is not MISSING:
if f.init:
# This field has a default factory. If a parameter is
# given, use it. If not, call the factory.
globals[default_name] = f.default_factory
value = (f'{default_name}() '
f'if {f.name} is _HAS_DEFAULT_FACTORY '
f'else {f.name}')
else:
# This is a field that's not in the __init__ params, but
# has a default factory function. It needs to be
# initialized here by calling the factory function,
# because there's no other way to initialize it.
# For a field initialized with a default=defaultvalue, the
# class dict just has the default value
# (cls.fieldname=defaultvalue). But that won't work for a
# default factory, the factory must be called in __init__
# and we must assign that to self.fieldname. We can't
# fall back to the class dict's value, both because it's
# not set, and because it might be different per-class
# (which, after all, is why we have a factory function!).
globals[default_name] = f.default_factory
value = f'{default_name}()'
else:
# No default factory.
if f.init:
if f.default is MISSING:
# There's no default, just do an assignment.
value = f.name
elif f.default is not MISSING:
globals[default_name] = f.default
value = f.name
else:
# This field does not need initialization. Signify that to
# the caller by returning None.
return None
# Only test this now, so that we can create variables for the
# default. However, return None to signify that we're not going
# to actually do the assignment statement for InitVars.
if f._field_type == _FIELD_INITVAR:
return None
# Now, actually generate the field assignment.
return _field_assign(frozen, f.name, value, self_name)
def _init_param(f):
# Return the __init__ parameter string for this field.
# For example, the equivalent of 'x:int=3' (except instead of 'int',
# reference a variable set to int, and instead of '3', reference a
# variable set to 3).
if f.default is MISSING and f.default_factory is MISSING:
# There's no default, and no default_factory, just
# output the variable name and type.
default = ''
elif f.default is not MISSING:
# There's a default, this will be the name that's used to look it up.
default = f'=_dflt_{f.name}'
elif f.default_factory is not MISSING:
# There's a factory function. Set a marker.
default = '=_HAS_DEFAULT_FACTORY'
return f'{f.name}:_type_{f.name}{default}'
def _init_fn(fields, frozen, has_post_init, self_name):
# fields contains both real fields and InitVar pseudo-fields.
# Make sure we don't have fields without defaults following fields
# with defaults. This actually would be caught when exec-ing the
# function source code, but catching it here gives a better error
# message, and future-proofs us in case we build up the function
# using ast.
seen_default = False
for f in fields:
# Only consider fields in the __init__ call.
if f.init:
if not (f.default is MISSING and f.default_factory is MISSING):
seen_default = True
elif seen_default:
raise TypeError(f'non-default argument {f.name!r} '
'follows default argument')
globals = {'MISSING': MISSING,
'_HAS_DEFAULT_FACTORY': _HAS_DEFAULT_FACTORY}
body_lines = []
for f in fields:
line = _field_init(f, frozen, globals, self_name)
# line is None means that this field doesn't require
# initialization (it's a pseudo-field). Just skip it.
if line:
body_lines.append(line)
# Does this class have a post-init function?
if has_post_init:
params_str = ','.join(f.name for f in fields
if f._field_type is _FIELD_INITVAR)
body_lines.append(f'{self_name}.{_POST_INIT_NAME}({params_str})')
# If no body lines, use 'pass'.
if not body_lines:
body_lines = ['pass']
locals = {f'_type_{f.name}': f.type for f in fields}
return _create_fn('__init__',
[self_name] + [_init_param(f) for f in fields if f.init],
body_lines,
locals=locals,
globals=globals,
return_type=None)
def _repr_fn(fields):
return _create_fn('__repr__',
('self',),
['return self.__class__.__qualname__ + f"(' +
', '.join([f"{f.name}={{self.{f.name}!r}}"
for f in fields]) +
')"'])
def _frozen_get_del_attr(cls, fields):
# XXX: globals is modified on the first call to _create_fn, then the
# modified version is used in the second call. Is this okay?
globals = {'cls': cls,
'FrozenInstanceError': FrozenInstanceError}
if fields:
fields_str = '(' + ','.join(repr(f.name) for f in fields) + ',)'
else:
# Special case for the zero-length tuple.
fields_str = '()'
return (_create_fn('__setattr__',
('self', 'name', 'value'),
(f'if type(self) is cls or name in {fields_str}:',
' raise FrozenInstanceError(f"cannot assign to field {name!r}")',
f'super(cls, self).__setattr__(name, value)'),
globals=globals),
_create_fn('__delattr__',
('self', 'name'),
(f'if type(self) is cls or name in {fields_str}:',
' raise FrozenInstanceError(f"cannot delete field {name!r}")',
f'super(cls, self).__delattr__(name)'),
globals=globals),
)
def _cmp_fn(name, op, self_tuple, other_tuple):
# Create a comparison function. If the fields in the object are
# named 'x' and 'y', then self_tuple is the string
# '(self.x,self.y)' and other_tuple is the string
# '(other.x,other.y)'.
return _create_fn(name,
('self', 'other'),
[ 'if other.__class__ is self.__class__:',
f' return {self_tuple}{op}{other_tuple}',
'return NotImplemented'])
def _hash_fn(fields):
self_tuple = _tuple_str('self', fields)
return _create_fn('__hash__',
('self',),
[f'return hash({self_tuple})'])
def _get_field(cls, a_name, a_type):
# Return a Field object for this field name and type. ClassVars
# and InitVars are also returned, but marked as such (see
# f._field_type).
# If the default value isn't derived from Field, then it's
# only a normal default value. Convert it to a Field().
default = getattr(cls, a_name, MISSING)
if isinstance(default, Field):
f = default
else:
if isinstance(default, types.MemberDescriptorType):
# This is a field in __slots__, so it has no default value.
default = MISSING
f = field(default=default)
# Assume it's a normal field until proven otherwise.
f._field_type = _FIELD
# Only at this point do we know the name and the type. Set them.
f.name = a_name
f.type = a_type
# If typing has not been imported, then it's impossible for
# any annotation to be a ClassVar. So, only look for ClassVar
# if typing has been imported.
typing = sys.modules.get('typing')
if typing is not None:
# This test uses a typing internal class, but it's the best
# way to test if this is a ClassVar.
if ((sys.version_info[:2] >= (3, 7) and
type(a_type) is typing._GenericAlias and
a_type.__origin__ is typing.ClassVar) or
(sys.version_info[:2] == (3, 6) and
type(a_type) is typing._ClassVar)):
# This field is a ClassVar, so it's not a field.
f._field_type = _FIELD_CLASSVAR
if f._field_type is _FIELD:
# Check if this is an InitVar.
if a_type is InitVar:
# InitVars are not fields, either.
f._field_type = _FIELD_INITVAR
# Validations for fields. This is delayed until now, instead of
# in the Field() constructor, since only here do we know the field
# name, which allows better error reporting.
# Special restrictions for ClassVar and InitVar.
if f._field_type in (_FIELD_CLASSVAR, _FIELD_INITVAR):
if f.default_factory is not MISSING:
raise TypeError(f'field {f.name} cannot have a '
'default factory')
# Should I check for other field settings? default_factory
# seems the most serious to check for. Maybe add others.
# For example, how about init=False (or really,
# init=<not-the-default-init-value>)? It makes no sense for
# ClassVar and InitVar to specify init=<anything>.
# For real fields, disallow mutable defaults for known types.
if f._field_type is _FIELD and isinstance(f.default, (list, dict, set)):
raise ValueError(f'mutable default {type(f.default)} for field '
f'{f.name} is not allowed: use default_factory')
return f
def _set_new_attribute(cls, name, value):
# Never overwrites an existing attribute. Returns True if the
# attribute already exists.
if name in cls.__dict__:
return True
setattr(cls, name, value)
return False
# Decide if/how we're going to create a hash function. Key is
# (unsafe_hash, eq, frozen, does-hash-exist). Value is the action to
# take. The common case is to do nothing, so instead of providing a
# function that is a no-op, use None to signify that.
def _hash_set_none(cls, fields):
return None
def _hash_add(cls, fields):
flds = [f for f in fields if (f.compare if f.hash is None else f.hash)]
return _hash_fn(flds)
def _hash_exception(cls, fields):
# Raise an exception.
raise TypeError(f'Cannot overwrite attribute __hash__ '
f'in class {cls.__name__}')
#
# +-------------------------------------- unsafe_hash?
# | +------------------------------- eq?
# | | +------------------------ frozen?
# | | | +---------------- has-explicit-hash?
# | | | |
# | | | | +------- action
# | | | | |
# v v v v v
_hash_action = {(False, False, False, False): None,
(False, False, False, True ): None,
(False, False, True, False): None,
(False, False, True, True ): None,
(False, True, False, False): _hash_set_none,
(False, True, False, True ): None,
(False, True, True, False): _hash_add,
(False, True, True, True ): None,
(True, False, False, False): _hash_add,
(True, False, False, True ): _hash_exception,
(True, False, True, False): _hash_add,
(True, False, True, True ): _hash_exception,
(True, True, False, False): _hash_add,
(True, True, False, True ): _hash_exception,
(True, True, True, False): _hash_add,
(True, True, True, True ): _hash_exception,
}
# See https://bugs.python.org/issue32929#msg312829 for an if-statement
# version of this table.
def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
# Now that dicts retain insertion order, there's no reason to use
# an ordered dict. I am leveraging that ordering here, because
# derived class fields overwrite base class fields, but the order
# is defined by the base class, which is found first.
fields = {}
setattr(cls, _PARAMS, _DataclassParams(init, repr, eq, order,
unsafe_hash, frozen))
# Find our base classes in reverse MRO order, and exclude
# ourselves. In reversed order so that more derived classes
# override earlier field definitions in base classes.
# As long as we're iterating over them, see if any are frozen.
any_frozen_base = False
has_dataclass_bases = False
for b in cls.__mro__[-1:0:-1]:
# Only process classes that have been processed by our
# decorator. That is, they have a _FIELDS attribute.
base_fields = getattr(b, _FIELDS, None)
if base_fields:
has_dataclass_bases = True
for f in base_fields.values():
fields[f.name] = f
if getattr(b, _PARAMS).frozen:
any_frozen_base = True
# Annotations that are defined in this class (not in base
# classes). If __annotations__ isn't present, then this class
# adds no new annotations. We use this to compute fields that
# are added by this class.
# Fields are found from cls_annotations, which is guaranteed to be
# ordered. Default values are from class attributes, if a field
# has a default. If the default value is a Field(), then it
# contains additional info beyond (and possibly including) the
# actual default value. Pseudo-fields ClassVars and InitVars are
# included, despite the fact that they're not real fields.
# That's dealt with later.
cls_annotations = cls.__dict__.get('__annotations__', {})
# Now find fields in our class. While doing so, validate some
# things, and set the default values (as class attributes)
# where we can.
cls_fields = [_get_field(cls, name, type)
for name, type in cls_annotations.items()]
for f in cls_fields:
fields[f.name] = f
# If the class attribute (which is the default value for
# this field) exists and is of type 'Field', replace it
# with the real default. This is so that normal class
# introspection sees a real default value, not a Field.
if isinstance(getattr(cls, f.name, None), Field):
if f.default is MISSING:
# If there's no default, delete the class attribute.
# This happens if we specify field(repr=False), for
# example (that is, we specified a field object, but
# no default value). Also if we're using a default
# factory. The class attribute should not be set at
# all in the post-processed class.
delattr(cls, f.name)
else:
setattr(cls, f.name, f.default)
# Do we have any Field members that don't also have annotations?
for name, value in cls.__dict__.items():
if isinstance(value, Field) and not name in cls_annotations:
raise TypeError(f'{name!r} is a field but has no type annotation')
# Check rules that apply if we are derived from any dataclasses.
if has_dataclass_bases:
# Raise an exception if any of our bases are frozen, but we're not.
if any_frozen_base and not frozen:
raise TypeError('cannot inherit non-frozen dataclass from a '
'frozen one')
# Raise an exception if we're frozen, but none of our bases are.
if not any_frozen_base and frozen:
raise TypeError('cannot inherit frozen dataclass from a '
'non-frozen one')
# Remember all of the fields on our class (including bases). This also
# marks this class as being a dataclass.
setattr(cls, _FIELDS, fields)
# Was this class defined with an explicit __hash__? Note that if
# __eq__ is defined in this class, then python will automatically
# set __hash__ to None. This is a heuristic, as it's possible
# that such a __hash__ == None was not auto-generated, but it
# close enough.
class_hash = cls.__dict__.get('__hash__', MISSING)
has_explicit_hash = not (class_hash is MISSING or
(class_hash is None and '__eq__' in cls.__dict__))
# If we're generating ordering methods, we must be generating
# the eq methods.
if order and not eq:
raise ValueError('eq must be true if order is true')
if init:
# Does this class have a post-init function?
has_post_init = hasattr(cls, _POST_INIT_NAME)
# Include InitVars and regular fields (so, not ClassVars).
flds = [f for f in fields.values()
if f._field_type in (_FIELD, _FIELD_INITVAR)]
_set_new_attribute(cls, '__init__',
_init_fn(flds,
frozen,
has_post_init,
# The name to use for the "self" param
# in __init__. Use "self" if possible.
'__dataclass_self__' if 'self' in fields
else 'self',
))
# Get the fields as a list, and include only real fields. This is
# used in all of the following methods.
field_list = [f for f in fields.values() if f._field_type is _FIELD]
if repr:
flds = [f for f in field_list if f.repr]
_set_new_attribute(cls, '__repr__', _repr_fn(flds))
if eq:
# Create _eq__ method. There's no need for a __ne__ method,
# since python will call __eq__ and negate it.
flds = [f for f in field_list if f.compare]
self_tuple = _tuple_str('self', flds)
other_tuple = _tuple_str('other', flds)
_set_new_attribute(cls, '__eq__',
_cmp_fn('__eq__', '==',
self_tuple, other_tuple))
if order:
# Create and set the ordering methods.
flds = [f for f in field_list if f.compare]
self_tuple = _tuple_str('self', flds)
other_tuple = _tuple_str('other', flds)
for name, op in [('__lt__', '<'),
('__le__', '<='),
('__gt__', '>'),
('__ge__', '>='),
]:
if _set_new_attribute(cls, name,
_cmp_fn(name, op, self_tuple, other_tuple)):
raise TypeError(f'Cannot overwrite attribute {name} '
f'in class {cls.__name__}. Consider using '
'functools.total_ordering')
if frozen:
for fn in _frozen_get_del_attr(cls, field_list):
if _set_new_attribute(cls, fn.__name__, fn):
raise TypeError(f'Cannot overwrite attribute {fn.__name__} '
f'in class {cls.__name__}')
# Decide if/how we're going to create a hash function.
hash_action = _hash_action[bool(unsafe_hash),
bool(eq),
bool(frozen),
has_explicit_hash]
if hash_action:
# No need to call _set_new_attribute here, since by the time
# we're here the overwriting is unconditional.
cls.__hash__ = hash_action(cls, field_list)
if not getattr(cls, '__doc__'):
# Create a class doc-string.
cls.__doc__ = (cls.__name__ +
str(inspect.signature(cls)).replace(' -> None', ''))
return cls
# _cls should never be specified by keyword, so start it with an
# underscore. The presence of _cls is used to detect if this
# decorator is being called with parameters or not.
def dataclass(_cls=None, *, init=True, repr=True, eq=True, order=False,
unsafe_hash=False, frozen=False):
"""Returns the same class as was passed in, with dunder methods
added based on the fields defined in the class.
Examines PEP 526 __annotations__ to determine fields.
If init is true, an __init__() method is added to the class. If
repr is true, a __repr__() method is added. If order is true, rich
comparison dunder methods are added. If unsafe_hash is true, a
__hash__() method function is added. If frozen is true, fields may
not be assigned to after instance creation.
"""
def wrap(cls):
return _process_class(cls, init, repr, eq, order, unsafe_hash, frozen)
# See if we're being called as @dataclass or @dataclass().
if _cls is None:
# We're called with parens.
return wrap
# We're called as @dataclass without parens.
return wrap(_cls)
def fields(class_or_instance):
"""Return a tuple describing the fields of this dataclass.
Accepts a dataclass or an instance of one. Tuple elements are of
type Field.
"""
# Might it be worth caching this, per class?
try:
fields = getattr(class_or_instance, _FIELDS)
except AttributeError:
raise TypeError('must be called with a dataclass type or instance')
# Exclude pseudo-fields. Note that fields is sorted by insertion
# order, so the order of the tuple is as the fields were defined.
return tuple(f for f in fields.values() if f._field_type is _FIELD)
def _is_dataclass_instance(obj):
"""Returns True if obj is an instance of a dataclass."""
return not isinstance(obj, type) and hasattr(obj, _FIELDS)
def is_dataclass(obj):
"""Returns True if obj is a dataclass or an instance of a
dataclass."""
return hasattr(obj, _FIELDS)
def asdict(obj, *, dict_factory=dict):
"""Return the fields of a dataclass instance as a new dictionary mapping
field names to field values.
Example usage:
@dataclass
class C:
x: int
y: int
c = C(1, 2)
assert asdict(c) == {'x': 1, 'y': 2}
If given, 'dict_factory' will be used instead of built-in dict.
The function applies recursively to field values that are
dataclass instances. This will also look into built-in containers:
tuples, lists, and dicts.
"""
if not _is_dataclass_instance(obj):
raise TypeError("asdict() should be called on dataclass instances")
return _asdict_inner(obj, dict_factory)
def _asdict_inner(obj, dict_factory):
if _is_dataclass_instance(obj):
result = []
for f in fields(obj):
value = _asdict_inner(getattr(obj, f.name), dict_factory)
result.append((f.name, value))
return dict_factory(result)
elif isinstance(obj, (list, tuple)):
return type(obj)(_asdict_inner(v, dict_factory) for v in obj)
elif isinstance(obj, dict):
return type(obj)((_asdict_inner(k, dict_factory), _asdict_inner(v, dict_factory))
for k, v in obj.items())
else:
return copy.deepcopy(obj)
def astuple(obj, *, tuple_factory=tuple):
"""Return the fields of a dataclass instance as a new tuple of field values.
Example usage::
@dataclass
class C:
x: int
y: int
c = C(1, 2)
assert astuple(c) == (1, 2)
If given, 'tuple_factory' will be used instead of built-in tuple.
The function applies recursively to field values that are
dataclass instances. This will also look into built-in containers:
tuples, lists, and dicts.
"""
if not _is_dataclass_instance(obj):
raise TypeError("astuple() should be called on dataclass instances")
return _astuple_inner(obj, tuple_factory)
def _astuple_inner(obj, tuple_factory):
if _is_dataclass_instance(obj):
result = []
for f in fields(obj):
value = _astuple_inner(getattr(obj, f.name), tuple_factory)
result.append(value)
return tuple_factory(result)
elif isinstance(obj, (list, tuple)):
return type(obj)(_astuple_inner(v, tuple_factory) for v in obj)
elif isinstance(obj, dict):
return type(obj)((_astuple_inner(k, tuple_factory), _astuple_inner(v, tuple_factory))
for k, v in obj.items())
else:
return copy.deepcopy(obj)
def make_dataclass(cls_name, fields, *, bases=(), namespace=None, init=True,
repr=True, eq=True, order=False, unsafe_hash=False,
frozen=False):
"""Return a new dynamically created dataclass.
The dataclass name will be 'cls_name'. 'fields' is an iterable
of either (name), (name, type) or (name, type, Field) objects. If type is
omitted, use the string 'typing.Any'. Field objects are created by
the equivalent of calling 'field(name, type [, Field-info])'.
C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,))
is equivalent to:
@dataclass
class C(Base):
x: 'typing.Any'
y: int
z: int = field(init=False)
For the bases and namespace parameters, see the builtin type() function.
The parameters init, repr, eq, order, unsafe_hash, and frozen are passed to
dataclass().
"""
if namespace is None:
namespace = {}
else:
# Copy namespace since we're going to mutate it.
namespace = namespace.copy()
anns = {}
for item in fields:
if isinstance(item, str):
name = item
tp = 'typing.Any'
elif len(item) == 2:
name, tp, = item
elif len(item) == 3:
name, tp, spec = item
namespace[name] = spec
anns[name] = tp
namespace['__annotations__'] = anns
cls = type(cls_name, bases, namespace)
return dataclass(cls, init=init, repr=repr, eq=eq, order=order,
unsafe_hash=unsafe_hash, frozen=frozen)
def replace(obj, **changes):
"""Return a new object replacing specified fields with new values.
This is especially useful for frozen classes. Example usage:
@dataclass(frozen=True)
class C:
x: int
y: int
c = C(1, 2)
c1 = replace(c, x=3)
assert c1.x == 3 and c1.y == 2
"""
# We're going to mutate 'changes', but that's okay because it's a new
# dict, even if called with 'replace(obj, **my_changes)'.
if not _is_dataclass_instance(obj):
raise TypeError("replace() should be called on dataclass instances")
# It's an error to have init=False fields in 'changes'.
# If a field is not in 'changes', read its value from the provided obj.
for f in getattr(obj, _FIELDS).values():
if not f.init:
# Error if this field is specified in changes.
if f.name in changes:
raise ValueError(f'field {f.name} is declared with '
'init=False, it cannot be specified with '
'replace()')
continue
if f.name not in changes:
changes[f.name] = getattr(obj, f.name)
# Create the new object, which calls __init__() and
# __post_init__() (if defined), using all of the init fields
# we've added and/or left in 'changes'. If there are values
# supplied in changes that aren't fields, this will correctly
# raise a TypeError.
return obj.__class__(**changes)
|
17e9fe10ee922b29b3d2bb9997c460f1b7a54d1d
|
2337351b228818e41be3002bd38f68f77c2aa074
|
/sa/migrations/0237_objectnotification_routes.py
|
1121cb602abeb2a2dce70f5b60fa227aa88a5422
|
[
"BSD-3-Clause"
] |
permissive
|
nocproject/noc
|
57d40c680a1499374463e472434f9595ed6d1374
|
6e6d71574e9b9d822bec572cc629a0ea73604a59
|
refs/heads/master
| 2023-08-31T01:11:33.544573
| 2023-08-30T17:31:11
| 2023-08-30T17:31:11
| 107,815,776
| 105
| 33
|
BSD-3-Clause
| 2023-07-31T07:57:45
| 2017-10-21T21:04:33
|
Python
|
UTF-8
|
Python
| false
| false
| 3,619
|
py
|
0237_objectnotification_routes.py
|
# ---------------------------------------------------------------------
# Migrate ObjectNotification to Route
# ---------------------------------------------------------------------
# Copyright (C) 2007-2022 The NOC Project
# See LICENSE for details
# ---------------------------------------------------------------------
# Third-party modules
import orjson
import bson
import hashlib
from pymongo import InsertOne
# NOC modules
from noc.core.migration.base import BaseMigration
class Migration(BaseMigration):
depends_on = [("main", "0066_default_mx_senders")]
def migrate(self):
rg_map = {str(rg["_id"]): rg["name"] for rg in self.mongo_db["resourcegroups"].find()}
route_bulk = []
cfgroute_bulk = []
for condition, message_type in [
("config_changed", "config_changed"),
("new", "object_new"),
("deleted", "object_deleted"),
("version_changed", "version_changed"),
("config_policy_violation", "config_policy_violation"),
]:
for rg, ng_id in self.db.execute(
f"SELECT resource_group, notification_group_id FROM sa_objectnotification WHERE {condition} = TRUE"
):
mr_id = bson.ObjectId()
rg_name = rg_map[rg]
route = {
"_id": mr_id,
"name": f"Notification {message_type} for {rg_name}: {ng_id}",
"is_active": True,
"description": f"Migrate from ObjectNotification for {rg_name}",
"order": 10,
"type": message_type,
"match": [
{
"labels": [f"noc::resourcegroup::{rg_name}::="],
"exclude_labels": [],
"administrative_domain": None,
"headers_match": [],
}
],
"action": "notification",
"notification_group": ng_id,
}
route_bulk += [InsertOne(route)]
change_id = bson.ObjectId()
data = orjson.dumps(
{
"id": str(mr_id),
"name": route["name"],
"type": route["type"],
"order": route["order"],
"action": route["action"],
"match": [
{
"labels": [f"noc::resourcegroup::{rg_name}::="],
"exclude_labels": [],
"administrative_domain": None,
"headers": [],
}
],
"notification_group": str(ng_id),
"change_id": str(change_id),
}
)
cfgroute_bulk += [
InsertOne(
{
"_id": mr_id,
"change_id": change_id,
"hash": hashlib.sha256(data).hexdigest()[:16],
"data": data.decode("utf-8"),
}
)
]
mr_coll = self.mongo_db["messageroutes"]
cfg_coll = self.mongo_db["ds_cfgmxroute"]
if route_bulk:
mr_coll.bulk_write(route_bulk)
if cfgroute_bulk:
cfg_coll.bulk_write(cfgroute_bulk)
|
a2a0adc60d7231ff1b7ee66dd8c1e05253835717
|
cf1cba50df334d63cdfae3f785c3d7bad1ed2c84
|
/imputed-v2-gwas/6_filter_gwas_variants.py
|
de34a6c31974a157a7b6ca66d397cb9309ffc0a7
|
[] |
no_license
|
Nealelab/UK_Biobank_GWAS
|
3fc34adfc7790a1d882b99de52eb3d36f1cb2c41
|
68b6d4c40e3961655037ff4822fb7917904b3f58
|
refs/heads/master
| 2023-08-16T01:14:15.974149
| 2023-06-07T14:51:08
| 2023-06-07T14:51:08
| 99,874,476
| 300
| 108
| null | 2020-02-04T17:55:04
| 2017-08-10T02:56:14
|
Python
|
UTF-8
|
Python
| false
| false
| 1,111
|
py
|
6_filter_gwas_variants.py
|
from __future__ import print_function
from pprint import pprint
from hail import *
hc = HailContext()
BUCKET = '...'
APPLICATION = 'test'
VARIANT_VDS = BUCKET + 'all_variants.vds'
GWAS_VARIANTS_VDS = BUCKET + 'gwas_variants.vds'
GWAS_VARIANTS_TSV = BUCKET + 'gwas_variants.tsv'
vds = (
hc
.read(VARIANT_VDS)
.filter_variants_expr("""
va.isHRC &&
va.info > 0.8 &&
va.qc.AF > 0.001 &&
va.qc.AF < 0.999 &&
va.qc.pHWE > 1e-10 &&
va.qc.callRate > 0.95
""")
)
vds.write(GWAS_VARIANTS_VDS, overwrite=True)
vds = hc.read(GWAS_VARIANTS_VDS)
n_variants = vds.count_variants()
print('')
print('nVariants: ', '{:,}'.format(n_variants))
pprint(vds.variant_schema)
(
vds
.export_variants(
output=GWAS_VARIANTS_TSV,
expr="""
variant = Variant(v.contig.replace("^0",""), v.start, v.ref, v.alt()),
chr = v.contig.replace("^0",""),
pos = v.start,
ref = v.ref,
alt = v.alt(),
rsid = va.rsid,
info = va.info,
va.qc.*
"""
)
)
|
088b340f4e5236cafcc52052b369073cb24a3b24
|
6189f34eff2831e3e727cd7c5e43bc5b591adffc
|
/WebMirror/management/rss_parser_funcs/feed_parse_extractTandQ.py
|
2cf9c2855e68897acf35be7b46925fd55e073dec
|
[
"BSD-3-Clause"
] |
permissive
|
fake-name/ReadableWebProxy
|
24603660b204a9e7965cfdd4a942ff62d7711e27
|
ca2e086818433abc08c014dd06bfd22d4985ea2a
|
refs/heads/master
| 2023-09-04T03:54:50.043051
| 2023-08-26T16:08:46
| 2023-08-26T16:08:46
| 39,611,770
| 207
| 20
|
BSD-3-Clause
| 2023-09-11T15:48:15
| 2015-07-24T04:30:43
|
Python
|
UTF-8
|
Python
| false
| false
| 4,349
|
py
|
feed_parse_extractTandQ.py
|
def extractTandQ(item):
"""
T&Q
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or 'preview' in item['title'].lower():
return None
bad = [
'#K-drama',
'fashion',
'C-Drama',
'#Trending',
'Feature',
'#Trailer',
'#Eng Sub',
'Movies',
'Status Updates/Post Tallies',
'Learn Chinese',
'Short Stories',
]
if any([(tmp in item['tags']) for tmp in bad]):
return None
tagmap = [
('Three Kingdoms Online Overlord', 'Three Kingdoms Online Overlord', 'translated'),
('Three Kingdoms Online Overlord | 网游之三国超级领主', 'Three Kingdoms Online Overlord', 'translated'),
('Perfect Fiance', 'Perfect Fiancé', 'translated'),
('Perfect Fiancé | 完美未婚夫', 'Perfect Fiancé', 'translated'),
('Ten Years are not that Far', 'Ten Years are not that Far', 'translated'),
('#Les Interpretes', 'Les Interpretes', 'translated'),
('致我们终将逝去的青春', 'To Our Youth That is Fading Away', 'translated'),
('So Young | 致我们终将逝去的青春', 'To Our Youth That is Fading Away', 'translated'),
("Fleeting Midsummer (Beijing University's Weakest Student)", "Fleeting Midsummer (Beijing University's Weakest Student)", 'translated'),
("Fleeting Midsummer (Peking University's Weakest Student)", "Fleeting Midsummer (Peking University's Weakest Student)", 'translated'),
("Fleeting Midsummer (Peking University's Weakest Student)| 北大差生·", "Fleeting Midsummer (Peking University's Weakest Student)", 'translated'),
("Fleeting Midsummer (Peking University's Weakest Student)| 北大差生", "Fleeting Midsummer (Peking University's Weakest Student)", 'translated'),
('When A Snail Falls in Love| 如果蜗牛有爱情', 'When A Snail Falls in Love', 'translated'),
('The Rebirth of an Ill-Fated Consort | 重生之嫡女祸妃', 'The Rebirth of an Ill-Fated Consort', 'translated'),
('Siege in Fog | 迷雾围城', 'Siege in Fog', 'translated'),
('Pristine Darkness | 他来了请闭眼之暗粼', 'Pristine Darkness', 'translated'),
('Les Interpretes | 亲爱的翻译官', 'Les Interpretes', 'translated'),
('Les Interpretes | 情爱的翻译官', 'Les Interpretes', 'translated'),
('The Daily Record of Secretly Loving the Male Idol|男神暗恋日记', 'The Daily Record of Secretly Loving the Male Idol', 'translated'),
('Master Devil Don\'t Kiss Me', 'Master Devil Don\'t Kiss Me', 'translated'),
('Master Devil Don\'t Kiss Me! | 恶魔少爷别吻我', 'Master Devil Don\'t Kiss Me', 'translated'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
|
f78ea6eddf55e373527d3cae118bec6252cf0539
|
da1500e0d3040497614d5327d2461a22e934b4d8
|
/third_party/blink/Tools/Scripts/webkitpy/tool/commands/rebaseline_server.py
|
00f97fd4b45c5e1f69c10bafcf92f9f3da932381
|
[
"LGPL-2.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-only",
"GPL-1.0-or-later",
"GPL-2.0-only",
"LGPL-2.0-only",
"BSD-2-Clause",
"LicenseRef-scancode-other-copyleft",
"BSD-3-Clause",
"Apache-2.0",
"MIT"
] |
permissive
|
youtube/cobalt
|
34085fc93972ebe05b988b15410e99845efd1968
|
acefdaaadd3ef46f10f63d1acae2259e4024d383
|
refs/heads/main
| 2023-09-01T13:09:47.225174
| 2023-09-01T08:54:54
| 2023-09-01T08:54:54
| 50,049,789
| 169
| 80
|
BSD-3-Clause
| 2023-09-14T21:50:50
| 2016-01-20T18:11:34
| null |
UTF-8
|
Python
| false
| false
| 4,677
|
py
|
rebaseline_server.py
|
# Copyright (c) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Starts a local HTTP server which displays layout test failures (given a test
results directory), provides comparisons of expected and actual results (both
images and text) and allows one-click rebaselining of tests.
"""
from webkitpy.common.host import Host
from webkitpy.common.net.layout_test_results import LayoutTestResults
from webkitpy.layout_tests.layout_package import json_results_generator
from webkitpy.tool.commands.abstract_local_server_command import AbstractLocalServerCommand
from webkitpy.tool.servers.rebaseline_server import get_test_baselines, RebaselineHTTPServer, STATE_NEEDS_REBASELINE
class TestConfig(object):
def __init__(self, test_port, layout_tests_directory, results_directory, platforms, host):
self.test_port = test_port
self.layout_tests_directory = layout_tests_directory
self.results_directory = results_directory
self.platforms = platforms
self.host = host
self.filesystem = host.filesystem
self.git = host.git()
class RebaselineServer(AbstractLocalServerCommand):
name = 'rebaseline-server'
help_text = __doc__
show_in_main_help = True
argument_names = '/path/to/results/directory'
server = RebaselineHTTPServer
def __init__(self):
super(RebaselineServer, self).__init__()
self._test_config = None
def _gather_baselines(self, results_json):
# Rebaseline server and it's associated JavaScript expected the tests subtree to
# be key-value pairs instead of hierarchical.
# FIXME: make the rebaseline server use the hierarchical tree.
new_tests_subtree = {}
def gather_baselines_for_test(result):
if result.did_pass_or_run_as_expected():
return
result_dict = result.result_dict()
result_dict['state'] = STATE_NEEDS_REBASELINE
result_dict['baselines'] = get_test_baselines(result.test_name(), self._test_config)
new_tests_subtree[result.test_name()] = result_dict
LayoutTestResults(results_json).for_each_test(gather_baselines_for_test)
results_json['tests'] = new_tests_subtree
def _prepare_config(self, options, args, tool):
results_directory = args[0]
host = Host()
print 'Parsing full_results.json...'
results_json_path = host.filesystem.join(results_directory, 'full_results.json')
results_json = json_results_generator.load_json(host.filesystem, results_json_path)
port = tool.port_factory.get()
layout_tests_directory = port.layout_tests_dir()
platforms = host.filesystem.listdir(host.filesystem.join(layout_tests_directory, 'platform'))
self._test_config = TestConfig(port, layout_tests_directory, results_directory, platforms, host)
print 'Gathering current baselines...'
self._gather_baselines(results_json)
return {
'test_config': self._test_config,
'results_json': results_json,
'platforms_json': {
'platforms': platforms,
'defaultPlatform': port.name(),
},
}
|
bf6d95301852188982c94d0ec570937b44fa27c2
|
f509ab9825c542e09b0c6591d86ef1f9feb540a6
|
/pkgs/filetransferutils-pkg/src/genie/libs/filetransferutils/plugins/ios/sftp/fileutils.py
|
c8012c1929ecd5ae8a5826231b58751f764c2a68
|
[
"Apache-2.0"
] |
permissive
|
CiscoTestAutomation/genielibs
|
97f597117193aaa18028defeb69078ebb241173a
|
e42e51475cddcb10f5c7814d0fe892ac865742ba
|
refs/heads/master
| 2023-08-11T16:39:41.959947
| 2023-07-27T17:58:42
| 2023-07-27T17:58:42
| 130,717,047
| 109
| 60
|
Apache-2.0
| 2023-08-29T22:32:08
| 2018-04-23T15:21:56
|
Python
|
UTF-8
|
Python
| false
| false
| 162
|
py
|
fileutils.py
|
""" File utils base class for SFTP on IOS devices. """
from ...iosxe.sftp.fileutils import FileUtils as FileUtilsXEBase
class FileUtils(FileUtilsXEBase):
pass
|
99c4d68af6a373b3984406f38e229ba6485a6265
|
1ab5036a95066a18d889aa1186fd1609ff4b5923
|
/petl/test/io/test_text_unicode.py
|
6721a854623da6cbd62c63d9331ea27190207e48
|
[
"MIT"
] |
permissive
|
petl-developers/petl
|
b52d6ee6e0ab16a1ba17f98a4aa2f791d93ba796
|
e829532e2ed350d00b96680d2d6774dec4a7f2e0
|
refs/heads/master
| 2023-08-30T02:36:09.816551
| 2023-08-22T15:07:19
| 2023-08-22T15:22:53
| 2,233,194
| 663
| 109
|
MIT
| 2023-08-22T15:22:55
| 2011-08-19T09:51:03
|
Python
|
UTF-8
|
Python
| false
| false
| 2,041
|
py
|
test_text_unicode.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division
import io
from tempfile import NamedTemporaryFile
from petl.test.helpers import ieq, eq_
from petl.io.text import fromtext, totext
def test_fromtext():
data = (
u"name,id\n"
u"Արամ Խաչատրյան,1\n"
u"Johann Strauß,2\n"
u"Вагиф Сәмәдоғлу,3\n"
u"章子怡,4\n"
)
fn = NamedTemporaryFile().name
f = io.open(fn, encoding='utf-8', mode='wt')
f.write(data)
f.close()
actual = fromtext(fn, encoding='utf-8')
expect = ((u'lines',),
(u'name,id',),
(u'Արամ Խաչատրյան,1',),
(u'Johann Strauß,2',),
(u'Вагиф Сәмәдоғлу,3',),
(u'章子怡,4',),
)
ieq(expect, actual)
ieq(expect, actual) # verify can iterate twice
def test_totext():
# exercise function
tbl = ((u'name', u'id'),
(u'Արամ Խաչատրյան', 1),
(u'Johann Strauß', 2),
(u'Вагиф Сәмәдоғлу', 3),
(u'章子怡', 4),
)
prologue = (
u"{| class='wikitable'\n"
u"|-\n"
u"! name\n"
u"! id\n"
)
template = (
u"|-\n"
u"| {name}\n"
u"| {id}\n"
)
epilogue = u"|}\n"
fn = NamedTemporaryFile().name
totext(tbl, fn, template=template, prologue=prologue,
epilogue=epilogue, encoding='utf-8')
# check what it did
f = io.open(fn, encoding='utf-8', mode='rt')
actual = f.read()
expect = (
u"{| class='wikitable'\n"
u"|-\n"
u"! name\n"
u"! id\n"
u"|-\n"
u"| Արամ Խաչատրյան\n"
u"| 1\n"
u"|-\n"
u"| Johann Strauß\n"
u"| 2\n"
u"|-\n"
u"| Вагиф Сәмәдоғлу\n"
u"| 3\n"
u"|-\n"
u"| 章子怡\n"
u"| 4\n"
u"|}\n"
)
eq_(expect, actual)
|
96ac19bb8adcf38ce5b40b5669a45ad824f99dfe
|
323897ae1b6a1afadb8fe8a35da25c9260bfb778
|
/tests/test_docs.py
|
86ce2e49bc7d85a10a4bc2cabe272bef8b14417c
|
[
"MIT"
] |
permissive
|
koaning/doubtlab
|
33c7a95a6cf3f5f551be880982b6a8107c0ed8db
|
0dd59d4e1168b08db7bd08f9b0934133c7e6c33c
|
refs/heads/main
| 2022-12-05T21:23:28.519902
| 2022-11-25T16:06:50
| 2022-11-25T16:06:50
| 425,039,785
| 473
| 22
|
MIT
| 2022-11-25T16:06:51
| 2021-11-05T17:57:54
|
Python
|
UTF-8
|
Python
| false
| false
| 1,652
|
py
|
test_docs.py
|
import pytest
from mktestdocs import check_docstring, check_md_file, get_codeblock_members
from doubtlab.reason import (
ProbaReason,
RandomReason,
OutlierReason,
DisagreeReason,
LongConfidenceReason,
ShortConfidenceReason,
MarginConfidenceReason,
WrongPredictionReason,
AbsoluteDifferenceReason,
RelativeDifferenceReason,
CleanlabReason,
ShannonEntropyReason,
)
from doubtlab.ensemble import DoubtEnsemble
from doubtlab.benchmark import flip_labels, shuffle_labels
all_objects = [
ProbaReason,
RandomReason,
OutlierReason,
DisagreeReason,
LongConfidenceReason,
ShortConfidenceReason,
MarginConfidenceReason,
WrongPredictionReason,
AbsoluteDifferenceReason,
RelativeDifferenceReason,
CleanlabReason,
ShannonEntropyReason,
DoubtEnsemble,
]
def flatten(items):
"""Flattens a list"""
return [item for sublist in items for item in sublist]
# This way we ensure that each item in `all_members` points to a method
# that could have a docstring.
all_members = flatten([get_codeblock_members(o) for o in all_objects]) + [
flip_labels,
shuffle_labels,
]
@pytest.mark.parametrize("func", all_members, ids=lambda d: d.__qualname__)
def test_function_docstrings(func):
"""Test the python example in each method in each object."""
check_docstring(obj=func)
@pytest.mark.parametrize(
"fpath",
[
"README.md",
"docs/quickstart/benchmarks.md",
"docs/examples/google-emotions.md",
],
)
def test_quickstart_docs_file(fpath):
"""Test the quickstart files."""
check_md_file(fpath, memory=True)
|
e970eec6431c05aa236e8d58719dca8c53a18321
|
5e9576c368e98927e2965bd2fb23bd35d9993d69
|
/featuretools/primitives/standard/transform/cumulative/cumulative_time_since_last_true.py
|
b237a5357bebc49cc69de314ef56d4e8a58b1ec0
|
[
"BSD-3-Clause"
] |
permissive
|
alteryx/featuretools
|
c6e319e063e8e84e7684bf232376f95dc5272160
|
c284c2d27a95b81e0bae913ac90df2b02c8f3b37
|
refs/heads/main
| 2023-08-25T12:21:33.945418
| 2023-08-23T16:30:25
| 2023-08-23T16:30:25
| 102,908,804
| 1,783
| 201
|
BSD-3-Clause
| 2023-09-07T18:53:19
| 2017-09-08T22:15:17
|
Python
|
UTF-8
|
Python
| false
| false
| 2,013
|
py
|
cumulative_time_since_last_true.py
|
import numpy as np
import pandas as pd
from woodwork.column_schema import ColumnSchema
from woodwork.logical_types import Boolean, Datetime, Double
from featuretools.primitives.base import TransformPrimitive
class CumulativeTimeSinceLastTrue(TransformPrimitive):
"""Determines the time (in seconds) since the last boolean was `True`
given a datetime index column and boolean column
Examples:
>>> from datetime import datetime
>>> cumulative_time_since_last_true = CumulativeTimeSinceLastTrue()
>>> booleans = [False, True, False, True]
>>> datetimes = [
... datetime(2011, 4, 9, 10, 30, 0),
... datetime(2011, 4, 9, 10, 30, 10),
... datetime(2011, 4, 9, 10, 30, 15),
... datetime(2011, 4, 9, 10, 30, 30)
... ]
>>> cumulative_time_since_last_true(datetimes, booleans).tolist()
[nan, 0.0, 5.0, 0.0]
"""
name = "cumulative_time_since_last_true"
input_types = [
ColumnSchema(logical_type=Datetime, semantic_tags={"time_index"}),
ColumnSchema(logical_type=Boolean),
]
return_type = ColumnSchema(logical_type=Double, semantic_tags={"numeric"})
def get_function(self):
def time_since_previous_true(datetime_col, bool_col):
if bool_col.dropna().empty:
return pd.Series([np.nan] * len(bool_col))
df = pd.DataFrame(
{
"datetime": datetime_col,
"last_true_datetime": datetime_col,
"bool": bool_col,
},
)
not_false_indices = df["bool"]
df.loc[~not_false_indices, "last_true_datetime"] = np.nan
df["last_true_datetime"] = df["last_true_datetime"].fillna(method="ffill")
total_seconds = (
df["datetime"] - df["last_true_datetime"]
).dt.total_seconds()
return pd.Series(total_seconds)
return time_since_previous_true
|
f219cfe0fc7fe4f1cd6553f61416be72f067d840
|
444a9480bce2035565332d4d4654244c0b5cd47b
|
/research/cv/FaceNet/eval.py
|
bd83a0d6f71658d106857fe0030de37d461f4379
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-proprietary-license"
] |
permissive
|
mindspore-ai/models
|
7ede9c6454e77e995e674628204e1c6e76bd7b27
|
eab643f51336dbf7d711f02d27e6516e5affee59
|
refs/heads/master
| 2023-07-20T01:49:34.614616
| 2023-07-17T11:43:18
| 2023-07-17T11:43:18
| 417,393,380
| 301
| 92
|
Apache-2.0
| 2023-05-17T11:22:28
| 2021-10-15T06:38:37
|
Python
|
UTF-8
|
Python
| false
| false
| 2,709
|
py
|
eval.py
|
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import argparse
import numpy as np
from mindspore import load_checkpoint, load_param_into_net
from mindspore.ops import stop_gradient
from mindspore.common import set_seed
from mindspore import context
from src.eval_metrics import evaluate
from src.LFWDataset import get_lfw_dataloader
from src.models import FaceNetModelwithLoss
set_seed(0)
parser = argparse.ArgumentParser(description='Face Recognition using Triplet Loss')
parser.add_argument("--ckpt", type=str, default="")
parser.add_argument("--eval_root_dir", type=str, default="")
parser.add_argument("--eval_pairs_path", type=str, default="")
parser.add_argument("--eval_batch_size", type=int, default=64)
args = parser.parse_args()
def validate_lfw(model_eval, lfw_dataloader):
distances, labels = [], []
print("Validating on LFW! ...")
for data in lfw_dataloader.create_dict_iterator():
distance = model_eval.evaluate(data['img1'], data['img2'])
label = data['issame']
distance = stop_gradient(distance)
label = stop_gradient(label)
distances.append(distance.asnumpy())
labels.append(label.asnumpy())
labels = np.array([sublabel for label in labels for sublabel in label])
distances = np.array([subdist for distance in distances for subdist in distance])
_, _, accuracy, _, _, _ = evaluate(distances, labels)
print(np.mean(accuracy))
if __name__ == "__main__":
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend", save_graphs=False, device_id=1)
facenet = FaceNetModelwithLoss(num_classes=1001, margin=0.5, mode='eval', ckpt_path="")
state_dict = load_checkpoint(ckpt_file_name=args.ckpt, net=facenet)
print("Loading the trained models from ckpt")
load_param_into_net(facenet, state_dict)
lfwdataloader = get_lfw_dataloader(eval_root_dir=args.eval_root_dir,
eval_pairs_path=args.eval_pairs_path,
eval_batch_size=args.eval_batch_size)
validate_lfw(facenet, lfwdataloader)
|
c3902aeb2177129e0b066c9847479159d80574d8
|
f30fa69c3a568eda5665301265ecafca4f4aaabf
|
/skimage/util/tests/test_slice_along_axes.py
|
38c9a2cb7cd4d15a8a779d23308e4a28ea125b3a
|
[
"BSD-2-Clause",
"BSD-3-Clause",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
scikit-image/scikit-image
|
7cf7f29424a7f98815d69ee9f5193e5fc64d829e
|
a45f2daf9fcac32e3e8d2c95ade807ac67351965
|
refs/heads/main
| 2023-09-05T07:08:10.970717
| 2023-08-26T19:38:11
| 2023-08-26T19:38:11
| 2,014,929
| 5,509
| 2,439
|
NOASSERTION
| 2023-09-14T10:46:45
| 2011-07-07T22:07:20
|
Python
|
UTF-8
|
Python
| false
| false
| 1,681
|
py
|
test_slice_along_axes.py
|
import numpy as np
import pytest
from skimage.util import slice_along_axes
rng = np.random.default_rng()
def test_2d_crop_0():
data = rng.random((50, 50))
out = slice_along_axes(data, [(0, 25)])
np.testing.assert_array_equal(out, data[:25, :])
def test_2d_crop_1():
data = rng.random((50, 50))
out = slice_along_axes(data, [(0, 25), (0, 10)])
np.testing.assert_array_equal(out, data[:25, :10])
def test_2d_crop_2():
data = rng.random((50, 50))
out = slice_along_axes(data, [(0, 25), (0, 30)], axes=[1, 0])
np.testing.assert_array_equal(out, data[:30, :25])
def test_2d_negative():
data = rng.random((50, 50))
out = slice_along_axes(data, [(5, -5), (6, -6)])
np.testing.assert_array_equal(out, data[5:-5, 6:-6])
def test_copy():
data = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
out_without_copy = slice_along_axes(data, [(0, 3)], axes=[1], copy=False)
out_copy = slice_along_axes(data, [(0, 3)], axes=[0], copy=True)
assert out_without_copy.base is data
assert out_copy.base is not data
def test_nd_crop():
data = rng.random((50, 50, 50))
out = slice_along_axes(data, [(0, 25)], axes=[2])
np.testing.assert_array_equal(out, data[:, :, :25])
def test_axes_invalid():
data = np.empty((2, 3))
with pytest.raises(ValueError):
slice_along_axes(data, [(0, 3)], axes=[2])
def test_axes_limit_invalid():
data = np.empty((50, 50))
with pytest.raises(ValueError):
slice_along_axes(data, [(0, 51)], axes=[0])
def test_too_many_axes():
data = np.empty((10, 10))
with pytest.raises(ValueError):
slice_along_axes(data, [(0, 1), (0, 1), (0, 1)])
|
b8017c75d0ce7464a3f24e8b84ea0c5379c76bb4
|
671a2603d05d5033e98623c9fa857c1ff0dbaf65
|
/application-workloads/python/vmss-bottle-autoscale/workserver.py
|
259485b69b047d9a954b23403aac3780258a2344
|
[
"MIT"
] |
permissive
|
Azure/azure-quickstart-templates
|
ed26f54b289d7826c40e7ee768001d2826297168
|
e2b259b46ebef83fcd015188cbef01e276d0c1b4
|
refs/heads/master
| 2023-09-05T11:00:25.375647
| 2023-09-01T15:00:21
| 2023-09-01T15:00:22
| 34,407,651
| 14,694
| 19,074
|
MIT
| 2023-09-14T17:36:17
| 2015-04-22T18:19:03
|
Bicep
|
UTF-8
|
Python
| false
| false
| 1,699
|
py
|
workserver.py
|
# workserver.py - simple HTTP server with a do_work / stop_work API
# GET /do_work activates a worker thread which uses CPU
# GET /stop_work signals worker thread to stop
import math
import socket
import threading
import time
from bottle import route, run
hostname = socket.gethostname()
hostport = 9000
keepworking = False # boolean to switch worker thread on or off
# thread which maximizes CPU usage while the keepWorking global is True
def workerthread():
# outer loop to run while waiting
while (True):
# main loop to thrash the CPI
while (keepworking == True):
for x in range(1, 69):
math.factorial(x)
time.sleep(3)
# start the worker thread
worker_thread = threading.Thread(target=workerthread, args=())
worker_thread.start()
def writebody():
body = '<html><head><title>Work interface - build</title></head>'
body += '<body><h2>Worker interface on ' + hostname + '</h2><ul><h3>'
if keepworking == False:
body += '<br/>Worker thread is not running. <a href="./do_work">Start work</a><br/>'
else:
body += '<br/>Worker thread is running. <a href="./stop_work">Stop work</a><br/>'
body += '<br/>Usage:<br/><br/>/do_work = start worker thread<br/>/stop_work = stop worker thread<br/>'
body += '</h3></ul></body></html>'
return body
@route('/')
def root():
return writebody()
@route('/do_work')
def do_work():
global keepworking
# start worker thread
keepworking = True
return writebody()
@route('/stop_work')
def stop_work():
global keepworking
# stop worker thread
keepworking = False
return writebody()
run(host=hostname, port=hostport)
|
9e04ac16057668835fb24c1de91976fa2ddc86e5
|
7400a2f7782a4b679f12cad8307ff7999a604c79
|
/puppetboard/errors.py
|
37c0cf7647a78aed682ee456c7156aa0f9070ff0
|
[
"Apache-2.0",
"CC-BY-3.0"
] |
permissive
|
voxpupuli/puppetboard
|
49494649c00573bac7ed24021dbb7f266da8add5
|
43c21f59b95bd3fe3f8462cb91a252768223ed11
|
refs/heads/master
| 2023-09-03T07:44:54.047732
| 2023-08-23T10:08:10
| 2023-08-23T10:08:10
| 11,746,441
| 396
| 229
|
Apache-2.0
| 2023-09-12T13:58:07
| 2013-07-29T18:22:09
|
Python
|
UTF-8
|
Python
| false
| false
| 955
|
py
|
errors.py
|
from flask import render_template
from werkzeug.exceptions import InternalServerError
from puppetboard.core import environments, get_app
app = get_app()
@app.errorhandler(400)
def bad_request(e):
envs = environments()
return render_template('400.html', envs=envs), 400
@app.errorhandler(403)
def forbidden(e):
envs = environments()
return render_template('403.html', envs=envs), 403
@app.errorhandler(404)
def not_found(e):
envs = environments()
return render_template('404.html', envs=envs), 404
@app.errorhandler(412)
def precond_failed(e):
"""We're slightly abusing 412 to handle missing features
depending on the API version."""
envs = environments()
return render_template('412.html', envs=envs), 412
@app.errorhandler(500)
def server_error(e):
envs = {}
try:
envs = environments()
except InternalServerError:
pass
return render_template('500.html', envs=envs), 500
|
c4acefa7e70f445bd8b9c33afe8c8feb7a0a416a
|
fbbe424559f64e9a94116a07eaaa555a01b0a7bb
|
/Skimage_numpy/source/scipy/weave/vtk_spec.py
|
29afe6af6b4f684a40f4b723b10b79612215f82a
|
[
"MIT"
] |
permissive
|
ryfeus/lambda-packs
|
6544adb4dec19b8e71d75c24d8ed789b785b0369
|
cabf6e4f1970dc14302f87414f170de19944bac2
|
refs/heads/master
| 2022-12-07T16:18:52.475504
| 2022-11-29T13:35:35
| 2022-11-29T13:35:35
| 71,386,735
| 1,283
| 263
|
MIT
| 2022-11-26T05:02:14
| 2016-10-19T18:22:39
|
Python
|
UTF-8
|
Python
| false
| false
| 4,214
|
py
|
vtk_spec.py
|
"""
VTK type converter.
This module handles conversion between VTK C++ and VTK Python objects
so that one can write inline C++ code to manipulate VTK Python
objects. It requires that you have VTK and the VTK-Python wrappers
installed. It has been tested with VTK 4.0 and above. You will need
to call inline with include_dirs, library_dirs and often even
libraries appropriately set for this to work without errors.
Sometimes you might need to include additional headers.
Distributed under the SciPy License.
Authors:
Prabhu Ramachandran <prabhu@aero.iitm.ernet.in>
Eric Jones <eric@enthought.com>
"""
from __future__ import absolute_import, print_function
from .c_spec import common_base_converter
vtk_py_to_c_template = \
"""
class %(type_name)s_handler
{
public:
%(c_type)s convert_to_%(type_name)s(PyObject* py_obj, const char* name)
{
%(c_type)s vtk_ptr = (%(c_type)s) vtkPythonGetPointerFromObject(py_obj, "%(type_name)s");
if (!vtk_ptr)
handle_conversion_error(py_obj,"%(type_name)s", name);
%(inc_ref_count)s
return vtk_ptr;
}
%(c_type)s py_to_%(type_name)s(PyObject* py_obj, const char* name)
{
%(c_type)s vtk_ptr = (%(c_type)s) vtkPythonGetPointerFromObject(py_obj, "%(type_name)s");
if (!vtk_ptr)
handle_bad_type(py_obj,"%(type_name)s", name);
%(inc_ref_count)s
return vtk_ptr;
}
};
%(type_name)s_handler x__%(type_name)s_handler = %(type_name)s_handler();
#define convert_to_%(type_name)s(py_obj,name) \\
x__%(type_name)s_handler.convert_to_%(type_name)s(py_obj,name)
#define py_to_%(type_name)s(py_obj,name) \\
x__%(type_name)s_handler.py_to_%(type_name)s(py_obj,name)
"""
vtk_c_to_py_template = \
"""
PyObject* %(type_name)s_to_py(vtkObjectBase* obj)
{
return vtkPythonGetObjectFromPointer(obj);
}
"""
class vtk_converter(common_base_converter):
def __init__(self,class_name="undefined"):
self.class_name = class_name
common_base_converter.__init__(self)
def init_info(self):
common_base_converter.init_info(self)
# These are generated on the fly instead of defined at
# the class level.
self.type_name = self.class_name
self.c_type = self.class_name + "*"
self.return_type = self.c_type
self.to_c_return = None # not used
self.check_func = None # not used
hdr = self.class_name + ".h"
# Remember that you need both the quotes!
self.headers.extend(['"vtkPythonUtil.h"', '"vtkObject.h"',
'"%s"' % hdr])
# self.include_dirs.extend(vtk_inc)
# self.define_macros.append(('SOME_VARIABLE', '1'))
# self.library_dirs.extend(vtk_lib)
self.libraries.extend(['vtkCommonPython', 'vtkCommon'])
# self.support_code.append(common_info.swig_support_code)
def type_match(self,value):
is_match = 0
try:
if value.IsA('vtkObject'):
is_match = 1
except AttributeError:
pass
return is_match
def generate_build_info(self):
if self.class_name != "undefined":
res = common_base_converter.generate_build_info(self)
else:
# if there isn't a class_name, we don't want the
# we don't want the support_code to be included
from . import base_info
res = base_info.base_info()
return res
def py_to_c_code(self):
return vtk_py_to_c_template % self.template_vars()
def c_to_py_code(self):
return vtk_c_to_py_template % self.template_vars()
def type_spec(self,name,value):
# factory
class_name = value.__class__.__name__
new_spec = self.__class__(class_name)
new_spec.name = name
return new_spec
def __cmp__(self,other):
# only works for equal
res = -1
try:
res = cmp(self.name,other.name) or \
cmp(self.__class__, other.__class__) or \
cmp(self.class_name, other.class_name) or \
cmp(self.type_name,other.type_name)
except:
pass
return res
|
45b091d8eb7bdbec28d116137c44b76715cfdd3a
|
5e45ba79976ba805f6744f0bcb4ddfbde3e0a7a4
|
/alibi_detect/utils/tensorflow/misc.py
|
e3aed121abe3e143f1e075b1619eb28052f68383
|
[
"Apache-2.0"
] |
permissive
|
SeldonIO/alibi-detect
|
e3293baa0603acace6f79bfb14953cf953943b10
|
4a1b4f74a8590117965421e86c2295bff0f33e89
|
refs/heads/master
| 2023-08-25T05:47:14.038826
| 2023-08-08T14:12:47
| 2023-08-08T14:12:47
| 213,390,927
| 1,922
| 195
|
Apache-2.0
| 2023-09-12T07:07:13
| 2019-10-07T13:29:13
|
Python
|
UTF-8
|
Python
| false
| false
| 2,661
|
py
|
misc.py
|
import tensorflow as tf
def zero_diag(mat: tf.Tensor) -> tf.Tensor:
"""
Set the diagonal of a matrix to 0
Parameters
----------
mat
A 2D square matrix
Returns
-------
A 2D square matrix with zeros along the diagonal
"""
return mat - tf.linalg.diag(tf.linalg.diag_part(mat))
def quantile(sample: tf.Tensor, p: float, type: int = 7, sorted: bool = False) -> float:
"""
Estimate a desired quantile of a univariate distribution from a vector of samples
Parameters
----------
sample
A 1D vector of values
p
The desired quantile in (0,1)
type
The method for computing the quantile.
See https://wikipedia.org/wiki/Quantile#Estimating_quantiles_from_a_sample
sorted
Whether or not the vector is already sorted into ascending order
Returns
-------
An estimate of the quantile
"""
N = len(sample)
if len(sample.shape) != 1:
raise ValueError("Quantile estimation only supports vectors of univariate samples.")
if not 1/N <= p <= (N-1)/N:
raise ValueError(f"The {p}-quantile should not be estimated using only {N} samples.")
sorted_sample = sample if sorted else tf.sort(sample)
if type == 6:
h = (N+1)*p
elif type == 7:
h = (N-1)*p + 1
elif type == 8:
h = (N+1/3)*p + 1/3
h_floor = int(h)
quantile = sorted_sample[h_floor-1]
if h_floor != h:
quantile += (h - h_floor)*(sorted_sample[h_floor]-sorted_sample[h_floor-1])
return float(quantile)
def subset_matrix(mat: tf.Tensor, inds_0: tf.Tensor, inds_1: tf.Tensor) -> tf.Tensor:
"""
Take a matrix and return the submatrix correspond to provided row and column indices
Parameters
----------
mat
A 2D matrix
inds_0
A vector of row indices
inds_1
A vector of column indices
Returns
-------
A submatrix of shape (len(inds_0), len(inds_1))
"""
if len(mat.shape) != 2:
raise ValueError("Subsetting only supported for matrices (2D)")
subbed_rows = tf.gather(mat, inds_0, axis=0)
subbed_rows_cols = tf.gather(subbed_rows, inds_1, axis=1)
return subbed_rows_cols
def clone_model(model: tf.keras.Model) -> tf.keras.Model:
""" Clone a sequential, functional or subclassed tf.keras.Model. """
try: # sequential or functional model
return tf.keras.models.clone_model(model)
except ValueError: # subclassed model
try:
config = model.get_config()
except NotImplementedError:
config = {}
return model.__class__.from_config(config)
|
2401520d56dc308faf67afafbb6dec328f797907
|
9a0eb3e292d57b59198c7c66a994372ced9cfa5b
|
/nodes/2.x/python/Color.ToDynamoColor.py
|
5263e85d733bde897f5b3ed4cf1c0d46a2759bb8
|
[
"MIT"
] |
permissive
|
andydandy74/ClockworkForDynamo
|
544ddf0893f5c0072fca7934f4e128001771f767
|
528400c667c4c3f2b51814af84e85c8fab8a8059
|
refs/heads/master
| 2023-08-19T03:07:33.489926
| 2023-08-13T04:31:17
| 2023-08-13T04:31:17
| 15,043,988
| 184
| 100
|
MIT
| 2023-09-04T18:47:40
| 2013-12-09T10:11:01
|
Python
|
UTF-8
|
Python
| false
| false
| 420
|
py
|
Color.ToDynamoColor.py
|
import clr
clr.AddReference('RevitAPI')
from Autodesk.Revit.DB import *
def GetColorComponents(color):
if hasattr(color, "Red") and hasattr(color, "Blue") and hasattr(color, "Green"): return color.Red, color.Green, color.Blue
else: return None, None, None
colors = UnwrapElement(IN[0])
if isinstance(IN[0], list): OUT = map(list, zip(*[GetColorComponents(x) for x in colors]))
else: OUT = GetColorComponents(colors)
|
6b23052f84575dd2e3025396338cee04608252eb
|
965efc4d7a83c2b5592417aa7e0d25a51f5a8108
|
/backend/metering_billing/migrations/0068_rename_customer_historicalinvoice_old_customer_and_more.py
|
e111d1ca383d73a1341f80d54feb778de877804a
|
[
"MIT"
] |
permissive
|
uselotus/lotus
|
f4ee23bb828605215f18aacd1d6fcff8e0986c53
|
c065fb33ee1a870d72bbd2adfddc08d50ca049b6
|
refs/heads/main
| 2023-08-17T03:38:35.770580
| 2023-07-26T18:50:17
| 2023-07-26T18:50:17
| 516,192,901
| 1,447
| 100
|
MIT
| 2023-06-25T22:53:06
| 2022-07-21T02:06:46
|
Python
|
UTF-8
|
Python
| false
| false
| 1,248
|
py
|
0068_rename_customer_historicalinvoice_old_customer_and_more.py
|
# Generated by Django 4.0.5 on 2022-11-10 23:29
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
(
"metering_billing",
"0067_remove_historicalinvoice_external_payment_obj_and_more",
),
]
operations = [
migrations.RenameField(
model_name="historicalinvoice",
old_name="customer",
new_name="old_customer",
),
migrations.RenameField(
model_name="historicalinvoice",
old_name="organization",
new_name="old_organization",
),
migrations.RenameField(
model_name="historicalinvoice",
old_name="subscription",
new_name="old_subscription",
),
migrations.RenameField(
model_name="invoice",
old_name="customer",
new_name="old_customer",
),
migrations.RenameField(
model_name="invoice",
old_name="organization",
new_name="old_organization",
),
migrations.RenameField(
model_name="invoice",
old_name="subscription",
new_name="old_subscription",
),
]
|
e1df1a68f98f0b648716d53e31d05fd0f4e68d40
|
5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d
|
/alipay/aop/api/response/AlipayOpenAppLocalitemQueryResponse.py
|
b2de48f385866a5cf6fe66214ec434c1b7a55291
|
[
"Apache-2.0"
] |
permissive
|
alipay/alipay-sdk-python-all
|
8bd20882852ffeb70a6e929038bf88ff1d1eff1c
|
1fad300587c9e7e099747305ba9077d4cd7afde9
|
refs/heads/master
| 2023-08-27T21:35:01.778771
| 2023-08-23T07:12:26
| 2023-08-23T07:12:26
| 133,338,689
| 247
| 70
|
Apache-2.0
| 2023-04-25T04:54:02
| 2018-05-14T09:40:54
|
Python
|
UTF-8
|
Python
| false
| false
| 8,326
|
py
|
AlipayOpenAppLocalitemQueryResponse.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
from alipay.aop.api.domain.AppItemAttrVO import AppItemAttrVO
from alipay.aop.api.domain.PhoneStructVO import PhoneStructVO
from alipay.aop.api.domain.ItemRiskInfo import ItemRiskInfo
from alipay.aop.api.domain.ItemSceneRiskInfo import ItemSceneRiskInfo
from alipay.aop.api.domain.LocalItemSkuQueryVO import LocalItemSkuQueryVO
from alipay.aop.api.domain.TimeRangeStructVO import TimeRangeStructVO
class AlipayOpenAppLocalitemQueryResponse(AlipayResponse):
def __init__(self):
super(AlipayOpenAppLocalitemQueryResponse, self).__init__()
self._attrs = None
self._category_id = None
self._create_time = None
self._customer_service_mobile = None
self._head_img = None
self._image_list = None
self._is_online = None
self._item_details_page_model = None
self._item_id = None
self._item_type = None
self._merchant_name = None
self._out_item_id = None
self._path = None
self._risk_info = None
self._scene_risk_info = None
self._skus = None
self._sold_time = None
self._spu_status = None
self._stock_num = None
self._title = None
self._update_time = None
@property
def attrs(self):
return self._attrs
@attrs.setter
def attrs(self, value):
if isinstance(value, list):
self._attrs = list()
for i in value:
if isinstance(i, AppItemAttrVO):
self._attrs.append(i)
else:
self._attrs.append(AppItemAttrVO.from_alipay_dict(i))
@property
def category_id(self):
return self._category_id
@category_id.setter
def category_id(self, value):
self._category_id = value
@property
def create_time(self):
return self._create_time
@create_time.setter
def create_time(self, value):
self._create_time = value
@property
def customer_service_mobile(self):
return self._customer_service_mobile
@customer_service_mobile.setter
def customer_service_mobile(self, value):
if isinstance(value, PhoneStructVO):
self._customer_service_mobile = value
else:
self._customer_service_mobile = PhoneStructVO.from_alipay_dict(value)
@property
def head_img(self):
return self._head_img
@head_img.setter
def head_img(self, value):
self._head_img = value
@property
def image_list(self):
return self._image_list
@image_list.setter
def image_list(self, value):
if isinstance(value, list):
self._image_list = list()
for i in value:
self._image_list.append(i)
@property
def is_online(self):
return self._is_online
@is_online.setter
def is_online(self, value):
self._is_online = value
@property
def item_details_page_model(self):
return self._item_details_page_model
@item_details_page_model.setter
def item_details_page_model(self, value):
self._item_details_page_model = value
@property
def item_id(self):
return self._item_id
@item_id.setter
def item_id(self, value):
self._item_id = value
@property
def item_type(self):
return self._item_type
@item_type.setter
def item_type(self, value):
self._item_type = value
@property
def merchant_name(self):
return self._merchant_name
@merchant_name.setter
def merchant_name(self, value):
self._merchant_name = value
@property
def out_item_id(self):
return self._out_item_id
@out_item_id.setter
def out_item_id(self, value):
self._out_item_id = value
@property
def path(self):
return self._path
@path.setter
def path(self, value):
self._path = value
@property
def risk_info(self):
return self._risk_info
@risk_info.setter
def risk_info(self, value):
if isinstance(value, list):
self._risk_info = list()
for i in value:
if isinstance(i, ItemRiskInfo):
self._risk_info.append(i)
else:
self._risk_info.append(ItemRiskInfo.from_alipay_dict(i))
@property
def scene_risk_info(self):
return self._scene_risk_info
@scene_risk_info.setter
def scene_risk_info(self, value):
if isinstance(value, list):
self._scene_risk_info = list()
for i in value:
if isinstance(i, ItemSceneRiskInfo):
self._scene_risk_info.append(i)
else:
self._scene_risk_info.append(ItemSceneRiskInfo.from_alipay_dict(i))
@property
def skus(self):
return self._skus
@skus.setter
def skus(self, value):
if isinstance(value, list):
self._skus = list()
for i in value:
if isinstance(i, LocalItemSkuQueryVO):
self._skus.append(i)
else:
self._skus.append(LocalItemSkuQueryVO.from_alipay_dict(i))
@property
def sold_time(self):
return self._sold_time
@sold_time.setter
def sold_time(self, value):
if isinstance(value, TimeRangeStructVO):
self._sold_time = value
else:
self._sold_time = TimeRangeStructVO.from_alipay_dict(value)
@property
def spu_status(self):
return self._spu_status
@spu_status.setter
def spu_status(self, value):
self._spu_status = value
@property
def stock_num(self):
return self._stock_num
@stock_num.setter
def stock_num(self, value):
self._stock_num = value
@property
def title(self):
return self._title
@title.setter
def title(self, value):
self._title = value
@property
def update_time(self):
return self._update_time
@update_time.setter
def update_time(self, value):
self._update_time = value
def parse_response_content(self, response_content):
response = super(AlipayOpenAppLocalitemQueryResponse, self).parse_response_content(response_content)
if 'attrs' in response:
self.attrs = response['attrs']
if 'category_id' in response:
self.category_id = response['category_id']
if 'create_time' in response:
self.create_time = response['create_time']
if 'customer_service_mobile' in response:
self.customer_service_mobile = response['customer_service_mobile']
if 'head_img' in response:
self.head_img = response['head_img']
if 'image_list' in response:
self.image_list = response['image_list']
if 'is_online' in response:
self.is_online = response['is_online']
if 'item_details_page_model' in response:
self.item_details_page_model = response['item_details_page_model']
if 'item_id' in response:
self.item_id = response['item_id']
if 'item_type' in response:
self.item_type = response['item_type']
if 'merchant_name' in response:
self.merchant_name = response['merchant_name']
if 'out_item_id' in response:
self.out_item_id = response['out_item_id']
if 'path' in response:
self.path = response['path']
if 'risk_info' in response:
self.risk_info = response['risk_info']
if 'scene_risk_info' in response:
self.scene_risk_info = response['scene_risk_info']
if 'skus' in response:
self.skus = response['skus']
if 'sold_time' in response:
self.sold_time = response['sold_time']
if 'spu_status' in response:
self.spu_status = response['spu_status']
if 'stock_num' in response:
self.stock_num = response['stock_num']
if 'title' in response:
self.title = response['title']
if 'update_time' in response:
self.update_time = response['update_time']
|
1e7e9c51f6a3687af41e5c491c263a9c13bc7018
|
eb9f655206c43c12b497c667ba56a0d358b6bc3a
|
/python/testData/surround/SurroundWithTryExcept.py
|
6c333bb4172917d819a37e18a4080e33d53db96c
|
[
"Apache-2.0"
] |
permissive
|
JetBrains/intellij-community
|
2ed226e200ecc17c037dcddd4a006de56cd43941
|
05dbd4575d01a213f3f4d69aa4968473f2536142
|
refs/heads/master
| 2023-09-03T17:06:37.560889
| 2023-09-03T11:51:00
| 2023-09-03T12:12:27
| 2,489,216
| 16,288
| 6,635
|
Apache-2.0
| 2023-09-12T07:41:58
| 2011-09-30T13:33:05
| null |
UTF-8
|
Python
| false
| false
| 36
|
py
|
SurroundWithTryExcept.py
|
def foo():
pr<caret>int("hello")
|
779b6be7cb2254c156de223b17b9870514cf6bf1
|
bb33e6be8316f35decbb2b81badf2b6dcf7df515
|
/source/res/scripts/client/gui/shared/players_panel_items.py
|
75600faf2d2fe64b6784f22a8ebaa502a51e33a7
|
[] |
no_license
|
StranikS-Scan/WorldOfTanks-Decompiled
|
999c9567de38c32c760ab72c21c00ea7bc20990c
|
d2fe9c195825ececc728e87a02983908b7ea9199
|
refs/heads/1.18
| 2023-08-25T17:39:27.718097
| 2022-09-22T06:49:44
| 2022-09-22T06:49:44
| 148,696,315
| 103
| 39
| null | 2022-09-14T17:50:03
| 2018-09-13T20:49:11
|
Python
|
UTF-8
|
Python
| false
| false
| 4,221
|
py
|
players_panel_items.py
|
# Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: scripts/client/gui/shared/players_panel_items.py
import logging
from enum import Enum
import BigWorld
_logger = logging.getLogger(__name__)
class PlayersPanelItems(Enum):
DEFAULT = 0
CAMP = 1
CAPTURED_BOMB = 3
FREE_BOMB = 4
class IComponent(object):
__slots__ = ()
def getType(self):
raise NotImplementedError
def setValuesOnCreate(self, entity):
raise NotImplementedError
def setValuesOnDestroy(self, entity):
raise NotImplementedError
class Camp(IComponent):
__slots__ = ('__campUdo', '__isAlive', '__campId')
def __init__(self, *args, **kwargs):
self.__campUdo = ''
self.__isAlive = True
self.__campId = 0
@property
def campUdo(self):
return self.__campUdo
@property
def isAlive(self):
return self.__isAlive
@property
def campId(self):
return self.__campId
def getType(self):
return PlayersPanelItems.CAMP.name
def setValuesOnCreate(self, entity):
udoOccupier = entity.dynamicComponents.get('udoOccupier')
entityInfo = entity.dynamicComponents.get('entityInfo')
if udoOccupier is None or entityInfo is None:
_logger.error('Some component is not found to get camp properties')
return False
else:
self.__campUdo = udoOccupier.guid
self.__isAlive = True
self.__campId = entityInfo.index
return True
def setValuesOnDestroy(self, entity):
self.__isAlive = False
return True
class _BaseTimer(IComponent):
__slots__ = ('_subtype', '_timerID', '_endTime', '_leftTime', '_totalTime')
def __init__(self, subtype, *args, **kwargs):
self._subtype = subtype
self._timerID = 0
self._endTime = 0
self._leftTime = 0
self._totalTime = 0
def getType(self):
return self._subtype
@property
def timerID(self):
return self._timerID
@property
def endTime(self):
return self._endTime
@property
def leftTime(self):
return self._leftTime
@property
def totalTime(self):
return self._totalTime
def setValuesOnCreate(self, entity):
return False
def setValuesOnDestroy(self, entity):
self._timerID = entity.id
self._endTime = BigWorld.serverTime()
self._leftTime = 0
return True
class BombTimer(_BaseTimer):
__slots__ = ('_isPaused', '_factor', '_timerGUID')
def __init__(self, subtype, *args, **kwargs):
super(BombTimer, self).__init__(subtype, *args, **kwargs)
self._factor = 1.0
self._isPaused = False
self._timerGUID = 0
@property
def isPaused(self):
return self._isPaused
@property
def timerGUID(self):
return self._timerGUID
@property
def factor(self):
return self._factor
def setValuesOnCreate(self, entity):
stressTimer = entity.dynamicComponents.get('stressTimer')
if stressTimer is None:
return False
else:
self._timerID = entity.id
self._endTime = stressTimer.timerInfo.endTime
self._leftTime = int(round(stressTimer.timeToDelete))
self._factor = stressTimer.factor
self._totalTime = stressTimer.lifetime
self._isPaused = stressTimer.isPaused
self._timerGUID = stressTimer.getGuiID
return True
def setValuesOnDestroy(self, entity):
super(BombTimer, self).setValuesOnDestroy(entity)
return True
_ITEMS_BY_TYPE = {PlayersPanelItems.CAMP.name: Camp,
PlayersPanelItems.CAPTURED_BOMB.name: BombTimer,
PlayersPanelItems.FREE_BOMB.name: BombTimer}
_PROCESS_REPLAY_TYPES = (PlayersPanelItems.CAPTURED_BOMB.name,)
def getGuiItemType(itemSubtype):
itemType = _ITEMS_BY_TYPE.get(itemSubtype)
if itemType is None:
_logger.error('Unknown type of the item for the players panel')
return
else:
return itemType(itemSubtype)
def isProcessReplayNeeded(itemSubtype):
return itemSubtype in _PROCESS_REPLAY_TYPES
|
2538b160c17010465f0f2bbea85f3d893787beb2
|
172258e84111128f67351029b1418f3e48b9ad28
|
/test-registry/numpy24/f.py
|
0367eb8c2aaf4090f7ee29d9e4b0dad2df97fd9c
|
[
"Apache-2.0"
] |
permissive
|
open-lambda/open-lambda
|
41aff8e721d5df3298156dc34c20e98901d2b4ae
|
deaa1b6a8635c9cc789208a8beee17d8f07607b3
|
refs/heads/main
| 2023-08-30T21:56:32.180377
| 2023-08-27T02:06:28
| 2023-08-27T02:23:16
| 61,392,620
| 933
| 126
|
Apache-2.0
| 2023-09-07T17:08:45
| 2016-06-17T18:01:08
|
Go
|
UTF-8
|
Python
| false
| false
| 117
|
py
|
f.py
|
import numpy
def f(event):
return {'result': int(numpy.array(event).sum()), 'numpy-version': numpy.__version__}
|
43b992d956f1844fbb74650c66fbb43d5dd303fe
|
e79cb86744b9cc5d46912f2f9acdb5ffd434f745
|
/src/ifp/test/init_floorplan7.py
|
8a2f7a6910d566c3cee99b895be6c660e95088fe
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"MPL-2.0"
] |
permissive
|
The-OpenROAD-Project/OpenROAD
|
555cbb00ec250bb09b9e4f9a7d1454e7ac7a01ab
|
1f6ccc9066e7df4509ed391d87b01eadb4b3b197
|
refs/heads/master
| 2023-08-31T05:35:25.363354
| 2023-08-31T05:04:27
| 2023-08-31T05:04:27
| 218,110,222
| 979
| 461
|
BSD-3-Clause
| 2023-09-14T21:51:36
| 2019-10-28T17:48:14
|
Verilog
|
UTF-8
|
Python
| false
| false
| 743
|
py
|
init_floorplan7.py
|
# init_floorplan called twice for some stupid reason
from openroad import Tech, Design
import helpers
tech = Tech()
tech.readLef("Nangate45/Nangate45.lef")
tech.readLiberty("Nangate45/Nangate45_typ.lib")
design = Design(tech)
design.readVerilog("reg1.v")
design.link("top")
die1 = helpers.make_rect(design, 0, 0, 1000, 1000)
core1 = helpers.make_rect(design, 100, 100, 900, 900)
die2 = helpers.make_rect(design, 100, 100, 1100, 1100)
core2 = helpers.make_rect(design, 200, 200, 800, 800)
floorplan = design.getFloorplan()
floorplan.initFloorplan(die1, core1)
floorplan.initFloorplan(die2, core2)
def_file = helpers.make_result_file("init_floorplan7.def")
design.writeDef(def_file)
helpers.diff_files('init_floorplan7.defok', def_file)
|
80d2edecebcd71fc6719b3d7392826ff7d4b655f
|
ec7591c3f478c43e76257aaa500d8f6a2e763d74
|
/stanza/models/ner/data.py
|
13f9f806a7d464b841820ed795c1c731aa87b885
|
[
"Apache-2.0"
] |
permissive
|
stanfordnlp/stanza
|
5cc3dbe70a96dd565639b7dae1efde6b4fa76985
|
c530c9af647d521262b56b717bcc38b0cfc5f1b8
|
refs/heads/main
| 2023-09-01T12:01:38.980322
| 2023-03-14T16:10:05
| 2023-03-14T16:10:05
| 104,854,615
| 4,281
| 599
|
NOASSERTION
| 2023-09-10T00:31:36
| 2017-09-26T08:00:56
|
Python
|
UTF-8
|
Python
| false
| false
| 8,760
|
py
|
data.py
|
import random
import logging
import torch
from stanza.models.common.bert_embedding import filter_data
from stanza.models.common.data import map_to_ids, get_long_tensor, sort_all
from stanza.models.common.vocab import PAD_ID, VOCAB_PREFIX
from stanza.models.pos.vocab import CharVocab, WordVocab
from stanza.models.ner.vocab import TagVocab, MultiVocab
from stanza.models.common.doc import *
from stanza.models.ner.utils import process_tags
logger = logging.getLogger('stanza')
class DataLoader:
def __init__(self, doc, batch_size, args, pretrain=None, vocab=None, evaluation=False, preprocess_tags=True, bert_tokenizer=None):
self.batch_size = batch_size
self.args = args
self.eval = evaluation
self.shuffled = not self.eval
self.doc = doc
self.preprocess_tags = preprocess_tags
data = self.load_doc(self.doc)
# filter out the long sentences if bert is used
if self.args.get('bert_model', False):
data = filter_data(self.args['bert_model'], data, bert_tokenizer)
self.tags = [[w[1] for w in sent] for sent in data]
# handle vocab
self.pretrain = pretrain
if vocab is None:
self.vocab = self.init_vocab(data)
else:
self.vocab = vocab
# filter and sample data
if args.get('sample_train', 1.0) < 1.0 and not self.eval:
keep = int(args['sample_train'] * len(data))
data = random.sample(data, keep)
logger.debug("Subsample training set with rate {:g}".format(args['sample_train']))
data = self.preprocess(data, self.vocab, args)
# shuffle for training
if self.shuffled:
random.shuffle(data)
self.num_examples = len(data)
# chunk into batches
self.data = self.chunk_batches(data)
logger.debug("{} batches created.".format(len(self.data)))
def init_vocab(self, data):
def from_model(model_filename):
""" Try loading vocab from charLM model file. """
state_dict = torch.load(model_filename, lambda storage, loc: storage)
if 'vocab' in state_dict:
return state_dict['vocab']
if 'model' in state_dict and 'vocab' in state_dict['model']:
return state_dict['model']['vocab']
raise ValueError("Cannot find vocab in charLM model file %s" % model_filename)
if self.eval:
raise AssertionError("Vocab must exist for evaluation.")
if self.args['charlm']:
charvocab = CharVocab.load_state_dict(from_model(self.args['charlm_forward_file']))
else:
charvocab = CharVocab(data, self.args['shorthand'])
wordvocab = self.pretrain.vocab
tagvocab = TagVocab(data, self.args['shorthand'], idx=1)
ignore = None
if self.args['emb_finetune_known_only']:
if self.args['lowercase']:
ignore = set([w[0] for sent in data for w in sent if w[0] in wordvocab or w[0].lower() in wordvocab])
else:
ignore = set([w[0] for sent in data for w in sent if w[0] in wordvocab])
logger.debug("Ignoring %d in the delta vocab as they did not appear in the original embedding", len(ignore))
deltavocab = WordVocab(data, self.args['shorthand'], cutoff=1, lower=self.args['lowercase'], ignore=ignore)
logger.debug("Creating delta vocab of size %s", len(deltavocab))
vocab = MultiVocab({'char': charvocab,
'word': wordvocab,
'delta': deltavocab,
'tag': tagvocab})
return vocab
def preprocess(self, data, vocab, args):
processed = []
if args.get('char_lowercase', False): # handle character case
char_case = lambda x: x.lower()
else:
char_case = lambda x: x
for sent in data:
processed_sent = [[w[0] for w in sent]]
processed_sent += [[vocab['char'].map([char_case(x) for x in w[0]]) for w in sent]]
processed_sent += [vocab['tag'].map([w[1] for w in sent])]
processed.append(processed_sent)
return processed
def __len__(self):
return len(self.data)
def __getitem__(self, key):
""" Get a batch with index. """
if not isinstance(key, int):
raise TypeError
if key < 0 or key >= len(self.data):
raise IndexError
batch = self.data[key]
batch_size = len(batch)
batch = list(zip(*batch))
assert len(batch) == 3 # words: List[List[int]], chars: List[List[List[int]]], tags: List[List[int]]
# sort sentences by lens for easy RNN operations
sentlens = [len(x) for x in batch[0]]
batch, orig_idx = sort_all(batch, sentlens)
sentlens = [len(x) for x in batch[0]]
# sort chars by lens for easy char-LM operations
chars_forward, chars_backward, charoffsets_forward, charoffsets_backward, charlens = self.process_chars(batch[1])
chars_sorted, char_orig_idx = sort_all([chars_forward, chars_backward, charoffsets_forward, charoffsets_backward], charlens)
chars_forward, chars_backward, charoffsets_forward, charoffsets_backward = chars_sorted
charlens = [len(sent) for sent in chars_forward]
# sort words by lens for easy char-RNN operations
batch_words = [w for sent in batch[1] for w in sent]
wordlens = [len(x) for x in batch_words]
batch_words, word_orig_idx = sort_all([batch_words], wordlens)
batch_words = batch_words[0]
wordlens = [len(x) for x in batch_words]
words = batch[0]
wordchars = get_long_tensor(batch_words, len(wordlens))
wordchars_mask = torch.eq(wordchars, PAD_ID)
chars_forward = get_long_tensor(chars_forward, batch_size, pad_id=self.vocab['char'].unit2id(' '))
chars_backward = get_long_tensor(chars_backward, batch_size, pad_id=self.vocab['char'].unit2id(' '))
chars = torch.cat([chars_forward.unsqueeze(0), chars_backward.unsqueeze(0)]) # padded forward and backward char idx
charoffsets = [charoffsets_forward, charoffsets_backward] # idx for forward and backward lm to get word representation
tags = get_long_tensor(batch[2], batch_size)
return words, wordchars, wordchars_mask, chars, tags, orig_idx, word_orig_idx, char_orig_idx, sentlens, wordlens, charlens, charoffsets
def __iter__(self):
for i in range(self.__len__()):
yield self.__getitem__(i)
def load_doc(self, doc):
data = doc.get([TEXT, NER], as_sentences=True, from_token=True)
if self.preprocess_tags: # preprocess tags
data = process_tags(data, self.args.get('scheme', 'bio'))
return data
def process_chars(self, sents):
start_id, end_id = self.vocab['char'].unit2id('\n'), self.vocab['char'].unit2id(' ') # special token
start_offset, end_offset = 1, 1
chars_forward, chars_backward, charoffsets_forward, charoffsets_backward = [], [], [], []
# get char representation for each sentence
for sent in sents:
chars_forward_sent, chars_backward_sent, charoffsets_forward_sent, charoffsets_backward_sent = [start_id], [start_id], [], []
# forward lm
for word in sent:
chars_forward_sent += word
charoffsets_forward_sent = charoffsets_forward_sent + [len(chars_forward_sent)] # add each token offset in the last for forward lm
chars_forward_sent += [end_id]
# backward lm
for word in sent[::-1]:
chars_backward_sent += word[::-1]
charoffsets_backward_sent = [len(chars_backward_sent)] + charoffsets_backward_sent # add each offset in the first for backward lm
chars_backward_sent += [end_id]
# store each sentence
chars_forward.append(chars_forward_sent)
chars_backward.append(chars_backward_sent)
charoffsets_forward.append(charoffsets_forward_sent)
charoffsets_backward.append(charoffsets_backward_sent)
charlens = [len(sent) for sent in chars_forward] # forward lm and backward lm should have the same lengths
return chars_forward, chars_backward, charoffsets_forward, charoffsets_backward, charlens
def reshuffle(self):
data = [y for x in self.data for y in x]
random.shuffle(data)
self.data = self.chunk_batches(data)
def chunk_batches(self, data):
data = [data[i:i+self.batch_size] for i in range(0, len(data), self.batch_size)]
return data
|
40b93b2b7e82b643bf1970c97313a9532c1cd199
|
77044a7d5beabe7dbdaed4cbf0add6f877f3bf3c
|
/python/image-content-search/src/imageData/helper/search.py
|
00982e2f8e3661974e72ccd97ad574691daf784e
|
[
"Apache-2.0"
] |
permissive
|
aws-samples/aws-cdk-examples
|
1153f27ccbcc3b647e0a3f608a69279ebff1b27c
|
63caf4f3deddc8b00c9d2ae69c6886180143c3ee
|
refs/heads/master
| 2023-09-04T04:49:37.077412
| 2023-09-01T20:48:26
| 2023-09-01T20:48:26
| 168,772,474
| 4,366
| 2,083
|
Apache-2.0
| 2023-09-11T07:57:00
| 2019-02-01T23:16:42
|
Python
|
UTF-8
|
Python
| false
| false
| 1,778
|
py
|
search.py
|
import boto3
import botocore
import os
import logging
import json
from helper import execute_statement, logger # type: ignore
aws_config = botocore.config.Config(
region_name = os.getenv('REGION'),
signature_version = 'v4',
retries = {
'max_attempts': int(os.getenv('DEFAULT_MAX_CALL_ATTEMPTS') or '1'),
'mode': 'standard'
}
)
logger = logging.getLogger()
logger.setLevel(logging.INFO)
# this module
# looks for images in the database
# translates the keywords if needed
def search_label(label, country = None, language = None):
if language and language != 'en':
translated_label = translate(language, label)
logger.info("Translated label {} ({}) to {} (en).".format(label, language, translated_label))
label = translated_label
statement = "SELECT image_id FROM tags WHERE label=:label"
parameters = [{'name':'label', 'value':{'stringValue': label.lower()}}]
result = execute_statement(statement, parameters)
logger.info(result)
response = []
for record in result["records"]:
for item in record:
response.append({
"id": item["stringValue"]
})
return {
'statusCode': 200,
'headers': {
'Content-Type': 'application/json; charset=UTF-8'
},
'body': json.dumps(response)
}
def translate(language, word):
translate = boto3.client(service_name='translate', config=aws_config)
result = translate.translate_text(Text=word, SourceLanguageCode=language, TargetLanguageCode="en")
return result.get('TranslatedText')
def get_http_params(body):
params = {}
for param in body.split('&'):
key, value = param.split('=')
params[key] = value
return params
|
85ce45667d5b825f517f7a8c00772aa192146df4
|
5f32d51807618bccf199f3942f5d92a45367b9a9
|
/examples/html-py-ever/tests/run_all.py
|
551350105ef0f693586ffffeb6bd3cf771e748e3
|
[
"MIT"
] |
permissive
|
PyO3/setuptools-rust
|
89b1ad2bc01da47f8ed90854dd190da2822e9c41
|
322d5f8a642e66d3168025e864e68d84d8c115ea
|
refs/heads/main
| 2023-09-03T03:00:53.325718
| 2023-09-01T13:39:15
| 2023-09-01T13:39:15
| 84,393,574
| 508
| 98
|
MIT
| 2023-09-05T02:55:05
| 2017-03-09T03:24:55
|
Python
|
UTF-8
|
Python
| false
| false
| 1,819
|
py
|
run_all.py
|
#!/usr/bin/env python3
import os
from glob import glob
from time import perf_counter
from typing import Tuple
import html_py_ever
from bs4 import BeautifulSoup
try:
import lxml
HAVE_LXML = True
except ImportError:
HAVE_LXML = False
def rust(filename: str) -> Tuple[int, float, float]:
start_load = perf_counter()
doc = html_py_ever.parse_file(filename)
end_load = perf_counter()
start_search = perf_counter()
links = doc.select("a[href]")
end_search = perf_counter()
return len(links), end_load - start_load, end_search - start_search
def python(filename: str, parser: str) -> Tuple[int, float, float]:
start_load = perf_counter()
with open(filename, encoding="utf8") as fp:
soup = BeautifulSoup(fp, parser)
end_load = perf_counter()
start_search = perf_counter()
links = soup.select("a[href]")
end_search = perf_counter()
return len(links), end_load - start_load, end_search - start_search
def main():
files_glob = os.path.abspath(os.path.join(os.path.dirname(__file__), "*.html"))
for filename in glob(files_glob):
count_rs, parse_rs, select_rs = rust(filename)
count_py, parse_py, select_py = python(filename, "html.parser")
assert count_rs == count_py
print(f"{filename} {count_rs} {parse_rs:6f}s")
print(f"Parse py {parse_py:6f}s {parse_py/parse_rs:6.3f}x")
print(f"Select py {select_py:6f}s {select_py/select_rs:6.3f}x")
if HAVE_LXML:
count_lxml, parse_lxml, select_lxml = python(filename, "lxml")
assert count_rs == count_lxml
print(f"Parse lxml {parse_lxml:6f}s {parse_lxml/parse_rs:6.3f}x")
print(f"Select lxml {select_lxml:6f}s {select_lxml/select_rs:6.3f}x")
if __name__ == "__main__":
main()
|
5ac46a2a36957a1ec34396fdaf8e0dcde7f96895
|
a63d907ad63ba6705420a6fb2788196d1bd3763c
|
/src/dataflow/jobnavi/jobnavi-api-python/jobnavi/HAProxy.py
|
2b9511910b777ec57703627398010bc4dece6ae1
|
[
"MIT"
] |
permissive
|
Tencent/bk-base
|
a38461072811667dc2880a13a5232004fe771a4b
|
6d483b4df67739b26cc8ecaa56c1d76ab46bd7a2
|
refs/heads/master
| 2022-07-30T04:24:53.370661
| 2022-04-02T10:30:55
| 2022-04-02T10:30:55
| 381,257,882
| 101
| 51
|
NOASSERTION
| 2022-04-02T10:30:56
| 2021-06-29T06:10:01
|
Python
|
UTF-8
|
Python
| false
| false
| 3,401
|
py
|
HAProxy.py
|
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making BK-BASE 蓝鲸基础平台 available.
Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
BK-BASE 蓝鲸基础平台 is licensed under the MIT License.
License for BK-BASE 蓝鲸基础平台:
--------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import math
import time
import jobnavi.http_util as http_util
import jobnavi.settings as settings
from jobnavi.jobnavi_logging import get_logger
logger = get_logger()
class HAProxy(object):
def init(self, config):
self.config = config
self.max_retry = int(config.get(settings.JOBNAVI_HA_FAILOVER_RETRY, settings.JOBNAVI_HA_FAILOVER_RETRY_DEFAULT))
url_string = str(config.get(settings.JOBNAVI_SCHEDULER_ADDRESS))
if not url_string:
raise Exception("Param " + settings.JOBNAVI_SCHEDULER_ADDRESS + " must config.")
self.urls = url_string.split(",")
self.active_url_index = 0
def send_request(self, url, method, param, header, retry_times):
if not self.config:
raise Exception("HAProxy may not init.")
max_retry_interval = 60
max_retry_times = retry_times
if not retry_times:
max_retry_times = self.max_retry
for i in range(1, max_retry_times + 1):
if "GET" == method:
content = http_util.http_get(self.urls[self.active_url_index] + url)
else:
content = http_util.http_post(self.urls[self.active_url_index] + url, param, header)
if not content:
logger.error("http request error.")
if self.active_url_index < len(self.urls) - 1:
self.active_url_index = self.active_url_index + 1
else:
self.active_url_index = 0
logger.info("Failover to " + self.urls[self.active_url_index])
retry = math.pow(2, i) / 10
if retry > max_retry_interval:
retry = max_retry_interval
logger.info("job retry after " + str(retry) + "s....")
time.sleep(retry)
else:
return content
ha_proxy = HAProxy()
def init_ha_proxy(config):
ha_proxy.init(config)
def get_ha_proxy():
return ha_proxy
|
870511ae9ed605922864c756fa83dd329876f36c
|
fa1ad2e2ac7e376fc7cb3b3a6e1bb88eed3e80be
|
/dts/airbyte/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_client.py
|
a3d3691e8e8c8fb73ca7031f94095d3411be8e93
|
[
"MIT",
"Elastic-2.0",
"Apache-2.0",
"BSD-3-Clause"
] |
permissive
|
alldatacenter/alldata
|
7bc7713c9f1d56ad6b8e59ea03206d1073b7e047
|
8d5f9a2d49ab8f9e85ccf058cb02c2fda287afc6
|
refs/heads/master
| 2023-08-05T07:32:25.442740
| 2023-08-03T13:17:24
| 2023-08-03T13:17:24
| 213,321,771
| 774
| 250
|
Apache-2.0
| 2023-09-06T17:35:32
| 2019-10-07T07:36:18
| null |
UTF-8
|
Python
| false
| false
| 6,502
|
py
|
test_client.py
|
#
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
import json
import pendulum
import pytest
from airbyte_cdk.models import SyncMode
from facebook_business import FacebookAdsApi, FacebookSession
from facebook_business.exceptions import FacebookRequestError
from source_facebook_marketing.streams import AdAccount, AdCreatives, Campaigns
FB_API_VERSION = FacebookAdsApi.API_VERSION
@pytest.fixture(name="fb_call_rate_response")
def fb_call_rate_response_fixture():
error = {
"message": (
"(#80000) There have been too many calls from this ad-account. Wait a bit and try again. "
"For more info, please refer to https://developers.facebook.com/docs/graph-api/overview/rate-limiting."
),
"type": "OAuthException",
"code": 80000,
"error_subcode": 2446079,
"fbtrace_id": "this_is_fake_response",
}
headers = {"x-app-usage": json.dumps({"call_count": 28, "total_time": 25, "total_cputime": 25})}
return {
"json": {
"error": error,
},
"status_code": 400,
"headers": headers,
}
@pytest.fixture(name="fb_call_amount_data_response")
def fb_call_amount_data_response_fixture():
error = {"message": "Please reduce the amount of data you're asking for, then retry your request", "code": 1}
return {
"json": {
"error": error,
},
"status_code": 500,
}
class TestBackoff:
def test_limit_reached(self, mocker, requests_mock, api, fb_call_rate_response, account_id):
"""Error once, check that we retry and not fail"""
# turn Campaigns into non batch mode to test non batch logic
mocker.patch.object(Campaigns, "use_batch", new_callable=mocker.PropertyMock, return_value=False)
campaign_responses = [
fb_call_rate_response,
{
"json": {"data": [{"id": 1, "updated_time": "2020-09-25T00:00:00Z"}, {"id": 2, "updated_time": "2020-09-25T00:00:00Z"}]},
"status_code": 200,
},
]
requests_mock.register_uri("GET", FacebookSession.GRAPH + f"/{FB_API_VERSION}/act_{account_id}/campaigns", campaign_responses)
requests_mock.register_uri("GET", FacebookSession.GRAPH + f"/{FB_API_VERSION}/1/", [{"status_code": 200}])
requests_mock.register_uri("GET", FacebookSession.GRAPH + f"/{FB_API_VERSION}/2/", [{"status_code": 200}])
stream = Campaigns(api=api, start_date=pendulum.now(), end_date=pendulum.now(), include_deleted=False)
try:
records = list(stream.read_records(sync_mode=SyncMode.full_refresh, stream_state={}))
assert records
except FacebookRequestError:
pytest.fail("Call rate error has not being handled")
def test_batch_limit_reached(self, requests_mock, api, fb_call_rate_response, account_id):
"""Error once, check that we retry and not fail"""
responses = [
fb_call_rate_response,
{
"json": {
"data": [
{
"id": "123",
"object_type": "SHARE",
"status": "ACTIVE",
},
{
"id": "1234",
"object_type": "SHARE",
"status": "ACTIVE",
},
],
"status_code": 200,
}
},
]
batch_responses = [
fb_call_rate_response,
{
"json": [
{"body": json.dumps({"name": "creative 1"}), "code": 200, "headers": {}},
{"body": json.dumps({"name": "creative 2"}), "code": 200, "headers": {}},
]
},
]
requests_mock.register_uri("GET", FacebookSession.GRAPH + f"/{FB_API_VERSION}/act_{account_id}/adcreatives", responses)
requests_mock.register_uri("GET", FacebookSession.GRAPH + f"/{FB_API_VERSION}/act_{account_id}/", responses)
requests_mock.register_uri("POST", FacebookSession.GRAPH + f"/{FB_API_VERSION}/", batch_responses)
stream = AdCreatives(api=api, include_deleted=False)
records = list(stream.read_records(sync_mode=SyncMode.full_refresh, stream_state={}))
assert records == [{"name": "creative 1"}, {"name": "creative 2"}]
@pytest.mark.parametrize(
"error_response",
[
{"json": {"error": {}}, "status_code": 500},
{"json": {"error": {"code": 104}}},
{"json": {"error": {"code": 2}}, "status_code": 500},
],
ids=["server_error", "connection_reset_error", "temporary_oauth_error"],
)
def test_common_error_retry(self, error_response, requests_mock, api, account_id):
"""Error once, check that we retry and not fail"""
account_data = {"id": 1, "updated_time": "2020-09-25T00:00:00Z", "name": "Some name"}
responses = [
error_response,
{
"json": account_data,
"status_code": 200,
},
]
requests_mock.register_uri("GET", FacebookSession.GRAPH + f"/{FB_API_VERSION}/me/business_users", json={"data": []})
requests_mock.register_uri("GET", FacebookSession.GRAPH + f"/{FB_API_VERSION}/act_{account_id}/", responses)
requests_mock.register_uri("GET", FacebookSession.GRAPH + f"/{FB_API_VERSION}/{account_data['id']}/", responses)
stream = AdAccount(api=api)
accounts = list(stream.read_records(sync_mode=SyncMode.full_refresh, stream_state={}))
assert accounts == [account_data]
def test_limit_error_retry(self, fb_call_amount_data_response, requests_mock, api, account_id):
"""Error every time, check limit parameter decreases by 2 times every new call"""
res = requests_mock.register_uri(
"GET", FacebookSession.GRAPH + f"/{FB_API_VERSION}/act_{account_id}/campaigns", [fb_call_amount_data_response]
)
stream = Campaigns(api=api, start_date=pendulum.now(), end_date=pendulum.now(), include_deleted=False, page_size=100)
try:
list(stream.read_records(sync_mode=SyncMode.full_refresh, stream_state={}))
except FacebookRequestError:
assert [x.qs.get("limit")[0] for x in res.request_history] == ["100", "50", "25", "12", "6"]
|
86c9e046ddd120e19ab6174b18709b004bc96fb0
|
5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d
|
/alipay/aop/api/domain/AlipayCommerceMedicalEcodeOpenQueryModel.py
|
d8004e01bb613eca57a10d2bbff74d3f9994b7b7
|
[
"Apache-2.0"
] |
permissive
|
alipay/alipay-sdk-python-all
|
8bd20882852ffeb70a6e929038bf88ff1d1eff1c
|
1fad300587c9e7e099747305ba9077d4cd7afde9
|
refs/heads/master
| 2023-08-27T21:35:01.778771
| 2023-08-23T07:12:26
| 2023-08-23T07:12:26
| 133,338,689
| 247
| 70
|
Apache-2.0
| 2023-04-25T04:54:02
| 2018-05-14T09:40:54
|
Python
|
UTF-8
|
Python
| false
| false
| 1,406
|
py
|
AlipayCommerceMedicalEcodeOpenQueryModel.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class AlipayCommerceMedicalEcodeOpenQueryModel(object):
def __init__(self):
self._medical_code = None
self._org_no = None
@property
def medical_code(self):
return self._medical_code
@medical_code.setter
def medical_code(self, value):
self._medical_code = value
@property
def org_no(self):
return self._org_no
@org_no.setter
def org_no(self, value):
self._org_no = value
def to_alipay_dict(self):
params = dict()
if self.medical_code:
if hasattr(self.medical_code, 'to_alipay_dict'):
params['medical_code'] = self.medical_code.to_alipay_dict()
else:
params['medical_code'] = self.medical_code
if self.org_no:
if hasattr(self.org_no, 'to_alipay_dict'):
params['org_no'] = self.org_no.to_alipay_dict()
else:
params['org_no'] = self.org_no
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayCommerceMedicalEcodeOpenQueryModel()
if 'medical_code' in d:
o.medical_code = d['medical_code']
if 'org_no' in d:
o.org_no = d['org_no']
return o
|
884a5a365f7e3e5313e8d2452e59f1933e753500
|
fbbe424559f64e9a94116a07eaaa555a01b0a7bb
|
/Keras_tensorflow/source/tensorflow/contrib/distributions/python/ops/chi2.py
|
210cbe95c8222fe50aec96cb8e66282723d8f5da
|
[
"MIT"
] |
permissive
|
ryfeus/lambda-packs
|
6544adb4dec19b8e71d75c24d8ed789b785b0369
|
cabf6e4f1970dc14302f87414f170de19944bac2
|
refs/heads/master
| 2022-12-07T16:18:52.475504
| 2022-11-29T13:35:35
| 2022-11-29T13:35:35
| 71,386,735
| 1,283
| 263
|
MIT
| 2022-11-26T05:02:14
| 2016-10-19T18:22:39
|
Python
|
UTF-8
|
Python
| false
| false
| 3,839
|
py
|
chi2.py
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The Chi2 distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.distributions.python.ops import gamma
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import math_ops
class Chi2(gamma.Gamma):
"""The Chi2 distribution with degrees of freedom df.
The PDF of this distribution is:
```pdf(x) = (x^(df/2 - 1)e^(-x/2))/(2^(df/2)Gamma(df/2)), x > 0```
Note that the Chi2 distribution is a special case of the Gamma distribution,
with Chi2(df) = Gamma(df/2, 1/2).
"""
def __init__(self,
df,
validate_args=False,
allow_nan_stats=True,
name="Chi2"):
"""Construct Chi2 distributions with parameter `df`.
Args:
df: Floating point tensor, the degrees of freedom of the
distribution(s). `df` must contain only positive values.
validate_args: `Boolean`, default `False`. Whether to assert that
`df > 0`, and that `x > 0` in the methods `prob(x)` and `log_prob(x)`.
If `validate_args` is `False` and the inputs are invalid, correct
behavior is not guaranteed.
allow_nan_stats: `Boolean`, default `True`. If `False`, raise an
exception if a statistic (e.g. mean/mode/etc...) is undefined for any
batch member. If `True`, batch members with valid parameters leading to
undefined statistics will return NaN for this statistic.
name: The name to prepend to all ops created by this distribution.
"""
parameters = locals()
parameters.pop("self")
# Even though all stats of chi2 are defined for valid parameters, this is
# not true in the parent class "gamma." therefore, passing
# allow_nan_stats=True
# through to the parent class results in unnecessary asserts.
with ops.name_scope(name, values=[df]) as ns:
self._df = ops.convert_to_tensor(df, name="df")
super(Chi2, self).__init__(
alpha=0.5 * self._df,
beta=constant_op.constant(0.5, dtype=self._df.dtype),
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
name=ns)
self._parameters = parameters
@staticmethod
def _param_shapes(sample_shape):
return {"df": ops.convert_to_tensor(sample_shape, dtype=dtypes.int32)}
@property
def df(self):
return self._df
class Chi2WithAbsDf(Chi2):
"""Chi2 with parameter transform `df = floor(abs(df))`."""
def __init__(self,
df,
validate_args=False,
allow_nan_stats=True,
name="Chi2WithAbsDf"):
parameters = locals()
parameters.pop("self")
with ops.name_scope(name, values=[df]) as ns:
super(Chi2WithAbsDf, self).__init__(
df=math_ops.floor(math_ops.abs(df, name="abs_df"),
name="floor_abs_df"),
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
name=ns)
self._parameters = parameters
|
d3cb5d4adf5d0bb3bdbbc7b322db4d67f0b2a9c9
|
5ca308ba58f66e2bd5bec49c01a819c700a05527
|
/workspace/data_collection/data_collection.py
|
77bfb693485c447466a322bb2cf053aac8957b55
|
[] |
no_license
|
wfnian/posture_recognition
|
b1a3419695bd0ea57b9fb4b48a7024b6b754c3f4
|
95809a410ca5d7a956d9206b83c243f801354985
|
refs/heads/master
| 2022-05-31T10:35:52.800719
| 2022-05-08T08:09:27
| 2022-05-08T08:09:27
| 163,076,649
| 117
| 33
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,894
|
py
|
data_collection.py
|
import math
import os
import sys
import cv2
from PyQt5.QtCore import QTimer
from PyQt5.QtGui import QPixmap, QImage
from PyQt5.QtWidgets import QApplication, QMainWindow
from data_collection_window import *
picSN = 223
# ====================import openpose=========================================
dir_path = os.path.dirname(os.path.realpath(__file__))
try:
# 一定要注意是 build目录下的python而不是openpose根目录下的
# 如果一直报错可以将绝对路径加入 path环境变量中去。
# 或者将绝对路径引进来 F:\\OPENPOSE\\openpose\\build\\python\\openpose\\Release
# 或是如下添加绝对路径
sys.path.append('/home/wfnian/OPENPOSE/openpose/build/python')
from openpose import pyopenpose as op
# 此句和上句同理 两者只要一者起效便可
except ImportError as e:
print('Did you enable `BUILD_PYTHON`')
raise e
# =============================参数args 设置====================================
# 详细参考flags.hpp 文件
params = dict()
params["camera"] = 1
params["model_folder"] = "/home/wfnian/OPENPOSE/openpose/models"
# 根据实际情况路径做相应改变
params["number_people_max"] = 1 # 只检测一个人
params["camera_resolution"] = "640x360"
params["disable_blending"] = True
params["render_threshold"] = 0.001
opWrapper = op.WrapperPython()
opWrapper.configure(params)
opWrapper.start()
# ==============================================================================
class Video:
def __init__(self, capture):
self.capture = capture
def captureFrame(self):
"""
capture frame and return captured frame
"""
ret, readFrame = self.capture.read()
return readFrame
def captureNextFrame(self):
"""
capture frame and reverse RBG BGR and return opencv image
"""
ret, readFrame = self.capture.read()
if ret:
self.currentFrame = cv2.cvtColor(readFrame, cv2.COLOR_BGR2RGB)
def convertFrame(self):
"""
converts frame to format suitable for QtGui
"""
try:
height, width = self.currentFrame.shape[:2]
img = QImage(self.currentFrame, width, height, QtGui.QImage.Format_RGB888)
img = QPixmap.fromImage(img)
self.previousFrame = self.currentFrame
return img
except:
print("Convert error")
return None
class mWindow(QMainWindow, Ui_MainWindow):
def __init__(self):
super(mWindow, self).__init__()
self.setupUi(self)
self._timer1 = QTimer(self)
self._timer1.timeout.connect(self.showCapture)
self.video = Video(cv2.VideoCapture(0))
self._timer1.start(30) # 每隔多长时间
self.pushButton.clicked.connect(self.savePic)
self.picPaths = "0"
def showCapture(self):
try:
self.video.captureNextFrame()
self.label.setPixmap(self.video.convertFrame())
except TypeError:
print("No frame")
def savePic(self):
self.video.captureNextFrame()
frame = self.video.convertFrame()
self.label_2.setPixmap(frame)
# self.label_2.setScaledContents(False) # 设置图片自适应窗口
self.capturedFrame = self.video.captureFrame()
pose = self.lineEdit.text()
global picSN
picSN += 1
pictureName = str(picSN) + '_' + pose + ".jpg"
self.picPaths = "../dataset/pic_background/" + pictureName
cv2.imwrite(self.picPaths, self.capturedFrame)
print('captured')
try:
self.processPic()
except:
print("线程错误")
def processPic(self):
# ============================= 启动openPose ===================================
datum = op.Datum()
datum.cvInputData = cv2.imread(self.picPaths) # 输入
opWrapper.emplaceAndPop(op.VectorDatum([datum])) # 输出
keyPoints = datum.poseKeypoints.tolist()
dstPicPath = "../dataset/marked_pic/p_" + self.picPaths.split('/')[-1] # 处理后的图片
cv2.imwrite(dstPicPath, datum.cvOutputData)
# ============================= 写骨骼数据文件 ===================================
with open("../dataset/bone_dataSet.data", "a+") as dataSet:
dataSet.writelines(
str(self.pointDistance(keyPoints[0]) + self.pointAngle(keyPoints[0]) + [int(self.lineEdit.text())]))
dataSet.write("\n")
# ============================= 写骨骼图片文件 ===================================
bone_img = datum.cvOutputData
height, width, channel = bone_img.shape
pixmap = QPixmap.fromImage(QImage(
bone_img.data, width, height, 3 * width, QImage.Format_RGB888).rgbSwapped())
self.label_3.setPixmap(pixmap)
# ============================= label3 显示图片 ==================================
def pointDistance(self, keyPoint):
"""
:param keyPoint:
:return:list
:distance:
"""
distance0 = (keyPoint[4][0] - keyPoint[9][0]) ** 2 + \
(keyPoint[4][1] - keyPoint[9][1]) ** 2
distance1 = (keyPoint[7][0] - keyPoint[12][0]) ** 2 + \
(keyPoint[7][1] - keyPoint[12][1]) ** 2
distance2 = (keyPoint[2][0] - keyPoint[4][0]) ** 2 + \
(keyPoint[2][1] - keyPoint[4][1]) ** 2
distance3 = (keyPoint[5][0] - keyPoint[7][0]) ** 2 + \
(keyPoint[5][1] - keyPoint[7][1]) ** 2
distance4 = (keyPoint[0][0] - keyPoint[4][0]) ** 2 + \
(keyPoint[0][1] - keyPoint[4][1]) ** 2
distance5 = (keyPoint[0][0] - keyPoint[7][0]) ** 2 + \
(keyPoint[0][1] - keyPoint[7][1]) ** 2
distance6 = (keyPoint[4][0] - keyPoint[10][0]) ** 2 + \
(keyPoint[4][1] - keyPoint[10][1]) ** 2
distance7 = (keyPoint[7][0] - keyPoint[13][0]) ** 2 + \
(keyPoint[7][1] - keyPoint[13][1]) ** 2
distance8 = (keyPoint[4][0] - keyPoint[7][0]) ** 2 + \
(keyPoint[4][1] - keyPoint[7][1]) ** 2
distance9 = (keyPoint[11][0] - keyPoint[14][0]) ** 2 + \
(keyPoint[11][1] - keyPoint[14][1]) ** 2
distance10 = (keyPoint[10][0] - keyPoint[13][0]
) ** 2 + (keyPoint[10][1] - keyPoint[13][1]) ** 2
distance11 = (keyPoint[6][0] - keyPoint[10][0]
) ** 2 + (keyPoint[6][1] - keyPoint[10][1]) ** 2
distance12 = (keyPoint[3][0] - keyPoint[13][0]
) ** 2 + (keyPoint[3][1] - keyPoint[13][1]) ** 2
distance13 = (keyPoint[4][0] - keyPoint[23][0]
) ** 2 + (keyPoint[4][1] - keyPoint[23][1]) ** 2
distance14 = (keyPoint[7][0] - keyPoint[20][0]
) ** 2 + (keyPoint[7][1] - keyPoint[20][1]) ** 2
return [distance0, distance1, distance2, distance3, distance4, distance5, distance6, distance7,
distance8, distance9, distance10, distance11, distance12, distance13, distance14]
def pointAngle(self, keyPoint):
angle0 = self.myAngle(keyPoint[2], keyPoint[3], keyPoint[4])
angle1 = self.myAngle(keyPoint[5], keyPoint[6], keyPoint[7])
angle2 = self.myAngle(keyPoint[9], keyPoint[10], keyPoint[11])
angle3 = self.myAngle(keyPoint[12], keyPoint[13], keyPoint[14])
angle4 = self.myAngle(keyPoint[3], keyPoint[2], keyPoint[1])
angle5 = self.myAngle(keyPoint[6], keyPoint[5], keyPoint[1])
angle6 = self.myAngle(keyPoint[10], keyPoint[8], keyPoint[13])
angle7 = self.myAngle(keyPoint[7], keyPoint[12], keyPoint[13])
angle8 = self.myAngle(keyPoint[4], keyPoint[9], keyPoint[10])
angle9 = self.myAngle(keyPoint[4], keyPoint[0], keyPoint[7])
angle10 = self.myAngle(keyPoint[4], keyPoint[8], keyPoint[7])
angle11 = self.myAngle(keyPoint[1], keyPoint[8], keyPoint[13])
angle12 = self.myAngle(keyPoint[1], keyPoint[8], keyPoint[10])
angle13 = self.myAngle(keyPoint[4], keyPoint[1], keyPoint[8])
angle14 = self.myAngle(keyPoint[7], keyPoint[1], keyPoint[8])
return [angle0, angle1, angle2, angle3, angle4, angle5, angle6, angle7,
angle8, angle9, angle10, angle11, angle12, angle13, angle14]
def myAngle(self, A, B, C):
c = math.sqrt((A[0] - B[0]) ** 2 + (A[1] - B[1]) ** 2)
a = math.sqrt((B[0] - C[0]) ** 2 + (B[1] - C[1]) ** 2)
b = math.sqrt((A[0] - C[0]) ** 2 + (A[1] - C[1]) ** 2)
if 2 * a * c != 0:
return (a ** 2 + c ** 2 - b ** 2) / (2 * a * c)
return 0
if __name__ == '__main__':
app = QApplication(sys.argv)
mwin = mWindow()
mwin.show()
sys.exit(app.exec_())
|
23445878cb86aeac2918888434d65aa641ca53d1
|
529e713a78e82de2ae5d44cfb8ef209e0894d72a
|
/docker-continuous-integration/web/test/conftest.py
|
4738e88c93763511598c8ee5a024d2bdd1407477
|
[
"MIT"
] |
permissive
|
realpython/materials
|
cd2f548276be2c82f134ca03eadb1cd279e0f26e
|
d2d62756d3854f54a12a767f2bf9470486c0ceef
|
refs/heads/master
| 2023-09-05T22:12:29.806738
| 2023-08-31T20:56:28
| 2023-08-31T20:56:28
| 132,374,697
| 4,678
| 6,482
|
MIT
| 2023-09-12T22:22:06
| 2018-05-06T20:46:18
|
HTML
|
UTF-8
|
Python
| false
| false
| 604
|
py
|
conftest.py
|
import pytest
import redis
from page_tracker.app import app
def pytest_addoption(parser):
parser.addoption("--flask-url")
parser.addoption("--redis-url")
@pytest.fixture(scope="session")
def flask_url(request):
return request.config.getoption("--flask-url")
@pytest.fixture(scope="session")
def redis_url(request):
return request.config.getoption("--redis-url")
@pytest.fixture
def http_client():
return app.test_client()
@pytest.fixture(scope="module")
def redis_client(redis_url):
if redis_url:
return redis.Redis.from_url(redis_url)
return redis.Redis()
|
5256100be937e7176993160e411739ae83f5ac46
|
36a094a44450d1353e9dfc8242a54e2bb70bb9b5
|
/examples/extensions/extension_model.py
|
b4078cd657251ac233cbb58eab58fdde2c05035a
|
[
"Apache-2.0"
] |
permissive
|
zyfra/ebonite
|
52843ce847a3fd28e4ba8ab64d986dcfb23671c0
|
b01b662c43709d152940f488574d78ff25f89ecf
|
refs/heads/master
| 2022-11-29T21:20:02.358797
| 2020-10-19T12:22:49
| 2020-10-19T12:22:49
| 221,721,146
| 275
| 18
|
Apache-2.0
| 2022-11-21T22:44:02
| 2019-11-14T14:49:47
|
Python
|
UTF-8
|
Python
| false
| false
| 896
|
py
|
extension_model.py
|
"""This module shows how to load extensions from local code"""
import ebonite
def main():
# load extension
# you just use plain module name, if it's installed from pip
# or, you can just directly import your classes
# to automatically load extension on startup, set EBONITE_EXTENSIONS env variable
ebonite.load_extensions('myext.extension_source')
# set up client
ebnt = ebonite.Ebonite.local(clear=True)
# create a model using myext extension
model = ebnt.create_model('my_extended_model', 'model', 1)
# your extension code will be inside docker image in form of files if you have local files
# or requirement if you installed it from pip
image = ebnt.create_image(model, 'local_ext_model', builder_args={'force_overwrite': True})
ebnt.create_instance(image, 'local_ext_model').run(detach=False)
if __name__ == '__main__':
main()
|
8e112852eeed190a1bf14239724966049d35a1bf
|
71acb7214efd91c0d327f6d8958e1798eadb4401
|
/locations/spiders/the_good_guys_au.py
|
2aa6bc6fa7874219572cd2b608b9d4569f4bffb2
|
[
"CC0-1.0",
"MIT"
] |
permissive
|
alltheplaces/alltheplaces
|
21b9f8b4ace1352e52ae7b8f8825a930d2cb033e
|
1bcbb55cfcf06f2c714465570711f6e83f205c22
|
refs/heads/master
| 2023-08-30T19:45:35.098658
| 2023-08-30T17:51:54
| 2023-08-30T17:51:54
| 61,166,935
| 453
| 176
|
NOASSERTION
| 2023-09-14T17:16:40
| 2016-06-15T01:09:18
|
Python
|
UTF-8
|
Python
| false
| false
| 1,772
|
py
|
the_good_guys_au.py
|
import chompjs
import scrapy
from locations.hours import DAYS_FULL
from locations.structured_data_spider import StructuredDataSpider
class TheGoodGuysAUSpider(StructuredDataSpider):
name = "the_good_guys_au"
item_attributes = {"brand": "The Good Guys", "brand_wikidata": "Q7737217"}
allowed_domains = ["www.thegoodguys.com.au"]
start_urls = ["https://www.thegoodguys.com.au/store-locator"]
time_format = "%I:%M%p"
def parse(self, response):
data_json = chompjs.parse_js_object(response.xpath('//div[@id="allStoreJson"]/text()').extract_first())
for store in data_json["locations"]:
yield scrapy.Request(store["url"], self.parse_sd)
def pre_process_data(self, ld_data, **kwargs):
# Linked data on the page deviates from specifications and
# therefore needs correcting prior to being parsed.
coordinates = "".join(ld_data.pop("geo").split())
ld_data["geo"] = {
"@type": "GeoCoordinates",
"latitude": coordinates.split(",")[0],
"longitude": coordinates.split(",")[1],
}
oh_spec = ld_data.pop("OpeningHoursSpecification", [])
days_to_find = DAYS_FULL.copy()
for day in oh_spec:
day_name = day["dayOfWeek"].replace("http://schema.org/", "")
if day_name in DAYS_FULL:
days_to_find.remove(day_name)
for day in oh_spec:
if day["dayOfWeek"].replace("http://schema.org/", "") == "Today":
day["dayOfWeek"] = "http://schema.org/" + days_to_find[0]
ld_data["openingHoursSpecification"] = oh_spec
def post_process_item(self, item, response, ld_data, **kwargs):
item.pop("facebook")
item.pop("image")
yield item
|
a37b3326b9e931c4ceb4577539c57061118b11f9
|
88ae8695987ada722184307301e221e1ba3cc2fa
|
/tools/gn/infra/recipe_modules/target/api.py
|
850c104e9baedbee07d25c55a52453120024ea24
|
[
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla"
] |
permissive
|
iridium-browser/iridium-browser
|
71d9c5ff76e014e6900b825f67389ab0ccd01329
|
5ee297f53dc7f8e70183031cff62f37b0f19d25f
|
refs/heads/master
| 2023-08-03T16:44:16.844552
| 2023-07-20T15:17:00
| 2023-07-23T16:09:30
| 220,016,632
| 341
| 40
|
BSD-3-Clause
| 2021-08-13T13:54:45
| 2019-11-06T14:32:31
| null |
UTF-8
|
Python
| false
| false
| 2,291
|
py
|
api.py
|
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from recipe_engine import recipe_api
PLATFORM_TO_TRIPLE = {
'fuchsia-amd64': 'x86_64-fuchsia',
'fuchsia-arm64': 'aarch64-fuchsia',
'linux-amd64': 'x86_64-linux-gnu',
'linux-arm64': 'aarch64-linux-gnu',
'mac-amd64': 'x86_64-apple-darwin',
'mac-arm64': 'arm64-apple-darwin',
}
PLATFORMS = PLATFORM_TO_TRIPLE.keys()
class Target(object):
def __init__(self, api, os, arch):
self.m = api
self._os = os
self._arch = arch
@property
def is_win(self):
"""Returns True iff the target platform is Windows."""
return self.os == 'windows'
@property
def is_mac(self):
"""Returns True iff the target platform is macOS."""
return self.os == 'mac'
@property
def is_linux(self):
"""Returns True iff the target platform is Linux."""
return self.os == 'linux'
@property
def is_host(self):
"""Returns True iff the target platform is host."""
return self == self.m.host
@property
def os(self):
"""Returns the target os name which will be in:
* windows
* mac
* linux
"""
return self._os
@property
def arch(self):
"""Returns the current CPU architecture."""
return self._arch
@property
def platform(self):
"""Returns the target platform in the <os>-<arch> format."""
return '%s-%s' % (self.os, self.arch)
@property
def triple(self):
"""Returns the target triple."""
return PLATFORM_TO_TRIPLE[self.platform]
def __str__(self):
return self.platform
def __eq__(self, other):
if isinstance(other, Target):
return self._os == other._os and self._arch == other._arch
return False
def __ne__(self, other):
return not self.__eq__(other)
class TargetApi(recipe_api.RecipeApi):
def __call__(self, platform):
return Target(self, *platform.split('-', 2))
@property
def host(self):
return Target(self, self.m.platform.name.replace('win', 'windows'), {
'intel': {
32: '386',
64: 'amd64',
},
'arm': {
32: 'armv6',
64: 'arm64',
},
}[self.m.platform.arch][self.m.platform.bits])
|
4e3a167aacb663e71491ceae48b971be0d583d52
|
704976ea552111c6a5af9cd7cb62b9d9abaf3996
|
/pypy/module/pypyjit/test_pypy_c/test_call.py
|
42981add486f6f89da04234c35a83fc9686d3a6a
|
[
"BSD-3-Clause"
] |
permissive
|
mesalock-linux/mesapy
|
4f02c5819ce7f2f6e249d34840f1aa097577645d
|
ed546d59a21b36feb93e2309d5c6b75aa0ad95c9
|
refs/heads/mesapy2.7
| 2023-08-16T21:33:02.239581
| 2019-08-13T10:29:43
| 2019-08-13T18:06:45
| 136,080,721
| 396
| 33
|
NOASSERTION
| 2020-04-01T03:05:18
| 2018-06-04T20:45:17
|
Python
|
UTF-8
|
Python
| false
| false
| 21,830
|
py
|
test_call.py
|
import py
from pypy.module.pypyjit.test_pypy_c.test_00_model import BaseTestPyPyC
from pypy.module.pypyjit.test_pypy_c.model import OpMatcher
class TestCall(BaseTestPyPyC):
def test_recursive_call(self):
def fn():
def rec(n):
if n == 0:
return 0
return 1 + rec(n-1)
#
# this loop is traced and then aborted, because the trace is too
# long. But then "rec" is marked as "don't inline". Since we
# already traced function from the start (because of number),
# now we can inline it as call assembler
i = 0
j = 0
while i < 25:
i += 1
j += rec(100) # ID: call_rec
return j
#
# NB. the parameters below are a bit ad-hoc. After 16 iterations,
# the we trace from the "while" and reach a "trace too long". Then
# in the next execution, we trace the "rec" function from start;
# that's "functrace" below. Then after one or two extra iterations
# we try again from "while", and this time we succeed.
log = self.run(fn, [], threshold=20)
functrace, loop = log.loops_by_filename(self.filepath)
assert loop.match_by_id('call_rec', """
...
p53 = call_assembler_r(..., descr=...)
guard_not_forced(descr=...)
keepalive(...)
guard_no_exception(descr=...)
...
""")
def test_fib(self):
def fib(n):
if n == 0 or n == 1:
return 1
return fib(n - 1) + fib(n - 2) # ID: call_rec
log = self.run(fib, [7], function_threshold=15)
loop, = log.loops_by_filename(self.filepath, is_entry_bridge='*')
#assert loop.match_by_id('call_rec', '''
#...
#p1 = call_assembler(..., descr=...)
#...
#''')
def test_simple_call(self):
src = """
OFFSET = 0
def f(i):
return i + 1 + OFFSET # ID: add
def main(n):
i = 0
while i < n+OFFSET: # ID: cond
i = f(f(i)) # ID: call
a = 0
return i
"""
log = self.run(src, [1000])
assert log.result == 1000
# first, we test what is inside the entry bridge
# -----------------------------------------------
entry_bridge, = log.loops_by_id('call', is_entry_bridge=True)
# LOAD_GLOBAL of OFFSET
ops = entry_bridge.ops_by_id('cond', opcode='LOAD_GLOBAL')
assert log.opnames(ops) == ["guard_value",
"guard_not_invalidated"]
ops = entry_bridge.ops_by_id('add', opcode='LOAD_GLOBAL')
assert log.opnames(ops) == []
#
ops = entry_bridge.ops_by_id('call', opcode='LOAD_GLOBAL')
assert log.opnames(ops) == []
#
assert entry_bridge.match_by_id('call', """
p38 = call_r(ConstClass(_ll_1_threadlocalref_get__Ptr_GcStruct_objectLlT_Signed), #, descr=<Callr . i EF=1 OS=5>)
p39 = getfield_gc_r(p38, descr=<FieldP pypy.interpreter.executioncontext.ExecutionContext.inst_topframeref .*>)
i40 = force_token()
p41 = getfield_gc_r(p38, descr=<FieldP pypy.interpreter.executioncontext.ExecutionContext.inst_w_tracefunc .*>)
guard_value(p41, ConstPtr(ptr42), descr=...)
i42 = getfield_gc_i(p38, descr=<FieldU pypy.interpreter.executioncontext.ExecutionContext.inst_profilefunc .*>)
i43 = int_is_zero(i42)
guard_true(i43, descr=...)
i50 = force_token()
""")
#
# then, we test the actual loop
# -----------------------------
loop, = log.loops_by_id('call')
assert loop.match("""
guard_not_invalidated(descr=...)
i9 = int_lt(i5, i6)
guard_true(i9, descr=...)
i10 = force_token()
i12 = int_add(i5, 1)
i13 = force_token()
i15 = int_add_ovf(i12, 1)
guard_no_overflow(descr=...)
--TICK--
jump(..., descr=...)
""")
def test_method_call(self):
def fn(n):
class A(object):
def __init__(self, a):
self.a = a
def f(self, i):
return self.a + i
i = 0
a = A(1)
while i < n:
x = a.f(i) # ID: meth1
i = a.f(x) # ID: meth2
return i
#
log = self.run(fn, [1000])
assert log.result == 1000
#
# first, we test the entry bridge
# -------------------------------
entry_bridge, = log.loops_by_filename(self.filepath, is_entry_bridge=True)
ops = entry_bridge.ops_by_id('meth1', opcode='LOOKUP_METHOD')
assert log.opnames(ops) == ['guard_value', 'getfield_gc_r',
'guard_value',
'guard_not_invalidated']
# the second LOOKUP_METHOD is folded away
assert list(entry_bridge.ops_by_id('meth2', opcode='LOOKUP_METHOD')) == []
#
# then, the actual loop
# ----------------------
loop, = log.loops_by_filename(self.filepath)
assert loop.match("""
i15 = int_lt(i6, i9)
guard_true(i15, descr=...)
guard_not_invalidated(descr=...)
i16 = force_token()
i17 = int_add_ovf(i10, i6)
guard_no_overflow(descr=...)
i18 = force_token()
i19 = int_add_ovf(i10, i17)
guard_no_overflow(descr=...)
--TICK--
jump(..., descr=...)
""")
def test_static_classmethod_call(self):
def fn(n):
class A(object):
@classmethod
def f(cls, i):
return i + (cls is A) + 1
@staticmethod
def g(i):
return i - 1
#
i = 0
a = A()
while i < n:
x = a.f(i)
i = a.g(x)
return i
#
log = self.run(fn, [1000])
assert log.result == 1000
loop, = log.loops_by_filename(self.filepath)
assert loop.match("""
i14 = int_lt(i6, i9)
guard_true(i14, descr=...)
guard_not_invalidated(descr=...)
i15 = force_token()
i17 = int_add_ovf(i8, 1)
guard_no_overflow(descr=...)
i18 = force_token()
--TICK--
jump(..., descr=...)
""")
def test_default_and_kw(self):
def main(n):
def f(i, j=1):
return i + j
#
i = 0
while i < n:
i = f(f(i), j=1) # ID: call
a = 0
return i
#
log = self.run(main, [1000])
assert log.result == 1000
loop, = log.loops_by_id('call')
assert loop.match_by_id('call', """
guard_not_invalidated?
i14 = force_token()
i16 = force_token()
""")
def test_kwargs_empty(self):
def main(x):
def g(**args):
return len(args) + 1
#
s = 0
d = {}
i = 0
while i < x:
s += g(**d) # ID: call
i += 1
return s
#
log = self.run(main, [1000])
assert log.result == 1000
loop, = log.loops_by_id('call')
ops = log.opnames(loop.ops_by_id('call'))
guards = [ops for ops in ops if ops.startswith('guard')]
assert guards == ["guard_not_invalidated", "guard_no_overflow"]
def test_kwargs(self):
# this is not a very precise test, could be improved
def main(x):
def g(**args):
return len(args)
#
s = 0
d = {"a": 1}
i = 0
while i < x:
s += g(**d) # ID: call
d[str(i)] = i
if i % 100 == 99:
d = {"a": 1}
i += 1
return s
#
log = self.run(main, [1000])
assert log.result == 50500
loop, = log.loops_by_id('call')
print loop.ops_by_id('call')
ops = log.opnames(loop.ops_by_id('call'))
guards = [ops for ops in ops if ops.startswith('guard')]
print guards
assert len(guards) <= 20
def test_stararg_virtual(self):
def main(x):
def g(*args):
return len(args)
def h(a, b, c):
return c
#
s = 0
i = 0
while i < x:
l = [i, x, 2]
s += g(*l) # ID: g1
s += h(*l) # ID: h1
s += g(i, x, 2) # ID: g2
a = 0
i += 1
i = 0
while i < x:
l = [x, 2]
g(*l)
s += g(i, *l) # ID: g3
s += h(i, *l) # ID: h2
a = 0
i += 1
return s
#
log = self.run(main, [1000])
assert log.result == 13000
loop0, = log.loops_by_id('g1')
assert loop0.match_by_id('g1', """
guard_not_invalidated?
i20 = force_token()
i22 = int_add_ovf(i8, 3)
guard_no_overflow(descr=...)
""")
assert loop0.match_by_id('h1', """
i20 = force_token()
i22 = int_add_ovf(i8, 2)
guard_no_overflow(descr=...)
""")
assert loop0.match_by_id('g2', """
i27 = force_token()
i29 = int_add_ovf(i26, 3)
guard_no_overflow(descr=...)
""")
#
loop1, = log.loops_by_id('g3')
assert loop1.match_by_id('g3', """
i21 = force_token()
i23 = int_add_ovf(i9, 3)
guard_no_overflow(descr=...)
""")
assert loop1.match_by_id('h2', """
i25 = force_token()
i27 = int_add_ovf(i23, 2)
guard_no_overflow(descr=...)
""")
def test_stararg(self):
def main(x):
def g(*args):
return args[-1]
def h(*args):
return len(args)
#
s = 0
l = []
i = 0
while i < x:
l.append(1)
s += g(*l) # ID: g
i = h(*l) # ID: h
a = 0
return s
#
log = self.run(main, [1000])
assert log.result == 1000
loop, = log.loops_by_id('g')
ops_g = log.opnames(loop.ops_by_id('g'))
ops_h = log.opnames(loop.ops_by_id('h'))
ops = ops_g + ops_h
assert 'new_with_vtable' not in ops
assert 'call_may_force' not in ops
def test_call_builtin_function(self):
def main(n):
i = 2
l = []
while i < n:
i += 1
l.append(i) # ID: append
a = 0
return i, len(l)
#
log = self.run(main, [1000])
assert log.result == (1000, 998)
loop, = log.loops_by_filename(self.filepath)
# the int strategy is used here
assert loop.match_by_id('append', """
guard_not_invalidated?
i15 = int_add(i13, 1)
i17 = arraylen_gc(p15, descr=<ArrayS .>)
i18 = int_lt(i17, i15)
# a cond call to _ll_list_resize_hint_really_look_inside_iff
cond_call(i18, _, p8, i15, 1, descr=<Callv 0 rii EF=5>)
guard_no_exception(descr=...)
p17 = getfield_gc_r(p8, descr=<FieldP list.items .*>)
setarrayitem_gc(p17, i13, i12, descr=<ArrayS .>)
""")
def test_blockstack_virtualizable(self):
def main(n):
from pypyjit import residual_call
l = len
i = 0
while i < n:
try:
residual_call(l, []) # ID: call
except:
pass
i += 1
return i
#
log = self.run(main, [500])
assert log.result == 500
loop, = log.loops_by_id('call')
assert loop.match_by_id('call', opcode='CALL_FUNCTION', expected_src="""
# make sure that the "block" is not allocated
...
p20 = force_token()
p22 = new_with_vtable(descr=<SizeDescr .*>)
p24 = new_array_clear(1, descr=<ArrayP .>)
p26 = new_with_vtable(descr=<SizeDescr .*>)
{{{
setfield_gc(p0, p20, descr=<FieldP .*PyFrame.vable_token .*>)
setfield_gc(p22, ConstPtr(null), descr=<FieldP pypy.interpreter.argument.Arguments.inst_keywords_w .*>)
setfield_gc(p22, ConstPtr(null), descr=<FieldP pypy.interpreter.argument.Arguments.inst_keywords .*>)
setfield_gc(p22, 1, descr=<FieldU pypy.interpreter.argument.Arguments.inst__jit_few_keywords .*>)
setfield_gc(p22, 0, descr=<FieldU pypy.interpreter.argument.Arguments.inst_methodcall .*>)
setfield_gc(p22, ConstPtr(null), descr=<FieldP pypy.interpreter.argument.Arguments.inst_keyword_names_w .*>)
setfield_gc(p26, ConstPtr(ptr22), descr=<FieldP pypy.objspace.std.listobject.W_ListObject.inst_strategy .*>)
setfield_gc(p26, ConstPtr(null), descr=<FieldP pypy.objspace.std.listobject.W_ListObject.inst_lstorage .*>)
setarrayitem_gc(p24, 0, p26, descr=<ArrayP .>)
setfield_gc(p22, p24, descr=<FieldP .*Arguments.inst_arguments_w .*>)
}}}
p32 = call_may_force_r(_, p18, p22, descr=<Callr . rr EF=7>)
...
""")
def test_func_defaults(self):
def main(n):
i = 1
while i < n:
i += len(xrange(i+1)) - i
return i
log = self.run(main, [10000])
assert log.result == 10000
loop, = log.loops_by_filename(self.filepath)
assert loop.match("""
i10 = int_lt(i5, i6)
guard_true(i10, descr=...)
guard_not_invalidated(descr=...)
i120 = int_add(i5, 1)
--TICK--
jump(..., descr=...)
""")
def test_global_closure_has_constant_cells(self):
log = self.run("""
def make_adder(n):
def add(x):
return x + n
return add
add5 = make_adder(5)
def main():
i = 0
while i < 5000:
i = add5(i) # ID: call
""", [])
loop, = log.loops_by_id('call', is_entry_bridge=True)
assert loop.match("""
guard_value(i4, 1, descr=...)
guard_isnull(p5, descr=...)
guard_nonnull_class(p12, ConstClass(W_IntObject), descr=...)
guard_value(p2, ConstPtr(ptr21), descr=...)
i22 = getfield_gc_i(p12, descr=<FieldS pypy.objspace.std.intobject.W_IntObject.inst_intval .*>)
i24 = int_lt(i22, 5000)
guard_true(i24, descr=...)
guard_not_invalidated(descr=...)
p29 = call_r(ConstClass(_ll_1_threadlocalref_get__Ptr_GcStruct_objectLlT_Signed), #, descr=<Callr . i EF=1 OS=5>)
p30 = getfield_gc_r(p29, descr=<FieldP pypy.interpreter.executioncontext.ExecutionContext.inst_topframeref .*>)
p31 = force_token()
p32 = getfield_gc_r(p29, descr=<FieldP pypy.interpreter.executioncontext.ExecutionContext.inst_w_tracefunc .*>)
guard_value(p32, ConstPtr(ptr33), descr=...)
i34 = getfield_gc_i(p29, descr=<FieldU pypy.interpreter.executioncontext.ExecutionContext.inst_profilefunc .*>)
i35 = int_is_zero(i34)
guard_true(i35, descr=...)
p37 = getfield_gc_r(ConstPtr(ptr36), descr=<FieldP pypy.interpreter.nestedscope.Cell.inst_w_value .*>)
guard_nonnull_class(p37, ConstClass(W_IntObject), descr=...)
i39 = getfield_gc_i(p37, descr=<FieldS pypy.objspace.std.intobject.W_IntObject.inst_intval .*>)
i40 = int_add_ovf(i22, i39)
guard_no_overflow(descr=...)
--TICK--
""")
def test_local_closure_is_virtual(self):
log = self.run("""
def main():
i = 0
while i < 5000:
def add():
return i + 1
i = add() # ID: call
""", [])
loop, = log.loops_by_id('call')
assert loop.match("""
i8 = getfield_gc_i(p6, descr=<FieldS pypy.objspace.std.intobject.W_IntObject.inst_intval .*>)
i10 = int_lt(i8, 5000)
guard_true(i10, descr=...)
guard_not_invalidated?
i11 = force_token()
i13 = int_add(i8, 1)
--TICK--
p22 = new_with_vtable(descr=<SizeDescr .*>)
setfield_gc(p22, i13, descr=<FieldS pypy.objspace.std.intobject.W_IntObject.inst_intval .*>)
setfield_gc(p4, p22, descr=<FieldP pypy.interpreter.nestedscope.Cell.inst_w_value .*>)
jump(..., descr=...)
""")
def test_kwargs_virtual(self):
def main(n):
def g(**kwargs):
return kwargs["x"] + 1
i = 0
while i < n:
i = g(x=i)
return i
log = self.run(main, [500])
assert log.result == 500
loop, = log.loops_by_filename(self.filepath)
assert loop.match("""
i2 = int_lt(i0, i1)
guard_true(i2, descr=...)
guard_not_invalidated?
i3 = force_token()
i4 = int_add(i0, 1)
--TICK--
jump(..., descr=...)
""")
def test_kwargs_virtual2(self):
log = self.run("""
def f(*args, **kwargs):
kwargs['a'] = kwargs['z'] * 0
return g(1, *args, **kwargs)
def g(x, y, z=2, a=1):
return x - y + z + a
def main(stop):
res = 0
i = 0
while i < stop:
res = f(res, z=i) # ID: call
i += 1
return res""", [1000])
assert log.result == 500
loop, = log.loops_by_id('call')
assert loop.match("""
i65 = int_lt(i58, i29)
guard_true(i65, descr=...)
guard_not_invalidated(..., descr=...)
i66 = force_token()
i67 = force_token()
i69 = int_sub_ovf(1, i56)
guard_no_overflow(..., descr=...)
i70 = int_add_ovf(i69, i58)
guard_no_overflow(..., descr=...)
i71 = int_add(i58, 1)
--TICK--
jump(..., descr=...)
""")
def test_kwargs_virtual3(self):
log = self.run("""
def f(a, b, c):
pass
def main(stop):
i = 0
while i < stop:
d = {'a': 2, 'b': 3, 'c': 4}
f(**d) # ID: call
i += 1
return 13
""", [1000])
assert log.result == 13
loop, = log.loops_by_id('call')
allops = loop.allops()
calls = [op for op in allops if op.name.startswith('call')]
assert len(calls) == 0
assert len([op for op in allops if op.name.startswith('new')]) == 0
def test_kwargs_non_virtual(self):
log = self.run("""
def f(a, b, c):
pass
def main(stop):
d = {'a': 2, 'b': 3, 'c': 4}
i = 0
while i < stop:
f(**d) # ID: call
i += 1
return 13
""", [1000])
assert log.result == 13
loop, = log.loops_by_id('call')
allops = loop.allops()
calls = [op for op in allops if op.name.startswith('call')]
assert OpMatcher(calls).match('''
p93 = call_r(ConstClass(view_as_kwargs), p35, p12, descr=<.*>)
i103 = call_i(ConstClass(_match_keywords), ConstPtr(ptr52), 0, 0, p94, p98, 0, descr=<.*>)
''')
assert len([op for op in allops if op.name.startswith('new')]) == 1
# 1 alloc
def test_complex_case(self):
log = self.run("""
def f(x, y, a, b, c=3, d=4):
pass
def main(stop):
i = 0
while i < stop:
a = [1, 2]
d = {'a': 2, 'b': 3, 'd':4}
f(*a, **d) # ID: call
i += 1
return 13
""", [1000])
loop, = log.loops_by_id('call')
assert loop.match_by_id('call', '''
guard_not_invalidated?
i1 = force_token()
''')
def test_complex_case_global(self):
log = self.run("""
def f(x, y, a, b, c=3, d=4):
pass
a = [1, 2]
d = {'a': 2, 'b': 3, 'd':4}
def main(stop):
i = 0
while i < stop:
f(*a, **d) # ID: call
i += 1
return 13
""", [1000])
def test_complex_case_loopconst(self):
log = self.run("""
def f(x, y, a, b, c=3, d=4):
pass
def main(stop):
i = 0
a = [1, 2]
d = {'a': 2, 'b': 3, 'd':4}
while i < stop:
f(*a, **d) # ID: call
i += 1
return 13
""", [1000])
|
13099c271904366ae107c3258cee07515b360587
|
ac2f43c8e0d9649a7f063c59b3dffdfed9fd7ed7
|
/tests2/tests/wedge100/power_supply.py
|
a6a93c8415ad4566385261f6b2f260ff78e42cd2
|
[] |
no_license
|
facebook/openbmc
|
bef10604ced226288600f55248b7f1be9945aea4
|
32777c66a8410d767eae15baabf71c61a0bef13c
|
refs/heads/helium
| 2023-08-17T03:13:54.729494
| 2023-08-16T23:24:18
| 2023-08-16T23:24:18
| 31,917,712
| 684
| 331
| null | 2023-07-25T21:19:08
| 2015-03-09T19:18:35
|
C
|
UTF-8
|
Python
| false
| false
| 2,294
|
py
|
power_supply.py
|
#!/usr/bin/env python3
#
# Copyright 2021-present Facebook. All Rights Reserved.
#
# This program file is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program in a file named COPYING; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor,
# Boston, MA 02110-1301 USA
#
import os
POWER_TYPE_CACHE = "/var/cache/detect_power_module_type.txt"
def match_power_type(dev, presence):
"""
check to see which power device is being used and
if it is being detected. Then we return the list
of the power device kernel driver modules that must
present in the image.
pem1 = 1 --> means pem with ltc4151 inserted
pem2 = 1 --> means pem with ltc4281 inserted
psu1 and psu2 = 1 -> means system with PSU inserted
"""
power_type = None
if dev == "pem1" and presence == 1:
power_type = "pem1"
elif dev == "pem2" and presence == 1:
power_type = "pem2"
elif dev == "psu1" or dev == "psu2":
if presence == 1:
power_type = "psu"
else:
raise Exception("file contains unknown module")
return power_type
def get_power_type():
"""
Read appropriate path that contains the presence
status of various power module option.
"""
power_type = None
if not os.path.exists(POWER_TYPE_CACHE):
raise Exception("Path for power type doesn't exist")
with open(POWER_TYPE_CACHE, "r") as fp:
lines = fp.readlines()
if lines:
for line in lines:
dev = line.split(": ")[0]
presence = int(line.split(": ")[1])
if presence == 1:
power_type = match_power_type(dev, presence)
break
else:
raise Exception("Power module file is empty")
return power_type
|
d122889fd87df3be01e3989b325e370d26dc4ce4
|
56d6257e932e1397ab03b1e7ccc6231378665b04
|
/Push/actions.py
|
62cf84a72eec0be8e35ca6a15036fc2bb1ce0081
|
[] |
no_license
|
gluon/AbletonLive10.1_MIDIRemoteScripts
|
e6c8dc4956cff9630aaa36f3667994387ad1d0cf
|
2468b51eba7e5082b06f9e381b3e72027c5f272c
|
refs/heads/master
| 2023-01-10T18:37:46.504180
| 2022-12-23T09:21:48
| 2022-12-23T09:21:48
| 213,423,555
| 205
| 59
| null | 2021-02-12T16:15:01
| 2019-10-07T15:44:52
|
Python
|
UTF-8
|
Python
| false
| false
| 5,495
|
py
|
actions.py
|
#Embedded file name: /Users/versonator/Jenkins/live/output/Live/mac_64_static/Release/python-bundle/MIDI Remote Scripts/Push/actions.py
from __future__ import absolute_import, print_function, unicode_literals
import Live
from ableton.v2.base import depends, listens, task
from ableton.v2.control_surface import Component
from ableton.v2.control_surface.mode import SetAttributeMode, ModesComponent
from pushbase.consts import MessageBoxText
from pushbase.device_chain_utils import is_empty_drum_pad
from pushbase.browser_modes import BrowserAddEffectMode
from pushbase.action_with_options_component import OptionsComponent
from pushbase.message_box_component import Messenger
class CreateDefaultTrackComponent(Component, Messenger):
@depends(selection=None)
def __init__(self, selection = None, *a, **k):
super(CreateDefaultTrackComponent, self).__init__(*a, **k)
self.options = OptionsComponent(parent=self)
self.options.selected_option = None
self.options.option_names = (u'Audio', u'Midi', u'Return')
self.options.labels = (u'Create track:', u'', u'', u'')
self.options.selected_color = u'Browser.Load'
self.options.unselected_color = u'Browser.Load'
self._on_option_selected.subject = self.options
self._selection = selection
@listens(u'selected_option')
def _on_option_selected(self, option):
if option is not None:
self.create_track()
self.options.selected_option = None
def create_track(self):
try:
song = self.song
selected_option = self.options.selected_option
if selected_option == 0:
song.create_audio_track()
elif selected_option == 1:
song.create_midi_track()
elif selected_option == 2:
song.create_return_track()
except Live.Base.LimitationError:
self.expect_dialog(MessageBoxText.TRACK_LIMIT_REACHED)
except RuntimeError:
self.expect_dialog(MessageBoxText.MAX_RETURN_TRACKS_REACHED)
def on_enabled_changed(self):
self.options.selected_option = None
class CreateInstrumentTrackComponent(Component, Messenger):
@depends(selection=None)
def __init__(self, selection = None, browser_mode = None, browser_component = None, browser_hotswap_mode = None, *a, **k):
super(CreateInstrumentTrackComponent, self).__init__(*a, **k)
self._selection = selection
self._with_browser_modes = ModesComponent(parent=self)
self._with_browser_modes.add_mode(u'create', [self._prepare_browser,
SetAttributeMode(self.application.browser, u'filter_type', Live.Browser.FilterType.instrument_hotswap),
SetAttributeMode(browser_component, u'do_load_item', self._do_browser_load_item),
browser_mode,
browser_component.reset_load_memory])
self._with_browser_modes.add_mode(u'hotswap', [browser_hotswap_mode, browser_mode])
self._go_to_hotswap_task = self._tasks.add(task.sequence(task.delay(1), task.run(self._go_to_hotswap)))
self._go_to_hotswap_task.kill()
def on_enabled_changed(self):
self._with_browser_modes.selected_mode = u'create' if self.is_enabled() else None
self._go_to_hotswap_task.kill()
def _prepare_browser(self):
self.application.browser.hotswap_target = None
def _do_browser_load_item(self, item):
song = self.song
try:
song.create_midi_track()
except Live.Base.LimitationError:
self.expect_dialog(MessageBoxText.TRACK_LIMIT_REACHED)
item.action()
self._go_to_hotswap_task.restart()
def _go_to_hotswap(self):
self._with_browser_modes.selected_mode = u'hotswap'
class CreateDeviceComponent(Component):
@depends(selection=None)
def __init__(self, selection = None, browser_component = None, browser_mode = None, browser_hotswap_mode = None, insert_left = False, *a, **k):
super(CreateDeviceComponent, self).__init__(*a, **k)
self._selection = selection
self._add_effect_mode = BrowserAddEffectMode(selection=selection, browser=self.application.browser, application_view=self.application.view, insert_left=insert_left)
self._create_device_modes = ModesComponent(parent=self)
self._create_device_modes.add_mode(u'create', [SetAttributeMode(browser_component, u'do_load_item', self._do_browser_load_item),
self._add_effect_mode,
browser_mode,
browser_component.reset_load_memory])
self._create_device_modes.add_mode(u'hotswap', [browser_hotswap_mode, browser_mode])
self._go_to_hotswap_task = self._tasks.add(task.sequence(task.delay(1), task.run(self._go_to_hotswap)))
self._go_to_hotswap_task.kill()
def on_enabled_changed(self):
self._go_to_hotswap_task.kill()
if self.is_enabled():
if is_empty_drum_pad(self._selection.selected_object):
self._create_device_modes.selected_mode = u'hotswap'
else:
self._create_device_modes.selected_mode = u'create'
def _go_to_hotswap(self):
self._create_device_modes.selected_mode = u'hotswap'
def _do_browser_load_item(self, item):
selection = self._add_effect_mode.get_selection_for_insert()
if selection:
self._selection.selected_object = selection
item.action()
self._go_to_hotswap_task.restart()
|
65045adcbebc38a21c52d9629e34123b8d82f233
|
313cdc3bd63a9d7f1a358758315f75bfe697720e
|
/Compiler/exceptions.py
|
fd9c6c8c751aace0efd594587396f6949b03109c
|
[
"BSD-2-Clause"
] |
permissive
|
KULeuven-COSIC/SCALE-MAMBA
|
1abf9151940b77e7d8919443fa5ca5cbb137b638
|
c111516e3ebc1efd12a2bd47dd2122b160e13d1e
|
refs/heads/master
| 2022-08-20T07:15:37.749282
| 2022-03-30T06:57:03
| 2022-03-30T06:57:03
| 131,836,493
| 241
| 102
|
NOASSERTION
| 2023-09-06T08:24:55
| 2018-05-02T10:40:57
|
Verilog
|
UTF-8
|
Python
| false
| false
| 604
|
py
|
exceptions.py
|
# Copyright (c) 2017, The University of Bristol, Senate House, Tyndall Avenue, Bristol, BS8 1TH, United Kingdom.
# Copyright (c) 2021, COSIC-KU Leuven, Kasteelpark Arenberg 10, bus 2452, B-3001 Leuven-Heverlee, Belgium.
class CompilerError(Exception):
"""Base class for compiler exceptions."""
pass
class RegisterOverflowError(CompilerError):
pass
class MemoryOverflowError(CompilerError):
pass
class ArgumentError(CompilerError):
""" Exception raised for errors in instruction argument parsing. """
def __init__(self, arg, msg):
self.arg = arg
self.msg = msg
|
797f5876776a1087c39dc4c55540ad4b405a75e3
|
3a2071c34e3c35847b2bcc2a5d3b3a74114daa0f
|
/subversion/tests/cmdline/pegrev_parse_tests.py
|
c8d9ca4076aa8f37063bfc699f59db3b4e404541
|
[
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-other-permissive",
"X11",
"BSD-2-Clause",
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"HPND-Markus-Kuhn",
"LicenseRef-scancode-unicode",
"Apache-2.0",
"FSFAP"
] |
permissive
|
apache/subversion
|
18a9142afe63f060ffc0814fe0c758c91ad8bd31
|
dd957c4991e61bde23cc60d13449ea8b65f80c43
|
refs/heads/trunk
| 2023-09-04T15:22:36.755177
| 2023-08-29T19:55:03
| 2023-08-29T19:55:03
| 454,263
| 520
| 207
|
Apache-2.0
| 2023-08-26T14:17:30
| 2009-12-31T09:00:10
|
C
|
UTF-8
|
Python
| false
| false
| 24,266
|
py
|
pegrev_parse_tests.py
|
#!/usr/bin/env python
#
# basic_tests.py: testing working-copy interactions with ra_local
#
# Subversion is a tool for revision control.
# See https://subversion.apache.org for more information.
#
# ====================================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
######################################################################
# General modules
import shutil, stat, re, os, logging
logger = logging.getLogger()
# Our testing module
import svntest
from svntest import wc
from svntest import main
from svntest import actions
# (abbreviation)
Skip = svntest.testcase.Skip_deco
SkipUnless = svntest.testcase.SkipUnless_deco
XFail = svntest.testcase.XFail_deco
Issues = svntest.testcase.Issues_deco
Issue = svntest.testcase.Issue_deco
Wimp = svntest.testcase.Wimp_deco
Item = wc.StateItem
######################################################################
# Helper functions
# Most of our tests use absolute paths as parameters on the command line. But
# for these tests, it's important that we can use bare file names in the
# commands, because the parser may have (and as of this writing does have)
# edge-case bugs that we can only expose in this way. Therefore, these helpers
# ensure that we run 'svn' with the CWD at the root of the working copy.
def run_svn_at_wcdir(sbox, expected_status, expected_stderr, *varargs):
if expected_stderr is None:
expected_stderr = []
cwd = os.getcwd()
try:
os.chdir(sbox.wc_dir)
actions.run_and_verify_svn(None, expected_stderr, *varargs)
finally:
os.chdir(cwd)
if expected_status is not None:
actions.run_and_verify_status(sbox.wc_dir, expected_status)
def get_trojan_virginal_state(sbox):
return actions.get_virginal_state(sbox.wc_dir, '1', tree='trojan')
def build_trojan_sandbox(sbox, expected_stderr):
sbox.build(tree='trojan')
if expected_stderr is None:
return get_trojan_virginal_state(sbox)
return None
def build_empty_sandbox(sbox, expected_stderr):
sbox.build(empty=True)
if expected_stderr is None:
return svntest.wc.State(sbox.wc_dir, {
'': svntest.wc.StateItem(status=' ', wc_rev='0')
})
return None
def build_sandbox(sbox, empty_sandbox, expected_stderr):
if not empty_sandbox:
return build_trojan_sandbox(sbox, expected_stderr)
else:
return build_empty_sandbox(sbox, expected_stderr)
def do_add_file(sbox, dst, dst_cmdline,
expected_stderr=None, empty_sandbox=False):
expected_status = build_sandbox(sbox, empty_sandbox, expected_stderr)
if expected_status is not None:
expected_status.add({dst: Item(status='A ', wc_rev='-')})
main.file_write(sbox.ospath(dst), "This is file '" + dst + "'.")
run_svn_at_wcdir(sbox, expected_status, expected_stderr,
'add', dst_cmdline)
def do_add_file_e(sbox, dst, dst_cmdline, expected_stderr=None):
"like do_add_file() but with an empty sandbox"
return do_add_file(sbox, dst, dst_cmdline, expected_stderr, True)
def do_make_dir(sbox, dst, dst_cmdline,
expected_stderr=None, empty_sandbox=False):
expected_status = build_sandbox(sbox, empty_sandbox, expected_stderr)
if expected_status is not None:
expected_status.add({dst: Item(status='A ', wc_rev='-')})
run_svn_at_wcdir(sbox, expected_status, expected_stderr,
'mkdir', dst_cmdline)
def do_make_dir_e(sbox, dst, dst_cmdline, expected_stderr=None):
"like do_make_dir() but with an empty sandbox"
return do_make_dir(sbox, dst, dst_cmdline, expected_stderr, True)
def do_remove(sbox, dst, dst_cmdline, expected_stderr=None):
expected_status = build_trojan_sandbox(sbox, expected_stderr)
if expected_status is not None and dst in expected_status.desc:
expected_status.tweak(dst, status='D ')
run_svn_at_wcdir(sbox, expected_status, expected_stderr,
'remove', dst_cmdline)
def do_rename(sbox, src, src_cmdline, dst, dst_cmdline,
expected_stderr=None):
expected_status = build_trojan_sandbox(sbox, expected_stderr)
if expected_status is not None:
expected_status.tweak(src, status='D ', moved_to=dst)
expected_status.add({dst: Item(status='A ', copied='+',
moved_from=src, wc_rev='-')})
run_svn_at_wcdir(sbox, expected_status, expected_stderr,
'rename', src_cmdline, dst_cmdline)
######################################################################
# Tests
#
# Each test must return on success or raise on failure.
#=====================================================================
# Tests for 'svn add' in the current directory
def add_file_here_1_escape_peg(sbox):
"add file 'tau' with pegrev escape"
do_add_file_e(sbox, 'tau', 'tau@')
def add_file_here_2_escape_peg(sbox):
"add file '@tau' with pegrev escape"
do_add_file_e(sbox, '@tau', '@tau@')
def add_file_here_3_escape_peg(sbox):
"add file '_@tau' with pegrev escape"
do_add_file_e(sbox, '_@tau', '_@tau@')
def add_file_here_4_escape_peg(sbox):
"add file '.@tau' with pegrev escape"
do_add_file_e(sbox, '.@tau', '.@tau@')
def add_file_here_5_escape_peg(sbox):
"add file 'tau@' with pegrev escape"
do_add_file_e(sbox, 'tau@', 'tau@@')
def add_file_here_6_escape_peg(sbox):
"add file '@tau@' with pegrev escape"
do_add_file_e(sbox, '@tau@', '@tau@@')
def add_file_here_7_escape_peg(sbox):
"add file '@' with pegrev escape"
do_add_file_e(sbox, '@', '@@')
#---------------------------------------------------------------------
def add_file_here_1_no_escape_peg(sbox):
"add file 'tau' without pegrev escape"
do_add_file_e(sbox, 'tau', 'tau')
def add_file_here_2_no_escape_peg(sbox):
"add file '@tau' without pegrev escape"
do_add_file_e(sbox, '@tau', '@tau', "svn: E125001: '@tau'")
def add_file_here_3_no_escape_peg(sbox):
"add file '_@tau' without pegrev escape"
do_add_file_e(sbox, '_@tau', '_@tau', "svn: E200009: '_@tau'")
@Wimp("The error message mentions '@tau' instead of '.@tau'")
def add_file_here_4_no_escape_peg(sbox):
"add file '.@tau' without pegrev escape"
do_add_file_e(sbox, '.@tau', '.@tau', "svn: E200009: '.@tau'")
def add_file_here_5_no_escape_peg(sbox):
"add file 'tau@' without pegrev escape"
do_add_file_e(sbox, 'tau@', 'tau@', 'svn: E200009: ')
def add_file_here_6_no_escape_peg(sbox):
"add file '@tau@' without pegrev escape"
do_add_file_e(sbox, '@tau@', '@tau@', 'svn: E200009: ')
def add_file_here_7_no_escape_peg(sbox):
"add file '@' without pegrev escape"
do_add_file_e(sbox, '@', '@', "svn: E125001: '@'")
#=====================================================================
# Tests for 'svn add' in a subdirectory
def add_file_subdir_1_escape_peg(sbox):
"add file 'E/tau' with pegrev escape"
do_add_file(sbox, 'E/tau', 'E/tau@')
def add_file_subdir_2_escape_peg(sbox):
"add file 'E/@tau' with pegrev escape"
do_add_file(sbox, 'E/@tau', 'E/@tau@')
def add_file_subdir_3_escape_peg(sbox):
"add file 'E/_@tau' with pegrev escape"
do_add_file(sbox, 'E/_@tau', 'E/_@tau@')
def add_file_subdir_4_escape_peg(sbox):
"add file 'E/.@tau' with pegrev escape"
do_add_file(sbox, 'E/.@tau', 'E/.@tau@')
def add_file_subdir_5_escape_peg(sbox):
"add file 'E/tau@' with pegrev escape"
do_add_file(sbox, 'E/tau@', 'E/tau@@')
def add_file_subdir_6_escape_peg(sbox):
"add file 'E/@tau@' with pegrev escape"
do_add_file(sbox, 'E/@tau@', 'E/@tau@@')
def add_file_subdir_7_escape_peg(sbox):
"add file 'E/@' with pegrev escape"
do_add_file(sbox, 'E/@', 'E/@@')
#---------------------------------------------------------------------
def add_file_subdir_1_no_escape_peg(sbox):
"add file 'E/tau' without pegrev escape"
do_add_file(sbox, 'E/tau', 'E/tau')
@Wimp("The error message mentions 'E@tau' instead of 'E/@tau'")
@Wimp("The error message should be E125001")
def add_file_subdir_2_no_escape_peg(sbox):
"add file 'E/@tau' without pegrev escape"
do_add_file(sbox, 'E/@tau', 'E/@tau', r"svn: E200009: 'E[\\/]@tau'")
def add_file_subdir_3_no_escape_peg(sbox):
"add file 'E/_@tau' without pegrev escape"
do_add_file(sbox, 'E/_@tau', 'E/_@tau', r"svn: E200009: 'E[\\/]_@tau'")
@Wimp("The error message mentions 'E@tau' instead of 'E/.@tau'")
def add_file_subdir_4_no_escape_peg(sbox):
"add file 'E/.@tau' without pegrev escape"
do_add_file(sbox, 'E/.@tau', 'E/.@tau', r"svn: E200009: 'E[\\/].@tau'")
def add_file_subdir_5_no_escape_peg(sbox):
"add file 'E/tau@' without pegrev escape"
do_add_file(sbox, 'E/tau@', 'E/tau@', 'svn: E200009: ')
def add_file_subdir_6_no_escape_peg(sbox):
"add file 'E/@tau@' without pegrev escape"
do_add_file(sbox, 'E/@tau@', 'E/@tau@', 'svn: E200009: ')
@Wimp("The error message is E200009 but should be E125001")
def add_file_subdir_7_no_escape_peg(sbox):
"add file 'E/@' without pegrev escape"
do_add_file(sbox, 'E/@', 'E/@', r"svn: E125001: 'E[\\/]@'")
#=====================================================================
# Tests for 'svn mkdir' in the current directory
def make_dir_here_1_escape_peg(sbox):
"create directory 'T' with pegrev escape"
do_make_dir_e(sbox, 'T', 'T@')
def make_dir_here_2_escape_peg(sbox):
"create directory '@T' with pegrev escape"
do_make_dir_e(sbox, '@T', '@T@')
def make_dir_here_3_escape_peg(sbox):
"create directory '_@T' with pegrev escape"
do_make_dir(sbox, '_@T', '_@T@')
def make_dir_here_4_escape_peg(sbox):
"create directory '.@T' with pegrev escape"
do_make_dir_e(sbox, '.@T', '.@T@')
def make_dir_here_5_escape_peg(sbox):
"create directory 'T@' with pegrev escape"
do_make_dir_e(sbox, 'T@', 'T@@')
def make_dir_here_6_escape_peg(sbox):
"create directory '@T@' with pegrev escape"
do_make_dir_e(sbox, '@T@', '@T@@')
def make_dir_here_7_escape_peg(sbox):
"create directory '@' with pegrev escape"
do_make_dir_e(sbox, '@', '@@')
#---------------------------------------------------------------------
def make_dir_here_1_no_escape_peg(sbox):
"create directory 'T' without pegrev escape"
do_make_dir_e(sbox, 'T', 'T')
def make_dir_here_2_no_escape_peg(sbox):
"create directory '@T' without pegrev escape"
do_make_dir_e(sbox, '@T', '@T', "svn: E125001: '@T'")
def make_dir_here_3_no_escape_peg(sbox):
"create directory '_@T' without pegrev escape"
do_make_dir_e(sbox, '_@T', '_@T', "svn: E200009: '_@T'")
@Wimp("The error message mentions '@T' instead of '.@T'")
def make_dir_here_4_no_escape_peg(sbox):
"create directory '.@T' without pegrev escape"
do_make_dir_e(sbox, '.@T', '.@T', "svn: E200009: '.@T'")
# Skip tests 5 and 6 that create a directory with a trailing @ in the name
# because is correctly interpreted as a peg revision escape. This is already
# tested by:
# - make_dir_here_5_escape_peg
# - make_dir_here_6_escape_peg
def make_dir_here_7_no_escape_peg(sbox):
"create directory '@' without pegrev escape"
do_make_dir_e(sbox, '@', '@', "svn: E125001: '@'")
#=====================================================================
# Tests for 'svn add' in a subdirectory
def make_dir_subdir_1_escape_peg(sbox):
"create directory 'E/T' with pegrev escape"
do_make_dir(sbox, 'E/T', 'E/T@')
def make_dir_subdir_2_escape_peg(sbox):
"create directory 'E/@T' with pegrev escape"
do_make_dir(sbox, 'E/@T', 'E/@T@')
def make_dir_subdir_3_escape_peg(sbox):
"create directory 'E/_@T' with pegrev escape"
do_make_dir(sbox, 'E/_@T', 'E/_@T@')
def make_dir_subdir_4_escape_peg(sbox):
"create directory 'E/.@T' with pegrev escape"
do_make_dir(sbox, 'E/.@T', 'E/.@T@')
def make_dir_subdir_5_escape_peg(sbox):
"create directory 'E/T@' with pegrev escape"
do_make_dir(sbox, 'E/T@', 'E/T@@')
def make_dir_subdir_6_escape_peg(sbox):
"create directory 'E/@T@' with pegrev escape"
do_make_dir(sbox, 'E/@T@', 'E/@T@@')
def make_dir_subdir_7_escape_peg(sbox):
"create directory 'E/@' with pegrev escape"
do_make_dir(sbox, 'E/@', 'E/@@')
#---------------------------------------------------------------------
def make_dir_subdir_1_no_escape_peg(sbox):
"create directory 'E/T' without pegrev escape"
do_make_dir(sbox, 'E/T', 'E/T')
@Wimp("The error message mentions 'E@T' instead of 'E/@T'")
@Wimp("The error message should be E125001")
def make_dir_subdir_2_no_escape_peg(sbox):
"create directory 'E/@T' without pegrev escape"
do_make_dir(sbox, 'E/@T', 'E/@T', r"svn: E200009: 'E[\\/]@T'")
def make_dir_subdir_3_no_escape_peg(sbox):
"create directory 'E/_@T' without pegrev escape"
do_make_dir(sbox, 'E/_@T', 'E/_@T', r"svn: E200009: 'E[\\/]_@T'")
@Wimp("The error message mentions 'E@T' instead of 'E/.@T'")
def make_dir_subdir_4_no_escape_peg(sbox):
"create directory 'E/.@T' without pegrev escape"
do_make_dir(sbox, 'E/.@T', 'E/.@T', r"svn: E200009: 'E[\\/].@T'")
# Skip tests 5 and 6 that create a directory with a trailing @ in the name
# because is correctly interpreted as a peg revision escape. This is already
# tested by:
# - make_dir_subdir_5_escape_peg
# - make_dir_subdir_6_escape_peg
@Wimp("Reports error that E exists but should be E125001 for E/@")
def make_dir_subdir_7_no_escape_peg(sbox):
"create directory 'E/@' without pegrev escape"
do_make_dir(sbox, 'E/@', 'E/@', r"svn: E125001: 'E[\\/]@'")
#=====================================================================
# Tests for 'svn remove' in the current directory
def remove_here_1_escape_peg(sbox):
"remove 'iota' with pegrev escape"
do_remove(sbox, 'iota', 'iota@')
def remove_here_2_escape_peg(sbox):
"remove '@zeta' with pegrev escape"
do_remove(sbox, '@zeta', '@zeta@')
def remove_here_3_escape_peg(sbox):
"remove '_@theta' with pegrev escape"
do_remove(sbox, '_@theta', '_@theta@')
def remove_here_4_escape_peg(sbox):
"remove '.@kappa' with pegrev escape"
do_remove(sbox, '.@kappa', '.@kappa@')
def remove_here_5_escape_peg(sbox):
"remove 'lambda@' with pegrev escape"
do_remove(sbox, 'lambda@', 'lambda@@')
def remove_here_6_escape_peg(sbox):
"remove '@omicron@' with pegrev escape"
do_remove(sbox, '@omicron@', '@omicron@@')
def remove_here_7_escape_peg(sbox):
"remove '@' with pegrev escape"
do_remove(sbox, '@', '@@')
#---------------------------------------------------------------------
def remove_here_1_no_escape_peg(sbox):
"remove 'iota' without pegrev escape"
do_remove(sbox, 'iota', 'iota')
def remove_here_2_no_escape_peg(sbox):
"remove '@zeta' without pegrev escape"
do_remove(sbox, '@zeta', '@zeta', "svn: E125001: '@zeta'")
def remove_here_3_no_escape_peg(sbox):
"remove '_@theta' without pegrev escape"
do_remove(sbox, '_@theta', '_@theta', "svn: E200009: '_@theta'")
@Wimp("The error message mentions '@kappa' instead of '.@kappa'")
def remove_here_4_no_escape_peg(sbox):
"remove '.@kappa' without pegrev escape"
do_remove(sbox, '.@kappa', '.@kappa', "svn: E200009: '.@kappa'")
def remove_here_5_no_escape_peg(sbox):
"remove 'lambda@' without pegrev escape"
do_remove(sbox, 'lambda@', 'lambda@', 'svn: E200005: ')
def remove_here_6_no_escape_peg(sbox):
"remove '@omicron@' without pegrev escape"
do_remove(sbox, '@omicron@', '@omicron@', 'svn: E200005: ')
def remove_here_7_no_escape_peg(sbox):
"remove '@' without pegrev escape"
do_remove(sbox, '@', '@', "svn: E125001: '@'")
#=====================================================================
# Tests for 'svn remove' in a subdirectory directory
def remove_subdir_1_escape_peg(sbox):
"remove 'A/alpha' with pegrev escape"
do_remove(sbox, 'A/alpha', 'A/alpha@')
def remove_subdir_2_escape_peg(sbox):
"remove 'B/@beta' with pegrev escape"
do_remove(sbox, 'B/@beta', 'B/@beta@')
def remove_subdir_3_escape_peg(sbox):
"remove 'G/_@gamma' with pegrev escape"
do_remove(sbox, 'G/_@gamma', 'G/_@gamma@')
def remove_subdir_4_escape_peg(sbox):
"remove 'D/.@delta' with pegrev escape"
do_remove(sbox, 'D/.@delta', 'D/.@delta@')
def remove_subdir_5_escape_peg(sbox):
"remove 'B/pi@' with pegrev escape"
do_remove(sbox, 'B/pi@', 'B/pi@@')
def remove_subdir_6_escape_peg(sbox):
"remove 'A/@omega@' with pegrev escape"
do_remove(sbox, 'A/@omega@', 'A/@omega@@')
def remove_subdir_7_escape_peg(sbox):
"remove 'B/@' with pegrev escape"
do_remove(sbox, 'B/@', 'B/@@')
def remove_subdir_7a_escape_peg(sbox):
"remove missing 'E/@' with pegrev escape"
do_remove(sbox, 'E/@', 'E/@@', r"svn: E200005: '.*[\\/]E[\\/]@'")
def remove_subdir_7b_escape_peg(sbox):
"remove missing '@/@' with pegrev escape"
do_remove(sbox, '@/@@', '@/@@', r"svn: E200005: '.*[\\/]@[\\/]@'")
#---------------------------------------------------------------------
def remove_subdir_1_no_escape_peg(sbox):
"remove 'A/alpha' without pegrev escape"
do_remove(sbox, 'A/alpha', 'A/alpha')
@Wimp("The error message mentions 'B@beta' instead of 'B/@beta'")
@Wimp("The error message should be E125001")
def remove_subdir_2_no_escape_peg(sbox):
"remove 'B/@beta' without pegrev escape"
do_remove(sbox, 'B/@beta', 'B/@beta', r"svn: E200009: 'B[\\/]@beta'")
def remove_subdir_3_no_escape_peg(sbox):
"remove 'G/_@gamma' without pegrev escape"
do_remove(sbox, 'G/_@gamma', 'G/_@gamma', r"svn: E200009: 'G[\\/]_@gamma'")
@Wimp("The error message mentions 'D@delta' instead of 'D/.@delta'")
def remove_subdir_4_no_escape_peg(sbox):
"remove 'D/.@delta' without pegrev escape"
do_remove(sbox, 'D/.@delta', 'D/.@delta', "svn: E200009: 'D/.@delta'")
# Skip tests 5 and 6 that remove a node with a trailing @ in the name
# because is correctly interpreted as a peg revision escape. This is already
# tested by:
# - remove_subdir_5_escape_peg
# - remove_subdir__escape_peg
@Wimp("Removes B instead of reporting E125001 for B/@")
def remove_subdir_7_no_escape_peg(sbox):
"remove 'B/@' without pegrev escape"
do_remove(sbox, 'B/@', 'B/@') #, r"svn: E125001: 'B[\\/]@'")
@Wimp("Removes E instead of reporting ENOENT or E125001 for E/@")
def remove_subdir_7a_no_escape_peg(sbox):
"remove missing 'E/@' without pegrev escape"
do_remove(sbox, 'E/@', 'E/@') #, r"svn: E125001: 'E[\\/]@'")
@Wimp("Removes @ instead of reporting ENOENT or E125001 for @/@")
def remove_subdir_7b_no_escape_peg(sbox):
"remove missing '@/@' without pegrev escape"
do_remove(sbox, '@/@', '@/@') #, r"svn: E125001: '@[\\/]@'")
#=====================================================================
# Test for 'svn move' to a subdirectory
@Wimp("Rename creates 'E/@tau@' instead of '@/@tau'")
@Issue(4530)
def rename_to_subdir_2_dst_escape_peg(sbox):
"rename 'iota' to 'E/@tau with pegrev escape"
# NOTE: This rename succeeds, but creates E/@tau@ instead of E/@tau, even
# though it should strip away the pegrev escape from the target.
do_rename(sbox, 'iota', 'iota', 'E/@tau', 'E/@tau@')
#---------------------------------------------------------------------
@Wimp("Rename creates 'E@tau' instead of failing")
@Issue(4530)
def rename_to_subdir_2_no_dst_escape_peg(sbox):
"rename 'iota' to 'E/@tau without pegrev escape"
# NOTE: This rename succeeds, but creates E@tau in the current directory,
# when instead it should fail with 'svn: E125001: ...'.
do_rename(sbox, 'iota', 'iota', 'E/@tau', 'E/@tau') ### 'svn: E200009: '
########################################################################
# Run the tests
# list all tests here, starting with None:
test_list = [ None,
add_file_here_1_escape_peg,
add_file_here_2_escape_peg,
add_file_here_3_escape_peg,
add_file_here_4_escape_peg,
add_file_here_5_escape_peg,
add_file_here_6_escape_peg,
add_file_here_7_escape_peg,
add_file_here_1_no_escape_peg,
add_file_here_2_no_escape_peg,
add_file_here_3_no_escape_peg,
add_file_here_4_no_escape_peg,
add_file_here_5_no_escape_peg,
add_file_here_6_no_escape_peg,
add_file_here_7_no_escape_peg,
add_file_subdir_1_escape_peg,
add_file_subdir_2_escape_peg,
add_file_subdir_3_escape_peg,
add_file_subdir_4_escape_peg,
add_file_subdir_5_escape_peg,
add_file_subdir_6_escape_peg,
add_file_subdir_7_escape_peg,
add_file_subdir_1_no_escape_peg,
add_file_subdir_2_no_escape_peg,
add_file_subdir_3_no_escape_peg,
add_file_subdir_4_no_escape_peg,
add_file_subdir_5_no_escape_peg,
add_file_subdir_6_no_escape_peg,
add_file_subdir_7_no_escape_peg,
make_dir_here_1_escape_peg,
make_dir_here_2_escape_peg,
make_dir_here_3_escape_peg,
make_dir_here_4_escape_peg,
make_dir_here_5_escape_peg,
make_dir_here_6_escape_peg,
make_dir_here_7_escape_peg,
make_dir_here_1_no_escape_peg,
make_dir_here_2_no_escape_peg,
make_dir_here_3_no_escape_peg,
make_dir_here_4_no_escape_peg,
# skipped: make_dir_here_5_no_escape_peg
# skipped: make_dir_here_6_no_escape_peg
make_dir_here_7_no_escape_peg,
make_dir_subdir_1_escape_peg,
make_dir_subdir_2_escape_peg,
make_dir_subdir_3_escape_peg,
make_dir_subdir_4_escape_peg,
make_dir_subdir_5_escape_peg,
make_dir_subdir_6_escape_peg,
make_dir_subdir_7_escape_peg,
make_dir_subdir_1_no_escape_peg,
make_dir_subdir_2_no_escape_peg,
make_dir_subdir_3_no_escape_peg,
make_dir_subdir_4_no_escape_peg,
# skipped: make_dir_subdir_5_no_escape_peg
# skipped: make_dir_subdir_6_no_escape_peg
make_dir_subdir_7_no_escape_peg,
remove_here_1_escape_peg,
remove_here_2_escape_peg,
remove_here_3_escape_peg,
remove_here_4_escape_peg,
remove_here_5_escape_peg,
remove_here_6_escape_peg,
remove_here_7_escape_peg,
remove_here_1_no_escape_peg,
remove_here_2_no_escape_peg,
remove_here_3_no_escape_peg,
remove_here_4_no_escape_peg,
remove_here_5_no_escape_peg,
remove_here_6_no_escape_peg,
remove_here_7_no_escape_peg,
remove_subdir_1_escape_peg,
remove_subdir_2_escape_peg,
remove_subdir_3_escape_peg,
remove_subdir_4_escape_peg,
remove_subdir_5_escape_peg,
remove_subdir_6_escape_peg,
remove_subdir_7_escape_peg,
remove_subdir_7a_escape_peg,
remove_subdir_7b_escape_peg,
remove_subdir_1_no_escape_peg,
remove_subdir_2_no_escape_peg,
remove_subdir_3_no_escape_peg,
remove_subdir_4_no_escape_peg,
# skipped: remove_subdir_5_no_escape_peg,
# skipped: remove_subdir_6_no_escape_peg,
remove_subdir_7_no_escape_peg,
remove_subdir_7a_no_escape_peg,
remove_subdir_7b_no_escape_peg,
rename_to_subdir_2_dst_escape_peg,
rename_to_subdir_2_no_dst_escape_peg,
]
if __name__ == '__main__':
svntest.main.run_tests(test_list)
# NOTREACHED
### End of file.
|
c94ead1de990e4504070619b4961df633004d9aa
|
f8dee139258b7d971bd1cfa16bd16e356537bbac
|
/Contents/Libraries/Shared/guessit/rules/properties/date.py
|
0b6083bd7f459ec9bb31621b89acf83415b54b27
|
[
"MIT"
] |
permissive
|
pannal/Sub-Zero.bundle
|
79673016ae68d1f2e9886fd30b8763b73a8f6cf8
|
4ced7d8c8f9f5fb47d12410f87fa33d782e9f0f4
|
refs/heads/master
| 2023-07-27T23:04:32.925845
| 2023-07-09T13:07:38
| 2023-07-09T13:08:04
| 21,959,699
| 1,820
| 178
|
NOASSERTION
| 2022-11-28T03:23:13
| 2014-07-17T22:19:13
|
Python
|
UTF-8
|
Python
| false
| false
| 2,491
|
py
|
date.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
date and year properties
"""
from rebulk import Rebulk, RemoveMatch, Rule
from ..common.date import search_date, valid_year
from ..common.validators import seps_surround
def date():
"""
Builder for rebulk object.
:return: Created Rebulk object
:rtype: Rebulk
"""
rebulk = Rebulk().defaults(validator=seps_surround)
rebulk.regex(r"\d{4}", name="year", formatter=int,
validator=lambda match: seps_surround(match) and valid_year(match.value))
def date_functional(string, context):
"""
Search for date in the string and retrieves match
:param string:
:return:
"""
ret = search_date(string, context.get('date_year_first'), context.get('date_day_first'))
if ret:
return ret[0], ret[1], {'value': ret[2]}
rebulk.functional(date_functional, name="date", properties={'date': [None]},
conflict_solver=lambda match, other: other
if other.name in ['episode', 'season']
else '__default__')
rebulk.rules(KeepMarkedYearInFilepart)
return rebulk
class KeepMarkedYearInFilepart(Rule):
"""
Keep first years marked with [](){} in filepart, or if no year is marked, ensure it won't override titles.
"""
priority = 64
consequence = RemoveMatch
def when(self, matches, context):
ret = []
if len(matches.named('year')) > 1:
for filepart in matches.markers.named('path'):
years = matches.range(filepart.start, filepart.end, lambda match: match.name == 'year')
if len(years) > 1:
group_years = []
ungroup_years = []
for year in years:
if matches.markers.at_match(year, lambda marker: marker.name == 'group'):
group_years.append(year)
else:
ungroup_years.append(year)
if group_years and ungroup_years:
ret.extend(ungroup_years)
ret.extend(group_years[1:]) # Keep the first year in marker.
elif not group_years:
ret.append(ungroup_years[0]) # Keep first year for title.
if len(ungroup_years) > 2:
ret.extend(ungroup_years[2:])
return ret
|
9ccbad1d28cd807ef0a40e1c0491e16cc0e032d8
|
7ce570cd7fc2e97204b8e9c013e6352891d9a9e5
|
/__init__.py
|
d92cd1c32b230c99cf68b29f46d14dc7144c2dd3
|
[
"MIT"
] |
permissive
|
SublimeLinter/SublimeLinter
|
e789aa5852514993620f0afc3638c404c2229811
|
33ab552b2683dea452ed83ee9d2a7d753b969853
|
refs/heads/master
| 2023-09-05T23:14:52.644954
| 2023-08-16T21:34:05
| 2023-08-16T21:34:05
| 13,782,029
| 745
| 160
|
MIT
| 2023-08-16T21:34:06
| 2013-10-22T18:27:36
|
Python
|
UTF-8
|
Python
| false
| false
| 101
|
py
|
__init__.py
|
"""Hi.
If you're looking for the public API,
it is over there: `SublimeLinter.lint.__init__.py`
"""
|
80dd18548060604dd8be6d756921fd1f0ea2f2f3
|
5821d864fb40417184cd37a3ee3c889895d39efb
|
/manuscript/img-src/prepie.py
|
d858570c4f7fd5f6c2e53ecd676c851f62c7b610
|
[
"WTFPL"
] |
permissive
|
ssloy/least-squares-course
|
9c86d8c54894248440fba78206ce253559f4257b
|
13692cdfd40a8005893fd33887d6cc743c5f01ec
|
refs/heads/master
| 2022-08-18T15:53:15.313071
| 2021-12-01T12:44:59
| 2021-12-01T12:44:59
| 222,901,933
| 162
| 18
|
WTFPL
| 2022-07-28T21:16:03
| 2019-11-20T09:38:37
|
TeX
|
UTF-8
|
Python
| false
| false
| 1,261
|
py
|
prepie.py
|
import numpy as np
fa = 1 # left constraint
fb = 3 # right constraint
n = 40
x = np.linspace(0, 2*np.pi, n)
g = [np.sin(p) for p in x]
A = np.matrix(np.zeros((n-1,n-2)))
b = np.matrix(np.zeros((n-1,1)))
A[0,0] = 1
b[0,0] = fa + g[1]-g[0]
A[n-2, n-3] = -1
b[n-2,0] = -fb + g[-1]-g[-2]
for i in range(1,n-2):
b[i,0] = g[i]-g[i-1]
A[i, i-1] = -1
A[i, i ] = 1
f = [fa] + (np.linalg.inv(A.T*A)*A.T*b).T.tolist()[0] + [fb]
import matplotlib.pyplot as plt
plt.rcParams["font.family"] = "serif"
plt.rcParams["mathtext.fontset"] = "dejavuserif"
plt.rcParams['text.usetex'] = True
plt.rc('font', size=20)
#plt.rc('axes', titlesize=20, labelsize=20)
#plt.rc('xtick', labelsize=40) # fontsize of the tick labels
fig,ax = plt.subplots(1, figsize=(6.40,6.40),dpi=150)
for item in ([ax.title, ax.xaxis.label, ax.yaxis.label] +
ax.get_xticklabels() + ax.get_yticklabels()):
item.set_fontsize(20)
plt.plot(x, f, linewidth=3, label='$f(x)$')
plt.plot(x, g, linewidth=3, label='$g(x) = \sin x$')
plt.scatter([x[0], x[-1]], [fa,fb], color='red', edgecolors='black',s=200)
plt.legend(frameon=False)
plt.tight_layout()
plt.gca().set_aspect('equal', adjustable='box')
plt.savefig('prepie.png', bbox_inches='tight')
plt.show()
|
ca2ff4798f65f1e5522acd434e7a1cb1aea2525a
|
20d4aac372684501ce6d1b35b8d0beeb20769341
|
/pyart/core/__init__.py
|
15788cac0014741a11dc1a7e6d213e88a7346e93
|
[
"BSD-3-Clause"
] |
permissive
|
ARM-DOE/pyart
|
c108dd28a2d6eb1eaaa414408757feefac9515c3
|
172bbcf1cf3bcdb953c76ebae72c27c95dc2e606
|
refs/heads/main
| 2023-08-18T07:41:28.137014
| 2023-08-17T15:58:24
| 2023-08-17T15:58:24
| 6,248,645
| 455
| 255
|
NOASSERTION
| 2023-09-14T20:09:51
| 2012-10-16T17:51:30
|
Python
|
UTF-8
|
Python
| false
| false
| 719
|
py
|
__init__.py
|
"""
Core Py-ART classes and function for interacting with weather radar data.
"""
from .grid import Grid # noqa
from .radar import Radar # noqa
from .radar_spectra import RadarSpectra # noqa
from .transforms import antenna_to_cartesian # noqa
from .transforms import antenna_vectors_to_cartesian # noqa
from .transforms import cartesian_to_geographic # noqa
from .transforms import cartesian_to_geographic_aeqd # noqa
from .transforms import cartesian_vectors_to_geographic # noqa
from .transforms import geographic_to_cartesian # noqa
from .transforms import geographic_to_cartesian_aeqd # noqa
from .wind_profile import HorizontalWindProfile # noqa
__all__ = [s for s in dir() if not s.startswith("_")]
|
305ecbd07d8ec32756c89ef1ab528b637684ece9
|
dcc25b784213b17015d2080a7623c772d474dc22
|
/sigs/dx/issue_analysis/scripts/event_classifier.py
|
3776f132ca4c32a5ecc4d9f50be0da569e22309c
|
[
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
] |
permissive
|
mindspore-ai/community
|
930c9d9fdbead852e3597d522a72fe5b66bfc005
|
c72ce898482419117550ad16d93b38298f4306a1
|
refs/heads/master
| 2023-07-19T19:43:20.785198
| 2023-07-17T06:51:22
| 2023-07-17T06:51:22
| 250,693,100
| 193
| 10
|
Apache-2.0
| 2022-10-29T10:01:40
| 2020-03-28T02:00:02
|
Python
|
UTF-8
|
Python
| false
| false
| 1,886
|
py
|
event_classifier.py
|
# 遍历issue列表时调用,以判断该条issue是否满足三个条件
def is_promoted(owner_id, issue_operate_logs, issue_comments):
# 这里某些项以后可以设置成integer,用以表示单条issue里正面行为的次数(比如打了多个标签)
total_flag = False
label_flag = False
assign_flag = False
other_flag = False
# 先看日志里是否有推进issue解决的正面行为
for action in issue_operate_logs:
action_owner_id = action['user']['id']
action_icon = action['icon']
# 有没有打标签
if action_owner_id == owner_id and action_icon == 'tag icon':
label_flag = True
# 有没有指派负责人/协作人
if action_owner_id == owner_id and action_icon == 'add user icon':
assign_flag = True
# 有没有其他推进issue解决的行为(如设置schedule/milestone)
if action_owner_id == owner_id and (action_icon != 'add user icon' and action_icon != 'tag icon'):
other_flag = True
# 再看评论区里是否有推进issue解决的正面行为
def is_label_comment(issue_comment):
# 先简单判断一下前两个字符是不是均为/
if issue_comment['body'][0] == '/' and issue_comment['body'][1] == '/':
return True
else:
return False
for comment in issue_comments:
comment_owner_id = comment['user']['id']
# 有没有通过评论打标签
if comment_owner_id == owner_id and is_label_comment(comment):
label_flag = True
# 有没有在自己的issue下做回复(不包括与bot互动)
if comment_owner_id == owner_id and comment['body'][0] != '/':
other_flag = True
total_flag = label_flag or assign_flag or other_flag
return total_flag, label_flag, assign_flag, other_flag
|
c29bf5e02b9f7e22217a020fd55daadc6d94a301
|
33392bbfbc4abd42b0c67843c7c6ba9e0692f845
|
/dsp/L2/meta/vmc/vmc_stream_mixer.py
|
72959488d76f47090889a02a08d200f7ddc08861
|
[
"Apache-2.0",
"OFL-1.1",
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown",
"BSD-2-Clause",
"MIT"
] |
permissive
|
Xilinx/Vitis_Libraries
|
bad9474bf099ed288418430f695572418c87bc29
|
2e6c66f83ee6ad21a7c4f20d6456754c8e522995
|
refs/heads/main
| 2023-07-20T09:01:16.129113
| 2023-06-08T08:18:19
| 2023-06-08T08:18:19
| 210,433,135
| 785
| 371
|
Apache-2.0
| 2023-07-06T21:35:46
| 2019-09-23T19:13:46
|
C++
|
UTF-8
|
Python
| false
| false
| 769
|
py
|
vmc_stream_mixer.py
|
from dds_mixer import *
#### VMC validators ####
def vmc_validate_output_window_size(args):
tempargs = {}
tempargs["TP_INPUT_WINDOW_VSIZE"] = args["input_window_size"]
tempargs["TT_DATA"] = args["data_type"]
return validate_TP_WINDOW_VSIZE(tempargs)
#### VMC graph generator ####
def vmc_generate_graph(name, args):
tmpargs = {}
tmpargs["TT_DATA"] = args["data_type"]
tmpargs["TP_MIXER_MODE"] = args["mixer_mode"]
tmpargs["TP_INPUT_WINDOW_VSIZE"] = args["input_window_size"]
tmpargs["TP_NUM_OUTPUTS"] = 1
tmpargs["TP_SSR"] = args["ssr"]
tmpargs["TP_API"] = 1
tmpargs["phaseInc"] = args["phase_increment"]
tmpargs["initialPhaseOffset"] = 0
return generate_graph(name, tmpargs)
|
82a9833d7630c9a001275305e80a70f04e0cda68
|
7eb3b417635cefc78a5cbba4239e4114142dd739
|
/mamba/error.py
|
f10ac3fe175be4923b9a8022937cca5aebd77967
|
[
"MIT"
] |
permissive
|
nestorsalceda/mamba
|
0ff0f380cb3994991d8a34e7fff7541b8e0347fd
|
7b40daf79697710e6e269198e6496c2ff0d4a0b9
|
refs/heads/master
| 2023-08-11T17:10:12.547078
| 2020-11-16T12:31:58
| 2020-11-16T12:31:58
| 8,735,726
| 493
| 68
|
MIT
| 2022-12-08T10:54:51
| 2013-03-12T18:53:42
|
Python
|
UTF-8
|
Python
| false
| false
| 113
|
py
|
error.py
|
# -*- coding: utf-8 -*-
import collections
Error = collections.namedtuple('Error', ('exception', 'traceback'))
|
00db054910842e2910511bdb95b8958d8544bfc4
|
e03bce53de6f88c0e09f56e4fe11c36af0f1161f
|
/runway/_cli/commands/_dismantle.py
|
928893bc9acae0ed5cf159391d38c6ebe61a458d
|
[
"Apache-2.0"
] |
permissive
|
onicagroup/runway
|
20c31df9cbc1a1ffc5c9aa468ce5cf7d6ac7899f
|
0763b06aee07d2cf3f037a49ca0cb81a048c5deb
|
refs/heads/master
| 2023-08-30T22:35:54.113981
| 2023-08-29T14:13:35
| 2023-08-29T14:13:35
| 122,529,924
| 156
| 79
|
Apache-2.0
| 2023-09-13T13:43:50
| 2018-02-22T20:12:55
|
Python
|
UTF-8
|
Python
| false
| false
| 796
|
py
|
_dismantle.py
|
"""``runway dismantle`` command."""
# docs: file://./../../../docs/source/commands.rst
import logging
from typing import TYPE_CHECKING, Any, cast
import click
from .. import options
from ._destroy import destroy
if TYPE_CHECKING:
from ..._logging import RunwayLogger
LOGGER = cast("RunwayLogger", logging.getLogger(__name__.replace("._", ".")))
@click.command("dismantle", short_help="alias of destroy")
@options.ci
@options.debug
@options.deploy_environment
@options.no_color
@options.tags
@options.verbose
@click.pass_context
def dismantle(ctx: click.Context, **kwargs: Any) -> None:
"""Alias of "runway destroy".
For more information, refer to the output of "runway destroy --help".
"""
LOGGER.verbose("forwarding to destroy...")
ctx.forward(destroy, **kwargs)
|
7fc755e503bfa9c330fed73be683429a2fb29b3f
|
0820f480ec00bbe3e7e75c38ee2c13adf1ace6ed
|
/buildutils/detect.py
|
7e4e3393091ef1ff08a3f0c7b9b22ca807a54608
|
[
"BSD-3-Clause",
"LGPL-3.0-only",
"LicenseRef-scancode-zeromq-exception-lgpl-3.0"
] |
permissive
|
zeromq/pyzmq
|
9f2258d3bf1211cec9b12b4b0272e9ccd85b2ac5
|
9bee18aa4112bb6351c226c2000c7a858db386ab
|
refs/heads/main
| 2023-08-31T08:20:21.445949
| 2023-08-10T09:28:41
| 2023-08-10T09:28:41
| 788,264
| 2,944
| 656
|
BSD-3-Clause
| 2023-09-05T06:32:54
| 2010-07-21T07:20:37
|
Python
|
UTF-8
|
Python
| false
| false
| 4,825
|
py
|
detect.py
|
"""Detect zmq version"""
# -----------------------------------------------------------------------------
# Copyright (C) PyZMQ Developers
#
# This file is part of pyzmq, copied and adapted from h5py.
# h5py source used under the New BSD license
#
# h5py: <http://code.google.com/p/h5py/>
#
# Distributed under the terms of the New BSD License. The full license is in
# the file LICENSE.BSD, distributed as part of this software.
# -----------------------------------------------------------------------------
import logging
import os
import platform
import shutil
import sys
from .misc import get_compiler, get_output_error
from .msg import info
from .patch import patch_lib_paths
pjoin = os.path.join
# -----------------------------------------------------------------------------
# Utility functions (adapted from h5py: https://www.h5py.org/)
# -----------------------------------------------------------------------------
def test_compilation(cfile, compiler, **compiler_attrs):
"""Test simple compilation with given settings"""
efile, ext = os.path.splitext(cfile)
cpreargs = lpreargs = []
if sys.platform == 'darwin':
# use appropriate arch for compiler
if platform.architecture()[0] == '32bit':
if platform.processor() == 'powerpc':
cpu = 'ppc'
else:
cpu = 'i386'
cpreargs = ['-arch', cpu]
lpreargs = ['-arch', cpu, '-undefined', 'dynamic_lookup']
else:
# allow for missing UB arch, since it will still work:
lpreargs = ['-undefined', 'dynamic_lookup']
if sys.platform == 'sunos5':
if platform.architecture()[0] == '32bit':
lpreargs = ['-m32']
else:
lpreargs = ['-m64']
extra = compiler_attrs.get('extra_compile_args', None)
extra_link = compiler_attrs.get('extra_link_args', [])
lpreargs.extend(extra_link)
objs = compiler.compile([cfile], extra_preargs=cpreargs, extra_postargs=extra)
compiler.link_executable(objs, efile, extra_preargs=lpreargs)
return efile
def compile_and_forget(basedir, src, compiler, **compiler_attrs):
"""Make sure src compiles and links successfully.
The resulting binary is deleted without being run.
"""
if not os.path.exists(basedir):
os.makedirs(basedir)
cfile = pjoin(basedir, os.path.basename(src))
shutil.copy(src, cfile)
try:
efile = test_compilation(cfile, compiler=compiler, **compiler_attrs)
finally:
shutil.rmtree(basedir)
def detect_zmq(basedir, compiler, **compiler_attrs):
"""Compile, link & execute a test program, in empty directory `basedir`.
The C compiler will be updated with any keywords given via setattr.
Parameters
----------
basedir : path
The location where the test program will be compiled and run
compiler : str
The distutils compiler key (e.g. 'unix', 'msvc', or 'mingw32')
**compiler_attrs : dict
Any extra compiler attributes, which will be set via ``setattr(cc)``.
Returns
-------
A dict of properties for zmq compilation, with the following two keys:
vers : tuple
The ZMQ version as a tuple of ints, e.g. (2,2,0)
settings : dict
The compiler options used to compile the test function, e.g. `include_dirs`,
`library_dirs`, `libs`, etc.
"""
cfile = pjoin(basedir, 'vers.c')
shutil.copy(pjoin(os.path.dirname(__file__), 'vers.c'), cfile)
# check if we need to link against Realtime Extensions library
if sys.platform.startswith('linux'):
info("Checking for timer_create")
info(
"** Errors about missing timer_create are a normal part of this process **"
)
if not compiler.has_function('timer_create'):
compiler_attrs['libraries'].append('rt')
info(
"** The above error about timer_create is normal and not a problem! **"
)
info("no timer_create, linking librt")
cc = get_compiler(compiler=compiler, **compiler_attrs)
efile = test_compilation(cfile, compiler=cc, **compiler_attrs)
patch_lib_paths(efile, cc.library_dirs)
# add library dirs to %PATH% for windows
env = os.environ.copy()
if sys.platform.startswith("win"):
env["PATH"] = os.pathsep.join([env["PATH"]] + cc.library_dirs)
rc, so, se = get_output_error([efile], env=env)
if rc:
msg = f"Error running version detection script:\n{so}\n{se}"
logging.error(msg)
raise OSError(msg)
handlers = {'vers': lambda val: tuple(int(v) for v in val.split('.'))}
props = {}
for line in (x for x in so.split('\n') if x):
key, val = line.split(':')
props[key] = handlers[key](val)
return props
|
9ba9015d61dd2bd34dc979586d1e685769bbb577
|
6416b746ee71d897789eab1e450000831674dbd0
|
/src/otx/algorithms/common/utils/ir.py
|
946ef9b40c0ab99f77fb8b6f7e35340b58325e58
|
[
"Apache-2.0"
] |
permissive
|
openvinotoolkit/training_extensions
|
c921f83ad52311af96ff45ae0b88d0aecddd855b
|
80454808b38727e358e8b880043eeac0f18152fb
|
refs/heads/develop
| 2023-08-31T06:29:07.229339
| 2023-08-31T01:57:26
| 2023-08-31T01:57:26
| 154,843,614
| 397
| 230
|
Apache-2.0
| 2023-09-14T06:17:01
| 2018-10-26T14:02:29
|
Python
|
UTF-8
|
Python
| false
| false
| 1,137
|
py
|
ir.py
|
"""Collections of IR-related utils for common OTX algorithms."""
# Copyright (C) 2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
from pathlib import Path
from typing import Any, Dict, Tuple
from openvino.runtime import Core, serialize
def check_if_quantized(model: Any) -> bool:
"""Checks if OpenVINO model is already quantized."""
nodes = model.get_ops()
for op in nodes:
if "FakeQuantize" == op.get_type_name():
return True
return False
def embed_ir_model_data(xml_file: str, data_items: Dict[Tuple[str, str], Any]) -> None:
"""Embeds serialized data to IR xml file.
Args:
xml_file : a path to IR xml file.
data_items : a dict with tuple-keyworded serialized objects.
"""
core = Core()
model = core.read_model(xml_file)
for k, data in data_items.items():
model.set_rt_info(data, list(k))
# workaround for CVS-110054
tmp_xml_path = Path(Path(xml_file).parent) / "tmp.xml"
serialize(model, str(tmp_xml_path))
tmp_xml_path.rename(xml_file)
Path(str(tmp_xml_path.parent / tmp_xml_path.stem) + ".bin").unlink()
|
35c57d3b1bf82f2cb66e5052def6e8b0fb6bacab
|
bb4e12f9155e7f67cb3974c8405253a0a5ffa516
|
/tests/test_parser.py
|
8c7acfcf37639f4505aba63388fe1f5262eb719b
|
[
"MIT",
"CC-BY-4.0"
] |
permissive
|
mido/mido
|
80c5c05e3eb38051253ccf40caf6ac7b917cc066
|
6970e045f1e66314ee266e8fb16432df75f6e87e
|
refs/heads/main
| 2023-08-15T09:47:38.967908
| 2023-08-07T18:46:25
| 2023-08-07T18:47:14
| 10,889,417
| 937
| 160
|
MIT
| 2023-08-08T07:11:05
| 2013-06-23T18:11:38
|
Python
|
UTF-8
|
Python
| false
| false
| 3,419
|
py
|
test_parser.py
|
# SPDX-FileCopyrightText: 2017 Ole Martin Bjorndalen <ombdalen@gmail.com>
#
# SPDX-License-Identifier: MIT
import random
from pytest import raises
from mido.messages import Message, specs
from mido.parser import Parser, parse, parse_all
def test_parse():
"""Parse a note_on msg and compare it to one created with Message()."""
parsed = parse(b'\x90\x4c\x20')
other = Message('note_on', channel=0, note=0x4c, velocity=0x20)
assert parsed == other
def test_parse_stray_data():
"""The parser should ignore stray data bytes."""
assert parse_all(b'\x20\x30') == []
def test_parse_stray_status_bytes():
"""The parser should ignore stray status bytes."""
assert parse_all(b'\x90\x90\xf0') == []
def test_encode_and_parse():
"""Encode a message and then parse it.
Should return the same message.
"""
note_on = Message('note_on')
assert note_on == parse(note_on.bytes())
def test_feed_byte():
"""Put various things into feed_byte()."""
parser = Parser()
parser.feed_byte(0)
parser.feed_byte(255)
with raises(TypeError):
parser.feed_byte([1, 2, 3])
with raises(ValueError):
parser.feed_byte(-1)
with raises(ValueError):
parser.feed_byte(256)
def test_feed():
"""Put various things into feed()."""
parser = Parser()
parser.feed([])
parser.feed([1, 2, 3])
# TODO: add more valid types.
with raises(TypeError):
parser.feed(1)
with raises(TypeError):
parser.feed(None)
with raises(TypeError):
parser.feed()
def test_parse_random_bytes():
"""Parser should not crash when parsing random data."""
randrange = random.Random('a_random_seed').randrange
parser = Parser()
for _ in range(10000):
byte = randrange(256)
parser.feed_byte(byte)
def test_parse_channel():
"""Parser should not discard the channel in channel messages."""
assert parse([0x90, 0x00, 0x00]).channel == 0
assert parse([0x92, 0x00, 0x00]).channel == 2
def test_one_byte_message():
"""Messages that are one byte long should not wait for data bytes."""
messages = parse_all([0xf6]) # Tune request.
assert len(messages) == 1
assert messages[0].type == 'tune_request'
def test_undefined_messages():
"""The parser should ignore undefined status bytes and sysex_end."""
messages = parse_all([0xf4, 0xf5, 0xf7, 0xf9, 0xfd])
assert messages == []
def test_realtime_inside_sysex():
"""Realtime message inside sysex should be delivered first."""
messages = parse_all([0xf0, 0, 0xfb, 0, 0xf7])
assert len(messages) == 2
assert messages[0].type == 'continue'
assert messages[1].type == 'sysex'
def test_undefined_realtime_inside_sysex():
"""Undefined realtime message inside sysex should ignored."""
messages = parse_all([0xf0, 0, 0xf9, 0xfd, 0, 0xf7])
assert len(messages) == 1
assert messages[0].type == 'sysex'
def test_encode_and_parse_all():
"""Encode and then parse all message types.
This checks mostly for errors in the parser.
"""
parser = Parser()
for type_ in sorted(specs.SPEC_BY_TYPE.keys()):
msg = Message(type_)
parser.feed(msg.bytes())
parser.get_message() == msg
assert parser.get_message() is None
def test_parser_ascii_text():
assert parse_all(b'7 bit ASCII should not produce any messages') == []
|
a6699ef0e7889e9278ae1a2b313786b41127baf9
|
c69fc4000c675f4405bd2b00e749fadaf629d3b3
|
/examples/optimization_applications/hyperpara_optimize.py
|
7d414387ea9202e7fbd67757d420042ff5233821
|
[
"MIT"
] |
permissive
|
SimonBlanke/Hyperactive
|
dedf75e77bbd0c3020ce7b8f3d9382b7474f8590
|
23cc6adf36a13a9cac2b544117a41edd889563f0
|
refs/heads/master
| 2023-08-25T01:00:10.666237
| 2023-07-23T15:23:19
| 2023-07-23T15:23:19
| 155,687,643
| 473
| 45
|
MIT
| 2023-06-28T15:34:12
| 2018-11-01T08:53:30
|
Python
|
UTF-8
|
Python
| false
| false
| 1,180
|
py
|
hyperpara_optimize.py
|
"""
This example shows the original purpose of Hyperactive.
You can search for any number of hyperparameters and Hyperactive
will return the best one after the optimization run.
"""
import numpy as np
from sklearn.model_selection import cross_val_score
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.datasets import load_wine
from hyperactive import Hyperactive
data = load_wine()
X, y = data.data, data.target
def model(opt):
gbr = GradientBoostingClassifier(
n_estimators=opt["n_estimators"],
max_depth=opt["max_depth"],
min_samples_split=opt["min_samples_split"],
min_samples_leaf=opt["min_samples_leaf"],
criterion=opt["criterion"],
)
scores = cross_val_score(gbr, X, y, cv=4)
return scores.mean()
search_space = {
"n_estimators": list(range(10, 150, 5)),
"max_depth": list(range(2, 12)),
"min_samples_split": list(range(2, 25)),
"min_samples_leaf": list(range(1, 25)),
"criterion": ["friedman_mse", "squared_error", "absolute_error"],
"subsample": list(np.arange(0.1, 3, 0.1)),
}
hyper = Hyperactive()
hyper.add_search(model, search_space, n_iter=40)
hyper.run()
|
4089c9bc559cab6d1dcd3006ace69dcb1de14e16
|
8358c8d86600703663eb8a8f30493c20704cf586
|
/test/core/gen2/test_progress.py
|
66260d97a149a51e0c259ce5e9433ccbc284792a
|
[
"MIT"
] |
permissive
|
dcs4cop/xcube
|
612ffeb416dfee4e6a32677a719eab1a26aee990
|
a5a4da14bdc2dba80e0dd7d89b221fb30d148b77
|
refs/heads/master
| 2023-08-17T06:36:57.207806
| 2023-08-08T15:16:09
| 2023-08-08T15:16:09
| 130,693,090
| 149
| 21
|
MIT
| 2023-09-14T07:38:55
| 2018-04-23T12:27:35
|
Python
|
UTF-8
|
Python
| false
| false
| 6,392
|
py
|
test_progress.py
|
import unittest
from unittest.mock import patch
import requests_mock
from xcube.core.gen2.request import CubeGeneratorRequest
from xcube.core.gen2.progress import ApiProgressCallbackObserver
from xcube.core.gen2.progress import TerminalProgressCallbackObserver
from xcube.core.gen2.progress import _ThreadedProgressObserver
from xcube.util.progress import ProgressState
class TestThreadedProgressObservers(unittest.TestCase):
REQUEST = dict(input_config=dict(store_id='sentinelhub',
data_id='S2L2A',
),
cube_config=dict(variable_names=['B01', 'B02', 'B03'],
crs='WGS84',
bbox=[12.2, 52.1, 13.9, 54.8],
spatial_res=0.05,
time_range=['2018-01-01', None],
time_period='4D'),
output_config=dict(store_id='memory',
data_id='CHL'),
callback_config=dict(api_uri='https://xcube-gen.test/api/v1/jobs/tomtom/iamajob/callback',
access_token='dfsvdfsv'))
def setUp(self) -> None:
self._request = CubeGeneratorRequest.from_dict(self.REQUEST)
self._callback_config = self._request.callback_config
@requests_mock.Mocker()
def test_api_delegate(self, m):
m.put('https://xcube-gen.test/api/v1/jobs/tomtom/iamajob/callback', json={})
progress_state = ProgressState(label='test', total_work=0., super_work=10.)
observer = ApiProgressCallbackObserver(self._callback_config)
observer.on_begin(state_stack=[progress_state])
with self.assertRaises(ValueError) as e:
self._callback_config.api_uri = None
observer = ApiProgressCallbackObserver(self._callback_config)
observer.on_begin(state_stack=[progress_state])
self.assertEqual('Both, api_uri and access_token must be given.', str(e.exception))
with self.assertRaises(ValueError) as e:
self._callback_config.access_token = None
observer = ApiProgressCallbackObserver(self._callback_config)
observer.on_begin(state_stack=[progress_state])
self.assertEqual('Both, api_uri and access_token must be given.', str(e.exception))
@requests_mock.Mocker()
def test_callback(self, m):
expected_callback = {
"sender": "on_begin",
"state": {
"label": "Test",
"total_work": 100,
"error": False,
"progress": 0.0,
"elapsed": 3.,
}
}
m.put('https://xcube-gen.test/api/v1/jobs/tomtom/iamajob/callback', json=expected_callback)
state_stack = ProgressState('Test', 100, 100)
observer = ApiProgressCallbackObserver(self._callback_config)
res = observer.callback("on_begin", 3., [state_stack])
self.assertDictEqual(expected_callback, res.request.json())
with self.assertRaises(ValueError) as e:
observer.callback("on_begin", 3., [])
self.assertEqual("ProgressStates must be given", str(e.exception))
observer = TerminalProgressCallbackObserver()
res = observer.callback("on_begin", 3., [state_stack], False)
self.assertIn('Test', res)
self.assertIn('0% Completed', res)
def test_threaded_progress_on_begin(self):
_mock_patch = patch('xcube.core.gen2.progress._ThreadedProgressObserver._start_timer')
_mock = _mock_patch.start()
observer = TerminalProgressCallbackObserver()
state_stack = ProgressState('Test', 100, 100)
observer.on_begin([state_stack])
self.assertTrue(_mock.called)
_mock.stop()
_mock = _mock_patch.start()
state_stack1 = ProgressState('Test', 100, 100)
state_stack2 = ProgressState('Test', 100, 100)
observer.on_begin([state_stack1, state_stack2])
self.assertFalse(_mock.called)
def test_threaded_progress_on_end(self):
_mock_patch = patch('xcube.core.gen2.progress._ThreadedProgressObserver._stop_timer')
_mock = _mock_patch.start()
observer = TerminalProgressCallbackObserver()
observer._running = True
state_stack = ProgressState('Test', 100, 100)
observer.on_end([state_stack])
self.assertTrue(_mock.called)
_mock.stop()
_mock = _mock_patch.start()
state_stack1 = ProgressState('Test', 100, 100)
state_stack2 = ProgressState('Test', 100, 100)
observer.on_end([state_stack1, state_stack2])
self.assertFalse(_mock.called)
# def test_running_progress(self):
# """
# Uncomment the lines below if you want to run and test the termial progress bar output.
# """
# from time import sleep
# from xcube.util.progress import observe_progress
#
# TerminalProgressCallbackObserver().activate()
#
# with observe_progress('Generating cube', 100) as cm:
# dt = 1
# for i in range(1, 80):
# cm.will_work(1)
# sleep(dt)
# cm.worked(1)
#
# cm.will_work(20)
# sleep(dt)
# cm.worked(20)
class TestThreadedProgressObserver(unittest.TestCase):
def test_threaded(self):
with self.assertRaises(ValueError) as e:
_ThreadedProgressObserver(minimum=-1, dt=0)
self.assertEqual("The timer's minimum must be >=0", str(e.exception))
with self.assertRaises(ValueError) as e:
_ThreadedProgressObserver(minimum=0, dt=-1)
self.assertEqual("The timer's time step must be >=0", str(e.exception))
observer = _ThreadedProgressObserver(minimum=0, dt=0)
with self.assertRaises(ValueError) as e:
observer.on_begin(state_stack=[])
self.assertEqual('state_stack must be given', str(e.exception))
with self.assertRaises(ValueError) as e:
observer.on_update(state_stack=[])
self.assertEqual('state_stack must be given', str(e.exception))
with self.assertRaises(ValueError) as e:
observer.on_end(state_stack=[])
self.assertEqual('state_stack must be given', str(e.exception))
|
f1fbfb2fe659387d0490bcf0a88b52b5cbe03781
|
8a3971656297d19c58d21721ad6d9310a81f21bb
|
/tensorflow_text/python/ops/greedy_constrained_sequence_op.py
|
693095f2a833d2b7905ac32efdb21878ec230186
|
[
"Apache-2.0"
] |
permissive
|
tensorflow/text
|
6646b72cd8f9f1935814febb638707470feaf10c
|
fd0cdcf0756b521ff538e45bca14074013335f6e
|
refs/heads/master
| 2023-08-31T11:45:49.547133
| 2023-08-28T19:36:21
| 2023-08-28T19:37:03
| 189,305,903
| 1,211
| 319
|
Apache-2.0
| 2023-09-12T18:34:15
| 2019-05-29T22:10:03
|
C++
|
UTF-8
|
Python
| false
| false
| 7,939
|
py
|
greedy_constrained_sequence_op.py
|
# coding=utf-8
# Copyright 2023 TF.Text Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bulk Greedy Constrained Sequence.
Constrains a set of predictions based on a set of legal transitions and/or a
set of transition weights, returning the legal sequence that maximizes the
product of the state scores and the transition weights at each step.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops.ragged import ragged_tensor
from tensorflow.python.framework import load_library
from tensorflow.python.platform import resource_loader
gen_constrained_sequence_op = load_library.load_op_library(resource_loader.get_path_to_datafile('_constrained_sequence_op.so'))
def greedy_constrained_sequence(scores,
sequence_length=None,
allowed_transitions=None,
transition_weights=None,
use_log_space=False,
use_start_and_end_states=False,
name=None):
"""Performs greedy constrained sequence on a batch of examples.
Constrains a set of predictions based on a set of legal transitions
and/or a set of transition weights, returning the legal sequence that
maximizes the product or sum of the state scores and the transition weights
at each step. If `use_log_space` is true, the sum is used; if false, the
product is used.
This op also takes a parameter `use_start_and_end_states`, which when true
will add an implicit start and end state to each sequence. These implicit
states allow the user to specify additional weights and permitted transitions
to start and end a sequence (so, for instance, if you wanted to forbid your
output from ending in a certain set of states you could do so).
Inputs to this op can take one of three forms: a single TensorFlow tensor
of scores with no sequence lengths, a TensorFlow tensor of scores along
with a TensorFlow tensor of sequence lengths, or a RaggedTensor. If only the
scores tensor is passed, this op will assume that the sequence lengths are
equal to the size of the tensor (and so use all the data provided). If a
scores tensor and sequence_lengths tensor is provided, the op will only
use the data in the scores tensor as specified by the sequence_lengths tensor.
Finally, if a RaggedTensor is provided, the sequence_lengths will be ignored
and the variable length sequences in the RaggedTensor will be used.
Args:
scores: `<float32> [batch_size, num_steps, |num_states|]`
A tensor of scores, where `scores[b, t, s]` is the predicted score for
transitioning to state `s` at step `t` for batch `b`. The |num_states|
dimension must correspond to the num_states attribute for this op. This
input may be ragged; if it is ragged, the ragged tensor should have the
same structure [b, t, s] and only axis 1 should be ragged.
sequence_length: `<{int32, int64}>[batch_size]`
A rank-1 tensor representing the length of the output sequence. If None,
and the 'scores' input is not ragged, sequence lengths will be assumed
to be the length of the score tensor.
allowed_transitions:
if use_start_and_end_states is TRUE:
`<bool>[num_states+1, num_states+1]`
if use_start_and_end_states is FALSE:
`<bool>[num_states, num_states]`
A rank-2 tensor representing allowed transitions.
- allowed_transitions[i][j] is true if the transition from state i to
state j is allowed for i and j in 0...(num_states).
- allowed_transitions[num_states][num_states] is ignored.
If use_start_and_end_states is TRUE:
- allowed_transitions[num_states][j] is true if the sequence is allowed
to start from state j.
- allowed_transitions[i][num_states] is true if the sequence is allowed
to end on state i.
Default - An empty tensor. This allows all sequence states to transition
to all other sequence states.
transition_weights:
if use_start_and_end_states is TRUE:
`<float32>[num_states+1, num_states+1]`
if use_start_and_end_states is FALSE:
`<float32>[num_states, num_states]`
A rank-2 tensor representing transition weights.
- transition_weights[i][j] is the coefficient that a candidate transition
score will be multiplied by if that transition is from state i to
state j.
- transition_weights[num_states][num_states] is ignored.
If use_start_and_end_states is TRUE:
- transition_weights[num_states][j] is the coefficient that will be used
if the transition starts with state j.
- transition_weights[i][num_states] is the coefficient that will be used
if the final state in the sequence is state i.
Default - An empty tensor. This assigns a wieght of 1.0 all transitions
use_log_space: Whether to use log space for the calculation. If false,
calculations will be done in exp-space.
use_start_and_end_states: If True, sequences will have an implicit start
and end state added.
name: The name scope within which this op should be constructed.
Returns:
An <int32>[batch_size, (num_steps)] ragged tensor containing the appropriate
sequence of transitions. If a sequence is impossible, the value of the
RaggedTensor for that and all following transitions in that sequence shall
be '-1'.
"""
with ops.name_scope(
name, "BulkViterbiConstrainedSequence",
[scores, sequence_length, allowed_transitions, transition_weights]):
if allowed_transitions is None:
allowed_transitions = []
if transition_weights is None:
transition_weights = []
score_data = ragged_tensor.convert_to_tensor_or_ragged_tensor(
scores, name="score_data")
if isinstance(score_data, ragged_tensor.RaggedTensor):
# TODO(momernick): Extend the generated op to support ragged tensors.
dense_scores = score_data.to_tensor(default_value=0)
sequence_lengths = score_data.row_lengths(axis=1)
else:
dense_scores = score_data
# In this case, the core input was a dense tensor.
if sequence_length is not None:
sequence_lengths = ops.convert_to_tensor(sequence_length)
else:
batch_size = array_ops.shape(dense_scores)[0]
dense_length = array_ops.shape(dense_scores)[-2]
sequence_lengths = array_ops.ones([batch_size],
dtype=dtypes.int32) * dense_length
transition_weights = ops.convert_to_tensor(transition_weights)
allowed_transitions = ops.convert_to_tensor(
allowed_transitions, dtype=dtypes.bool)
output, output_splits = gen_constrained_sequence_op.constrained_sequence(
scores=dense_scores,
sequence_lengths=sequence_lengths,
allowed_transitions=allowed_transitions,
transition_weights=transition_weights,
use_viterbi=False,
use_log_space=use_log_space,
use_start_and_end_states=use_start_and_end_states)
return ragged_tensor.RaggedTensor.from_row_splits(
values=output, row_splits=output_splits)
|
e444ee53d7241ce980d2f63c77c732b5acd933e9
|
5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d
|
/alipay/aop/api/domain/AlipayOpenMiniMiniappServiceconfigModifyModel.py
|
984b79dac66a791f7615d8a40cef5a8be7c2d044
|
[
"Apache-2.0"
] |
permissive
|
alipay/alipay-sdk-python-all
|
8bd20882852ffeb70a6e929038bf88ff1d1eff1c
|
1fad300587c9e7e099747305ba9077d4cd7afde9
|
refs/heads/master
| 2023-08-27T21:35:01.778771
| 2023-08-23T07:12:26
| 2023-08-23T07:12:26
| 133,338,689
| 247
| 70
|
Apache-2.0
| 2023-04-25T04:54:02
| 2018-05-14T09:40:54
|
Python
|
UTF-8
|
Python
| false
| false
| 1,491
|
py
|
AlipayOpenMiniMiniappServiceconfigModifyModel.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class AlipayOpenMiniMiniappServiceconfigModifyModel(object):
def __init__(self):
self._home_open = None
self._service_config = None
@property
def home_open(self):
return self._home_open
@home_open.setter
def home_open(self, value):
self._home_open = value
@property
def service_config(self):
return self._service_config
@service_config.setter
def service_config(self, value):
self._service_config = value
def to_alipay_dict(self):
params = dict()
if self.home_open:
if hasattr(self.home_open, 'to_alipay_dict'):
params['home_open'] = self.home_open.to_alipay_dict()
else:
params['home_open'] = self.home_open
if self.service_config:
if hasattr(self.service_config, 'to_alipay_dict'):
params['service_config'] = self.service_config.to_alipay_dict()
else:
params['service_config'] = self.service_config
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayOpenMiniMiniappServiceconfigModifyModel()
if 'home_open' in d:
o.home_open = d['home_open']
if 'service_config' in d:
o.service_config = d['service_config']
return o
|
7bf203ee69d8a68ac7e85e0aebbe7dd5f98575c0
|
5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d
|
/alipay/aop/api/domain/TemplateRightsContentDTO.py
|
1210ba97e74e1c950ffdda456542bdee5da12280
|
[
"Apache-2.0"
] |
permissive
|
alipay/alipay-sdk-python-all
|
8bd20882852ffeb70a6e929038bf88ff1d1eff1c
|
1fad300587c9e7e099747305ba9077d4cd7afde9
|
refs/heads/master
| 2023-08-27T21:35:01.778771
| 2023-08-23T07:12:26
| 2023-08-23T07:12:26
| 133,338,689
| 247
| 70
|
Apache-2.0
| 2023-04-25T04:54:02
| 2018-05-14T09:40:54
|
Python
|
UTF-8
|
Python
| false
| false
| 1,723
|
py
|
TemplateRightsContentDTO.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class TemplateRightsContentDTO(object):
def __init__(self):
self._detail = None
self._logo_id = None
self._title = None
@property
def detail(self):
return self._detail
@detail.setter
def detail(self, value):
self._detail = value
@property
def logo_id(self):
return self._logo_id
@logo_id.setter
def logo_id(self, value):
self._logo_id = value
@property
def title(self):
return self._title
@title.setter
def title(self, value):
self._title = value
def to_alipay_dict(self):
params = dict()
if self.detail:
if hasattr(self.detail, 'to_alipay_dict'):
params['detail'] = self.detail.to_alipay_dict()
else:
params['detail'] = self.detail
if self.logo_id:
if hasattr(self.logo_id, 'to_alipay_dict'):
params['logo_id'] = self.logo_id.to_alipay_dict()
else:
params['logo_id'] = self.logo_id
if self.title:
if hasattr(self.title, 'to_alipay_dict'):
params['title'] = self.title.to_alipay_dict()
else:
params['title'] = self.title
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = TemplateRightsContentDTO()
if 'detail' in d:
o.detail = d['detail']
if 'logo_id' in d:
o.logo_id = d['logo_id']
if 'title' in d:
o.title = d['title']
return o
|
867943ca617e323ee8d5a5765628599d350438e1
|
7bea5adf7d6284fbad0131d665e957d58adfe7c7
|
/allauth/socialaccount/providers/evernote/tests.py
|
8c96be38e384c9beb31fa4ea6f8698303302bce9
|
[
"MIT"
] |
permissive
|
pennersr/django-allauth
|
50c9e71c3666785368e92ed9e19ea0f6a5438cd2
|
6b8911a5ebbabda0d446f2743bd4d00d250ed500
|
refs/heads/main
| 2023-09-03T16:48:10.988418
| 2023-09-02T08:00:53
| 2023-09-02T08:00:53
| 976,994
| 7,719
| 3,481
|
MIT
| 2023-09-14T15:06:57
| 2010-10-10T20:10:52
|
Python
|
UTF-8
|
Python
| false
| false
| 888
|
py
|
tests.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from allauth.socialaccount.tests import OAuthTestsMixin
from allauth.tests import MockedResponse, TestCase
from .provider import EvernoteProvider
class EvernoteTests(OAuthTestsMixin, TestCase):
provider_id = EvernoteProvider.id
def get_mocked_response(self):
return []
def get_access_token_response(self):
return MockedResponse(
200,
"oauth_token=S%3Ds1%3AU%3D9876%3AE%3D999999b0c50%3AC%3D14c1f89dd18%3AP%3D81%3AA%3Dpennersr%3AV%3D2%3AH%3Ddeadf00dd2d6aba7b519923987b4bf77&oauth_token_secret=&edam_shard=s1&edam_userId=591969&edam_expires=1457994271824&edam_noteStoreUrl=https%3A%2F%2Fsandbox.evernote.com%2Fshard%2Fs1%2Fnotestore&edam_webApiUrlPrefix=https%3A%2F%2Fsandbox.evernote.com%2Fshard%2Fs1%2F", # noqa
{"content-type": "text/plain"},
)
|
c391301a331c8070160543046171619543564127
|
6c8305ea1df9687df1c0d2b0ace56733516c6322
|
/readthedocs/api/v2/migrations/0001_initial.py
|
c3a95da5aae3d76fdc9c25da2fc8b2996c7dd4d6
|
[
"MIT"
] |
permissive
|
readthedocs/readthedocs.org
|
9806083aa744c2308267919480a692e1e003e45d
|
bf88ce6d1085d922322a5fadce63a22c5544c830
|
refs/heads/main
| 2023-09-05T20:22:34.281891
| 2023-09-05T12:41:52
| 2023-09-05T12:41:52
| 841,835
| 2,894
| 1,509
|
MIT
| 2023-09-14T20:36:00
| 2010-08-16T19:18:06
|
Python
|
UTF-8
|
Python
| false
| false
| 2,577
|
py
|
0001_initial.py
|
# Generated by Django 3.2.18 on 2023-05-31 20:40
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
("projects", "0100_project_readthedocs_yaml_path"),
]
operations = [
migrations.CreateModel(
name="BuildAPIKey",
fields=[
(
"id",
models.CharField(
editable=False,
max_length=150,
primary_key=True,
serialize=False,
unique=True,
),
),
("prefix", models.CharField(editable=False, max_length=8, unique=True)),
("hashed_key", models.CharField(editable=False, max_length=150)),
("created", models.DateTimeField(auto_now_add=True, db_index=True)),
(
"name",
models.CharField(
default=None,
help_text="A free-form name for the API key. Need not be unique. 50 characters max.",
max_length=50,
),
),
(
"revoked",
models.BooleanField(
blank=True,
default=False,
help_text="If the API key is revoked, clients cannot use it anymore. (This cannot be undone.)",
),
),
(
"expiry_date",
models.DateTimeField(
blank=True,
help_text="Once API key expires, clients cannot use it anymore.",
null=True,
verbose_name="Expires",
),
),
(
"project",
models.ForeignKey(
help_text="Project that this API key grants access to",
on_delete=django.db.models.deletion.CASCADE,
related_name="build_api_keys",
to="projects.project",
),
),
],
options={
"verbose_name": "Build API key",
"verbose_name_plural": "Build API keys",
"ordering": ("-created",),
"abstract": False,
},
),
]
|
1f5e4d06e8bd70a681cf2467b51b2b2ff228d1cd
|
a5a99f646e371b45974a6fb6ccc06b0a674818f2
|
/PhysicsTools/Heppy/python/physicsobjects/Particle.py
|
4edc6640debf3cdad5ce0ed41e411e5a21183571
|
[
"Apache-2.0"
] |
permissive
|
cms-sw/cmssw
|
4ecd2c1105d59c66d385551230542c6615b9ab58
|
19c178740257eb48367778593da55dcad08b7a4f
|
refs/heads/master
| 2023-08-23T21:57:42.491143
| 2023-08-22T20:22:40
| 2023-08-22T20:22:40
| 10,969,551
| 1,006
| 3,696
|
Apache-2.0
| 2023-09-14T19:14:28
| 2013-06-26T14:09:07
|
C++
|
UTF-8
|
Python
| false
| false
| 452
|
py
|
Particle.py
|
class Particle(object):
def __str__(self):
tmp = '{className} : {pdgId:>3}, pt={pt:5.1f}, eta={eta:5.2f}, phi={phi:5.2f}, mass={mass:5.2f}'
return tmp.format( className = self.__class__.__name__,
pdgId = self.pdgId(),
pt = self.pt(),
eta = self.eta(),
phi = self.phi(),
mass = self.mass() )
|
03171bbcc496a0da916f5e4b585afe200d3a0261
|
45ce76f126ee8985e834d438413cccef437e0fa0
|
/discord/webhook/sync.py
|
7da6ada70818dc97eaaade33bbf73e6bb0580e15
|
[
"MIT"
] |
permissive
|
Rapptz/discord.py
|
929437812bc9f59d78c2c43c6a9f2c4bdb3f8146
|
f74eb14d722aa1bc90f9d0478199250d2eb4e81b
|
refs/heads/master
| 2023-08-29T22:04:47.095128
| 2023-08-25T19:43:04
| 2023-08-25T19:43:04
| 41,179,827
| 15,894
| 8,049
|
MIT
| 2023-09-14T18:57:12
| 2015-08-21T22:19:08
|
Python
|
UTF-8
|
Python
| false
| false
| 42,586
|
py
|
sync.py
|
"""
The MIT License (MIT)
Copyright (c) 2015-present Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
# If you're wondering why this is essentially copy pasted from the async_.py
# file, then it's due to needing two separate types to make the typing shenanigans
# a bit easier to write. It's an unfortunate design. Originally, these types were
# merged and an adapter was used to differentiate between the async and sync versions.
# However, this proved to be difficult to provide typings for, so here we are.
from __future__ import annotations
import threading
import logging
import json
import time
import re
from urllib.parse import quote as urlquote
from typing import Any, Dict, List, Literal, Optional, TYPE_CHECKING, Sequence, Tuple, Union, TypeVar, Type, overload
import weakref
from .. import utils
from ..errors import HTTPException, Forbidden, NotFound, DiscordServerError
from ..message import Message, MessageFlags
from ..http import Route, handle_message_parameters
from ..channel import PartialMessageable
from .async_ import BaseWebhook, _WebhookState
__all__ = (
'SyncWebhook',
'SyncWebhookMessage',
)
_log = logging.getLogger(__name__)
if TYPE_CHECKING:
from typing_extensions import Self
from types import TracebackType
from ..file import File
from ..embeds import Embed
from ..mentions import AllowedMentions
from ..message import Attachment
from ..abc import Snowflake
from ..state import ConnectionState
from ..types.webhook import (
Webhook as WebhookPayload,
)
from ..types.message import (
Message as MessagePayload,
)
BE = TypeVar('BE', bound=BaseException)
try:
from requests import Session, Response
except ModuleNotFoundError:
pass
MISSING: Any = utils.MISSING
class DeferredLock:
def __init__(self, lock: threading.Lock) -> None:
self.lock: threading.Lock = lock
self.delta: Optional[float] = None
def __enter__(self) -> Self:
self.lock.acquire()
return self
def delay_by(self, delta: float) -> None:
self.delta = delta
def __exit__(
self,
exc_type: Optional[Type[BE]],
exc: Optional[BE],
traceback: Optional[TracebackType],
) -> None:
if self.delta:
time.sleep(self.delta)
self.lock.release()
class WebhookAdapter:
def __init__(self):
self._locks: weakref.WeakValueDictionary[Any, threading.Lock] = weakref.WeakValueDictionary()
def request(
self,
route: Route,
session: Session,
*,
payload: Optional[Dict[str, Any]] = None,
multipart: Optional[List[Dict[str, Any]]] = None,
files: Optional[Sequence[File]] = None,
reason: Optional[str] = None,
auth_token: Optional[str] = None,
params: Optional[Dict[str, Any]] = None,
) -> Any:
headers: Dict[str, str] = {}
files = files or []
to_send: Optional[Union[str, bytes, Dict[str, Any]]] = None
bucket = (route.webhook_id, route.webhook_token)
try:
lock = self._locks[bucket]
except KeyError:
self._locks[bucket] = lock = threading.Lock()
if payload is not None:
headers['Content-Type'] = 'application/json; charset=utf-8'
to_send = utils._to_json(payload).encode('utf-8')
if auth_token is not None:
headers['Authorization'] = f'Bot {auth_token}'
if reason is not None:
headers['X-Audit-Log-Reason'] = urlquote(reason, safe='/ ')
response: Optional[Response] = None
data: Optional[Union[Dict[str, Any], str]] = None
file_data: Optional[Dict[str, Any]] = None
method = route.method
url = route.url
webhook_id = route.webhook_id
with DeferredLock(lock) as lock:
for attempt in range(5):
for file in files:
file.reset(seek=attempt)
if multipart:
file_data = {}
for p in multipart:
name = p['name']
if name == 'payload_json':
to_send = {'payload_json': p['value']}
else:
file_data[name] = (p['filename'], p['value'], p['content_type'])
try:
with session.request(
method, url, data=to_send, files=file_data, headers=headers, params=params
) as response:
_log.debug(
'Webhook ID %s with %s %s has returned status code %s',
webhook_id,
method,
url,
response.status_code,
)
response.encoding = 'utf-8'
# Compatibility with aiohttp
response.status = response.status_code # type: ignore
data = response.text or None
try:
if data and response.headers['Content-Type'] == 'application/json':
data = json.loads(data)
except KeyError:
pass
remaining = response.headers.get('X-Ratelimit-Remaining')
if remaining == '0' and response.status_code != 429:
delta = utils._parse_ratelimit_header(response)
_log.debug(
'Webhook ID %s has exhausted its rate limit bucket (retry: %s).',
webhook_id,
delta,
)
lock.delay_by(delta)
if 300 > response.status_code >= 200:
return data
if response.status_code == 429:
if not response.headers.get('Via'):
raise HTTPException(response, data)
fmt = 'Webhook ID %s is rate limited. Retrying in %.2f seconds.'
retry_after: float = data['retry_after'] # type: ignore
_log.warning(fmt, webhook_id, retry_after)
time.sleep(retry_after)
continue
if response.status_code >= 500:
time.sleep(1 + attempt * 2)
continue
if response.status_code == 403:
raise Forbidden(response, data)
elif response.status_code == 404:
raise NotFound(response, data)
else:
raise HTTPException(response, data)
except OSError as e:
if attempt < 4 and e.errno in (54, 10054):
time.sleep(1 + attempt * 2)
continue
raise
if response:
if response.status_code >= 500:
raise DiscordServerError(response, data)
raise HTTPException(response, data)
raise RuntimeError('Unreachable code in HTTP handling.')
def delete_webhook(
self,
webhook_id: int,
*,
token: Optional[str] = None,
session: Session,
reason: Optional[str] = None,
) -> None:
route = Route('DELETE', '/webhooks/{webhook_id}', webhook_id=webhook_id)
return self.request(route, session, reason=reason, auth_token=token)
def delete_webhook_with_token(
self,
webhook_id: int,
token: str,
*,
session: Session,
reason: Optional[str] = None,
) -> None:
route = Route('DELETE', '/webhooks/{webhook_id}/{webhook_token}', webhook_id=webhook_id, webhook_token=token)
return self.request(route, session, reason=reason)
def edit_webhook(
self,
webhook_id: int,
token: str,
payload: Dict[str, Any],
*,
session: Session,
reason: Optional[str] = None,
) -> WebhookPayload:
route = Route('PATCH', '/webhooks/{webhook_id}', webhook_id=webhook_id)
return self.request(route, session, reason=reason, payload=payload, auth_token=token)
def edit_webhook_with_token(
self,
webhook_id: int,
token: str,
payload: Dict[str, Any],
*,
session: Session,
reason: Optional[str] = None,
) -> WebhookPayload:
route = Route('PATCH', '/webhooks/{webhook_id}/{webhook_token}', webhook_id=webhook_id, webhook_token=token)
return self.request(route, session, reason=reason, payload=payload)
def execute_webhook(
self,
webhook_id: int,
token: str,
*,
session: Session,
payload: Optional[Dict[str, Any]] = None,
multipart: Optional[List[Dict[str, Any]]] = None,
files: Optional[Sequence[File]] = None,
thread_id: Optional[int] = None,
wait: bool = False,
) -> MessagePayload:
params = {'wait': int(wait)}
if thread_id:
params['thread_id'] = thread_id
route = Route('POST', '/webhooks/{webhook_id}/{webhook_token}', webhook_id=webhook_id, webhook_token=token)
return self.request(route, session, payload=payload, multipart=multipart, files=files, params=params)
def get_webhook_message(
self,
webhook_id: int,
token: str,
message_id: int,
*,
session: Session,
thread_id: Optional[int] = None,
) -> MessagePayload:
route = Route(
'GET',
'/webhooks/{webhook_id}/{webhook_token}/messages/{message_id}',
webhook_id=webhook_id,
webhook_token=token,
message_id=message_id,
)
params = None if thread_id is None else {'thread_id': thread_id}
return self.request(route, session, params=params)
def edit_webhook_message(
self,
webhook_id: int,
token: str,
message_id: int,
*,
session: Session,
payload: Optional[Dict[str, Any]] = None,
multipart: Optional[List[Dict[str, Any]]] = None,
files: Optional[Sequence[File]] = None,
thread_id: Optional[int] = None,
) -> MessagePayload:
route = Route(
'PATCH',
'/webhooks/{webhook_id}/{webhook_token}/messages/{message_id}',
webhook_id=webhook_id,
webhook_token=token,
message_id=message_id,
)
params = None if thread_id is None else {'thread_id': thread_id}
return self.request(route, session, payload=payload, multipart=multipart, files=files, params=params)
def delete_webhook_message(
self,
webhook_id: int,
token: str,
message_id: int,
*,
session: Session,
thread_id: Optional[int] = None,
) -> None:
route = Route(
'DELETE',
'/webhooks/{webhook_id}/{webhook_token}/messages/{message_id}',
webhook_id=webhook_id,
webhook_token=token,
message_id=message_id,
)
params = None if thread_id is None else {'thread_id': thread_id}
return self.request(route, session, params=params)
def fetch_webhook(
self,
webhook_id: int,
token: str,
*,
session: Session,
) -> WebhookPayload:
route = Route('GET', '/webhooks/{webhook_id}', webhook_id=webhook_id)
return self.request(route, session=session, auth_token=token)
def fetch_webhook_with_token(
self,
webhook_id: int,
token: str,
*,
session: Session,
) -> WebhookPayload:
route = Route('GET', '/webhooks/{webhook_id}/{webhook_token}', webhook_id=webhook_id, webhook_token=token)
return self.request(route, session=session)
class _WebhookContext(threading.local):
adapter: Optional[WebhookAdapter] = None
_context = _WebhookContext()
def _get_webhook_adapter() -> WebhookAdapter:
if _context.adapter is None:
_context.adapter = WebhookAdapter()
return _context.adapter
class SyncWebhookMessage(Message):
"""Represents a message sent from your webhook.
This allows you to edit or delete a message sent by your
webhook.
This inherits from :class:`discord.Message` with changes to
:meth:`edit` and :meth:`delete` to work.
.. versionadded:: 2.0
"""
_state: _WebhookState
def edit(
self,
*,
content: Optional[str] = MISSING,
embeds: Sequence[Embed] = MISSING,
embed: Optional[Embed] = MISSING,
attachments: Sequence[Union[Attachment, File]] = MISSING,
allowed_mentions: Optional[AllowedMentions] = None,
) -> SyncWebhookMessage:
"""Edits the message.
.. versionchanged:: 2.0
This function will now raise :exc:`TypeError` or
:exc:`ValueError` instead of ``InvalidArgument``.
Parameters
------------
content: Optional[:class:`str`]
The content to edit the message with or ``None`` to clear it.
embeds: List[:class:`Embed`]
A list of embeds to edit the message with.
embed: Optional[:class:`Embed`]
The embed to edit the message with. ``None`` suppresses the embeds.
This should not be mixed with the ``embeds`` parameter.
attachments: List[Union[:class:`Attachment`, :class:`File`]]
A list of attachments to keep in the message as well as new files to upload. If ``[]`` is passed
then all attachments are removed.
.. note::
New files will always appear after current attachments.
.. versionadded:: 2.0
allowed_mentions: :class:`AllowedMentions`
Controls the mentions being processed in this message.
See :meth:`.abc.Messageable.send` for more information.
Raises
-------
HTTPException
Editing the message failed.
Forbidden
Edited a message that is not yours.
TypeError
You specified both ``embed`` and ``embeds``
ValueError
The length of ``embeds`` was invalid or
there was no token associated with this webhook.
Returns
--------
:class:`SyncWebhookMessage`
The newly edited message.
"""
return self._state._webhook.edit_message(
self.id,
content=content,
embeds=embeds,
embed=embed,
attachments=attachments,
allowed_mentions=allowed_mentions,
thread=self._state._thread,
)
def add_files(self, *files: File) -> SyncWebhookMessage:
r"""Adds new files to the end of the message attachments.
.. versionadded:: 2.0
Parameters
-----------
\*files: :class:`File`
New files to add to the message.
Raises
-------
HTTPException
Editing the message failed.
Forbidden
Tried to edit a message that isn't yours.
Returns
--------
:class:`SyncWebhookMessage`
The newly edited message.
"""
return self.edit(attachments=[*self.attachments, *files])
def remove_attachments(self, *attachments: Attachment) -> SyncWebhookMessage:
r"""Removes attachments from the message.
.. versionadded:: 2.0
Parameters
-----------
\*attachments: :class:`Attachment`
Attachments to remove from the message.
Raises
-------
HTTPException
Editing the message failed.
Forbidden
Tried to edit a message that isn't yours.
Returns
--------
:class:`SyncWebhookMessage`
The newly edited message.
"""
return self.edit(attachments=[a for a in self.attachments if a not in attachments])
def delete(self, *, delay: Optional[float] = None) -> None:
"""Deletes the message.
Parameters
-----------
delay: Optional[:class:`float`]
If provided, the number of seconds to wait before deleting the message.
This blocks the thread.
Raises
------
Forbidden
You do not have proper permissions to delete the message.
NotFound
The message was deleted already.
HTTPException
Deleting the message failed.
"""
if delay is not None:
time.sleep(delay)
self._state._webhook.delete_message(self.id, thread=self._state._thread)
class SyncWebhook(BaseWebhook):
"""Represents a synchronous Discord webhook.
For an asynchronous counterpart, see :class:`Webhook`.
.. container:: operations
.. describe:: x == y
Checks if two webhooks are equal.
.. describe:: x != y
Checks if two webhooks are not equal.
.. describe:: hash(x)
Returns the webhooks's hash.
.. versionchanged:: 1.4
Webhooks are now comparable and hashable.
Attributes
------------
id: :class:`int`
The webhook's ID
type: :class:`WebhookType`
The type of the webhook.
.. versionadded:: 1.3
token: Optional[:class:`str`]
The authentication token of the webhook. If this is ``None``
then the webhook cannot be used to make requests.
guild_id: Optional[:class:`int`]
The guild ID this webhook is for.
channel_id: Optional[:class:`int`]
The channel ID this webhook is for.
user: Optional[:class:`abc.User`]
The user this webhook was created by. If the webhook was
received without authentication then this will be ``None``.
name: Optional[:class:`str`]
The default name of the webhook.
source_guild: Optional[:class:`PartialWebhookGuild`]
The guild of the channel that this webhook is following.
Only given if :attr:`type` is :attr:`WebhookType.channel_follower`.
.. versionadded:: 2.0
source_channel: Optional[:class:`PartialWebhookChannel`]
The channel that this webhook is following.
Only given if :attr:`type` is :attr:`WebhookType.channel_follower`.
.. versionadded:: 2.0
"""
__slots__: Tuple[str, ...] = ('session',)
def __init__(
self,
data: WebhookPayload,
session: Session,
token: Optional[str] = None,
state: Optional[Union[ConnectionState, _WebhookState]] = None,
) -> None:
super().__init__(data, token, state)
self.session: Session = session
def __repr__(self) -> str:
return f'<Webhook id={self.id!r}>'
@property
def url(self) -> str:
""":class:`str` : Returns the webhook's url."""
return f'https://discord.com/api/webhooks/{self.id}/{self.token}'
@classmethod
def partial(cls, id: int, token: str, *, session: Session = MISSING, bot_token: Optional[str] = None) -> SyncWebhook:
"""Creates a partial :class:`Webhook`.
Parameters
-----------
id: :class:`int`
The ID of the webhook.
token: :class:`str`
The authentication token of the webhook.
session: :class:`requests.Session`
The session to use to send requests with. Note
that the library does not manage the session and
will not close it. If not given, the ``requests``
auto session creation functions are used instead.
bot_token: Optional[:class:`str`]
The bot authentication token for authenticated requests
involving the webhook.
Returns
--------
:class:`SyncWebhook`
A partial :class:`SyncWebhook`.
A partial :class:`SyncWebhook` is just a :class:`SyncWebhook` object with an ID and a token.
"""
data: WebhookPayload = {
'id': id,
'type': 1,
'token': token,
}
import requests
if session is not MISSING:
if not isinstance(session, requests.Session):
raise TypeError(f'expected requests.Session not {session.__class__.__name__}')
else:
session = requests # type: ignore
return cls(data, session, token=bot_token)
@classmethod
def from_url(cls, url: str, *, session: Session = MISSING, bot_token: Optional[str] = None) -> SyncWebhook:
"""Creates a partial :class:`Webhook` from a webhook URL.
Parameters
------------
url: :class:`str`
The URL of the webhook.
session: :class:`requests.Session`
The session to use to send requests with. Note
that the library does not manage the session and
will not close it. If not given, the ``requests``
auto session creation functions are used instead.
bot_token: Optional[:class:`str`]
The bot authentication token for authenticated requests
involving the webhook.
Raises
-------
ValueError
The URL is invalid.
Returns
--------
:class:`SyncWebhook`
A partial :class:`SyncWebhook`.
A partial :class:`SyncWebhook` is just a :class:`SyncWebhook` object with an ID and a token.
"""
m = re.search(r'discord(?:app)?\.com/api/webhooks/(?P<id>[0-9]{17,20})/(?P<token>[A-Za-z0-9\.\-\_]{60,})', url)
if m is None:
raise ValueError('Invalid webhook URL given.')
data: Dict[str, Any] = m.groupdict()
data['type'] = 1
import requests
if session is not MISSING:
if not isinstance(session, requests.Session):
raise TypeError(f'expected requests.Session not {session.__class__.__name__}')
else:
session = requests # type: ignore
return cls(data, session, token=bot_token) # type: ignore
def fetch(self, *, prefer_auth: bool = True) -> SyncWebhook:
"""Fetches the current webhook.
This could be used to get a full webhook from a partial webhook.
.. note::
When fetching with an unauthenticated webhook, i.e.
:meth:`is_authenticated` returns ``False``, then the
returned webhook does not contain any user information.
Parameters
-----------
prefer_auth: :class:`bool`
Whether to use the bot token over the webhook token
if available. Defaults to ``True``.
Raises
-------
HTTPException
Could not fetch the webhook
NotFound
Could not find the webhook by this ID
ValueError
This webhook does not have a token associated with it.
Returns
--------
:class:`SyncWebhook`
The fetched webhook.
"""
adapter: WebhookAdapter = _get_webhook_adapter()
if prefer_auth and self.auth_token:
data = adapter.fetch_webhook(self.id, self.auth_token, session=self.session)
elif self.token:
data = adapter.fetch_webhook_with_token(self.id, self.token, session=self.session)
else:
raise ValueError('This webhook does not have a token associated with it')
return SyncWebhook(data, self.session, token=self.auth_token, state=self._state)
def delete(self, *, reason: Optional[str] = None, prefer_auth: bool = True) -> None:
"""Deletes this Webhook.
Parameters
------------
reason: Optional[:class:`str`]
The reason for deleting this webhook. Shows up on the audit log.
.. versionadded:: 1.4
prefer_auth: :class:`bool`
Whether to use the bot token over the webhook token
if available. Defaults to ``True``.
Raises
-------
HTTPException
Deleting the webhook failed.
NotFound
This webhook does not exist.
Forbidden
You do not have permissions to delete this webhook.
ValueError
This webhook does not have a token associated with it.
"""
if self.token is None and self.auth_token is None:
raise ValueError('This webhook does not have a token associated with it')
adapter: WebhookAdapter = _get_webhook_adapter()
if prefer_auth and self.auth_token:
adapter.delete_webhook(self.id, token=self.auth_token, session=self.session, reason=reason)
elif self.token:
adapter.delete_webhook_with_token(self.id, self.token, session=self.session, reason=reason)
def edit(
self,
*,
reason: Optional[str] = None,
name: Optional[str] = MISSING,
avatar: Optional[bytes] = MISSING,
channel: Optional[Snowflake] = None,
prefer_auth: bool = True,
) -> SyncWebhook:
"""Edits this Webhook.
Parameters
------------
name: Optional[:class:`str`]
The webhook's new default name.
avatar: Optional[:class:`bytes`]
A :term:`py:bytes-like object` representing the webhook's new default avatar.
channel: Optional[:class:`abc.Snowflake`]
The webhook's new channel. This requires an authenticated webhook.
reason: Optional[:class:`str`]
The reason for editing this webhook. Shows up on the audit log.
.. versionadded:: 1.4
prefer_auth: :class:`bool`
Whether to use the bot token over the webhook token
if available. Defaults to ``True``.
Raises
-------
HTTPException
Editing the webhook failed.
NotFound
This webhook does not exist.
ValueError
This webhook does not have a token associated with it
or it tried editing a channel without authentication.
Returns
--------
:class:`SyncWebhook`
The newly edited webhook.
"""
if self.token is None and self.auth_token is None:
raise ValueError('This webhook does not have a token associated with it')
payload = {}
if name is not MISSING:
payload['name'] = str(name) if name is not None else None
if avatar is not MISSING:
payload['avatar'] = utils._bytes_to_base64_data(avatar) if avatar is not None else None
adapter: WebhookAdapter = _get_webhook_adapter()
data: Optional[WebhookPayload] = None
# If a channel is given, always use the authenticated endpoint
if channel is not None:
if self.auth_token is None:
raise ValueError('Editing channel requires authenticated webhook')
payload['channel_id'] = channel.id
data = adapter.edit_webhook(self.id, self.auth_token, payload=payload, session=self.session, reason=reason)
elif prefer_auth and self.auth_token:
data = adapter.edit_webhook(self.id, self.auth_token, payload=payload, session=self.session, reason=reason)
elif self.token:
data = adapter.edit_webhook_with_token(self.id, self.token, payload=payload, session=self.session, reason=reason)
if data is None:
raise RuntimeError('Unreachable code hit: data was not assigned')
return SyncWebhook(data=data, session=self.session, token=self.auth_token, state=self._state)
def _create_message(self, data: MessagePayload, *, thread: Snowflake = MISSING) -> SyncWebhookMessage:
state = _WebhookState(self, parent=self._state, thread=thread)
# state may be artificial (unlikely at this point...)
channel = self.channel or PartialMessageable(state=self._state, guild_id=self.guild_id, id=int(data['channel_id'])) # type: ignore
# state is artificial
return SyncWebhookMessage(data=data, state=state, channel=channel) # type: ignore
@overload
def send(
self,
content: str = MISSING,
*,
username: str = MISSING,
avatar_url: Any = MISSING,
tts: bool = MISSING,
file: File = MISSING,
files: Sequence[File] = MISSING,
embed: Embed = MISSING,
embeds: Sequence[Embed] = MISSING,
allowed_mentions: AllowedMentions = MISSING,
thread: Snowflake = MISSING,
thread_name: str = MISSING,
wait: Literal[True],
suppress_embeds: bool = MISSING,
silent: bool = MISSING,
) -> SyncWebhookMessage:
...
@overload
def send(
self,
content: str = MISSING,
*,
username: str = MISSING,
avatar_url: Any = MISSING,
tts: bool = MISSING,
file: File = MISSING,
files: Sequence[File] = MISSING,
embed: Embed = MISSING,
embeds: Sequence[Embed] = MISSING,
allowed_mentions: AllowedMentions = MISSING,
thread: Snowflake = MISSING,
thread_name: str = MISSING,
wait: Literal[False] = ...,
suppress_embeds: bool = MISSING,
silent: bool = MISSING,
) -> None:
...
def send(
self,
content: str = MISSING,
*,
username: str = MISSING,
avatar_url: Any = MISSING,
tts: bool = False,
file: File = MISSING,
files: Sequence[File] = MISSING,
embed: Embed = MISSING,
embeds: Sequence[Embed] = MISSING,
allowed_mentions: AllowedMentions = MISSING,
thread: Snowflake = MISSING,
thread_name: str = MISSING,
wait: bool = False,
suppress_embeds: bool = False,
silent: bool = False,
) -> Optional[SyncWebhookMessage]:
"""Sends a message using the webhook.
The content must be a type that can convert to a string through ``str(content)``.
To upload a single file, the ``file`` parameter should be used with a
single :class:`File` object.
If the ``embed`` parameter is provided, it must be of type :class:`Embed` and
it must be a rich embed type. You cannot mix the ``embed`` parameter with the
``embeds`` parameter, which must be a :class:`list` of :class:`Embed` objects to send.
Parameters
------------
content: :class:`str`
The content of the message to send.
wait: :class:`bool`
Whether the server should wait before sending a response. This essentially
means that the return type of this function changes from ``None`` to
a :class:`WebhookMessage` if set to ``True``.
username: :class:`str`
The username to send with this message. If no username is provided
then the default username for the webhook is used.
avatar_url: :class:`str`
The avatar URL to send with this message. If no avatar URL is provided
then the default avatar for the webhook is used. If this is not a
string then it is explicitly cast using ``str``.
tts: :class:`bool`
Indicates if the message should be sent using text-to-speech.
file: :class:`File`
The file to upload. This cannot be mixed with ``files`` parameter.
files: List[:class:`File`]
A list of files to send with the content. This cannot be mixed with the
``file`` parameter.
embed: :class:`Embed`
The rich embed for the content to send. This cannot be mixed with
``embeds`` parameter.
embeds: List[:class:`Embed`]
A list of embeds to send with the content. Maximum of 10. This cannot
be mixed with the ``embed`` parameter.
allowed_mentions: :class:`AllowedMentions`
Controls the mentions being processed in this message.
.. versionadded:: 1.4
thread: :class:`~discord.abc.Snowflake`
The thread to send this message to.
.. versionadded:: 2.0
thread_name: :class:`str`
The thread name to create with this webhook if the webhook belongs
to a :class:`~discord.ForumChannel`. Note that this is mutually
exclusive with the ``thread`` parameter, as this will create a
new thread with the given name.
.. versionadded:: 2.0
suppress_embeds: :class:`bool`
Whether to suppress embeds for the message. This sends the message without any embeds if set to ``True``.
.. versionadded:: 2.0
silent: :class:`bool`
Whether to suppress push and desktop notifications for the message. This will increment the mention counter
in the UI, but will not actually send a notification.
.. versionadded:: 2.2
Raises
--------
HTTPException
Sending the message failed.
NotFound
This webhook was not found.
Forbidden
The authorization token for the webhook is incorrect.
TypeError
You specified both ``embed`` and ``embeds`` or ``file`` and ``files``
or ``thread`` and ``thread_name``.
ValueError
The length of ``embeds`` was invalid or
there was no token associated with this webhook.
Returns
---------
Optional[:class:`SyncWebhookMessage`]
If ``wait`` is ``True`` then the message that was sent, otherwise ``None``.
"""
if self.token is None:
raise ValueError('This webhook does not have a token associated with it')
previous_mentions: Optional[AllowedMentions] = getattr(self._state, 'allowed_mentions', None)
if content is None:
content = MISSING
if suppress_embeds or silent:
flags = MessageFlags._from_value(0)
flags.suppress_embeds = suppress_embeds
flags.suppress_notifications = silent
else:
flags = MISSING
if thread_name is not MISSING and thread is not MISSING:
raise TypeError('Cannot mix thread_name and thread keyword arguments.')
with handle_message_parameters(
content=content,
username=username,
avatar_url=avatar_url,
tts=tts,
file=file,
files=files,
embed=embed,
embeds=embeds,
thread_name=thread_name,
allowed_mentions=allowed_mentions,
previous_allowed_mentions=previous_mentions,
flags=flags,
) as params:
adapter: WebhookAdapter = _get_webhook_adapter()
thread_id: Optional[int] = None
if thread is not MISSING:
thread_id = thread.id
data = adapter.execute_webhook(
self.id,
self.token,
session=self.session,
payload=params.payload,
multipart=params.multipart,
files=params.files,
thread_id=thread_id,
wait=wait,
)
if wait:
return self._create_message(data, thread=thread)
def fetch_message(self, id: int, /, *, thread: Snowflake = MISSING) -> SyncWebhookMessage:
"""Retrieves a single :class:`~discord.SyncWebhookMessage` owned by this webhook.
.. versionadded:: 2.0
Parameters
------------
id: :class:`int`
The message ID to look for.
thread: :class:`~discord.abc.Snowflake`
The thread to look in.
Raises
--------
~discord.NotFound
The specified message was not found.
~discord.Forbidden
You do not have the permissions required to get a message.
~discord.HTTPException
Retrieving the message failed.
ValueError
There was no token associated with this webhook.
Returns
--------
:class:`~discord.SyncWebhookMessage`
The message asked for.
"""
if self.token is None:
raise ValueError('This webhook does not have a token associated with it')
thread_id: Optional[int] = None
if thread is not MISSING:
thread_id = thread.id
adapter: WebhookAdapter = _get_webhook_adapter()
data = adapter.get_webhook_message(
self.id,
self.token,
id,
session=self.session,
thread_id=thread_id,
)
return self._create_message(data, thread=thread)
def edit_message(
self,
message_id: int,
*,
content: Optional[str] = MISSING,
embeds: Sequence[Embed] = MISSING,
embed: Optional[Embed] = MISSING,
attachments: Sequence[Union[Attachment, File]] = MISSING,
allowed_mentions: Optional[AllowedMentions] = None,
thread: Snowflake = MISSING,
) -> SyncWebhookMessage:
"""Edits a message owned by this webhook.
This is a lower level interface to :meth:`WebhookMessage.edit` in case
you only have an ID.
.. versionadded:: 1.6
Parameters
------------
message_id: :class:`int`
The message ID to edit.
content: Optional[:class:`str`]
The content to edit the message with or ``None`` to clear it.
embeds: List[:class:`Embed`]
A list of embeds to edit the message with.
embed: Optional[:class:`Embed`]
The embed to edit the message with. ``None`` suppresses the embeds.
This should not be mixed with the ``embeds`` parameter.
attachments: List[Union[:class:`Attachment`, :class:`File`]]
A list of attachments to keep in the message as well as new files to upload. If ``[]`` is passed
then all attachments are removed.
.. versionadded:: 2.0
allowed_mentions: :class:`AllowedMentions`
Controls the mentions being processed in this message.
See :meth:`.abc.Messageable.send` for more information.
thread: :class:`~discord.abc.Snowflake`
The thread the webhook message belongs to.
.. versionadded:: 2.0
Raises
-------
HTTPException
Editing the message failed.
Forbidden
Edited a message that is not yours.
TypeError
You specified both ``embed`` and ``embeds``
ValueError
The length of ``embeds`` was invalid or
there was no token associated with this webhook.
"""
if self.token is None:
raise ValueError('This webhook does not have a token associated with it')
previous_mentions: Optional[AllowedMentions] = getattr(self._state, 'allowed_mentions', None)
with handle_message_parameters(
content=content,
attachments=attachments,
embed=embed,
embeds=embeds,
allowed_mentions=allowed_mentions,
previous_allowed_mentions=previous_mentions,
) as params:
thread_id: Optional[int] = None
if thread is not MISSING:
thread_id = thread.id
adapter: WebhookAdapter = _get_webhook_adapter()
data = adapter.edit_webhook_message(
self.id,
self.token,
message_id,
session=self.session,
payload=params.payload,
multipart=params.multipart,
files=params.files,
thread_id=thread_id,
)
return self._create_message(data, thread=thread)
def delete_message(self, message_id: int, /, *, thread: Snowflake = MISSING) -> None:
"""Deletes a message owned by this webhook.
This is a lower level interface to :meth:`WebhookMessage.delete` in case
you only have an ID.
.. versionadded:: 1.6
Parameters
------------
message_id: :class:`int`
The message ID to delete.
thread: :class:`~discord.abc.Snowflake`
The thread the webhook message belongs to.
.. versionadded:: 2.0
Raises
-------
HTTPException
Deleting the message failed.
Forbidden
Deleted a message that is not yours.
ValueError
This webhook does not have a token associated with it.
"""
if self.token is None:
raise ValueError('This webhook does not have a token associated with it')
thread_id: Optional[int] = None
if thread is not MISSING:
thread_id = thread.id
adapter: WebhookAdapter = _get_webhook_adapter()
adapter.delete_webhook_message(
self.id,
self.token,
message_id,
session=self.session,
thread_id=thread_id,
)
|
510e8a99e9b28ce006651d720294f18f60440d8e
|
385c027fc4b9c09706a4d880bdb8aa5897d0ebca
|
/starry/_core/__init__.py
|
3d40bf22ba2e4ec2977143db8a0d6a7a86ffdcfa
|
[
"MIT"
] |
permissive
|
rodluger/starry
|
076d46324473a6ac634781a3382021d02a5f4fdd
|
b72dff08588532f96bd072f2f1005e227d8e4ed8
|
refs/heads/master
| 2023-05-23T16:37:07.835744
| 2022-07-14T15:38:11
| 2022-07-14T15:38:11
| 120,621,593
| 131
| 31
|
MIT
| 2021-11-16T16:48:10
| 2018-02-07T13:54:20
|
Python
|
UTF-8
|
Python
| false
| false
| 81
|
py
|
__init__.py
|
# -*- coding: utf-8 -*-
from . import core, ops, utils, math
from .core import *
|
0aa7dddb39f9d6b73782f6e6bf9cb146bc776623
|
749af8e81d5ccd2d8714a34434a9c77772df551b
|
/statsmodels/robust/tests/test_scale.py
|
ced234f09d9a77fd8fd83fef129b0ce83a1b2bc3
|
[
"BSD-3-Clause"
] |
permissive
|
statsmodels/statsmodels
|
98ca67192c08bcc611ed3a75edaded2c7181ab98
|
01b19d7d111b29c183f620ff0a949ef6391ff8ee
|
refs/heads/main
| 2023-09-05T13:05:49.497076
| 2023-09-01T10:54:50
| 2023-09-01T10:54:50
| 1,885,237
| 8,666
| 3,023
|
BSD-3-Clause
| 2023-09-13T17:51:48
| 2011-06-12T17:04:50
|
Python
|
UTF-8
|
Python
| false
| false
| 8,580
|
py
|
test_scale.py
|
"""
Test functions for models.robust.scale
"""
import numpy as np
from numpy.random import standard_normal
from numpy.testing import assert_almost_equal, assert_equal
import pytest
from scipy.stats import norm as Gaussian
import statsmodels.api as sm
import statsmodels.robust.scale as scale
from statsmodels.robust.scale import mad
# Example from Section 5.5, Venables & Ripley (2002)
DECIMAL = 4
# TODO: Can replicate these tests using stackloss data and R if this
# data is a problem
class TestChem:
@classmethod
def setup_class(cls):
cls.chem = np.array(
[
2.20,
2.20,
2.4,
2.4,
2.5,
2.7,
2.8,
2.9,
3.03,
3.03,
3.10,
3.37,
3.4,
3.4,
3.4,
3.5,
3.6,
3.7,
3.7,
3.7,
3.7,
3.77,
5.28,
28.95,
]
)
def test_mean(self):
assert_almost_equal(np.mean(self.chem), 4.2804, DECIMAL)
def test_median(self):
assert_almost_equal(np.median(self.chem), 3.385, DECIMAL)
def test_mad(self):
assert_almost_equal(scale.mad(self.chem), 0.52632, DECIMAL)
def test_iqr(self):
assert_almost_equal(scale.iqr(self.chem), 0.68570, DECIMAL)
def test_qn(self):
assert_almost_equal(scale.qn_scale(self.chem), 0.73231, DECIMAL)
def test_huber_scale(self):
assert_almost_equal(scale.huber(self.chem)[0], 3.20549, DECIMAL)
def test_huber_location(self):
assert_almost_equal(scale.huber(self.chem)[1], 0.67365, DECIMAL)
def test_huber_huberT(self):
n = scale.norms.HuberT()
n.t = 1.5
h = scale.Huber(norm=n)
assert_almost_equal(
scale.huber(self.chem)[0], h(self.chem)[0], DECIMAL
)
assert_almost_equal(
scale.huber(self.chem)[1], h(self.chem)[1], DECIMAL
)
def test_huber_Hampel(self):
hh = scale.Huber(norm=scale.norms.Hampel())
assert_almost_equal(hh(self.chem)[0], 3.17434, DECIMAL)
assert_almost_equal(hh(self.chem)[1], 0.66782, DECIMAL)
class TestMad:
@classmethod
def setup_class(cls):
np.random.seed(54321)
cls.X = standard_normal((40, 10))
def test_mad(self):
m = scale.mad(self.X)
assert_equal(m.shape, (10,))
def test_mad_empty(self):
empty = np.empty(0)
assert np.isnan(scale.mad(empty))
empty = np.empty((10, 100, 0))
assert_equal(scale.mad(empty, axis=1), np.empty((10, 0)))
empty = np.empty((100, 100, 0, 0))
assert_equal(scale.mad(empty, axis=-1), np.empty((100, 100, 0)))
def test_mad_center(self):
n = scale.mad(self.X, center=0)
assert_equal(n.shape, (10,))
with pytest.raises(TypeError):
scale.mad(self.X, center=None)
assert_almost_equal(
scale.mad(self.X, center=1),
np.median(np.abs(self.X - 1), axis=0) / Gaussian.ppf(3 / 4.0),
DECIMAL,
)
class TestMadAxes:
@classmethod
def setup_class(cls):
np.random.seed(54321)
cls.X = standard_normal((40, 10, 30))
def test_axis0(self):
m = scale.mad(self.X, axis=0)
assert_equal(m.shape, (10, 30))
def test_axis1(self):
m = scale.mad(self.X, axis=1)
assert_equal(m.shape, (40, 30))
def test_axis2(self):
m = scale.mad(self.X, axis=2)
assert_equal(m.shape, (40, 10))
def test_axisneg1(self):
m = scale.mad(self.X, axis=-1)
assert_equal(m.shape, (40, 10))
class TestIqr:
@classmethod
def setup_class(cls):
np.random.seed(54321)
cls.X = standard_normal((40, 10))
def test_iqr(self):
m = scale.iqr(self.X)
assert_equal(m.shape, (10,))
def test_iqr_empty(self):
empty = np.empty(0)
assert np.isnan(scale.iqr(empty))
empty = np.empty((10, 100, 0))
assert_equal(scale.iqr(empty, axis=1), np.empty((10, 0)))
empty = np.empty((100, 100, 0, 0))
assert_equal(scale.iqr(empty, axis=-1), np.empty((100, 100, 0)))
empty = np.empty(shape=())
with pytest.raises(ValueError):
scale.iqr(empty)
class TestIqrAxes:
@classmethod
def setup_class(cls):
np.random.seed(54321)
cls.X = standard_normal((40, 10, 30))
def test_axis0(self):
m = scale.iqr(self.X, axis=0)
assert_equal(m.shape, (10, 30))
def test_axis1(self):
m = scale.iqr(self.X, axis=1)
assert_equal(m.shape, (40, 30))
def test_axis2(self):
m = scale.iqr(self.X, axis=2)
assert_equal(m.shape, (40, 10))
def test_axisneg1(self):
m = scale.iqr(self.X, axis=-1)
assert_equal(m.shape, (40, 10))
class TestQn:
@classmethod
def setup_class(cls):
np.random.seed(54321)
cls.normal = standard_normal(size=40)
cls.range = np.arange(0, 40)
cls.exponential = np.random.exponential(size=40)
cls.stackloss = sm.datasets.stackloss.load_pandas().data
cls.sunspot = sm.datasets.sunspots.load_pandas().data.SUNACTIVITY
def test_qn_naive(self):
assert_almost_equal(
scale.qn_scale(self.normal), scale._qn_naive(self.normal), DECIMAL
)
assert_almost_equal(
scale.qn_scale(self.range), scale._qn_naive(self.range), DECIMAL
)
assert_almost_equal(
scale.qn_scale(self.exponential),
scale._qn_naive(self.exponential),
DECIMAL,
)
def test_qn_robustbase(self):
# from R's robustbase with finite.corr = FALSE
assert_almost_equal(scale.qn_scale(self.range), 13.3148, DECIMAL)
assert_almost_equal(
scale.qn_scale(self.stackloss),
np.array([8.87656, 8.87656, 2.21914, 4.43828]),
DECIMAL,
)
# sunspot.year from datasets in R only goes up to 289
assert_almost_equal(
scale.qn_scale(self.sunspot[0:289]), 33.50901, DECIMAL
)
def test_qn_empty(self):
empty = np.empty(0)
assert np.isnan(scale.qn_scale(empty))
empty = np.empty((10, 100, 0))
assert_equal(scale.qn_scale(empty, axis=1), np.empty((10, 0)))
empty = np.empty((100, 100, 0, 0))
assert_equal(scale.qn_scale(empty, axis=-1), np.empty((100, 100, 0)))
empty = np.empty(shape=())
with pytest.raises(ValueError):
scale.iqr(empty)
class TestQnAxes:
@classmethod
def setup_class(cls):
np.random.seed(54321)
cls.X = standard_normal((40, 10, 30))
def test_axis0(self):
m = scale.qn_scale(self.X, axis=0)
assert_equal(m.shape, (10, 30))
def test_axis1(self):
m = scale.qn_scale(self.X, axis=1)
assert_equal(m.shape, (40, 30))
def test_axis2(self):
m = scale.qn_scale(self.X, axis=2)
assert_equal(m.shape, (40, 10))
def test_axisneg1(self):
m = scale.qn_scale(self.X, axis=-1)
assert_equal(m.shape, (40, 10))
class TestHuber:
@classmethod
def setup_class(cls):
np.random.seed(54321)
cls.X = standard_normal((40, 10))
def test_huber_result_shape(self):
h = scale.Huber(maxiter=100)
m, s = h(self.X)
assert_equal(m.shape, (10,))
class TestHuberAxes:
@classmethod
def setup_class(cls):
np.random.seed(54321)
cls.X = standard_normal((40, 10, 30))
cls.h = scale.Huber(maxiter=1000, tol=1.0e-05)
def test_default(self):
m, s = self.h(self.X, axis=0)
assert_equal(m.shape, (10, 30))
def test_axis1(self):
m, s = self.h(self.X, axis=1)
assert_equal(m.shape, (40, 30))
def test_axis2(self):
m, s = self.h(self.X, axis=2)
assert_equal(m.shape, (40, 10))
def test_axisneg1(self):
m, s = self.h(self.X, axis=-1)
assert_equal(m.shape, (40, 10))
def test_mad_axis_none():
# GH 7027
a = np.array([[0, 1, 2], [2, 3, 2]])
def m(x):
return np.median(x)
direct = mad(a=a, axis=None)
custom = mad(a=a, axis=None, center=m)
axis0 = mad(a=a.ravel(), axis=0)
np.testing.assert_allclose(direct, custom)
np.testing.assert_allclose(direct, axis0)
|
b79cc328f68ef273685c857fdcaf2c8ca031cb0c
|
a63d907ad63ba6705420a6fb2788196d1bd3763c
|
/src/api/dataflow/component/handlers/cluster_config_handler.py
|
d0d3656e562204ce47edb0a5cab41d239032bb50
|
[
"MIT"
] |
permissive
|
Tencent/bk-base
|
a38461072811667dc2880a13a5232004fe771a4b
|
6d483b4df67739b26cc8ecaa56c1d76ab46bd7a2
|
refs/heads/master
| 2022-07-30T04:24:53.370661
| 2022-04-02T10:30:55
| 2022-04-02T10:30:55
| 381,257,882
| 101
| 51
|
NOASSERTION
| 2022-04-02T10:30:56
| 2021-06-29T06:10:01
|
Python
|
UTF-8
|
Python
| false
| false
| 4,709
|
py
|
cluster_config_handler.py
|
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making BK-BASE 蓝鲸基础平台 available.
Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
BK-BASE 蓝鲸基础平台 is licensed under the MIT License.
License for BK-BASE 蓝鲸基础平台:
--------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from django.forms import model_to_dict
from dataflow.shared.handlers import processing_cluster_config
from dataflow.shared.log import component_logger as logger
def create_cluster_config(params):
if processing_cluster_config.where(
geog_area_code=params.get("geog_area_code"),
component_type=params.get("component_type"),
cluster_name=params.get("cluster_name"),
).exists():
logger.info(
"update cluster config, cluster_name(%s), component_type(%s), geog_area_code(%s)"
% (
params.get("cluster_name"),
params.get("component_type"),
params.get("geog_area_code"),
)
)
processing_cluster_config.update(
geog_area_code=params.get("geog_area_code"),
cluster_name=params.get("cluster_name"),
component_type=params.get("component_type"),
cluster_domain=params.get("cluster_domain"),
cluster_group=params.get("cluster_group"),
cluster_label=params.get("cluster_label"),
priority=params.get("priority"),
version=params.get("version"),
belong=params.get("belong"),
description=params.get("description"),
)
else:
logger.info(
"create cluster config, cluster_name(%s), component_type(%s), geog_area_code(%s)"
% (
params.get("cluster_name"),
params.get("component_type"),
params.get("geog_area_code"),
)
)
processing_cluster_config.save(
tag=params.get("geog_area_code"),
cluster_domain=params.get("cluster_domain"),
cluster_group=params.get("cluster_group"),
cluster_name=params.get("cluster_name"),
cluster_label=params.get("cluster_label"),
priority=params.get("priority"),
version=params.get("version"),
belong=params.get("belong"),
component_type=params.get("component_type"),
geog_area_code=params.get("geog_area_code"),
description=params.get("description"),
)
def destroy_cluster_config(params, cluster_name):
component_type = params.get("component_type")
geog_area_code = params.get("geog_area_code")
processing_cluster_config.delete(
cluster_name=cluster_name,
component_type=component_type,
geog_area_code=geog_area_code,
)
logger.info(
"destroy cluster config, cluster_name(%s), component_type(%s), geog_area_code(%s)"
% (cluster_name, component_type, geog_area_code)
)
def retrieve_cluster_config(params, cluster_name):
component_type = params.get("component_type")
geog_area_code = params.get("geog_area_code")
query_list = processing_cluster_config.where(
cluster_name=cluster_name,
component_type=component_type,
geog_area_code=geog_area_code,
)
logger.info(
"get cluster config(%s), cluster_name(%s), component_type(%s), geog_area_code(%s)"
% (query_list.count(), cluster_name, component_type, geog_area_code)
)
ret = []
if query_list:
for one_res in query_list:
ret.append(model_to_dict(one_res))
return ret
|
7228c8fc139f3d3b3cbde0fbc17f20f96d7f4cc6
|
6d652aa802d90571a640ac0b538ff3055d0e34c5
|
/i3pystatus/updates/__init__.py
|
d0b93e75f6a44c4263121b1f51e9f97c01e96d10
|
[
"MIT"
] |
permissive
|
enkore/i3pystatus
|
38eaea8203ed309ff90e1717bd3a9075f12590b0
|
0820dd4e3d479dddec7797b2ea9a83da0f62b7cf
|
refs/heads/current
| 2023-08-18T11:36:18.296269
| 2023-04-25T20:56:08
| 2023-04-25T20:56:08
| 8,130,605
| 438
| 244
|
MIT
| 2023-08-13T12:13:33
| 2013-02-11T01:01:15
|
Python
|
UTF-8
|
Python
| false
| false
| 5,263
|
py
|
__init__.py
|
import threading
from i3pystatus import SettingsBase, Module, formatp
from i3pystatus.core.util import internet, require
from i3pystatus.core.desktop import DesktopNotification
class Backend(SettingsBase):
settings = ()
updates = 0
class Updates(Module):
"""
Generic update checker.
To use select appropriate backend(s) for your system.
For list of all available backends see :ref:`updatebackends`.
Left clicking on the module will refresh the count of upgradeable packages.
This may be used to dismiss the notification after updating your system.
Right clicking shows a desktop notification with a summary count and a list
of available updates.
.. rubric:: Available formatters
* `{count}` — Sum of all available updates from all backends.
* For each backend registered there is one formatter named after the
backend, multiple identical backends do not accumulate, but overwrite
each other.
* For example, `{Cower}` (note capital C) is the number of updates
reported by the cower backend, assuming it has been registered.
.. rubric:: Usage example
::
from i3pystatus import Status
from i3pystatus.updates import pacman, cower
status = Status()
status.register("updates",
format = "Updates: {count}",
format_no_updates = "No updates",
backends = [pacman.Pacman(), cower.Cower()])
status.run()
"""
interval = 3600
settings = (
("backends", "Required list of backends used to check for updates."),
("format", "Format used when updates are available. "
"May contain formatters."),
("format_no_updates", "String that is shown if no updates are "
"available. If not set the module will be hidden if no updates "
"are available."),
("format_working", "Format used while update queries are run. By "
"default the same as ``format``."),
("format_summary", "Format for the summary line of notifications. By "
"default the same as ``format``."),
("notification_icon", "Icon shown when reporting the list of updates. "
"Default is ``software-update-available``, and can be "
"None for no icon."),
"color",
"color_no_updates",
"color_working",
("interval", "Default interval is set to one hour."),
)
required = ("backends",)
backends = None
format = "Updates: {count}"
format_no_updates = None
format_working = None
format_summary = None
notification_icon = "software-update-available"
color = "#00DD00"
color_no_updates = None
color_working = None
on_leftclick = "run"
on_rightclick = "report"
def init(self):
if not isinstance(self.backends, list):
self.backends = [self.backends]
if self.format_working is None: # we want to allow an empty format
self.format_working = self.format
if self.format_summary is None: # we want to allow an empty format
self.format_summary = self.format
self.color_working = self.color_working or self.color
self.data = {
"count": 0
}
self.notif_body = {}
self.condition = threading.Condition()
self.thread = threading.Thread(target=self.update_thread, daemon=True)
self.thread.start()
def update_thread(self):
self.check_updates()
while True:
with self.condition:
self.condition.wait(self.interval)
self.check_updates()
@require(internet)
def check_updates(self):
for backend in self.backends:
key = backend.__class__.__name__
if key not in self.data:
self.data[key] = "?"
if key not in self.notif_body:
self.notif_body[key] = ""
self.output = {
"full_text": formatp(self.format_working, **self.data).strip(),
"color": self.color_working,
}
updates_count = 0
for backend in self.backends:
name = backend.__class__.__name__
updates, notif_body = backend.updates
try:
updates_count += updates
except TypeError:
pass
self.data[name] = updates
self.notif_body[name] = notif_body or ""
if updates_count == 0:
self.output = {} if not self.format_no_updates else {
"full_text": self.format_no_updates,
"color": self.color_no_updates,
}
return
self.data["count"] = updates_count
self.output = {
"full_text": formatp(self.format, **self.data).strip(),
"color": self.color,
}
def run(self):
with self.condition:
self.condition.notify()
def report(self):
DesktopNotification(
title=formatp(self.format_summary, **self.data).strip(),
body="\n".join(self.notif_body.values()),
icon=self.notification_icon,
urgency=1,
timeout=0,
).display()
|
9c7953c5fd9cf4d20379343699f2aee55994101e
|
32f2d6ccfdcbd94e9e022aaaaddc16a7251d0634
|
/augur/application/db/models/augur_operations_old.py
|
898b6f77f2dca551c9e7f6b3b03ba65eef8f2209
|
[
"MIT"
] |
permissive
|
chaoss/augur
|
13565ba6094cd0b224ea458a7b36ed916fd3e330
|
03e5cf8bef4cd6fd5c3458393d1ae839d7bcc3c3
|
refs/heads/main
| 2023-08-24T20:27:17.200466
| 2023-08-13T21:15:24
| 2023-08-13T21:15:24
| 78,134,122
| 580
| 1,027
|
MIT
| 2023-09-14T14:56:36
| 2017-01-05T17:34:54
|
Python
|
UTF-8
|
Python
| false
| false
| 3,974
|
py
|
augur_operations_old.py
|
# from augur.application.db.models.base import Base
# from sqlalchemy import (
# Index,
# Column,
# Integer,
# String,
# UniqueConstraint,
# BigInteger,
# TIMESTAMP,
# PrimaryKeyConstraint,
# func,
# text,
# )
# # Start of Augur Operations tablespoon
# class All(Base):
# all_id = Column(BigInteger, primary_key=True)
# Name = Column(String())
# Bytes = Column(String())
# Lines = Column(String())
# Code = Column(String())
# Comment = Column(String())
# Blank = Column(String())
# Complexity = Column(String())
# Count = Column(String())
# WeightedComplexity = Column(String())
# Files = Column(String())
# __tablename__ = "all"
# __table_args__ = {"schema": "augur_operations"}
# class AugurSettings(Base):
# id = Column(BigInteger)
# setting = Column(String())
# value = Column(String())
# last_modified = Column(TIMESTAMP(), server_default=func.current_timestamp())
# __tablename__ = "augur_settings"
# __table_args__ = (
# PrimaryKeyConstraint("id"),
# UniqueConstraint("setting", name="setting-unique"),
# {"schema": "augur_operations"},
# )
# class ReposFetchLog(Base):
# repos_fetch_log_id = Column(BigInteger)
# repos_id = Column(Integer, nullable=False)
# status = Column(String(), nullable=False)
# date = Column(TIMESTAMP(), nullable=False, server_default=func.current_timestamp())
# __tablename__ = "repos_fetch_log"
# __table_args__ = (
# PrimaryKeyConstraint("repos_fetch_log_id"),
# Index("repos_id,statusops", repos_id, status),
# {"schema": "augur_operations"},
# )
# # TODO: Add foreign key to Repo table
# class WorkerHistory(Base):
# history_id = Column(BigInteger)
# repo_id = Column(BigInteger)
# worker = Column(String(), nullable=False)
# job_model = Column(String(), nullable=False)
# oauth_id = Column(Integer)
# timestamp = Column(TIMESTAMP(), nullable=False)
# status = Column(String(), nullable=False)
# total_results = Column(Integer)
# __tablename__ = "worker_history"
# __table_args__ = (
# PrimaryKeyConstraint("history_id", name="history_pkey"),
# {"schema": "augur_operations"},
# )
# class WorkerJob(Base):
# job_model = Column(String())
# state = Column(Integer, nullable=False, server_default=text("0"))
# zombie_head = Column(Integer)
# since_id_str = Column(String(), nullable=False, server_default="0")
# description = Column(String(), server_default="None")
# last_count = Column(Integer)
# last_run = Column(TIMESTAMP())
# analysis_state = Column(Integer, server_default=text("0"))
# oauth_id = Column(Integer, nullable=False)
# __tablename__ = "worker_job"
# __table_args__ = (
# PrimaryKeyConstraint("job_model", name="job_pkey"),
# {"schema": "augur_operations"},
# )
# class WorkerOauth(Base):
# oauth_id = Column(BigInteger)
# name = Column(String(), nullable=False)
# consumer_key = Column(String(), nullable=False)
# consumer_secret = Column(String(), nullable=False)
# access_token = Column(String(), nullable=False)
# access_token_secret = Column(String(), nullable=False)
# repo_directory = Column(String())
# platform = Column(String(), server_default="github")
# __tablename__ = "worker_oauth"
# __table_args__ = (PrimaryKeyConstraint("oauth_id"), {"schema": "augur_operations"})
# class WorkerSettingsFacade(Base):
# id = Column(Integer)
# setting = Column(String(), nullable=False)
# value = Column(String(), nullable=False)
# last_modified = Column(
# TIMESTAMP(), nullable=False, server_default=func.current_timestamp()
# )
# __tablename__ = "worker_settings_facade"
# __table_args__ = (
# PrimaryKeyConstraint("id", name="settings_pkey"),
# {"schema": "augur_operations"},
# )
|
c94571982d873660c4a88bbb781ab453ed8acb49
|
4ae34a5179d7adf1037eb9a3cb249f9a5c06684e
|
/examples/v1beta1/trial-images/enas-cnn-cifar10/op_library.py
|
ab02ed4a7a0e742528d51d98d2f90f15fc1928b8
|
[
"Apache-2.0"
] |
permissive
|
kubeflow/katib
|
367373c0452d49a7a115b86893f4dab9e1f278ea
|
e3e0aa24aeea1edfab0fd42f55392af651d2b3ae
|
refs/heads/master
| 2023-09-04T05:02:05.752156
| 2023-08-24T22:40:54
| 2023-08-24T22:40:54
| 127,941,481
| 1,385
| 422
|
Apache-2.0
| 2023-09-14T13:17:29
| 2018-04-03T17:07:12
|
Go
|
UTF-8
|
Python
| false
| false
| 4,686
|
py
|
op_library.py
|
# Copyright 2022 The Kubeflow Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
from keras import backend as K
from keras.layers import Input, Conv2D, ZeroPadding2D, concatenate, MaxPooling2D, \
AveragePooling2D, Dense, Activation, BatchNormalization, GlobalAveragePooling2D, \
SeparableConv2D, DepthwiseConv2D
def concat(inputs):
n = len(inputs)
if n == 1:
return inputs[0]
total_dim = list()
for x in inputs:
total_dim.append(K.int_shape(x))
total_dim = np.asarray(total_dim)
max_dim = max(total_dim[:, 1])
padded_input = [0 for _ in range(n)]
for i in range(n):
if total_dim[i][1] < max_dim:
diff = max_dim - total_dim[i][1]
half_diff = int(diff / 2)
if diff % 2 == 0:
padded_input[i] = ZeroPadding2D(padding=(half_diff, half_diff))(inputs[i])
else:
padded_input[i] = ZeroPadding2D(padding=((half_diff, half_diff + 1),
(half_diff, half_diff + 1)))(inputs[i])
else:
padded_input[i] = inputs[i]
result = concatenate(inputs=padded_input, axis=-1)
return result
def conv(x, config):
parameters = {
"num_filter": 64,
"filter_size": 3,
"stride": 1,
}
for k in parameters.keys():
if k in config:
parameters[k] = int(config[k])
activated = Activation('relu')(x)
conved = Conv2D(
filters=parameters['num_filter'],
kernel_size=parameters['filter_size'],
strides=parameters['stride'],
padding='same')(activated)
result = BatchNormalization()(conved)
return result
def sp_conv(x, config):
parameters = {
"num_filter": 64,
"filter_size": 3,
"stride": 1,
"depth_multiplier": 1,
}
for k in parameters.keys():
if k in config:
parameters[k] = int(config[k])
activated = Activation('relu')(x)
conved = SeparableConv2D(
filters=parameters['num_filter'],
kernel_size=parameters['filter_size'],
strides=parameters['stride'],
depth_multiplier=parameters['depth_multiplier'],
padding='same')(activated)
result = BatchNormalization()(conved)
return result
def dw_conv(x, config):
parameters = {
"filter_size": 3,
"stride": 1,
"depth_multiplier": 1,
}
for k in parameters.keys():
if k in config:
parameters[k] = int(config[k])
activated = Activation('relu')(x)
conved = DepthwiseConv2D(
kernel_size=parameters['filter_size'],
strides=parameters['stride'],
depth_multiplier=parameters['depth_multiplier'],
padding='same')(activated)
result = BatchNormalization()(conved)
return result
def reduction(x, config):
# handle the exteme case where the input has the dimension 1 by 1 and is not reductible
# we will just change the reduction layer to identity layer
# such situation is very likely to appear though
dim = K.int_shape(x)
if dim[1] == 1 or dim[2] == 1:
print("WARNING: One or more dimensions of the input of the reduction layer is 1. It cannot be further reduced. A identity layer will be used instead.")
return x
parameters = {
'reduction_type': "max_pooling",
'pool_size': 2,
'stride': None,
}
if 'reduction_type' in config:
parameters['reduction_type'] = config['reduction_type']
if 'pool_size' in config:
parameters['pool_size'] = int(config['pool_size'])
if 'stride' in config:
parameters['stride'] = int(config['stride'])
if parameters['reduction_type'] == 'max_pooling':
result = MaxPooling2D(
pool_size=parameters['pool_size'],
strides=parameters['stride']
)(x)
elif parameters['reduction_type'] == 'avg_pooling':
result = AveragePooling2D(
pool_size=parameters['pool_size'],
strides=parameters['stride']
)(x)
return result
|
810c244b44d371edd8dfbee439d460085d0315b6
|
2b5ffa18e7198e45fa77674b96dac8d91159fed7
|
/setup.py
|
75485712d4b40f13a0a1137dd4946d72fd96f67a
|
[
"BSD-3-Clause"
] |
permissive
|
potatolondon/djangae
|
73681d0c8302ac216f74bc00b980de368e8d4280
|
bef308632790bb6f87e71bb91183f57bad6bd149
|
refs/heads/master
| 2023-09-01T15:27:51.995232
| 2023-08-30T14:40:48
| 2023-08-30T14:40:48
| 10,217,788
| 474
| 155
|
BSD-3-Clause
| 2023-02-08T01:05:31
| 2013-05-22T10:55:55
|
Python
|
UTF-8
|
Python
| false
| false
| 1,810
|
py
|
setup.py
|
import os
from setuptools import setup, find_packages
NAME = 'djangae'
PACKAGES = find_packages()
DESCRIPTION = 'Django integration with Google App Engine'
URL = "https://gitlab.com/potato-oss/djangae/djangae"
LONG_DESCRIPTION = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
AUTHOR = 'Potato London Ltd.'
EXTRAS = {
"test": ["beautifulsoup4"],
}
setup(
name=NAME,
version='2.0.0rc3',
packages=PACKAGES,
# metadata for upload to PyPI
author=AUTHOR,
author_email='mail@p.ota.to',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
long_description_content_type='text/markdown',
keywords=["django", "Google App Engine", "GAE"],
url=URL,
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Framework :: Django :: 2.2',
'Framework :: Django :: 3.2',
'Framework :: Django :: 4.1',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
include_package_data=True,
# dependencies
install_requires=[
'django>=2.2,<5.0',
'django-gcloud-connectors>=0.3.5',
'google-api-python-client>=2.27.0',
'google-cloud-tasks>=1.5.0,<2.0.0',
'google-cloud-logging>=3.0.0,<4.0.0',
'psutil>=5.7.3',
# requests required by cloud storage file backend
'requests>=2.22.0',
# required for iap backend
'cryptography==3.4.6',
'python-jose[cryptography]==3.2.0',
'google-cloud-storage==1.43.0',
# required minimum version for oauth backend
'google-auth>=2.3.2,<3.0dev',
],
extras_require=EXTRAS,
tests_require=EXTRAS['test'],
)
|
5109bfc575f41c98d8ff96dc2125f4f4d7e64f5f
|
511144d69b15df3f4bad6dc2fbee2ab6adc457d4
|
/tests/data/simple_cases/one_element_subscript.py
|
39205ba9f7aae25cafd20b132920bcb676420a57
|
[
"MIT"
] |
permissive
|
psf/black
|
7345769d7932d474cb05ca62a73a940608728794
|
47676bf5939ae5c8e670d947917bc8af4732eab6
|
refs/heads/main
| 2023-09-02T12:44:37.458104
| 2023-08-26T13:44:17
| 2023-08-26T13:44:17
| 125,266,328
| 23,453
| 2,531
|
MIT
| 2023-09-13T07:40:43
| 2018-03-14T19:54:45
|
Python
|
UTF-8
|
Python
| false
| false
| 674
|
py
|
one_element_subscript.py
|
# We should not treat the trailing comma
# in a single-element subscript.
a: tuple[int,]
b = tuple[int,]
# The magic comma still applies to multi-element subscripts.
c: tuple[int, int,]
d = tuple[int, int,]
# Magic commas still work as expected for non-subscripts.
small_list = [1,]
list_of_types = [tuple[int,],]
# output
# We should not treat the trailing comma
# in a single-element subscript.
a: tuple[int,]
b = tuple[int,]
# The magic comma still applies to multi-element subscripts.
c: tuple[
int,
int,
]
d = tuple[
int,
int,
]
# Magic commas still work as expected for non-subscripts.
small_list = [
1,
]
list_of_types = [
tuple[int,],
]
|
e87a804b1a69edd34c383b2b2c8c575fe1458ac3
|
7a15271c7cddd199f43555469a67d26ce0f60836
|
/experimental/one_vs_all/main_cifar10.py
|
3950277c6e59edd5b3216cb25b60e6cd4349d113
|
[
"Apache-2.0"
] |
permissive
|
google/uncertainty-baselines
|
b2c339d918bf3949ee066f9eafa6b51232a2ac3d
|
f5f6f50f82bd441339c9d9efbef3f09e72c5fef6
|
refs/heads/main
| 2023-09-02T13:59:26.355288
| 2023-08-14T16:35:22
| 2023-08-14T16:36:11
| 280,026,201
| 1,235
| 198
|
Apache-2.0
| 2023-09-11T22:21:48
| 2020-07-16T01:54:32
|
Python
|
UTF-8
|
Python
| false
| false
| 7,354
|
py
|
main_cifar10.py
|
# coding=utf-8
# Copyright 2023 The Uncertainty Baselines Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Wide ResNet 28-10 on CIFAR-10/100 trained with one-vs-all classifiers.
"""
import os
from absl import app
from absl import flags
from absl import logging
import numpy as np
import tensorflow as tf
import uncertainty_baselines as ub
import losses as loss_lib # local file import from experimental.one_vs_all
import models as models_lib # local file import from experimental.one_vs_all
# Flags relating to hyperparameters.
flags.DEFINE_integer('batch_size', 512, 'The training batch size.')
flags.DEFINE_integer('eval_batch_size', 100, 'The evaluation batch size.')
flags.DEFINE_string('optimizer', 'adam', 'The optimizer to train with.')
flags.DEFINE_float('learning_rate', 0.01, 'The learning rate.')
flags.DEFINE_float('weight_decay', None,
'The model decoupled weight decay rate.')
flags.DEFINE_integer('lr_warmup_epochs', 1,
'Number of epochs for a linear warmup to the initial '
'learning rate. Use 0 to do no warmup.')
flags.DEFINE_float('lr_decay_ratio', 0.2, 'Amount to decay learning rate.')
flags.DEFINE_list('lr_decay_epochs', ['60', '120', '160'],
'Epochs to decay learning rate by.')
# Flags relating to setting up the job.
flags.DEFINE_bool('use_tpu', False, 'Whether to run on CPU or TPU.')
# Flags relating to the training/eval loop.
flags.DEFINE_integer('eval_frequency', 100,
'How many steps between evaluating on the (validation and)'
'test set.')
flags.DEFINE_integer('train_steps', 2000, 'How many steps to train for.')
flags.DEFINE_integer('seed', 1337, 'Random seed.')
flags.DEFINE_enum('dataset', 'cifar10',
enum_values=['cifar10', 'cifar100'],
help='Dataset.')
# Misc flags
flags.DEFINE_integer('num_bins', 15, 'Number of bins for ECE.')
flags.DEFINE_string('output_dir', '/tmp/cifar', 'Output directory.')
# Accelerator flags.
flags.DEFINE_bool('use_gpu', False, 'Whether to run on GPU or otherwise TPU.')
flags.DEFINE_bool('use_bfloat16', False, 'Whether to use mixed precision.')
flags.DEFINE_integer('num_cores', 8, 'Number of TPU cores or number of GPUs.')
flags.DEFINE_string('tpu', None,
'Name of the TPU. Only used if use_gpu is False.')
# Loss-specific flags.
flags.DEFINE_float('dm_alpha', 1.0, 'DM Alpha parameter.')
flags.DEFINE_bool('distance_logits', False,
'Whether to use a distance-based last layer.')
flags.DEFINE_enum('loss_name', 'crossentropy',
enum_values=['crossentropy', 'dm_loss', 'one_vs_all',
'focal_loss'],
help='Loss function')
# Model flags.
flags.DEFINE_enum('model_name', 'wide_resnet',
enum_values=['resnet20', 'wide_resnet'],
help='Model to use for training.')
FLAGS = flags.FLAGS
def _check_batch_replica_divisible(
total_batch_size: int,
strategy: tf.distribute.Strategy):
"""Ensure the batch size is evenly divisible by the number of replicas."""
if total_batch_size % strategy.num_replicas_in_sync != 0:
raise ValueError(
'Batch size must be evenly divisible by the number of replicas in the '
'job. Total batch size: {}, num replicas: {}'.format(
total_batch_size, strategy.num_replicas_in_sync))
def _ds_as_tuple(ds):
return ds.map(lambda d: (d['features'], d['labels']))
def run(trial_dir: str):
"""Run the experiment."""
tf.random.set_seed(FLAGS.seed)
np.random.seed(FLAGS.seed)
strategy = ub.strategy_utils.get_strategy(FLAGS.tpu, FLAGS.use_tpu)
with strategy.scope():
# Setup CIFAR-10 tf.data.Dataset splits.
# Use 5000 validation images.
train_dataset_builder = ub.datasets.Cifar10Dataset(
split='train', validation_percent=0.1)
train_dataset = train_dataset_builder.load(batch_size=FLAGS.batch_size)
train_dataset = _ds_as_tuple(train_dataset)
train_dataset = strategy.experimental_distribute_dataset(train_dataset)
val_dataset_builder = ub.datasets.Cifar10Dataset(
split='validation', validation_percent=0.1)
val_dataset = val_dataset_builder.load(batch_size=FLAGS.eval_batch_size)
val_dataset = _ds_as_tuple(val_dataset)
val_dataset = strategy.experimental_distribute_dataset(val_dataset)
test_dataset_builder = ub.datasets.Cifar10Dataset(split='test')
test_dataset = test_dataset_builder.load(batch_size=FLAGS.eval_batch_size)
test_dataset = _ds_as_tuple(test_dataset)
test_dataset = strategy.experimental_distribute_dataset(test_dataset)
# Setup optimizer.
_check_batch_replica_divisible(FLAGS.batch_size, strategy)
_check_batch_replica_divisible(FLAGS.eval_batch_size, strategy)
optimizer = ub.optimizers.get(
optimizer_name=FLAGS.optimizer,
learning_rate_schedule='constant',
learning_rate=FLAGS.learning_rate,
weight_decay=FLAGS.weight_decay)
# Setup model.
# TODO(shreyaspadhy): How does one get the number of classes in dataset
model = models_lib.create_model(
batch_size=FLAGS.batch_size,
l2_weight=None,
num_classes=10,
distance_logits=FLAGS.distance_logits)
loss_fn = loss_lib.get(
loss_name=FLAGS.loss_name,
from_logits=True,
dm_alpha=FLAGS.dm_alpha,
focal_gamma=FLAGS.focal_gamma)
model.compile(
optimizer=optimizer,
loss=loss_fn,
metrics=['sparse_categorical_accuracy'])
# Train and eval.
steps_per_epoch = train_dataset_builder.num_examples // FLAGS.batch_size
validation_steps = (
val_dataset_builder.num_examples // FLAGS.eval_batch_size)
history = model.fit(
x=train_dataset,
batch_size=FLAGS.batch_size,
epochs=FLAGS.train_steps // steps_per_epoch,
steps_per_epoch=steps_per_epoch,
validation_data=val_dataset,
validation_steps=validation_steps,
validation_freq=FLAGS.eval_frequency,
shuffle=False)
logging.info(history)
test_steps = test_dataset_builder.num_examples // FLAGS.eval_batch_size
test_result = model.evaluate(
x=test_dataset,
batch_size=FLAGS.eval_batch_size,
steps=test_steps)
logging.info(test_result)
# Save a checkpoint after training.
if trial_dir:
model.save_weights(
os.path.join(trial_dir, 'model.ckpt-{}'.format(FLAGS.train_steps)))
def main(argv):
del argv
logging.info('Starting CIFAR-10 ResNet-20 experiment!')
trial_dir = os.path.join(FLAGS.output_dir, '0')
logging.info('Saving to dir: %s', trial_dir)
if not tf.io.gfile.exists(trial_dir):
tf.io.gfile.makedirs(trial_dir)
return run(trial_dir)
if __name__ == '__main__':
app.run(main)
|
29e512c40f683a70bfd97f1f6bc7eb059cd3b8af
|
c058f51b99f91faebf27183b2b579e9f96e0d8f5
|
/botorch/sampling/pathwise/features/generators.py
|
42fd30c8d29bbfaf08225055a84be0636ffb89a3
|
[
"MIT"
] |
permissive
|
pytorch/botorch
|
255d62f698cc615c750e9343c278a63c7e96a586
|
4cc5ed59b2e8a9c780f786830c548e05cc74d53c
|
refs/heads/main
| 2023-08-22T15:23:51.071048
| 2023-08-22T05:30:38
| 2023-08-22T05:30:38
| 142,940,093
| 2,891
| 373
|
MIT
| 2023-09-13T00:16:13
| 2018-07-30T23:59:57
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 6,229
|
py
|
generators.py
|
#!/usr/bin/env python3
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
r"""
.. [rahimi2007random]
A. Rahimi and B. Recht. Random features for large-scale kernel machines.
Advances in Neural Information Processing Systems 20 (2007).
.. [sutherland2015error]
D. J. Sutherland and J. Schneider. On the error of random Fourier features.
arXiv preprint arXiv:1506.02785 (2015).
"""
from __future__ import annotations
from typing import Any, Callable
import torch
from botorch.exceptions.errors import UnsupportedError
from botorch.sampling.pathwise.features.maps import KernelFeatureMap
from botorch.sampling.pathwise.utils import (
ChainedTransform,
FeatureSelector,
InverseLengthscaleTransform,
OutputscaleTransform,
SineCosineTransform,
)
from botorch.utils.dispatcher import Dispatcher
from botorch.utils.sampling import draw_sobol_normal_samples
from gpytorch import kernels
from gpytorch.kernels.kernel import Kernel
from torch import Size, Tensor
from torch.distributions import Gamma
TKernelFeatureMapGenerator = Callable[[Kernel, int, int], KernelFeatureMap]
GenKernelFeatures = Dispatcher("gen_kernel_features")
def gen_kernel_features(
kernel: kernels.Kernel,
num_inputs: int,
num_outputs: int,
**kwargs: Any,
) -> KernelFeatureMap:
r"""Generates a feature map :math:`\phi: \mathcal{X} \to \mathbb{R}^{n}` such that
:math:`k(x, x') ≈ \phi(x)^{T} \phi(x')`. For stationary kernels :math:`k`, defaults
to the method of random Fourier features. For more details, see [rahimi2007random]_
and [sutherland2015error]_.
Args:
kernel: The kernel :math:`k` to be represented via a finite-dim basis.
num_inputs: The number of input features.
num_outputs: The number of kernel features.
"""
return GenKernelFeatures(
kernel,
num_inputs=num_inputs,
num_outputs=num_outputs,
**kwargs,
)
def _gen_fourier_features(
kernel: kernels.Kernel,
weight_generator: Callable[[Size], Tensor],
num_inputs: int,
num_outputs: int,
) -> KernelFeatureMap:
r"""Generate a feature map :math:`\phi: \mathcal{X} \to \mathbb{R}^{2l}` that
approximates a stationary kernel so that :math:`k(x, x') ≈ \phi(x)^\top \phi(x')`.
Following [sutherland2015error]_, we represent complex exponentials by pairs of
basis functions :math:`\phi_{i}(x) = \sin(x^\top w_{i})` and
:math:`\phi_{i + l} = \cos(x^\top w_{i}).
Args:
kernel: A stationary kernel :math:`k(x, x') = k(x - x')`.
weight_generator: A callable used to generate weight vectors :math:`w`.
num_inputs: The number of input features.
num_outputs: The number of Fourier features.
"""
if num_outputs % 2:
raise UnsupportedError(
f"Expected an even number of output features, but received {num_outputs=}."
)
input_transform = InverseLengthscaleTransform(kernel)
if kernel.active_dims is not None:
num_inputs = len(kernel.active_dims)
input_transform = ChainedTransform(
input_transform, FeatureSelector(indices=kernel.active_dims)
)
weight = weight_generator(
Size([kernel.batch_shape.numel() * num_outputs // 2, num_inputs])
).reshape(*kernel.batch_shape, num_outputs // 2, num_inputs)
output_transform = SineCosineTransform(
torch.tensor((2 / num_outputs) ** 0.5, device=kernel.device, dtype=kernel.dtype)
)
return KernelFeatureMap(
kernel=kernel,
weight=weight,
input_transform=input_transform,
output_transform=output_transform,
)
@GenKernelFeatures.register(kernels.RBFKernel)
def _gen_kernel_features_rbf(
kernel: kernels.RBFKernel,
*,
num_inputs: int,
num_outputs: int,
) -> KernelFeatureMap:
def _weight_generator(shape: Size) -> Tensor:
try:
n, d = shape
except ValueError:
raise UnsupportedError(
f"Expected `shape` to be 2-dimensional, but {len(shape)=}."
)
return draw_sobol_normal_samples(
n=n,
d=d,
device=kernel.lengthscale.device,
dtype=kernel.lengthscale.dtype,
)
return _gen_fourier_features(
kernel=kernel,
weight_generator=_weight_generator,
num_inputs=num_inputs,
num_outputs=num_outputs,
)
@GenKernelFeatures.register(kernels.MaternKernel)
def _gen_kernel_features_matern(
kernel: kernels.MaternKernel,
*,
num_inputs: int,
num_outputs: int,
) -> KernelFeatureMap:
def _weight_generator(shape: Size) -> Tensor:
try:
n, d = shape
except ValueError:
raise UnsupportedError(
f"Expected `shape` to be 2-dimensional, but {len(shape)=}."
)
dtype = kernel.lengthscale.dtype
device = kernel.lengthscale.device
nu = torch.tensor(kernel.nu, device=device, dtype=dtype)
normals = draw_sobol_normal_samples(n=n, d=d, device=device, dtype=dtype)
return Gamma(nu, nu).rsample((n, 1)).rsqrt() * normals
return _gen_fourier_features(
kernel=kernel,
weight_generator=_weight_generator,
num_inputs=num_inputs,
num_outputs=num_outputs,
)
@GenKernelFeatures.register(kernels.ScaleKernel)
def _gen_kernel_features_scale(
kernel: kernels.ScaleKernel,
*,
num_inputs: int,
num_outputs: int,
) -> KernelFeatureMap:
active_dims = kernel.active_dims
feature_map = gen_kernel_features(
kernel.base_kernel,
num_inputs=num_inputs if active_dims is None else len(active_dims),
num_outputs=num_outputs,
)
if active_dims is not None and active_dims is not kernel.base_kernel.active_dims:
feature_map.input_transform = ChainedTransform(
feature_map.input_transform, FeatureSelector(indices=active_dims)
)
feature_map.output_transform = ChainedTransform(
OutputscaleTransform(kernel), feature_map.output_transform
)
return feature_map
|
a04b8f19cbe209b38f3d80186b27e799cf3c08f5
|
965efc4d7a83c2b5592417aa7e0d25a51f5a8108
|
/backend/metering_billing/management/commands/wait_for_db.py
|
ac0468cd334e65114f64ebaa6764f43443ad9b42
|
[
"MIT"
] |
permissive
|
uselotus/lotus
|
f4ee23bb828605215f18aacd1d6fcff8e0986c53
|
c065fb33ee1a870d72bbd2adfddc08d50ca049b6
|
refs/heads/main
| 2023-08-17T03:38:35.770580
| 2023-07-26T18:50:17
| 2023-07-26T18:50:17
| 516,192,901
| 1,447
| 100
|
MIT
| 2023-06-25T22:53:06
| 2022-07-21T02:06:46
|
Python
|
UTF-8
|
Python
| false
| false
| 495
|
py
|
wait_for_db.py
|
import time
from django.core.management.base import BaseCommand
from django.db import connections
class Command(BaseCommand):
"Django command to pause execution until the database is available"
def handle(self, *args, **options):
"Pause execution until the database is available"
db_connection = None
while not db_connection:
try:
db_connection = connections["default"]
except Exception:
time.sleep(1)
|
9a399db603029238d50ca7a0bf9c7995bca2b673
|
dbab4ef8ab69e46beed07e06523b920bdf4f0bfd
|
/fluffy/__init__.py
|
346176a0782d342df97aaf0a89e3dd1be6e0ae0d
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
chriskuehl/fluffy
|
9322f240d98576169f5a4e0fe4e4cd06e381745c
|
d1831685fa80e105d88d8e970d54ba51e502dcc9
|
refs/heads/main
| 2023-08-06T21:40:24.443183
| 2023-05-14T04:34:29
| 2023-05-14T04:34:29
| 15,094,183
| 158
| 25
|
NOASSERTION
| 2023-06-13T06:27:57
| 2013-12-11T00:49:44
|
Python
|
UTF-8
|
Python
| false
| false
| 19
|
py
|
__init__.py
|
version = '1.39.2'
|
cd6693b5afac51b8f4a76851577c8880ddd50150
|
a5a99f646e371b45974a6fb6ccc06b0a674818f2
|
/Documentation/ReferenceManualScripts/python/TableParser.py
|
1d64a6080b0b8edb2420b56718a0d050a0ebd0bb
|
[
"Apache-2.0"
] |
permissive
|
cms-sw/cmssw
|
4ecd2c1105d59c66d385551230542c6615b9ab58
|
19c178740257eb48367778593da55dcad08b7a4f
|
refs/heads/master
| 2023-08-23T21:57:42.491143
| 2023-08-22T20:22:40
| 2023-08-22T20:22:40
| 10,969,551
| 1,006
| 3,696
|
Apache-2.0
| 2023-09-14T19:14:28
| 2013-06-26T14:09:07
|
C++
|
UTF-8
|
Python
| false
| false
| 5,423
|
py
|
TableParser.py
|
from __future__ import print_function
from __future__ import absolute_import
# email: cmsdoxy@cern.ch, ali.mehmet.altundag@cern.ch
# please have a look at the namespaces.html (namespace list) and annotated.html
# (~class list) html files to understand the tags/attributes that we use in
# this script.
from bs4 import BeautifulSoup
import sys, os, copy
htmlFullPath = None
htmlFilePath = None
htmlFileName = None
fileNameTemplate = None # html file name template
htmlPage = None
tableClassName = 'directory'
# load rows from the table in [C]lass and [N]amespace list pages and prapere
# pages in the following structure: pages = {'A' : [...], 'B' : [...]}
def extractPages(configFileFlag = False):
# initial page, A
pages = {'A':[]}
# find all class/namespace talbe rows.
table = htmlPage.find('table', {'class' : tableClassName})
for row in table.findAll('tr'):
# please see the related html file (annotated.html) to understand the
# approach here. you will see that, only hidden rows have style
# attribute and these hidden rows must be added to pages of their
# parents. This is why we need to check whether row has a style
# attribute or not.
styleFlag = False
if 'style' in row: styleFlag = True
# change the first letter if row is not hidden (child) one
if not styleFlag: firstLetter = row.findAll('td')[0].text[0].upper()
# if pages dict doesn't have the page yet..
if firstLetter not in pages:
pages[firstLetter] = []
# insert the row into the related page
if configFileFlag:
url = row.find('a')['href']
if '_cff' in url or '_cfi' in url or '_cfg' in url:
pages[firstLetter].append(row)
else:
pages[firstLetter].append(row)
return pages
# load rows from the package documentation page. output structure:
# pages = {'PackageA' : [..], 'PackageB' : [...]}
def extractPagesForPackage():
# initial page, A
pages = {}
table = htmlPage.find('table', {'class' : tableClassName})
for row in table.findAll('tr'):
# first cell contains name of the package...
name = row.findAll('td')[0].text
# parse package names --please have a look at the pages.html file
name = name[name.find(' '):name.find('/')].strip()
# if the package is not added yet
if name not in pages: pages[name] = []
pages[name].append(row)
return pages
# generate alphabetic tab for html pages that will be generated by this script
def generateTab(items, curr, tabClass = 'tabs3'):
itemTagMap = {}; tab = ''
for item in items:
fn = fileNameTemplate % item.replace(' ', '') # generate file name
if item != curr: tab += '<li><a href="%s">%s</a></li>' % (fn, item)
else: tab += '<li class="current"><a href="%s">%s</a></li>'%(fn, item)
return '<div class="%s"><ul class="tablist">%s</ul></div>' % (tabClass,tab)
if __name__ == "__main__":
if len(sys.argv) < 2:
sys.stderr.write("not enough parameter!\n")
sys.exit(1)
# initialize variables
htmlFullPath = sys.argv[1]
htmlFilePath = os.path.split(htmlFullPath)[0]
htmlFileName = os.path.split(htmlFullPath)[1]
fileNameTemplate = htmlFileName.replace('.html', '_%s.html')
# load the html page
with open(htmlFullPath) as f:
htmlPage = f.read()
htmlPage = BeautifulSoup(htmlPage)
# please have a look at the pages.html page. You will see that class name
# of the related tab, which we will use to put 'index tab' by using this
# tab, is different for pages.html file. For namespaces.html (namespace
# list) and annotated.html (~class list) files, class names are the same
# tabs2. this is why we are setting 'the destination tab class name' up
# differently depending on the html file name.
if htmlFileName == 'packageDocumentation.html':
pages = extractPagesForPackage()
destTabClassName = 'tabs'
elif htmlFileName == 'configfiles.html':
pages = extractPages(configFileFlag = True)
destTabClassName = 'tabs2'
else:
pages = extractPages()
destTabClassName = 'tabs2'
allRows = []
pageNames = pages.keys(); pageNames.sort()
for page in pageNames:
allRows = allRows + pages[page]
pages['All'] = allRows
pageNames.append('All')
# prepare the template
table = htmlPage.find('table', {'class' : tableClassName})
# generate template (clean whole table content)
for row in table.findAll('tr'):
row.extract()
# generate pages
for page in pageNames:
print('generating %s...' % (fileNameTemplate % page))
temp = BeautifulSoup(str(htmlPage))
table = temp.find('table', {'class' : tableClassName})
oldTab = temp.find('div', {'class' : destTabClassName})
newTab = generateTab(pageNames, page)
oldTab.replaceWith(BeautifulSoup(oldTab.prettify() + str(newTab)))
for row in pages[page]:
table.append(row)
# replace blank character with '_'. Please notice that you will not
# be able to use original page name after this line.
page = page.replace(' ', '_')
with open('%s/%s'%(htmlFilePath, fileNameTemplate % page), 'w') as f:
f.write(str(temp))
|
04f319f558802e4961adbbf79d7571b896207cf4
|
66d89bffa3c0aa598a3d4023536a5f7873cb206d
|
/requests_mock/response.py
|
58555392731bd9e34778e71935e5a320f3d9c653
|
[
"Apache-2.0"
] |
permissive
|
jamielennox/requests-mock
|
45daaa1f0d4023b2cb3284e84b75e63d5406a48a
|
15df8e2b85ae71afea1013b5bf9fd010623acd39
|
refs/heads/master
| 2023-07-09T16:12:16.655445
| 2023-05-15T16:38:31
| 2023-06-08T13:33:22
| 28,104,322
| 396
| 81
|
Apache-2.0
| 2023-06-22T06:29:04
| 2014-12-16T20:06:51
|
Python
|
UTF-8
|
Python
| false
| false
| 11,181
|
py
|
response.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json as jsonutils
from requests.adapters import HTTPAdapter
from requests.cookies import MockRequest, MockResponse
from requests.cookies import RequestsCookieJar
from requests.cookies import merge_cookies, cookiejar_from_dict
from requests.packages.urllib3.response import HTTPResponse
from requests.utils import get_encoding_from_headers
import six
from requests_mock import compat
from requests_mock import exceptions
_BODY_ARGS = frozenset(['raw', 'body', 'content', 'text', 'json'])
_HTTP_ARGS = frozenset([
'status_code',
'reason',
'headers',
'cookies',
'json_encoder',
])
_DEFAULT_STATUS = 200
_http_adapter = HTTPAdapter()
class CookieJar(RequestsCookieJar):
def set(self, name, value, **kwargs):
"""Add a cookie to the Jar.
:param str name: cookie name/key.
:param str value: cookie value.
:param int version: Integer or None. Netscape cookies have version 0.
RFC 2965 and RFC 2109 cookies have a version cookie-attribute of 1.
However, note that cookielib may 'downgrade' RFC 2109 cookies to
Netscape cookies, in which case version is 0.
:param str port: String representing a port or a set of ports
(eg. '80', or '80,8080'),
:param str domain: The domain the cookie should apply to.
:param str path: Cookie path (a string, eg. '/acme/rocket_launchers').
:param bool secure: True if cookie should only be returned over a
secure connection.
:param int expires: Integer expiry date in seconds since epoch or None.
:param bool discard: True if this is a session cookie.
:param str comment: String comment from the server explaining the
function of this cookie.
:param str comment_url: URL linking to a comment from the server
explaining the function of this cookie.
"""
# just here to provide the function documentation
return super(CookieJar, self).set(name, value, **kwargs)
def _check_body_arguments(**kwargs):
# mutual exclusion, only 1 body method may be provided
provided = [x for x in _BODY_ARGS if kwargs.pop(x, None) is not None]
if len(provided) > 1:
raise RuntimeError('You may only supply one body element. You '
'supplied %s' % ', '.join(provided))
extra = [x for x in kwargs if x not in _HTTP_ARGS]
if extra:
raise TypeError('Too many arguments provided. Unexpected '
'arguments %s.' % ', '.join(extra))
class _FakeConnection(object):
"""An object that can mock the necessary parts of a socket interface."""
def send(self, request, **kwargs):
msg = 'This response was created without a connection. You are ' \
'therefore unable to make a request directly on that connection.'
raise exceptions.InvalidRequest(msg)
def close(self):
pass
def _extract_cookies(request, response, cookies):
"""Add cookies to the response.
Cookies in requests are extracted from the headers in the original_response
httplib.HTTPMessage which we don't create so we have to do this step
manually.
"""
# This will add cookies set manually via the Set-Cookie or Set-Cookie2
# header but this only allows 1 cookie to be set.
http_message = compat._FakeHTTPMessage(response.headers)
response.cookies.extract_cookies(MockResponse(http_message),
MockRequest(request))
# This allows you to pass either a CookieJar or a dictionary to request_uri
# or directly to create_response. To allow more than one cookie to be set.
if cookies:
merge_cookies(response.cookies, cookies)
class _IOReader(six.BytesIO):
"""A reader that makes a BytesIO look like a HTTPResponse.
A HTTPResponse will return an empty string when you read from it after
the socket has been closed. A BytesIO will raise a ValueError. For
compatibility we want to do the same thing a HTTPResponse does.
"""
def read(self, *args, **kwargs):
if self.closed:
return six.b('')
# if the file is open, but you asked for zero bytes read you should get
# back zero without closing the stream.
if len(args) > 0 and args[0] == 0:
return six.b('')
# not a new style object in python 2
result = six.BytesIO.read(self, *args, **kwargs)
# when using resp.iter_content(None) it'll go through a different
# request path in urllib3. This path checks whether the object is
# marked closed instead of the return value. see gh124.
if result == six.b(''):
self.close()
return result
def create_response(request, **kwargs):
"""
:param int status_code: The status code to return upon a successful
match. Defaults to 200.
:param HTTPResponse raw: A HTTPResponse object to return upon a
successful match.
:param io.IOBase body: An IO object with a read() method that can
return a body on successful match.
:param bytes content: A byte string to return upon a successful match.
:param unicode text: A text string to return upon a successful match.
:param object json: A python object to be converted to a JSON string
and returned upon a successful match.
:param class json_encoder: Encoder object to use for JOSON.
:param dict headers: A dictionary object containing headers that are
returned upon a successful match.
:param CookieJar cookies: A cookie jar with cookies to set on the
response.
:returns requests.Response: A response object that can
be returned to requests.
"""
connection = kwargs.pop('connection', _FakeConnection())
_check_body_arguments(**kwargs)
raw = kwargs.pop('raw', None)
body = kwargs.pop('body', None)
content = kwargs.pop('content', None)
text = kwargs.pop('text', None)
json = kwargs.pop('json', None)
headers = kwargs.pop('headers', {})
encoding = None
if content is not None and not isinstance(content, six.binary_type):
raise TypeError('Content should be binary data')
if text is not None and not isinstance(text, six.string_types):
raise TypeError('Text should be string data')
if json is not None:
encoder = kwargs.pop('json_encoder', None) or jsonutils.JSONEncoder
text = jsonutils.dumps(json, cls=encoder)
if text is not None:
encoding = get_encoding_from_headers(headers) or 'utf-8'
content = text.encode(encoding)
if content is not None:
body = _IOReader(content)
if not raw:
status = kwargs.get('status_code', _DEFAULT_STATUS)
reason = kwargs.get('reason',
six.moves.http_client.responses.get(status))
raw = HTTPResponse(status=status,
reason=reason,
headers=headers,
body=body or _IOReader(six.b('')),
decode_content=False,
enforce_content_length=False,
preload_content=False,
original_response=None)
response = _http_adapter.build_response(request, raw)
response.connection = connection
if encoding and not response.encoding:
response.encoding = encoding
_extract_cookies(request, response, kwargs.get('cookies'))
return response
class _Context(object):
"""Stores the data being used to process a current URL match."""
def __init__(self, headers, status_code, reason, cookies):
self.headers = headers
self.status_code = status_code
self.reason = reason
self.cookies = cookies
class _MatcherResponse(object):
def __init__(self, **kwargs):
self._exc = kwargs.pop('exc', None)
# If the user is asking for an exception to be thrown then prevent them
# specifying any sort of body or status response as it won't be used.
# This may be protecting the user too much but can be removed later.
if self._exc and kwargs:
raise TypeError('Cannot provide other arguments with exc.')
_check_body_arguments(**kwargs)
self._params = kwargs
# whilst in general you shouldn't do type checking in python this
# makes sure we don't end up with differences between the way types
# are handled between python 2 and 3.
content = self._params.get('content')
text = self._params.get('text')
if content is not None and not (callable(content) or
isinstance(content, six.binary_type)):
raise TypeError('Content should be a callback or binary data')
if text is not None and not (callable(text) or
isinstance(text, six.string_types)):
raise TypeError('Text should be a callback or string data')
def get_response(self, request):
# if an error was requested then raise that instead of doing response
if self._exc:
raise self._exc
# If a cookie dict is passed convert it into a CookieJar so that the
# cookies object available in a callback context is always a jar.
cookies = self._params.get('cookies', CookieJar())
if isinstance(cookies, dict):
cookies = cookiejar_from_dict(cookies, CookieJar())
context = _Context(self._params.get('headers', {}).copy(),
self._params.get('status_code', _DEFAULT_STATUS),
self._params.get('reason'),
cookies)
# if a body element is a callback then execute it
def _call(f, *args, **kwargs):
return f(request, context, *args, **kwargs) if callable(f) else f
return create_response(request,
json=_call(self._params.get('json')),
text=_call(self._params.get('text')),
content=_call(self._params.get('content')),
body=_call(self._params.get('body')),
raw=self._params.get('raw'),
json_encoder=self._params.get('json_encoder'),
status_code=context.status_code,
reason=context.reason,
headers=context.headers,
cookies=context.cookies)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.